CI boilerplate reduction (#34272)

* CI configuration boilerplate reduction and refactor

Configuration:
- New notation for list concatenation (prepend/append)
- New notation for string concatenation (prepend/append)
- Break out configuration files for: ci.yaml, cdash.yaml, view.yaml
- Spack CI section refactored to improve self-consistency and
composability
  - Scripts are now lists of lists and/or lists of strings
  - Job attributes are now listed under precedence ordered list that are
  composed/merged using Spack config merge rules.
  - "service-jobs" are identified explicitly rather than as a batch

CI:
- Consolidate common, platform, and architecture configurations for all CI stacks into composable configuration files
- Make padding consistent across all stacks (256)
- Merge all package -> runner mappings to be consistent across all
stacks

Unit Test:
- Refactor CI module unit-tests for refactor configuration

Docs:
- Add docs for new notations in configuration.rst
- Rewrite docs on CI pipelines to be consistent with refactored CI
workflow

* Script verbose environ, dev bootstrap

* Port #35409
This commit is contained in:
kwryankrattiger
2023-03-10 13:25:35 -06:00
committed by GitHub
parent 16c67ff9b4
commit f3595da600
40 changed files with 1781 additions and 3135 deletions

View File

@@ -364,59 +364,6 @@ def _spec_matches(spec, match_string):
return spec.intersects(match_string)
def _remove_attributes(src_dict, dest_dict):
if "tags" in src_dict and "tags" in dest_dict:
# For 'tags', we remove any tags that are listed for removal
for tag in src_dict["tags"]:
while tag in dest_dict["tags"]:
dest_dict["tags"].remove(tag)
def _copy_attributes(attrs_list, src_dict, dest_dict):
for runner_attr in attrs_list:
if runner_attr in src_dict:
if runner_attr in dest_dict and runner_attr == "tags":
# For 'tags', we combine the lists of tags, while
# avoiding duplicates
for tag in src_dict[runner_attr]:
if tag not in dest_dict[runner_attr]:
dest_dict[runner_attr].append(tag)
elif runner_attr in dest_dict and runner_attr == "variables":
# For 'variables', we merge the dictionaries. Any conflicts
# (i.e. 'runner-attributes' has same variable key as the
# higher level) we resolve by keeping the more specific
# 'runner-attributes' version.
for src_key, src_val in src_dict[runner_attr].items():
dest_dict[runner_attr][src_key] = copy.deepcopy(src_dict[runner_attr][src_key])
else:
dest_dict[runner_attr] = copy.deepcopy(src_dict[runner_attr])
def _find_matching_config(spec, gitlab_ci):
runner_attributes = {}
overridable_attrs = ["image", "tags", "variables", "before_script", "script", "after_script"]
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
matched = False
only_first = gitlab_ci.get("match_behavior", "first") == "first"
for ci_mapping in gitlab_ci["mappings"]:
for match_string in ci_mapping["match"]:
if _spec_matches(spec, match_string):
matched = True
if "remove-attributes" in ci_mapping:
_remove_attributes(ci_mapping["remove-attributes"], runner_attributes)
if "runner-attributes" in ci_mapping:
_copy_attributes(
overridable_attrs, ci_mapping["runner-attributes"], runner_attributes
)
break
if matched and only_first:
break
return runner_attributes if matched else None
def _format_job_needs(
phase_name,
strip_compilers,
@@ -536,6 +483,224 @@ def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
return affected_specs
def _build_jobs(phases, staged_phases):
for phase in phases:
phase_name = phase["name"]
spec_labels, dependencies, stages = staged_phases[phase_name]
for stage_jobs in stages:
for spec_label in stage_jobs:
spec_record = spec_labels[spec_label]
release_spec = spec_record["spec"]
release_spec_dag_hash = release_spec.dag_hash()
yield release_spec, release_spec_dag_hash
def _noop(x):
return x
def _unpack_script(script_section, op=_noop):
script = []
for cmd in script_section:
if isinstance(cmd, list):
for subcmd in cmd:
script.append(op(subcmd))
else:
script.append(op(cmd))
return script
class SpackCI:
"""Spack CI object used to generate intermediate representation
used by the CI generator(s).
"""
def __init__(self, ci_config, phases, staged_phases):
"""Given the information from the ci section of the config
and the job phases setup meta data needed for generating Spack
CI IR.
"""
self.ci_config = ci_config
self.named_jobs = ["any", "build", "cleanup", "noop", "reindex", "signing"]
self.ir = {
"jobs": {},
"temporary-storage-url-prefix": self.ci_config.get(
"temporary-storage-url-prefix", None
),
"enable-artifacts-buildcache": self.ci_config.get(
"enable-artifacts-buildcache", False
),
"bootstrap": self.ci_config.get(
"bootstrap", []
), # This is deprecated and should be removed
"rebuild-index": self.ci_config.get("rebuild-index", True),
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
"target": self.ci_config.get("target", "gitlab"),
}
jobs = self.ir["jobs"]
for spec, dag_hash in _build_jobs(phases, staged_phases):
jobs[dag_hash] = self.__init_job(spec)
for name in self.named_jobs:
# Skip the special named jobs
if name not in ["any", "build"]:
jobs[name] = self.__init_job("")
def __init_job(self, spec):
"""Initialize job object"""
return {"spec": spec, "attributes": {}}
def __is_named(self, section):
"""Check if a pipeline-gen configuration section is for a named job,
and if so return the name otherwise return none.
"""
for _name in self.named_jobs:
keys = ["{0}-job".format(_name), "{0}-job-remove".format(_name)]
if any([key for key in keys if key in section]):
return _name
return None
@staticmethod
def __job_name(name, suffix=""):
"""Compute the name of a named job with appropriate suffix.
Valid suffixes are either '-remove' or empty string or None
"""
assert type(name) == str
jname = name
if suffix:
jname = "{0}-job{1}".format(name, suffix)
else:
jname = "{0}-job".format(name)
return jname
def __apply_submapping(self, dest, spec, section):
"""Apply submapping setion to the IR dict"""
matched = False
only_first = section.get("match_behavior", "first") == "first"
for match_attrs in reversed(section["submapping"]):
attrs = cfg.InternalConfigScope._process_dict_keyname_overrides(match_attrs)
for match_string in match_attrs["match"]:
if _spec_matches(spec, match_string):
matched = True
if "build-job-remove" in match_attrs:
spack.config.remove_yaml(dest, attrs["build-job-remove"])
if "build-job" in match_attrs:
spack.config.merge_yaml(dest, attrs["build-job"])
break
if matched and only_first:
break
return dest
# Generate IR from the configs
def generate_ir(self):
"""Generate the IR from the Spack CI configurations."""
jobs = self.ir["jobs"]
# Implicit job defaults
defaults = [
{
"build-job": {
"script": [
"cd {env_dir}",
"spack env activate --without-view .",
"spack ci rebuild",
]
}
},
{"noop-job": {"script": ['echo "All specs already up to date, nothing to rebuild."']}},
]
# Job overrides
overrides = [
# Reindex script
{
"reindex-job": {
"script:": [
"spack buildcache update-index --keys --mirror-url {index_target_mirror}"
]
}
},
# Cleanup script
{
"cleanup-job": {
"script:": [
"spack -d mirror destroy --mirror-url {mirror_prefix}/$CI_PIPELINE_ID"
]
}
},
# Add signing job tags
{"signing-job": {"tags": ["aws", "protected", "notary"]}},
# Remove reserved tags
{"any-job-remove": {"tags": SPACK_RESERVED_TAGS}},
]
pipeline_gen = overrides + self.ci_config.get("pipeline-gen", []) + defaults
for section in reversed(pipeline_gen):
name = self.__is_named(section)
has_submapping = "submapping" in section
section = cfg.InternalConfigScope._process_dict_keyname_overrides(section)
if name:
remove_job_name = self.__job_name(name, suffix="-remove")
merge_job_name = self.__job_name(name)
do_remove = remove_job_name in section
do_merge = merge_job_name in section
def _apply_section(dest, src):
if do_remove:
dest = spack.config.remove_yaml(dest, src[remove_job_name])
if do_merge:
dest = copy.copy(spack.config.merge_yaml(dest, src[merge_job_name]))
if name == "build":
# Apply attributes to all build jobs
for _, job in jobs.items():
if job["spec"]:
_apply_section(job["attributes"], section)
elif name == "any":
# Apply section attributes too all jobs
for _, job in jobs.items():
_apply_section(job["attributes"], section)
else:
# Create a signing job if there is script and the job hasn't
# been initialized yet
if name == "signing" and name not in jobs:
if "signing-job" in section:
if "script" not in section["signing-job"]:
continue
else:
jobs[name] = self.__init_job("")
# Apply attributes to named job
_apply_section(jobs[name]["attributes"], section)
elif has_submapping:
# Apply section jobs with specs to match
for _, job in jobs.items():
if job["spec"]:
job["attributes"] = self.__apply_submapping(
job["attributes"], job["spec"], section
)
for _, job in jobs.items():
if job["spec"]:
job["spec"] = job["spec"].name
return self.ir
def generate_gitlab_ci_yaml(
env,
print_summary,
@@ -585,12 +750,18 @@ def generate_gitlab_ci_yaml(
yaml_root = ev.config_dict(env.yaml)
if "gitlab-ci" not in yaml_root:
tty.die('Environment yaml does not have "gitlab-ci" section')
# Get the joined "ci" config with all of the current scopes resolved
ci_config = cfg.get("ci")
gitlab_ci = yaml_root["gitlab-ci"]
if not ci_config:
tty.die('Environment yaml does not have "ci" section')
cdash_handler = CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
# Default target is gitlab...and only target is gitlab
if "target" in ci_config and ci_config["target"] != "gitlab":
tty.die('Spack CI module only generates target "gitlab"')
cdash_config = cfg.get("cdash")
cdash_handler = CDashHandler(cdash_config) if "build-group" in cdash_config else None
build_group = cdash_handler.build_group if cdash_handler else None
dependent_depth = os.environ.get("SPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH", None)
@@ -664,25 +835,25 @@ def generate_gitlab_ci_yaml(
# trying to build.
broken_specs_url = ""
known_broken_specs_encountered = []
if "broken-specs-url" in gitlab_ci:
broken_specs_url = gitlab_ci["broken-specs-url"]
if "broken-specs-url" in ci_config:
broken_specs_url = ci_config["broken-specs-url"]
enable_artifacts_buildcache = False
if "enable-artifacts-buildcache" in gitlab_ci:
enable_artifacts_buildcache = gitlab_ci["enable-artifacts-buildcache"]
if "enable-artifacts-buildcache" in ci_config:
enable_artifacts_buildcache = ci_config["enable-artifacts-buildcache"]
rebuild_index_enabled = True
if "rebuild-index" in gitlab_ci and gitlab_ci["rebuild-index"] is False:
if "rebuild-index" in ci_config and ci_config["rebuild-index"] is False:
rebuild_index_enabled = False
temp_storage_url_prefix = None
if "temporary-storage-url-prefix" in gitlab_ci:
temp_storage_url_prefix = gitlab_ci["temporary-storage-url-prefix"]
if "temporary-storage-url-prefix" in ci_config:
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
bootstrap_specs = []
phases = []
if "bootstrap" in gitlab_ci:
for phase in gitlab_ci["bootstrap"]:
if "bootstrap" in ci_config:
for phase in ci_config["bootstrap"]:
try:
phase_name = phase.get("name")
strip_compilers = phase.get("compiler-agnostic")
@@ -747,6 +918,27 @@ def generate_gitlab_ci_yaml(
shutil.copyfile(env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
shutil.copyfile(env.lock_path, os.path.join(concrete_env_dir, "spack.lock"))
with open(env.manifest_path, "r") as env_fd:
env_yaml_root = syaml.load(env_fd)
# Add config scopes to environment
env_includes = env_yaml_root["spack"].get("include", [])
cli_scopes = [
os.path.abspath(s.path)
for s in cfg.scopes().values()
if type(s) == cfg.ImmutableConfigScope
and s.path not in env_includes
and os.path.exists(s.path)
]
include_scopes = []
for scope in cli_scopes:
if scope not in include_scopes and scope not in env_includes:
include_scopes.insert(0, scope)
env_includes.extend(include_scopes)
env_yaml_root["spack"]["include"] = env_includes
with open(os.path.join(concrete_env_dir, "spack.yaml"), "w") as fd:
fd.write(syaml.dump_config(env_yaml_root, default_flow_style=False))
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
@@ -758,7 +950,7 @@ def generate_gitlab_ci_yaml(
# generation job and the rebuild jobs. This can happen when gitlab
# checks out the project into a runner-specific directory, for example,
# and different runners are picked for generate and rebuild jobs.
ci_project_dir = os.environ.get("CI_PROJECT_DIR")
ci_project_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd())
rel_artifacts_root = os.path.relpath(pipeline_artifacts_dir, ci_project_dir)
rel_concrete_env_dir = os.path.relpath(concrete_env_dir, ci_project_dir)
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
@@ -772,7 +964,7 @@ def generate_gitlab_ci_yaml(
try:
bindist.binary_index.update()
except bindist.FetchCacheError as e:
tty.error(e)
tty.warn(e)
staged_phases = {}
try:
@@ -829,6 +1021,9 @@ def generate_gitlab_ci_yaml(
else:
broken_spec_urls = web_util.list_url(broken_specs_url)
spack_ci = SpackCI(ci_config, phases, staged_phases)
spack_ci_ir = spack_ci.generate_ir()
before_script, after_script = None, None
for phase in phases:
phase_name = phase["name"]
@@ -856,7 +1051,7 @@ def generate_gitlab_ci_yaml(
spec_record["needs_rebuild"] = False
continue
runner_attribs = _find_matching_config(release_spec, gitlab_ci)
runner_attribs = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
if not runner_attribs:
tty.warn("No match found for {0}, skipping it".format(release_spec))
@@ -887,23 +1082,21 @@ def generate_gitlab_ci_yaml(
except AttributeError:
image_name = build_image
job_script = ["spack env activate --without-view ."]
if "script" not in runner_attribs:
raise AttributeError
if artifacts_root:
job_script.insert(0, "cd {0}".format(concrete_env_dir))
def main_script_replacements(cmd):
return cmd.replace("{env_dir}", concrete_env_dir)
job_script.extend(["spack ci rebuild"])
if "script" in runner_attribs:
job_script = [s for s in runner_attribs["script"]]
job_script = _unpack_script(runner_attribs["script"], op=main_script_replacements)
before_script = None
if "before_script" in runner_attribs:
before_script = [s for s in runner_attribs["before_script"]]
before_script = _unpack_script(runner_attribs["before_script"])
after_script = None
if "after_script" in runner_attribs:
after_script = [s for s in runner_attribs["after_script"]]
after_script = _unpack_script(runner_attribs["after_script"])
osname = str(release_spec.architecture)
job_name = get_job_name(
@@ -1147,19 +1340,6 @@ def generate_gitlab_ci_yaml(
else:
tty.warn("Unable to populate buildgroup without CDash credentials")
service_job_config = None
if "service-job-attributes" in gitlab_ci:
service_job_config = gitlab_ci["service-job-attributes"]
default_attrs = [
"image",
"tags",
"variables",
"before_script",
# 'script',
"after_script",
]
service_job_retries = {
"max": 2,
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
@@ -1171,55 +1351,29 @@ def generate_gitlab_ci_yaml(
# schedule a job to clean up the temporary storage location
# associated with this pipeline.
stage_names.append("cleanup-temp-storage")
cleanup_job = {}
if service_job_config:
_copy_attributes(default_attrs, service_job_config, cleanup_job)
if "tags" in cleanup_job:
service_tags = _remove_reserved_tags(cleanup_job["tags"])
cleanup_job["tags"] = service_tags
cleanup_job = copy.deepcopy(spack_ci_ir["jobs"]["cleanup"]["attributes"])
cleanup_job["stage"] = "cleanup-temp-storage"
cleanup_job["script"] = [
"spack -d mirror destroy --mirror-url {0}/$CI_PIPELINE_ID".format(
temp_storage_url_prefix
)
]
cleanup_job["when"] = "always"
cleanup_job["retry"] = service_job_retries
cleanup_job["interruptible"] = True
cleanup_job["script"] = _unpack_script(
cleanup_job["script"],
op=lambda cmd: cmd.replace("mirror_prefix", temp_storage_url_prefix),
)
output_object["cleanup"] = cleanup_job
if (
"signing-job-attributes" in gitlab_ci
"script" in spack_ci_ir["jobs"]["signing"]["attributes"]
and spack_pipeline_type == "spack_protected_branch"
):
# External signing: generate a job to check and sign binary pkgs
stage_names.append("stage-sign-pkgs")
signing_job_config = gitlab_ci["signing-job-attributes"]
signing_job = {}
signing_job = spack_ci_ir["jobs"]["signing"]["attributes"]
signing_job_attrs_to_copy = [
"image",
"tags",
"variables",
"before_script",
"script",
"after_script",
]
_copy_attributes(signing_job_attrs_to_copy, signing_job_config, signing_job)
signing_job_tags = []
if "tags" in signing_job:
signing_job_tags = _remove_reserved_tags(signing_job["tags"])
for tag in ["aws", "protected", "notary"]:
if tag not in signing_job_tags:
signing_job_tags.append(tag)
signing_job["tags"] = signing_job_tags
signing_job["script"] = _unpack_script(signing_job["script"])
signing_job["stage"] = "stage-sign-pkgs"
signing_job["when"] = "always"
@@ -1231,23 +1385,17 @@ def generate_gitlab_ci_yaml(
if rebuild_index_enabled:
# Add a final job to regenerate the index
stage_names.append("stage-rebuild-index")
final_job = {}
if service_job_config:
_copy_attributes(default_attrs, service_job_config, final_job)
if "tags" in final_job:
service_tags = _remove_reserved_tags(final_job["tags"])
final_job["tags"] = service_tags
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
index_target_mirror = mirror_urls[0]
if remote_mirror_override:
index_target_mirror = remote_mirror_override
final_job["stage"] = "stage-rebuild-index"
final_job["script"] = [
"spack buildcache update-index --keys --mirror-url {0}".format(index_target_mirror)
]
final_job["script"] = _unpack_script(
final_job["script"],
op=lambda cmd: cmd.replace("{index_target_mirror}", index_target_mirror),
)
final_job["when"] = "always"
final_job["retry"] = service_job_retries
final_job["interruptible"] = True
@@ -1328,13 +1476,7 @@ def generate_gitlab_ci_yaml(
else:
# No jobs were generated
tty.debug("No specs to rebuild, generating no-op job")
noop_job = {}
if service_job_config:
_copy_attributes(default_attrs, service_job_config, noop_job)
if "script" not in noop_job:
noop_job["script"] = ['echo "All specs already up to date, nothing to rebuild."']
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
noop_job["retry"] = service_job_retries
@@ -1348,7 +1490,7 @@ def generate_gitlab_ci_yaml(
sys.exit(1)
with open(output_file, "w") as outf:
outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
outf.write(syaml.dump(sorted_output, default_flow_style=True))
def _url_encode_string(input_string):
@@ -1528,7 +1670,10 @@ def copy_files_to_artifacts(src, artifacts_dir):
try:
fs.copy(src, artifacts_dir)
except Exception as err:
tty.warn(f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to: {err}")
msg = ("Unable to copy files ({0}) to artifacts {1} due to " "exception: {2}").format(
src, artifacts_dir, str(err)
)
tty.warn(msg)
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
@@ -1748,6 +1893,7 @@ def reproduce_ci_job(url, work_dir):
function is a set of printed instructions for running docker and then
commands to run to reproduce the build once inside the container.
"""
work_dir = os.path.realpath(work_dir)
download_and_extract_artifacts(url, work_dir)
lock_file = fs.find(work_dir, "spack.lock")[0]
@@ -1912,7 +2058,9 @@ def reproduce_ci_job(url, work_dir):
if job_image:
inst_list.append("\nRun the following command:\n\n")
inst_list.append(
" $ docker run --rm -v {0}:{1} -ti {2}\n".format(work_dir, mount_as_dir, job_image)
" $ docker run --rm --name spack_reproducer -v {0}:{1}:Z -ti {2}\n".format(
work_dir, mount_as_dir, job_image
)
)
inst_list.append("\nOnce inside the container:\n\n")
else:
@@ -1963,13 +2111,16 @@ def process_command(name, commands, repro_dir):
# Create a string [command 1] && [command 2] && ... && [command n] with commands
# quoted using double quotes.
args_to_string = lambda args: " ".join('"{}"'.format(arg) for arg in args)
full_command = " && ".join(map(args_to_string, commands))
full_command = " \n ".join(map(args_to_string, commands))
# Write the command to a shell script
script = "{0}.sh".format(name)
with open(script, "w") as fd:
fd.write("#!/bin/sh\n\n")
fd.write("\n# spack {0} command\n".format(name))
fd.write("set -e\n")
if os.environ.get("SPACK_VERBOSE_SCRIPT"):
fd.write("set -x\n")
fd.write(full_command)
fd.write("\n")

View File

@@ -255,10 +255,9 @@ def ci_rebuild(args):
# Make sure the environment is "gitlab-enabled", or else there's nothing
# to do.
yaml_root = ev.config_dict(env.yaml)
gitlab_ci = yaml_root["gitlab-ci"] if "gitlab-ci" in yaml_root else None
if not gitlab_ci:
tty.die("spack ci rebuild requires an env containing gitlab-ci cfg")
ci_config = cfg.get("ci")
if not ci_config:
tty.die("spack ci rebuild requires an env containing ci cfg")
tty.msg(
"SPACK_BUILDCACHE_DESTINATION={0}".format(
@@ -306,8 +305,10 @@ def ci_rebuild(args):
# Query the environment manifest to find out whether we're reporting to a
# CDash instance, and if so, gather some information from the manifest to
# support that task.
cdash_handler = spack_ci.CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
if cdash_handler:
cdash_config = cfg.get("cdash")
cdash_handler = None
if "build-group" in cdash_config:
cdash_handler = spack_ci.CDashHandler(cdash_config)
tty.debug("cdash url = {0}".format(cdash_handler.url))
tty.debug("cdash project = {0}".format(cdash_handler.project))
tty.debug("cdash project_enc = {0}".format(cdash_handler.project_enc))
@@ -340,13 +341,13 @@ def ci_rebuild(args):
pipeline_mirror_url = None
temp_storage_url_prefix = None
if "temporary-storage-url-prefix" in gitlab_ci:
temp_storage_url_prefix = gitlab_ci["temporary-storage-url-prefix"]
if "temporary-storage-url-prefix" in ci_config:
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
pipeline_mirror_url = url_util.join(temp_storage_url_prefix, ci_pipeline_id)
enable_artifacts_mirror = False
if "enable-artifacts-buildcache" in gitlab_ci:
enable_artifacts_mirror = gitlab_ci["enable-artifacts-buildcache"]
if "enable-artifacts-buildcache" in ci_config:
enable_artifacts_mirror = ci_config["enable-artifacts-buildcache"]
if enable_artifacts_mirror or (
spack_is_pr_pipeline and not enable_artifacts_mirror and not temp_storage_url_prefix
):
@@ -551,7 +552,7 @@ def ci_rebuild(args):
commands = [
# apparently there's a race when spack bootstraps? do it up front once
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now", "--dev"],
[
SPACK_COMMAND,
"-e",
@@ -593,8 +594,8 @@ def ci_rebuild(args):
# avoid wasting compute cycles attempting to build those hashes.
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
tty.debug("Install failed on develop")
if "broken-specs-url" in gitlab_ci:
broken_specs_url = gitlab_ci["broken-specs-url"]
if "broken-specs-url" in ci_config:
broken_specs_url = ci_config["broken-specs-url"]
dev_fail_hash = job_spec.dag_hash()
broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
tty.msg("Reporting broken develop build as: {0}".format(broken_spec_path))
@@ -615,17 +616,14 @@ def ci_rebuild(args):
# the package, run them and copy the output. Failures of any kind should
# *not* terminate the build process or preclude creating the build cache.
broken_tests = (
"broken-tests-packages" in gitlab_ci
and job_spec.name in gitlab_ci["broken-tests-packages"]
"broken-tests-packages" in ci_config
and job_spec.name in ci_config["broken-tests-packages"]
)
reports_dir = fs.join_path(os.getcwd(), "cdash_report")
if args.tests and broken_tests:
tty.warn(
"Unable to run stand-alone tests since listed in "
"gitlab-ci's 'broken-tests-packages'"
)
tty.warn("Unable to run stand-alone tests since listed in " "ci's 'broken-tests-packages'")
if cdash_handler:
msg = "Package is listed in gitlab-ci's broken-tests-packages"
msg = "Package is listed in ci's broken-tests-packages"
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
cdash_handler.copy_test_results(reports_dir, job_test_dir)
elif args.tests:
@@ -688,8 +686,8 @@ def ci_rebuild(args):
# If this is a develop pipeline, check if the spec that we just built is
# on the broken-specs list. If so, remove it.
if spack_is_develop_pipeline and "broken-specs-url" in gitlab_ci:
broken_specs_url = gitlab_ci["broken-specs-url"]
if spack_is_develop_pipeline and "broken-specs-url" in ci_config:
broken_specs_url = ci_config["broken-specs-url"]
just_built_hash = job_spec.dag_hash()
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
if web_util.url_exists(broken_spec_path):

View File

@@ -77,6 +77,8 @@
"config": spack.schema.config.schema,
"upstreams": spack.schema.upstreams.schema,
"bootstrap": spack.schema.bootstrap.schema,
"ci": spack.schema.ci.schema,
"cdash": spack.schema.cdash.schema,
}
# Same as above, but including keys for environments
@@ -360,6 +362,12 @@ def _process_dict_keyname_overrides(data):
if sk.endswith(":"):
key = syaml.syaml_str(sk[:-1])
key.override = True
elif sk.endswith("+"):
key = syaml.syaml_str(sk[:-1])
key.prepend = True
elif sk.endswith("-"):
key = syaml.syaml_str(sk[:-1])
key.append = True
else:
key = sk
@@ -1040,6 +1048,33 @@ def _override(string):
return hasattr(string, "override") and string.override
def _append(string):
"""Test if a spack YAML string is an override.
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
and if they do, their values append lower-precedence
configs.
str, str : concatenate strings.
[obj], [obj] : append lists.
"""
return getattr(string, "append", False)
def _prepend(string):
"""Test if a spack YAML string is an override.
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
and if they do, their values prepend lower-precedence
configs.
str, str : concatenate strings.
[obj], [obj] : prepend lists. (default behavior)
"""
return getattr(string, "prepend", False)
def _mark_internal(data, name):
"""Add a simple name mark to raw YAML/JSON data.
@@ -1102,7 +1137,57 @@ def get_valid_type(path):
raise ConfigError("Cannot determine valid type for path '%s'." % path)
def merge_yaml(dest, source):
def remove_yaml(dest, source):
"""UnMerges source from dest; entries in source take precedence over dest.
This routine may modify dest and should be assigned to dest, in
case dest was None to begin with, e.g.:
dest = remove_yaml(dest, source)
In the result, elements from lists from ``source`` will not appear
as elements of lists from ``dest``. Likewise, when iterating over keys
or items in merged ``OrderedDict`` objects, keys from ``source`` will not
appear as keys in ``dest``.
Config file authors can optionally end any attribute in a dict
with `::` instead of `:`, and the key will remove the entire section
from ``dest``
"""
def they_are(t):
return isinstance(dest, t) and isinstance(source, t)
# If source is None, overwrite with source.
if source is None:
return dest
# Source list is prepended (for precedence)
if they_are(list):
# Make sure to copy ruamel comments
dest[:] = [x for x in dest if x not in source]
return dest
# Source dict is merged into dest.
elif they_are(dict):
for sk, sv in source.items():
# always remove the dest items. Python dicts do not overwrite
# keys on insert, so this ensures that source keys are copied
# into dest along with mark provenance (i.e., file/line info).
unmerge = sk in dest
old_dest_value = dest.pop(sk, None)
if unmerge and not spack.config._override(sk):
dest[sk] = remove_yaml(old_dest_value, sv)
return dest
# If we reach here source and dest are either different types or are
# not both lists or dicts: replace with source.
return dest
def merge_yaml(dest, source, prepend=False, append=False):
"""Merges source into dest; entries in source take precedence over dest.
This routine may modify dest and should be assigned to dest, in
@@ -1118,6 +1203,9 @@ def merge_yaml(dest, source):
Config file authors can optionally end any attribute in a dict
with `::` instead of `:`, and the key will override that of the
parent instead of merging.
`+:` will extend the default prepend merge strategy to include string concatenation
`-:` will change the merge strategy to append, it also includes string concatentation
"""
def they_are(t):
@@ -1129,8 +1217,12 @@ def they_are(t):
# Source list is prepended (for precedence)
if they_are(list):
# Make sure to copy ruamel comments
dest[:] = source + [x for x in dest if x not in source]
if append:
# Make sure to copy ruamel comments
dest[:] = [x for x in dest if x not in source] + source
else:
# Make sure to copy ruamel comments
dest[:] = source + [x for x in dest if x not in source]
return dest
# Source dict is merged into dest.
@@ -1147,7 +1239,7 @@ def they_are(t):
old_dest_value = dest.pop(sk, None)
if merge and not _override(sk):
dest[sk] = merge_yaml(old_dest_value, sv)
dest[sk] = merge_yaml(old_dest_value, sv, _prepend(sk), _append(sk))
else:
# if sk ended with ::, or if it's new, completely override
dest[sk] = copy.deepcopy(sv)
@@ -1158,6 +1250,13 @@ def they_are(t):
return dest
elif they_are(str):
# Concatenate strings in prepend mode
if prepend:
return source + dest
elif append:
return dest + source
# If we reach here source and dest are either different types or are
# not both lists or dicts: replace with source.
return copy.copy(source)
@@ -1183,6 +1282,17 @@ def process_config_path(path):
front = syaml.syaml_str(front)
front.override = True
seen_override_in_path = True
elif front.endswith("+"):
front = front.rstrip("+")
front = syaml.syaml_str(front)
front.prepend = True
elif front.endswith("-"):
front = front.rstrip("-")
front = syaml.syaml_str(front)
front.append = True
result.append(front)
return result

View File

@@ -2193,6 +2193,7 @@ def _update_and_write_manifest(self, raw_yaml_dict, yaml_dict):
view = dict((name, view.to_dict()) for name, view in self.views.items())
else:
view = False
yaml_dict["view"] = view
if self.dev_specs:

View File

@@ -15,7 +15,8 @@
"cdash": {
"type": "object",
"additionalProperties": False,
"required": ["build-group", "url", "project", "site"],
# "required": ["build-group", "url", "project", "site"],
"required": ["build-group"],
"patternProperties": {
r"build-group": {"type": "string"},
r"url": {"type": "string"},

View File

@@ -0,0 +1,181 @@
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for gitlab-ci.yaml configuration file.
.. literalinclude:: ../spack/schema/ci.py
:lines: 13-
"""
from llnl.util.lang import union_dicts
# Schema for script fields
# List of lists and/or strings
# This is similar to what is allowed in
# the gitlab schema
script_schema = {
"type": "array",
"items": {"anyOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}]},
}
# Additional attributes are allow
# and will be forwarded directly to the
# CI target YAML for each job.
attributes_schema = {
"type": "object",
"properties": {
"image": {
"oneOf": [
{"type": "string"},
{
"type": "object",
"properties": {
"name": {"type": "string"},
"entrypoint": {"type": "array", "items": {"type": "string"}},
},
},
]
},
"tags": {"type": "array", "items": {"type": "string"}},
"variables": {
"type": "object",
"patternProperties": {r"[\w\d\-_\.]+": {"type": "string"}},
},
"before_script": script_schema,
"script": script_schema,
"after_script": script_schema,
},
}
submapping_schema = {
"type": "object",
"additinoalProperties": False,
"required": ["submapping"],
"properties": {
"match_behavior": {"type": "string", "enum": ["first", "merge"], "default": "first"},
"submapping": {
"type": "array",
"items": {
"type": "object",
"additionalProperties": False,
"required": ["match"],
"properties": {
"match": {"type": "array", "items": {"type": "string"}},
"build-job": attributes_schema,
"build-job-remove": attributes_schema,
},
},
},
},
}
named_attributes_schema = {
"oneOf": [
{
"type": "object",
"additionalProperties": False,
"properties": {"noop-job": attributes_schema, "noop-job-remove": attributes_schema},
},
{
"type": "object",
"additionalProperties": False,
"properties": {"build-job": attributes_schema, "build-job-remove": attributes_schema},
},
{
"type": "object",
"additionalProperties": False,
"properties": {
"reindex-job": attributes_schema,
"reindex-job-remove": attributes_schema,
},
},
{
"type": "object",
"additionalProperties": False,
"properties": {
"signing-job": attributes_schema,
"signing-job-remove": attributes_schema,
},
},
{
"type": "object",
"additionalProperties": False,
"properties": {
"cleanup-job": attributes_schema,
"cleanup-job-remove": attributes_schema,
},
},
{
"type": "object",
"additionalProperties": False,
"properties": {"any-job": attributes_schema, "any-job-remove": attributes_schema},
},
]
}
pipeline_gen_schema = {
"type": "array",
"items": {"oneOf": [submapping_schema, named_attributes_schema]},
}
core_shared_properties = union_dicts(
{
"pipeline-gen": pipeline_gen_schema,
"bootstrap": {
"type": "array",
"items": {
"anyOf": [
{"type": "string"},
{
"type": "object",
"additionalProperties": False,
"required": ["name"],
"properties": {
"name": {"type": "string"},
"compiler-agnostic": {"type": "boolean", "default": False},
},
},
]
},
},
"rebuild-index": {"type": "boolean"},
"broken-specs-url": {"type": "string"},
"broken-tests-packages": {"type": "array", "items": {"type": "string"}},
"target": {"type": "string", "enum": ["gitlab"], "default": "gitlab"},
}
)
ci_properties = {
"anyOf": [
{
"type": "object",
"additionalProperties": False,
# "required": ["mappings"],
"properties": union_dicts(
core_shared_properties, {"enable-artifacts-buildcache": {"type": "boolean"}}
),
},
{
"type": "object",
"additionalProperties": False,
# "required": ["mappings"],
"properties": union_dicts(
core_shared_properties, {"temporary-storage-url-prefix": {"type": "string"}}
),
},
]
}
#: Properties for inclusion in other schemas
properties = {"ci": ci_properties}
#: Full schema with metadata
schema = {
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Spack CI configuration file schema",
"type": "object",
"additionalProperties": False,
"properties": properties,
}

View File

@@ -12,11 +12,11 @@
import spack.schema.bootstrap
import spack.schema.cdash
import spack.schema.ci
import spack.schema.compilers
import spack.schema.concretizer
import spack.schema.config
import spack.schema.container
import spack.schema.gitlab_ci
import spack.schema.mirrors
import spack.schema.modules
import spack.schema.packages
@@ -31,7 +31,7 @@
spack.schema.concretizer.properties,
spack.schema.config.properties,
spack.schema.container.properties,
spack.schema.gitlab_ci.properties,
spack.schema.ci.properties,
spack.schema.mirrors.properties,
spack.schema.modules.properties,
spack.schema.packages.properties,

View File

@@ -28,8 +28,8 @@
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
from spack.schema.buildcache_spec import schema as specfile_schema
from spack.schema.ci import schema as ci_schema
from spack.schema.database_index import schema as db_idx_schema
from spack.schema.gitlab_ci import schema as gitlab_ci_schema
from spack.spec import CompilerSpec, Spec
from spack.util.pattern import Bunch
@@ -177,26 +177,29 @@ def test_ci_generate_with_env(
- [$old-gcc-pkgs]
mirrors:
some-mirror: {0}
gitlab-ci:
ci:
bootstrap:
- name: bootstrap
compiler-agnostic: true
mappings:
pipeline-gen:
- submapping:
- match:
- arch=test-debian6-core2
runner-attributes:
build-job:
tags:
- donotcare
image: donotcare
- match:
- arch=test-debian6-m1
runner-attributes:
build-job:
tags:
- donotcare
image: donotcare
service-job-attributes:
image: donotcare
tags: [donotcare]
- cleanup-job:
image: donotcare
tags: [donotcare]
- reindex-job:
script:: [hello, world]
cdash:
build-group: Not important
url: https://my.fake.cdash
@@ -239,6 +242,10 @@ def test_ci_generate_with_env(
def _validate_needs_graph(yaml_contents, needs_graph, artifacts):
"""Validate the needs graph in the generate CI"""
# TODO: Fix the logic to catch errors where expected packages/needs are not
# found.
for job_name, job_def in yaml_contents.items():
for needs_def_name, needs_list in needs_graph.items():
if job_name.startswith(needs_def_name):
@@ -269,27 +276,30 @@ def test_ci_generate_bootstrap_gcc(
spack:
definitions:
- bootstrap:
- gcc@9.5
- gcc@9.0
- gcc@3.0
specs:
- dyninst%gcc@9.5
- dyninst%gcc@3.0
mirrors:
some-mirror: https://my.fake.mirror
gitlab-ci:
ci:
bootstrap:
- name: bootstrap
compiler-agnostic: true
mappings:
pipeline-gen:
- submapping:
- match:
- arch=test-debian6-x86_64
runner-attributes:
build-job:
tags:
- donotcare
- match:
- arch=test-debian6-aarch64
runner-attributes:
build-job:
tags:
- donotcare
- any-job:
tags:
- donotcare
"""
)
@@ -326,26 +336,30 @@ def test_ci_generate_bootstrap_artifacts_buildcache(
spack:
definitions:
- bootstrap:
- gcc@9.5
- gcc@3.0
specs:
- dyninst%gcc@9.5
- dyninst%gcc@3.0
mirrors:
some-mirror: https://my.fake.mirror
gitlab-ci:
ci:
bootstrap:
- name: bootstrap
compiler-agnostic: true
mappings:
pipeline-gen:
- submapping:
- match:
- arch=test-debian6-x86_64
runner-attributes:
build-job:
tags:
- donotcare
- match:
- arch=test-debian6-aarch64
runner-attributes:
build-job:
tags:
- donotcare
- any-job:
tags:
- donotcare
enable-artifacts-buildcache: True
"""
)
@@ -398,7 +412,7 @@ def test_ci_generate_with_env_missing_section(
"""
)
expect_out = 'Error: Environment yaml does not have "gitlab-ci" section'
expect_out = 'Error: Environment yaml does not have "ci" section'
with tmpdir.as_cwd():
env_cmd("create", "test", "./spack.yaml")
@@ -427,12 +441,13 @@ def test_ci_generate_with_cdash_token(
- archive-files
mirrors:
some-mirror: https://my.fake.mirror
gitlab-ci:
ci:
enable-artifacts-buildcache: True
mappings:
pipeline-gen:
- submapping:
- match:
- archive-files
runner-attributes:
build-job:
tags:
- donotcare
image: donotcare
@@ -485,11 +500,12 @@ def test_ci_generate_with_custom_scripts(
- archive-files
mirrors:
some-mirror: https://my.fake.mirror
gitlab-ci:
mappings:
ci:
pipeline-gen:
- submapping:
- match:
- archive-files
runner-attributes:
build-job:
tags:
- donotcare
variables:
@@ -576,17 +592,18 @@ def test_ci_generate_pkg_with_deps(
- flatten-deps
mirrors:
some-mirror: https://my.fake.mirror
gitlab-ci:
ci:
enable-artifacts-buildcache: True
mappings:
pipeline-gen:
- submapping:
- match:
- flatten-deps
runner-attributes:
build-job:
tags:
- donotcare
- match:
- dependency-install
runner-attributes:
build-job:
tags:
- donotcare
"""
@@ -642,22 +659,23 @@ def test_ci_generate_for_pr_pipeline(
- flatten-deps
mirrors:
some-mirror: https://my.fake.mirror
gitlab-ci:
ci:
enable-artifacts-buildcache: True
mappings:
pipeline-gen:
- submapping:
- match:
- flatten-deps
runner-attributes:
build-job:
tags:
- donotcare
- match:
- dependency-install
runner-attributes:
build-job:
tags:
- donotcare
service-job-attributes:
image: donotcare
tags: [donotcare]
- cleanup-job:
image: donotcare
tags: [donotcare]
rebuild-index: False
"""
)
@@ -703,12 +721,13 @@ def test_ci_generate_with_external_pkg(
- externaltest
mirrors:
some-mirror: https://my.fake.mirror
gitlab-ci:
mappings:
ci:
pipeline-gen:
- submapping:
- match:
- archive-files
- externaltest
runner-attributes:
build-job:
tags:
- donotcare
image: donotcare
@@ -744,7 +763,7 @@ def test_ci_rebuild_missing_config(tmpdir, working_env, mutable_mock_env_path):
env_cmd("create", "test", "./spack.yaml")
env_cmd("activate", "--without-view", "--sh", "test")
out = ci_cmd("rebuild", fail_on_error=False)
assert "env containing gitlab-ci" in out
assert "env containing ci" in out
env_cmd("deactivate")
@@ -785,17 +804,18 @@ def create_rebuild_env(tmpdir, pkg_name, broken_tests=False):
- $packages
mirrors:
test-mirror: {1}
gitlab-ci:
ci:
broken-specs-url: {2}
broken-tests-packages: {3}
temporary-storage-url-prefix: {4}
mappings:
- match:
- {0}
runner-attributes:
tags:
- donotcare
image: donotcare
pipeline-gen:
- submapping:
- match:
- {0}
build-job:
tags:
- donotcare
image: donotcare
cdash:
build-group: Not important
url: https://my.fake.cdash
@@ -875,10 +895,9 @@ def activate_rebuild_env(tmpdir, pkg_name, rebuild_env):
@pytest.mark.parametrize("broken_tests", [True, False])
def test_ci_rebuild_mock_success(
tmpdir,
config,
working_env,
mutable_mock_env_path,
install_mockery,
install_mockery_mutable_config,
mock_gnupghome,
mock_stage,
mock_fetch,
@@ -914,7 +933,7 @@ def test_ci_rebuild(
tmpdir,
working_env,
mutable_mock_env_path,
install_mockery,
install_mockery_mutable_config,
mock_packages,
monkeypatch,
mock_gnupghome,
@@ -1014,12 +1033,13 @@ def test_ci_nothing_to_rebuild(
- $packages
mirrors:
test-mirror: {0}
gitlab-ci:
ci:
enable-artifacts-buildcache: True
mappings:
pipeline-gen:
- submapping:
- match:
- archive-files
runner-attributes:
build-job:
tags:
- donotcare
image: donotcare
@@ -1101,18 +1121,19 @@ def test_ci_generate_mirror_override(
- $packages
mirrors:
test-mirror: {0}
gitlab-ci:
mappings:
ci:
pipeline-gen:
- submapping:
- match:
- patchelf
runner-attributes:
build-job:
tags:
- donotcare
image: donotcare
service-job-attributes:
tags:
- nonbuildtag
image: basicimage
- cleanup-job:
tags:
- nonbuildtag
image: basicimage
""".format(
mirror_url
)
@@ -1183,19 +1204,24 @@ def test_push_mirror_contents(
- $packages
mirrors:
test-mirror: {0}
gitlab-ci:
ci:
enable-artifacts-buildcache: True
mappings:
pipeline-gen:
- submapping:
- match:
- patchelf
runner-attributes:
build-job:
tags:
- donotcare
image: donotcare
service-job-attributes:
tags:
- nonbuildtag
image: basicimage
- cleanup-job:
tags:
- nonbuildtag
image: basicimage
- any-job:
tags:
- nonbuildtag
image: basicimage
""".format(
mirror_url
)
@@ -1345,56 +1371,58 @@ def test_ci_generate_override_runner_attrs(
- a
mirrors:
some-mirror: https://my.fake.mirror
gitlab-ci:
tags:
- toplevel
- toplevel2
variables:
ONE: toplevelvarone
TWO: toplevelvartwo
before_script:
- pre step one
- pre step two
script:
- main step
after_script:
- post step one
match_behavior: {0}
mappings:
- match:
- flatten-deps
runner-attributes:
tags:
- specific-one
variables:
THREE: specificvarthree
- match:
- dependency-install
- match:
- a
remove-attributes:
tags:
- toplevel2
runner-attributes:
tags:
- specific-a
variables:
ONE: specificvarone
TWO: specificvartwo
before_script:
- custom pre step one
script:
- custom main step
after_script:
- custom post step one
- match:
- a
runner-attributes:
tags:
- specific-a-2
service-job-attributes:
image: donotcare
tags: [donotcare]
ci:
pipeline-gen:
- match_behavior: {0}
submapping:
- match:
- flatten-deps
build-job:
tags:
- specific-one
variables:
THREE: specificvarthree
- match:
- dependency-install
- match:
- a
build-job:
tags:
- specific-a-2
- match:
- a
build-job-remove:
tags:
- toplevel2
build-job:
tags:
- specific-a
variables:
ONE: specificvarone
TWO: specificvartwo
before_script::
- - custom pre step one
script::
- - custom main step
after_script::
- custom post step one
- build-job:
tags:
- toplevel
- toplevel2
variables:
ONE: toplevelvarone
TWO: toplevelvartwo
before_script:
- - pre step one
- pre step two
script::
- - main step
after_script:
- - post step one
- cleanup-job:
image: donotcare
tags: [donotcare]
""".format(
match_behavior
)
@@ -1420,8 +1448,6 @@ def test_ci_generate_override_runner_attrs(
assert global_vars["SPACK_CHECKOUT_VERSION"] == "12ad69eb1"
for ci_key in yaml_contents.keys():
if "(specs) b" in ci_key:
assert False
if "(specs) a" in ci_key:
# Make sure a's attributes override variables, and all the
# scripts. Also, make sure the 'toplevel' tag doesn't
@@ -1495,10 +1521,11 @@ def test_ci_generate_with_workarounds(
- callpath%gcc@9.5
mirrors:
some-mirror: https://my.fake.mirror
gitlab-ci:
mappings:
ci:
pipeline-gen:
- submapping:
- match: ['%gcc@9.5']
runner-attributes:
build-job:
tags:
- donotcare
image: donotcare
@@ -1550,11 +1577,12 @@ def test_ci_rebuild_index(
- callpath
mirrors:
test-mirror: {0}
gitlab-ci:
mappings:
ci:
pipeline-gen:
- submapping:
- match:
- patchelf
runner-attributes:
build-job:
tags:
- donotcare
image: donotcare
@@ -1642,29 +1670,30 @@ def test_ci_generate_bootstrap_prune_dag(
- b%gcc@12.2.0
mirrors:
atestm: {0}
gitlab-ci:
ci:
bootstrap:
- name: bootstrap
compiler-agnostic: true
mappings:
pipeline-gen:
- submapping:
- match:
- arch=test-debian6-x86_64
runner-attributes:
build-job:
tags:
- donotcare
- match:
- arch=test-debian6-core2
runner-attributes:
build-job:
tags:
- meh
- match:
- arch=test-debian6-aarch64
runner-attributes:
build-job:
tags:
- donotcare
- match:
- arch=test-debian6-m1
runner-attributes:
build-job:
tags:
- meh
""".format(
@@ -1743,14 +1772,12 @@ def test_ci_generate_prune_untouched(
- callpath
mirrors:
some-mirror: {0}
gitlab-ci:
mappings:
- match:
- arch=test-debian6-core2
runner-attributes:
tags:
- donotcare
image: donotcare
ci:
pipeline-gen:
- build-job:
tags:
- donotcare
image: donotcare
""".format(
mirror_url
)
@@ -1879,11 +1906,12 @@ def test_ci_subcommands_without_mirror(
spack:
specs:
- archive-files
gitlab-ci:
mappings:
ci:
pipeline-gen:
- submapping:
- match:
- archive-files
runner-attributes:
build-job:
tags:
- donotcare
image: donotcare
@@ -1912,12 +1940,13 @@ def test_ensure_only_one_temporary_storage():
"""Make sure 'gitlab-ci' section of env does not allow specification of
both 'enable-artifacts-buildcache' and 'temporary-storage-url-prefix'."""
gitlab_ci_template = """
gitlab-ci:
ci:
{0}
mappings:
pipeline-gen:
- submapping:
- match:
- notcheckedhere
runner-attributes:
build-job:
tags:
- donotcare
"""
@@ -1933,21 +1962,21 @@ def test_ensure_only_one_temporary_storage():
# User can specify "enable-artifacts-buildcache" (boolean)
yaml_obj = syaml.load(gitlab_ci_template.format(enable_artifacts))
jsonschema.validate(yaml_obj, gitlab_ci_schema)
jsonschema.validate(yaml_obj, ci_schema)
# User can also specify "temporary-storage-url-prefix" (string)
yaml_obj = syaml.load(gitlab_ci_template.format(temp_storage))
jsonschema.validate(yaml_obj, gitlab_ci_schema)
jsonschema.validate(yaml_obj, ci_schema)
# However, specifying both should fail to validate
yaml_obj = syaml.load(gitlab_ci_template.format(specify_both))
with pytest.raises(jsonschema.ValidationError):
jsonschema.validate(yaml_obj, gitlab_ci_schema)
jsonschema.validate(yaml_obj, ci_schema)
# Specifying neither should be fine too, as neither of these properties
# should be required
yaml_obj = syaml.load(gitlab_ci_template.format(specify_neither))
jsonschema.validate(yaml_obj, gitlab_ci_schema)
jsonschema.validate(yaml_obj, ci_schema)
def test_ci_generate_temp_storage_url(
@@ -1969,12 +1998,13 @@ def test_ci_generate_temp_storage_url(
- archive-files
mirrors:
some-mirror: https://my.fake.mirror
gitlab-ci:
ci:
temporary-storage-url-prefix: file:///work/temp/mirror
mappings:
pipeline-gen:
- submapping:
- match:
- archive-files
runner-attributes:
build-job:
tags:
- donotcare
image: donotcare
@@ -2040,15 +2070,16 @@ def test_ci_generate_read_broken_specs_url(
- a
mirrors:
some-mirror: https://my.fake.mirror
gitlab-ci:
ci:
broken-specs-url: "{0}"
mappings:
pipeline-gen:
- submapping:
- match:
- a
- flatten-deps
- b
- dependency-install
runner-attributes:
build-job:
tags:
- donotcare
image: donotcare
@@ -2089,26 +2120,27 @@ def test_ci_generate_external_signing_job(
- archive-files
mirrors:
some-mirror: https://my.fake.mirror
gitlab-ci:
ci:
temporary-storage-url-prefix: file:///work/temp/mirror
mappings:
pipeline-gen:
- submapping:
- match:
- archive-files
runner-attributes:
build-job:
tags:
- donotcare
image: donotcare
signing-job-attributes:
tags:
- nonbuildtag
- secretrunner
image:
name: customdockerimage
entrypoint: []
variables:
IMPORTANT_INFO: avalue
script:
- echo hello
- signing-job:
tags:
- nonbuildtag
- secretrunner
image:
name: customdockerimage
entrypoint: []
variables:
IMPORTANT_INFO: avalue
script::
- echo hello
"""
)
@@ -2151,11 +2183,12 @@ def test_ci_reproduce(
- $packages
mirrors:
test-mirror: file:///some/fake/mirror
gitlab-ci:
mappings:
ci:
pipeline-gen:
- submapping:
- match:
- archive-files
runner-attributes:
build-job:
tags:
- donotcare
image: {0}
@@ -2232,7 +2265,9 @@ def fake_download_and_extract_artifacts(url, work_dir):
working_dir.strpath,
output=str,
)
expect_out = "docker run --rm -v {0}:{0} -ti {1}".format(working_dir.strpath, image_name)
expect_out = "docker run --rm --name spack_reproducer -v {0}:{0}:Z -ti {1}".format(
os.path.realpath(working_dir.strpath), image_name
)
assert expect_out in rep_out