spack ci: preserve custom attributes in build jobs (#36651)
* Simplify test/cmd/ci.py::test_ci_generate_with_custom_scripts * Rearrange the build-job logic in generate_gitlab_ci_yaml * Preserve all unknown attributes in build jobs * Slip tests for custom attributes in the tests for other job types * Support custom artifacts * [@spackbot] updating style on behalf of blue42u * Don't bother sorting needs --------- Co-authored-by: blue42u <blue42u@users.noreply.github.com>
This commit is contained in:
parent
eb72217228
commit
08fd8c8d0a
@ -1040,7 +1040,6 @@ def generate_gitlab_ci_yaml(
|
|||||||
spack_ci = SpackCI(ci_config, phases, staged_phases)
|
spack_ci = SpackCI(ci_config, phases, staged_phases)
|
||||||
spack_ci_ir = spack_ci.generate_ir()
|
spack_ci_ir = spack_ci.generate_ir()
|
||||||
|
|
||||||
before_script, after_script = None, None
|
|
||||||
for phase in phases:
|
for phase in phases:
|
||||||
phase_name = phase["name"]
|
phase_name = phase["name"]
|
||||||
strip_compilers = phase["strip-compilers"]
|
strip_compilers = phase["strip-compilers"]
|
||||||
@ -1067,52 +1066,35 @@ def generate_gitlab_ci_yaml(
|
|||||||
spec_record["needs_rebuild"] = False
|
spec_record["needs_rebuild"] = False
|
||||||
continue
|
continue
|
||||||
|
|
||||||
runner_attribs = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
||||||
|
|
||||||
if not runner_attribs:
|
if not job_object:
|
||||||
tty.warn("No match found for {0}, skipping it".format(release_spec))
|
tty.warn("No match found for {0}, skipping it".format(release_spec))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
tags = [tag for tag in runner_attribs["tags"]]
|
|
||||||
|
|
||||||
if spack_pipeline_type is not None:
|
if spack_pipeline_type is not None:
|
||||||
# For spack pipelines "public" and "protected" are reserved tags
|
# For spack pipelines "public" and "protected" are reserved tags
|
||||||
tags = _remove_reserved_tags(tags)
|
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
|
||||||
if spack_pipeline_type == "spack_protected_branch":
|
if spack_pipeline_type == "spack_protected_branch":
|
||||||
tags.extend(["protected"])
|
job_object["tags"].extend(["protected"])
|
||||||
elif spack_pipeline_type == "spack_pull_request":
|
elif spack_pipeline_type == "spack_pull_request":
|
||||||
tags.extend(["public"])
|
job_object["tags"].extend(["public"])
|
||||||
|
|
||||||
variables = {}
|
if "script" not in job_object:
|
||||||
if "variables" in runner_attribs:
|
|
||||||
variables.update(runner_attribs["variables"])
|
|
||||||
|
|
||||||
image_name = None
|
|
||||||
image_entry = None
|
|
||||||
if "image" in runner_attribs:
|
|
||||||
build_image = runner_attribs["image"]
|
|
||||||
try:
|
|
||||||
image_name = build_image.get("name")
|
|
||||||
entrypoint = build_image.get("entrypoint")
|
|
||||||
image_entry = [p for p in entrypoint]
|
|
||||||
except AttributeError:
|
|
||||||
image_name = build_image
|
|
||||||
|
|
||||||
if "script" not in runner_attribs:
|
|
||||||
raise AttributeError
|
raise AttributeError
|
||||||
|
|
||||||
def main_script_replacements(cmd):
|
def main_script_replacements(cmd):
|
||||||
return cmd.replace("{env_dir}", concrete_env_dir)
|
return cmd.replace("{env_dir}", concrete_env_dir)
|
||||||
|
|
||||||
job_script = _unpack_script(runner_attribs["script"], op=main_script_replacements)
|
job_object["script"] = _unpack_script(
|
||||||
|
job_object["script"], op=main_script_replacements
|
||||||
|
)
|
||||||
|
|
||||||
before_script = None
|
if "before_script" in job_object:
|
||||||
if "before_script" in runner_attribs:
|
job_object["before_script"] = _unpack_script(job_object["before_script"])
|
||||||
before_script = _unpack_script(runner_attribs["before_script"])
|
|
||||||
|
|
||||||
after_script = None
|
if "after_script" in job_object:
|
||||||
if "after_script" in runner_attribs:
|
job_object["after_script"] = _unpack_script(job_object["after_script"])
|
||||||
after_script = _unpack_script(runner_attribs["after_script"])
|
|
||||||
|
|
||||||
osname = str(release_spec.architecture)
|
osname = str(release_spec.architecture)
|
||||||
job_name = get_job_name(
|
job_name = get_job_name(
|
||||||
@ -1125,13 +1107,12 @@ def main_script_replacements(cmd):
|
|||||||
if _is_main_phase(phase_name):
|
if _is_main_phase(phase_name):
|
||||||
compiler_action = "INSTALL_MISSING"
|
compiler_action = "INSTALL_MISSING"
|
||||||
|
|
||||||
job_vars = {
|
job_vars = job_object.setdefault("variables", {})
|
||||||
"SPACK_JOB_SPEC_DAG_HASH": release_spec_dag_hash,
|
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
|
||||||
"SPACK_JOB_SPEC_PKG_NAME": release_spec.name,
|
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||||
"SPACK_COMPILER_ACTION": compiler_action,
|
job_vars["SPACK_COMPILER_ACTION"] = compiler_action
|
||||||
}
|
|
||||||
|
|
||||||
job_dependencies = []
|
job_object["needs"] = []
|
||||||
if spec_label in dependencies:
|
if spec_label in dependencies:
|
||||||
if enable_artifacts_buildcache:
|
if enable_artifacts_buildcache:
|
||||||
# Get dependencies transitively, so they're all
|
# Get dependencies transitively, so they're all
|
||||||
@ -1144,7 +1125,7 @@ def main_script_replacements(cmd):
|
|||||||
for dep_label in dependencies[spec_label]:
|
for dep_label in dependencies[spec_label]:
|
||||||
dep_jobs.append(spec_labels[dep_label]["spec"])
|
dep_jobs.append(spec_labels[dep_label]["spec"])
|
||||||
|
|
||||||
job_dependencies.extend(
|
job_object["needs"].extend(
|
||||||
_format_job_needs(
|
_format_job_needs(
|
||||||
phase_name,
|
phase_name,
|
||||||
strip_compilers,
|
strip_compilers,
|
||||||
@ -1201,7 +1182,7 @@ def main_script_replacements(cmd):
|
|||||||
if enable_artifacts_buildcache:
|
if enable_artifacts_buildcache:
|
||||||
dep_jobs = [d for d in c_spec.traverse(deptype=all)]
|
dep_jobs = [d for d in c_spec.traverse(deptype=all)]
|
||||||
|
|
||||||
job_dependencies.extend(
|
job_object["needs"].extend(
|
||||||
_format_job_needs(
|
_format_job_needs(
|
||||||
bs["phase-name"],
|
bs["phase-name"],
|
||||||
bs["strip-compilers"],
|
bs["strip-compilers"],
|
||||||
@ -1267,7 +1248,7 @@ def main_script_replacements(cmd):
|
|||||||
]
|
]
|
||||||
|
|
||||||
if artifacts_root:
|
if artifacts_root:
|
||||||
job_dependencies.append(
|
job_object["needs"].append(
|
||||||
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1282,18 +1263,22 @@ def main_script_replacements(cmd):
|
|||||||
build_stamp = cdash_handler.build_stamp
|
build_stamp = cdash_handler.build_stamp
|
||||||
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
|
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
|
||||||
|
|
||||||
variables.update(job_vars)
|
job_object["artifacts"] = spack.config.merge_yaml(
|
||||||
|
job_object.get("artifacts", {}),
|
||||||
artifact_paths = [
|
{
|
||||||
|
"when": "always",
|
||||||
|
"paths": [
|
||||||
rel_job_log_dir,
|
rel_job_log_dir,
|
||||||
rel_job_repro_dir,
|
rel_job_repro_dir,
|
||||||
rel_job_test_dir,
|
rel_job_test_dir,
|
||||||
rel_user_artifacts_dir,
|
rel_user_artifacts_dir,
|
||||||
]
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
if enable_artifacts_buildcache:
|
if enable_artifacts_buildcache:
|
||||||
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
bc_root = os.path.join(local_mirror_dir, "build_cache")
|
||||||
artifact_paths.extend(
|
job_object["artifacts"]["paths"].extend(
|
||||||
[
|
[
|
||||||
os.path.join(bc_root, p)
|
os.path.join(bc_root, p)
|
||||||
for p in [
|
for p in [
|
||||||
@ -1303,33 +1288,15 @@ def main_script_replacements(cmd):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
job_object = {
|
job_object["stage"] = stage_name
|
||||||
"stage": stage_name,
|
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
||||||
"variables": variables,
|
job_object["interruptible"] = True
|
||||||
"script": job_script,
|
|
||||||
"tags": tags,
|
|
||||||
"artifacts": {"paths": artifact_paths, "when": "always"},
|
|
||||||
"needs": sorted(job_dependencies, key=lambda d: d["job"]),
|
|
||||||
"retry": {"max": 2, "when": JOB_RETRY_CONDITIONS},
|
|
||||||
"interruptible": True,
|
|
||||||
}
|
|
||||||
|
|
||||||
length_needs = len(job_dependencies)
|
length_needs = len(job_object["needs"])
|
||||||
if length_needs > max_length_needs:
|
if length_needs > max_length_needs:
|
||||||
max_length_needs = length_needs
|
max_length_needs = length_needs
|
||||||
max_needs_job = job_name
|
max_needs_job = job_name
|
||||||
|
|
||||||
if before_script:
|
|
||||||
job_object["before_script"] = before_script
|
|
||||||
|
|
||||||
if after_script:
|
|
||||||
job_object["after_script"] = after_script
|
|
||||||
|
|
||||||
if image_name:
|
|
||||||
job_object["image"] = image_name
|
|
||||||
if image_entry is not None:
|
|
||||||
job_object["image"] = {"name": image_name, "entrypoint": image_entry}
|
|
||||||
|
|
||||||
output_object[job_name] = job_object
|
output_object[job_name] = job_object
|
||||||
job_id += 1
|
job_id += 1
|
||||||
|
|
||||||
|
@ -41,6 +41,7 @@
|
|||||||
# CI target YAML for each job.
|
# CI target YAML for each job.
|
||||||
attributes_schema = {
|
attributes_schema = {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
|
"additionalProperties": True,
|
||||||
"properties": {
|
"properties": {
|
||||||
"image": image_schema,
|
"image": image_schema,
|
||||||
"tags": {"type": "array", "items": {"type": "string"}},
|
"tags": {"type": "array", "items": {"type": "string"}},
|
||||||
@ -56,7 +57,7 @@
|
|||||||
|
|
||||||
submapping_schema = {
|
submapping_schema = {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"additinoalProperties": False,
|
"additionalProperties": False,
|
||||||
"required": ["submapping"],
|
"required": ["submapping"],
|
||||||
"properties": {
|
"properties": {
|
||||||
"match_behavior": {"type": "string", "enum": ["first", "merge"], "default": "first"},
|
"match_behavior": {"type": "string", "enum": ["first", "merge"], "default": "first"},
|
||||||
|
@ -201,6 +201,7 @@ def test_ci_generate_with_env(
|
|||||||
tags: [donotcare]
|
tags: [donotcare]
|
||||||
- reindex-job:
|
- reindex-job:
|
||||||
script:: [hello, world]
|
script:: [hello, world]
|
||||||
|
custom_attribute: custom!
|
||||||
cdash:
|
cdash:
|
||||||
build-group: Not important
|
build-group: Not important
|
||||||
url: https://my.fake.cdash
|
url: https://my.fake.cdash
|
||||||
@ -235,6 +236,7 @@ def test_ci_generate_with_env(
|
|||||||
rebuild_job = yaml_contents["rebuild-index"]
|
rebuild_job = yaml_contents["rebuild-index"]
|
||||||
expected = "spack buildcache update-index --keys --mirror-url {0}".format(mirror_url)
|
expected = "spack buildcache update-index --keys --mirror-url {0}".format(mirror_url)
|
||||||
assert rebuild_job["script"][0] == expected
|
assert rebuild_job["script"][0] == expected
|
||||||
|
assert rebuild_job["custom_attribute"] == "custom!"
|
||||||
|
|
||||||
assert "variables" in yaml_contents
|
assert "variables" in yaml_contents
|
||||||
assert "SPACK_ARTIFACTS_ROOT" in yaml_contents["variables"]
|
assert "SPACK_ARTIFACTS_ROOT" in yaml_contents["variables"]
|
||||||
@ -481,7 +483,7 @@ def test_ci_generate_with_cdash_token(
|
|||||||
assert filecmp.cmp(orig_file, copy_to_file) is True
|
assert filecmp.cmp(orig_file, copy_to_file) is True
|
||||||
|
|
||||||
|
|
||||||
def test_ci_generate_with_custom_scripts(
|
def test_ci_generate_with_custom_settings(
|
||||||
tmpdir,
|
tmpdir,
|
||||||
working_env,
|
working_env,
|
||||||
mutable_mock_env_path,
|
mutable_mock_env_path,
|
||||||
@ -491,7 +493,7 @@ def test_ci_generate_with_custom_scripts(
|
|||||||
ci_base_environment,
|
ci_base_environment,
|
||||||
mock_binary_index,
|
mock_binary_index,
|
||||||
):
|
):
|
||||||
"""Test use of user-provided scripts"""
|
"""Test use of user-provided scripts and attributes"""
|
||||||
filename = str(tmpdir.join("spack.yaml"))
|
filename = str(tmpdir.join("spack.yaml"))
|
||||||
with open(filename, "w") as f:
|
with open(filename, "w") as f:
|
||||||
f.write(
|
f.write(
|
||||||
@ -523,6 +525,10 @@ def test_ci_generate_with_custom_scripts(
|
|||||||
- spack -d ci rebuild
|
- spack -d ci rebuild
|
||||||
after_script:
|
after_script:
|
||||||
- rm -rf /some/path/spack
|
- rm -rf /some/path/spack
|
||||||
|
custom_attribute: custom!
|
||||||
|
artifacts:
|
||||||
|
paths:
|
||||||
|
- some/custom/artifact
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -540,40 +546,40 @@ def test_ci_generate_with_custom_scripts(
|
|||||||
|
|
||||||
found_it = False
|
found_it = False
|
||||||
|
|
||||||
assert "variables" in yaml_contents
|
|
||||||
global_vars = yaml_contents["variables"]
|
global_vars = yaml_contents["variables"]
|
||||||
assert "SPACK_VERSION" in global_vars
|
|
||||||
assert global_vars["SPACK_VERSION"] == "0.15.3"
|
assert global_vars["SPACK_VERSION"] == "0.15.3"
|
||||||
assert "SPACK_CHECKOUT_VERSION" in global_vars
|
|
||||||
assert global_vars["SPACK_CHECKOUT_VERSION"] == "v0.15.3"
|
assert global_vars["SPACK_CHECKOUT_VERSION"] == "v0.15.3"
|
||||||
|
|
||||||
for ci_key in yaml_contents.keys():
|
for ci_key in yaml_contents.keys():
|
||||||
ci_obj = yaml_contents[ci_key]
|
ci_obj = yaml_contents[ci_key]
|
||||||
if "archive-files" in ci_key:
|
if "archive-files" in ci_key:
|
||||||
# Ensure we have variables, possibly interpolated
|
# Ensure we have variables, possibly interpolated
|
||||||
assert "variables" in ci_obj
|
|
||||||
var_d = ci_obj["variables"]
|
var_d = ci_obj["variables"]
|
||||||
assert "ONE" in var_d
|
|
||||||
assert var_d["ONE"] == "plain-string-value"
|
assert var_d["ONE"] == "plain-string-value"
|
||||||
assert "TWO" in var_d
|
|
||||||
assert var_d["TWO"] == "${INTERP_ON_BUILD}"
|
assert var_d["TWO"] == "${INTERP_ON_BUILD}"
|
||||||
|
|
||||||
# Ensure we have scripts verbatim
|
# Ensure we have scripts verbatim
|
||||||
assert "before_script" in ci_obj
|
assert ci_obj["before_script"] == [
|
||||||
before_script = ci_obj["before_script"]
|
"mkdir /some/path",
|
||||||
assert before_script[0] == "mkdir /some/path"
|
"pushd /some/path",
|
||||||
assert before_script[1] == "pushd /some/path"
|
"git clone ${SPACK_REPO}",
|
||||||
assert before_script[2] == "git clone ${SPACK_REPO}"
|
"cd spack",
|
||||||
assert before_script[3] == "cd spack"
|
"git checkout ${SPACK_REF}",
|
||||||
assert before_script[4] == "git checkout ${SPACK_REF}"
|
"popd",
|
||||||
assert before_script[5] == "popd"
|
]
|
||||||
|
assert ci_obj["script"][1].startswith("cd ")
|
||||||
|
ci_obj["script"][1] = "cd ENV"
|
||||||
|
assert ci_obj["script"] == [
|
||||||
|
"spack -d ci rebuild",
|
||||||
|
"cd ENV",
|
||||||
|
"spack env activate --without-view .",
|
||||||
|
"spack ci rebuild",
|
||||||
|
]
|
||||||
|
assert ci_obj["after_script"] == ["rm -rf /some/path/spack"]
|
||||||
|
|
||||||
assert "script" in ci_obj
|
# Ensure we have the custom attributes
|
||||||
assert ci_obj["script"][0] == "spack -d ci rebuild"
|
assert "some/custom/artifact" in ci_obj["artifacts"]["paths"]
|
||||||
|
assert ci_obj["custom_attribute"] == "custom!"
|
||||||
assert "after_script" in ci_obj
|
|
||||||
after_script = ci_obj["after_script"][0]
|
|
||||||
assert after_script == "rm -rf /some/path/spack"
|
|
||||||
|
|
||||||
found_it = True
|
found_it = True
|
||||||
|
|
||||||
@ -1223,6 +1229,7 @@ def test_push_mirror_contents(
|
|||||||
tags:
|
tags:
|
||||||
- nonbuildtag
|
- nonbuildtag
|
||||||
image: basicimage
|
image: basicimage
|
||||||
|
custom_attribute: custom!
|
||||||
""".format(
|
""".format(
|
||||||
mirror_url
|
mirror_url
|
||||||
)
|
)
|
||||||
@ -1264,6 +1271,7 @@ def test_push_mirror_contents(
|
|||||||
assert "nonbuildtag" in the_elt["tags"]
|
assert "nonbuildtag" in the_elt["tags"]
|
||||||
assert "image" in the_elt
|
assert "image" in the_elt
|
||||||
assert the_elt["image"] == "basicimage"
|
assert the_elt["image"] == "basicimage"
|
||||||
|
assert the_elt["custom_attribute"] == "custom!"
|
||||||
|
|
||||||
outputfile_not_pruned = str(tmpdir.join("unpruned_pipeline.yml"))
|
outputfile_not_pruned = str(tmpdir.join("unpruned_pipeline.yml"))
|
||||||
ci_cmd("generate", "--no-prune-dag", "--output-file", outputfile_not_pruned)
|
ci_cmd("generate", "--no-prune-dag", "--output-file", outputfile_not_pruned)
|
||||||
@ -1282,6 +1290,7 @@ def test_push_mirror_contents(
|
|||||||
job_vars = the_elt["variables"]
|
job_vars = the_elt["variables"]
|
||||||
assert "SPACK_SPEC_NEEDS_REBUILD" in job_vars
|
assert "SPACK_SPEC_NEEDS_REBUILD" in job_vars
|
||||||
assert job_vars["SPACK_SPEC_NEEDS_REBUILD"] == "False"
|
assert job_vars["SPACK_SPEC_NEEDS_REBUILD"] == "False"
|
||||||
|
assert the_elt["custom_attribute"] == "custom!"
|
||||||
found_spec_job = True
|
found_spec_job = True
|
||||||
|
|
||||||
assert found_spec_job
|
assert found_spec_job
|
||||||
@ -2005,6 +2014,8 @@ def test_ci_generate_temp_storage_url(
|
|||||||
tags:
|
tags:
|
||||||
- donotcare
|
- donotcare
|
||||||
image: donotcare
|
image: donotcare
|
||||||
|
- cleanup-job:
|
||||||
|
custom_attribute: custom!
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -2021,6 +2032,8 @@ def test_ci_generate_temp_storage_url(
|
|||||||
assert "cleanup" in pipeline_doc
|
assert "cleanup" in pipeline_doc
|
||||||
cleanup_job = pipeline_doc["cleanup"]
|
cleanup_job = pipeline_doc["cleanup"]
|
||||||
|
|
||||||
|
assert cleanup_job["custom_attribute"] == "custom!"
|
||||||
|
|
||||||
assert "script" in cleanup_job
|
assert "script" in cleanup_job
|
||||||
cleanup_task = cleanup_job["script"][0]
|
cleanup_task = cleanup_job["script"][0]
|
||||||
|
|
||||||
@ -2138,6 +2151,7 @@ def test_ci_generate_external_signing_job(
|
|||||||
IMPORTANT_INFO: avalue
|
IMPORTANT_INFO: avalue
|
||||||
script::
|
script::
|
||||||
- echo hello
|
- echo hello
|
||||||
|
custom_attribute: custom!
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -2157,6 +2171,7 @@ def test_ci_generate_external_signing_job(
|
|||||||
signing_job_tags = signing_job["tags"]
|
signing_job_tags = signing_job["tags"]
|
||||||
for expected_tag in ["notary", "protected", "aws"]:
|
for expected_tag in ["notary", "protected", "aws"]:
|
||||||
assert expected_tag in signing_job_tags
|
assert expected_tag in signing_job_tags
|
||||||
|
assert signing_job["custom_attribute"] == "custom!"
|
||||||
|
|
||||||
|
|
||||||
def test_ci_reproduce(
|
def test_ci_reproduce(
|
||||||
|
Loading…
Reference in New Issue
Block a user