gitlab ci: release fixes and improvements (#37601)

* gitlab ci: release fixes and improvements

  - use rules to reduce boilerplate in .gitlab-ci.yml
  - support copy-only pipeline jobs
  - make pipelines for release branches rebuild everything
  - make pipelines for protected tags copy-only

* gitlab ci: remove url changes used in testing

* gitlab ci: tag mirrors need public key

Make sure that mirrors associated with release branches and tags
contain the public key needed to verify the signed binaries.  This
also ensures that when stack-specific mirror contents are copied
to the root, the root mirror has the public key as well.

* review: be more specific about tags, curl flags

* Make the check in ci.yaml consistent with the .gitlab-ci.yml

---------

Co-authored-by: Ryan Krattiger <ryan.krattiger@kitware.com>
This commit is contained in:
Scott Wittenburg
2023-05-12 14:22:42 -06:00
committed by GitHub
parent 4e5fb62679
commit c08be95d5e
4 changed files with 218 additions and 464 deletions

View File

@@ -531,7 +531,7 @@ def __init__(self, ci_config, phases, staged_phases):
"""
self.ci_config = ci_config
self.named_jobs = ["any", "build", "cleanup", "noop", "reindex", "signing"]
self.named_jobs = ["any", "build", "copy", "cleanup", "noop", "reindex", "signing"]
self.ir = {
"jobs": {},
@@ -1207,7 +1207,7 @@ def main_script_replacements(cmd):
).format(c_spec, release_spec)
tty.debug(debug_msg)
if prune_dag and not rebuild_spec:
if prune_dag and not rebuild_spec and spack_pipeline_type != "spack_copy_only":
tty.debug(
"Pruning {0}/{1}, does not need rebuild.".format(
release_spec.name, release_spec.dag_hash()
@@ -1298,8 +1298,9 @@ def main_script_replacements(cmd):
max_length_needs = length_needs
max_needs_job = job_name
output_object[job_name] = job_object
job_id += 1
if spack_pipeline_type != "spack_copy_only":
output_object[job_name] = job_object
job_id += 1
if print_summary:
for phase in phases:
@@ -1329,6 +1330,17 @@ def main_script_replacements(cmd):
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
}
if spack_pipeline_type == "spack_copy_only":
stage_names.append("copy")
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
sync_job["stage"] = "copy"
if artifacts_root:
sync_job["needs"] = [
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
]
output_object["copy"] = sync_job
job_id += 1
if job_id > 0:
if temp_storage_url_prefix:
# There were some rebuild jobs scheduled, so we will need to

View File

@@ -89,6 +89,11 @@
"additionalProperties": False,
"properties": {"build-job": attributes_schema, "build-job-remove": attributes_schema},
},
{
"type": "object",
"additionalProperties": False,
"properties": {"copy-job": attributes_schema, "copy-job-remove": attributes_schema},
},
{
"type": "object",
"additionalProperties": False,