ci: Support secure binary signing on protected pipelines (#30753)

This PR supports the creation of securely signed binaries built from spack
develop as well as release branches and tags. Specifically:

- remove internal pr mirror url generation logic in favor of buildcache destination
on command line
    - with a single mirror url specified in the spack.yaml, this makes it clearer where 
    binaries from various pipelines are pushed
- designate some tags as reserved: ['public', 'protected', 'notary']
    - these tags are stripped from all jobs by default and provisioned internally
    based on pipeline type
- update gitlab ci yaml to include pipelines on more protected branches than just
develop (so include releases and tags)
    - binaries from all protected pipelines are pushed into mirrors including the
    branch name so releases, tags, and develop binaries are kept separate
- update rebuild jobs running on protected pipelines to run on special runners
provisioned with an intermediate signing key
    - protected rebuild jobs no longer use "SPACK_SIGNING_KEY" env var to
    obtain signing key (in fact, final signing key is nowhere available to rebuild jobs)
    - these intermediate signatures are verified at the end of each pipeline by a new
    signing job to ensure binaries were produced by a protected pipeline
- optionallly schedule a signing/notary job at the end of the pipeline to sign all
packges in the mirror
    - add signing-job-attributes to gitlab-ci section of spack environment to allow
    configuration
    - signing job runs on special runner (separate from protected rebuild runners)
    provisioned with public intermediate key and secret signing key
This commit is contained in:
Scott Wittenburg 2022-05-26 08:31:22 -06:00 committed by GitHub
parent b5a519fa51
commit 85e13260cf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 534 additions and 172 deletions

View File

@ -210,7 +210,7 @@ def get_all_built_specs(self):
return spec_list
def find_built_spec(self, spec):
def find_built_spec(self, spec, mirrors_to_check=None):
"""Look in our cache for the built spec corresponding to ``spec``.
If the spec can be found among the configured binary mirrors, a
@ -225,6 +225,8 @@ def find_built_spec(self, spec):
Args:
spec (spack.spec.Spec): Concrete spec to find
mirrors_to_check: Optional mapping containing mirrors to check. If
None, just assumes all configured mirrors.
Returns:
An list of objects containing the found specs and mirror url where
@ -240,17 +242,23 @@ def find_built_spec(self, spec):
]
"""
self.regenerate_spec_cache()
return self.find_by_hash(spec.dag_hash())
return self.find_by_hash(spec.dag_hash(), mirrors_to_check=mirrors_to_check)
def find_by_hash(self, find_hash):
def find_by_hash(self, find_hash, mirrors_to_check=None):
"""Same as find_built_spec but uses the hash of a spec.
Args:
find_hash (str): hash of the spec to search
mirrors_to_check: Optional mapping containing mirrors to check. If
None, just assumes all configured mirrors.
"""
if find_hash not in self._mirrors_for_spec:
return None
return self._mirrors_for_spec[find_hash]
results = self._mirrors_for_spec[find_hash]
if not mirrors_to_check:
return results
mirror_urls = mirrors_to_check.values()
return [r for r in results if r['mirror_url'] in mirror_urls]
def update_spec(self, spec, found_list):
"""
@ -1822,7 +1830,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
tty.debug("No Spack mirrors are currently configured")
return {}
results = binary_index.find_built_spec(spec)
results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
# Maybe we just didn't have the latest information from the mirror, so
# try to fetch directly, unless we are only considering the indices.

View File

@ -33,7 +33,6 @@
import spack.util.executable as exe
import spack.util.gpg as gpg_util
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
import spack.util.web as web_util
from spack.error import SpackError
from spack.spec import Spec
@ -42,10 +41,8 @@
'always',
]
SPACK_PR_MIRRORS_ROOT_URL = 's3://spack-binaries-prs'
SPACK_SHARED_PR_MIRROR_URL = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
'shared_pr_mirror')
TEMP_STORAGE_MIRROR_NAME = 'ci_temporary_mirror'
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
spack_gpg = spack.main.SpackCommand('gpg')
spack_compiler = spack.main.SpackCommand('compiler')
@ -199,6 +196,11 @@ def _get_cdash_build_name(spec, build_group):
spec.name, spec.version, spec.compiler, spec.architecture, build_group)
def _remove_reserved_tags(tags):
"""Convenience function to strip reserved tags from jobs"""
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
def _get_spec_string(spec):
format_elements = [
'{name}{@version}',
@ -231,8 +233,10 @@ def _add_dependency(spec_label, dep_label, deps):
deps[spec_label].add(dep_label)
def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only)
def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False,
mirrors_to_check=None):
spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only,
mirrors_to_check=mirrors_to_check)
if spec_deps_obj:
dependencies = spec_deps_obj['dependencies']
@ -249,7 +253,7 @@ def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
_add_dependency(entry['spec'], entry['depends'], deps)
def stage_spec_jobs(specs, check_index_only=False):
def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
"""Take a set of release specs and generate a list of "stages", where the
jobs in any stage are dependent only on jobs in previous stages. This
allows us to maximize build parallelism within the gitlab-ci framework.
@ -261,6 +265,8 @@ def stage_spec_jobs(specs, check_index_only=False):
are up to date on those mirrors. This flag limits that search to
the binary cache indices on those mirrors to speed the process up,
even though there is no garantee the index is up to date.
mirrors_to_checK: Optional mapping giving mirrors to check instead of
any configured mirrors.
Returns: A tuple of information objects describing the specs, dependencies
and stages:
@ -297,8 +303,8 @@ def _remove_satisfied_deps(deps, satisfied_list):
deps = {}
spec_labels = {}
_get_spec_dependencies(
specs, deps, spec_labels, check_index_only=check_index_only)
_get_spec_dependencies(specs, deps, spec_labels, check_index_only=check_index_only,
mirrors_to_check=mirrors_to_check)
# Save the original deps, as we need to return them at the end of the
# function. In the while loop below, the "dependencies" variable is
@ -340,7 +346,7 @@ def _print_staging_summary(spec_labels, dependencies, stages):
_get_spec_string(s)))
def _compute_spec_deps(spec_list, check_index_only=False):
def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None):
"""
Computes all the dependencies for the spec(s) and generates a JSON
object which provides both a list of unique spec names as well as a
@ -413,7 +419,7 @@ def append_dep(s, d):
continue
up_to_date_mirrors = bindist.get_mirrors_for_spec(
spec=s, index_only=check_index_only)
spec=s, mirrors_to_check=mirrors_to_check, index_only=check_index_only)
skey = _spec_deps_key(s)
spec_labels[skey] = {
@ -602,8 +608,8 @@ def get_spec_filter_list(env, affected_pkgs, dependencies=True, dependents=True)
def generate_gitlab_ci_yaml(env, print_summary, output_file,
prune_dag=False, check_index_only=False,
run_optimizer=False, use_dependencies=False,
artifacts_root=None):
""" Generate a gitlab yaml file to run a dynamic chile pipeline from
artifacts_root=None, remote_mirror_override=None):
""" Generate a gitlab yaml file to run a dynamic child pipeline from
the spec matrix in the active environment.
Arguments:
@ -629,6 +635,10 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
artifacts_root (str): Path where artifacts like logs, environment
files (spack.yaml, spack.lock), etc should be written. GitLab
requires this to be within the project directory.
remote_mirror_override (str): Typically only needed when one spack.yaml
is used to populate several mirrors with binaries, based on some
criteria. Spack protected pipelines populate different mirrors based
on branch name, facilitated by this option.
"""
with spack.concretize.disable_compiler_existence_check():
with env.write_transaction():
@ -678,17 +688,19 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
for s in affected_specs:
tty.debug(' {0}'.format(s.name))
generate_job_name = os.environ.get('CI_JOB_NAME', None)
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', None)
# Downstream jobs will "need" (depend on, for both scheduling and
# artifacts, which include spack.lock file) this pipeline generation
# job by both name and pipeline id. If those environment variables
# do not exist, then maybe this is just running in a shell, in which
# case, there is no expectation gitlab will ever run the generated
# pipeline and those environment variables do not matter.
generate_job_name = os.environ.get('CI_JOB_NAME', 'job-does-not-exist')
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', 'pipeline-does-not-exist')
# Values: "spack_pull_request", "spack_protected_branch", or not set
spack_pipeline_type = os.environ.get('SPACK_PIPELINE_TYPE', None)
is_pr_pipeline = spack_pipeline_type == 'spack_pull_request'
spack_pr_branch = os.environ.get('SPACK_PR_BRANCH', None)
pr_mirror_url = None
if spack_pr_branch:
pr_mirror_url = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
spack_pr_branch)
spack_buildcache_copy = os.environ.get('SPACK_COPY_BUILDCACHE', None)
if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
tty.die('spack ci generate requires an env containing a mirror')
@ -743,14 +755,25 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
'strip-compilers': False,
})
# Add per-PR mirror (and shared PR mirror) if enabled, as some specs might
# be up to date in one of those and thus not need to be rebuilt.
if pr_mirror_url:
spack.mirror.add(
'ci_pr_mirror', pr_mirror_url, cfg.default_modify_scope())
spack.mirror.add('ci_shared_pr_mirror',
SPACK_SHARED_PR_MIRROR_URL,
cfg.default_modify_scope())
# If a remote mirror override (alternate buildcache destination) was
# specified, add it here in case it has already built hashes we might
# generate.
mirrors_to_check = None
if remote_mirror_override:
if spack_pipeline_type == 'spack_protected_branch':
# Overriding the main mirror in this case might result
# in skipping jobs on a release pipeline because specs are
# up to date in develop. Eventually we want to notice and take
# advantage of this by scheduling a job to copy the spec from
# develop to the release, but until we have that, this makes
# sure we schedule a rebuild job if the spec isn't already in
# override mirror.
mirrors_to_check = {
'override': remote_mirror_override
}
else:
spack.mirror.add(
'ci_pr_mirror', remote_mirror_override, cfg.default_modify_scope())
pipeline_artifacts_dir = artifacts_root
if not pipeline_artifacts_dir:
@ -825,11 +848,13 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
phase_spec.concretize()
staged_phases[phase_name] = stage_spec_jobs(
concrete_phase_specs,
check_index_only=check_index_only)
check_index_only=check_index_only,
mirrors_to_check=mirrors_to_check)
finally:
# Clean up PR mirror if enabled
if pr_mirror_url:
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
# Clean up remote mirror override if enabled
if remote_mirror_override:
if spack_pipeline_type != 'spack_protected_branch':
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
all_job_names = []
output_object = {}
@ -889,6 +914,14 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
tags = [tag for tag in runner_attribs['tags']]
if spack_pipeline_type is not None:
# For spack pipelines "public" and "protected" are reserved tags
tags = _remove_reserved_tags(tags)
if spack_pipeline_type == 'spack_protected_branch':
tags.extend(['aws', 'protected'])
elif spack_pipeline_type == 'spack_pull_request':
tags.extend(['public'])
variables = {}
if 'variables' in runner_attribs:
variables.update(runner_attribs['variables'])
@ -1174,6 +1207,10 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
service_job_config,
cleanup_job)
if 'tags' in cleanup_job:
service_tags = _remove_reserved_tags(cleanup_job['tags'])
cleanup_job['tags'] = service_tags
cleanup_job['stage'] = 'cleanup-temp-storage'
cleanup_job['script'] = [
'spack -d mirror destroy --mirror-url {0}/$CI_PIPELINE_ID'.format(
@ -1181,9 +1218,74 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
]
cleanup_job['when'] = 'always'
cleanup_job['retry'] = service_job_retries
cleanup_job['interruptible'] = True
output_object['cleanup'] = cleanup_job
if ('signing-job-attributes' in gitlab_ci and
spack_pipeline_type == 'spack_protected_branch'):
# External signing: generate a job to check and sign binary pkgs
stage_names.append('stage-sign-pkgs')
signing_job_config = gitlab_ci['signing-job-attributes']
signing_job = {}
signing_job_attrs_to_copy = [
'image',
'tags',
'variables',
'before_script',
'script',
'after_script',
]
_copy_attributes(signing_job_attrs_to_copy,
signing_job_config,
signing_job)
signing_job_tags = []
if 'tags' in signing_job:
signing_job_tags = _remove_reserved_tags(signing_job['tags'])
for tag in ['aws', 'protected', 'notary']:
if tag not in signing_job_tags:
signing_job_tags.append(tag)
signing_job['tags'] = signing_job_tags
signing_job['stage'] = 'stage-sign-pkgs'
signing_job['when'] = 'always'
signing_job['retry'] = {
'max': 2,
'when': ['always']
}
signing_job['interruptible'] = True
output_object['sign-pkgs'] = signing_job
if spack_buildcache_copy:
# Generate a job to copy the contents from wherever the builds are getting
# pushed to the url specified in the "SPACK_BUILDCACHE_COPY" environment
# variable.
src_url = remote_mirror_override or remote_mirror_url
dest_url = spack_buildcache_copy
stage_names.append('stage-copy-buildcache')
copy_job = {
'stage': 'stage-copy-buildcache',
'tags': ['spack', 'public', 'medium', 'aws', 'x86_64'],
'image': 'ghcr.io/spack/python-aws-bash:0.0.1',
'when': 'on_success',
'interruptible': True,
'retry': service_job_retries,
'script': [
'. ./share/spack/setup-env.sh',
'spack --version',
'aws s3 sync --exclude *index.json* --exclude *pgp* {0} {1}'.format(
src_url, dest_url)
]
}
output_object['copy-mirror'] = copy_job
if rebuild_index_enabled:
# Add a final job to regenerate the index
stage_names.append('stage-rebuild-index')
@ -1194,9 +1296,13 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
service_job_config,
final_job)
if 'tags' in final_job:
service_tags = _remove_reserved_tags(final_job['tags'])
final_job['tags'] = service_tags
index_target_mirror = mirror_urls[0]
if is_pr_pipeline:
index_target_mirror = pr_mirror_url
if remote_mirror_override:
index_target_mirror = remote_mirror_override
final_job['stage'] = 'stage-rebuild-index'
final_job['script'] = [
@ -1205,6 +1311,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
]
final_job['when'] = 'always'
final_job['retry'] = service_job_retries
final_job['interruptible'] = True
output_object['rebuild-index'] = final_job
@ -1237,8 +1344,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
'SPACK_PIPELINE_TYPE': str(spack_pipeline_type)
}
if pr_mirror_url:
output_object['variables']['SPACK_PR_MIRROR_URL'] = pr_mirror_url
if remote_mirror_override:
(output_object['variables']
['SPACK_REMOTE_MIRROR_OVERRIDE']) = remote_mirror_override
spack_stack_name = os.environ.get('SPACK_CI_STACK_NAME', None)
if spack_stack_name:

View File

@ -64,6 +64,11 @@ def setup_parser(subparser):
'--dependencies', action='store_true', default=False,
help="(Experimental) disable DAG scheduling; use "
' "plain" dependencies.')
generate.add_argument(
'--buildcache-destination', default=None,
help="Override the mirror configured in the environment (spack.yaml) " +
"in order to push binaries from the generated pipeline to a " +
"different location.")
prune_group = generate.add_mutually_exclusive_group()
prune_group.add_argument(
'--prune-dag', action='store_true', dest='prune_dag',
@ -127,6 +132,7 @@ def ci_generate(args):
prune_dag = args.prune_dag
index_only = args.index_only
artifacts_root = args.artifacts_root
buildcache_destination = args.buildcache_destination
if not output_file:
output_file = os.path.abspath(".gitlab-ci.yml")
@ -140,7 +146,8 @@ def ci_generate(args):
spack_ci.generate_gitlab_ci_yaml(
env, True, output_file, prune_dag=prune_dag,
check_index_only=index_only, run_optimizer=run_optimizer,
use_dependencies=use_dependencies, artifacts_root=artifacts_root)
use_dependencies=use_dependencies, artifacts_root=artifacts_root,
remote_mirror_override=buildcache_destination)
if copy_yaml_to:
copy_to_dir = os.path.dirname(copy_yaml_to)
@ -180,6 +187,9 @@ def ci_rebuild(args):
if not gitlab_ci:
tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')
tty.msg('SPACK_BUILDCACHE_DESTINATION={0}'.format(
os.environ.get('SPACK_BUILDCACHE_DESTINATION', None)))
# Grab the environment variables we need. These either come from the
# pipeline generation step ("spack ci generate"), where they were written
# out as variables, or else provided by GitLab itself.
@ -196,7 +206,7 @@ def ci_rebuild(args):
compiler_action = get_env_var('SPACK_COMPILER_ACTION')
cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
remote_mirror_override = get_env_var('SPACK_REMOTE_MIRROR_OVERRIDE')
remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')
# Construct absolute paths relative to current $CI_PROJECT_DIR
@ -244,6 +254,10 @@ def ci_rebuild(args):
tty.debug('Pipeline type - PR: {0}, develop: {1}'.format(
spack_is_pr_pipeline, spack_is_develop_pipeline))
# If no override url exists, then just push binary package to the
# normal remote mirror url.
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
# Figure out what is our temporary storage mirror: Is it artifacts
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
# force something or pipelines might not have a way to propagate build
@ -373,7 +387,24 @@ def ci_rebuild(args):
cfg.default_modify_scope())
# Check configured mirrors for a built spec with a matching hash
matches = bindist.get_mirrors_for_spec(job_spec, index_only=False)
mirrors_to_check = None
if remote_mirror_override and spack_pipeline_type == 'spack_protected_branch':
# Passing "mirrors_to_check" below means we *only* look in the override
# mirror to see if we should skip building, which is what we want.
mirrors_to_check = {
'override': remote_mirror_override
}
# Adding this mirror to the list of configured mirrors means dependencies
# could be installed from either the override mirror or any other configured
# mirror (e.g. remote_mirror_url which is defined in the environment or
# pipeline_mirror_url), which is also what we want.
spack.mirror.add('mirror_override',
remote_mirror_override,
cfg.default_modify_scope())
matches = bindist.get_mirrors_for_spec(
job_spec, mirrors_to_check=mirrors_to_check, index_only=False)
if matches:
# Got a hash match on at least one configured mirror. All
@ -517,13 +548,6 @@ def ci_rebuild(args):
# any logs from the staging directory to artifacts now
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
# Create buildcache on remote mirror, either on pr-specific mirror or
# on the main mirror defined in the gitlab-enabled spack environment
if spack_is_pr_pipeline:
buildcache_mirror_url = pr_mirror_url
else:
buildcache_mirror_url = remote_mirror_url
# If the install succeeded, create a buildcache entry for this job spec
# and push it to one or more mirrors. If the install did not succeed,
# print out some instructions on how to reproduce this build failure

View File

@ -110,6 +110,7 @@
},
},
'service-job-attributes': runner_selector_schema,
'signing-job-attributes': runner_selector_schema,
'rebuild-index': {'type': 'boolean'},
'broken-specs-url': {'type': 'string'},
},

View File

@ -635,10 +635,6 @@ def test_ci_generate_for_pr_pipeline(tmpdir, mutable_mock_env_path,
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
with ev.read('test'):
monkeypatch.setattr(
ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
monkeypatch.setattr(
ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
ci_cmd('generate', '--output-file', outputfile)
with open(outputfile) as f:
@ -683,10 +679,6 @@ def test_ci_generate_with_external_pkg(tmpdir, mutable_mock_env_path,
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
with ev.read('test'):
monkeypatch.setattr(
ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
monkeypatch.setattr(
ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
ci_cmd('generate', '--output-file', outputfile)
with open(outputfile) as f:
@ -920,6 +912,77 @@ def fake_dl_method(spec, *args, **kwargs):
env_cmd('deactivate')
def test_ci_generate_mirror_override(tmpdir, mutable_mock_env_path,
install_mockery_mutable_config, mock_packages,
mock_fetch, mock_stage, mock_binary_index,
ci_base_environment):
"""Ensure that protected pipelines using --buildcache-destination do not
skip building specs that are not in the override mirror when they are
found in the main mirror."""
os.environ.update({
'SPACK_PIPELINE_TYPE': 'spack_protected_branch',
})
working_dir = tmpdir.join('working_dir')
mirror_dir = working_dir.join('mirror')
mirror_url = 'file://{0}'.format(mirror_dir.strpath)
spack_yaml_contents = """
spack:
definitions:
- packages: [patchelf]
specs:
- $packages
mirrors:
test-mirror: {0}
gitlab-ci:
mappings:
- match:
- patchelf
runner-attributes:
tags:
- donotcare
image: donotcare
service-job-attributes:
tags:
- nonbuildtag
image: basicimage
""".format(mirror_url)
filename = str(tmpdir.join('spack.yaml'))
with open(filename, 'w') as f:
f.write(spack_yaml_contents)
with tmpdir.as_cwd():
env_cmd('create', 'test', './spack.yaml')
first_ci_yaml = str(tmpdir.join('.gitlab-ci-1.yml'))
second_ci_yaml = str(tmpdir.join('.gitlab-ci-2.yml'))
with ev.read('test'):
install_cmd()
buildcache_cmd('create', '-u', '--mirror-url', mirror_url, 'patchelf')
buildcache_cmd('update-index', '--mirror-url', mirror_url, output=str)
# This generate should not trigger a rebuild of patchelf, since it's in
# the main mirror referenced in the environment.
ci_cmd('generate', '--check-index-only', '--output-file', first_ci_yaml)
# Because we used a mirror override (--buildcache-destination) on a
# spack protected pipeline, we expect to only look in the override
# mirror for the spec, and thus the patchelf job should be generated in
# this pipeline
ci_cmd('generate', '--check-index-only', '--output-file', second_ci_yaml,
'--buildcache-destination', 'file:///mirror/not/exist')
with open(first_ci_yaml) as fd1:
first_yaml = fd1.read()
assert 'no-specs-to-rebuild' in first_yaml
with open(second_ci_yaml) as fd2:
second_yaml = fd2.read()
assert 'no-specs-to-rebuild' not in second_yaml
@pytest.mark.disable_clean_stage_check
def test_push_mirror_contents(tmpdir, mutable_mock_env_path,
install_mockery_mutable_config, mock_packages,
@ -1151,10 +1214,6 @@ def test_ci_generate_override_runner_attrs(tmpdir, mutable_mock_env_path,
with ev.read('test'):
monkeypatch.setattr(
spack.main, 'get_version', lambda: '0.15.3-416-12ad69eb1')
monkeypatch.setattr(
ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
monkeypatch.setattr(
ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
ci_cmd('generate', '--output-file', outputfile)
with open(outputfile) as f:
@ -1256,10 +1315,6 @@ def test_ci_generate_with_workarounds(tmpdir, mutable_mock_env_path,
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
with ev.read('test'):
monkeypatch.setattr(
ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
monkeypatch.setattr(
ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
ci_cmd('generate', '--output-file', outputfile, '--dependencies')
with open(outputfile) as f:
@ -1417,11 +1472,6 @@ def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None,
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
with ev.read('test'):
monkeypatch.setattr(
ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
monkeypatch.setattr(
ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
ci_cmd('generate', '--output-file', outputfile)
with open(outputfile) as of:
@ -1630,11 +1680,6 @@ def test_ci_generate_temp_storage_url(tmpdir, mutable_mock_env_path,
env_cmd('create', 'test', './spack.yaml')
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
monkeypatch.setattr(
ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
monkeypatch.setattr(
ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
with ev.read('test'):
ci_cmd('generate', '--output-file', outputfile)
@ -1715,6 +1760,64 @@ def test_ci_generate_read_broken_specs_url(tmpdir, mutable_mock_env_path,
assert(ex not in output)
def test_ci_generate_external_signing_job(tmpdir, mutable_mock_env_path,
install_mockery,
mock_packages, monkeypatch,
ci_base_environment):
"""Verify that in external signing mode: 1) each rebuild jobs includes
the location where the binary hash information is written and 2) we
properly generate a final signing job in the pipeline."""
os.environ.update({
'SPACK_PIPELINE_TYPE': 'spack_protected_branch'
})
filename = str(tmpdir.join('spack.yaml'))
with open(filename, 'w') as f:
f.write("""\
spack:
specs:
- archive-files
mirrors:
some-mirror: https://my.fake.mirror
gitlab-ci:
temporary-storage-url-prefix: file:///work/temp/mirror
mappings:
- match:
- archive-files
runner-attributes:
tags:
- donotcare
image: donotcare
signing-job-attributes:
tags:
- nonbuildtag
- secretrunner
image:
name: customdockerimage
entrypoint: []
variables:
IMPORTANT_INFO: avalue
script:
- echo hello
""")
with tmpdir.as_cwd():
env_cmd('create', 'test', './spack.yaml')
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
with ev.read('test'):
ci_cmd('generate', '--output-file', outputfile)
with open(outputfile) as of:
pipeline_doc = syaml.load(of.read())
assert 'sign-pkgs' in pipeline_doc
signing_job = pipeline_doc['sign-pkgs']
assert 'tags' in signing_job
signing_job_tags = signing_job['tags']
for expected_tag in ['notary', 'protected', 'aws']:
assert expected_tag in signing_job_tags
def test_ci_reproduce(tmpdir, mutable_mock_env_path,
install_mockery, mock_packages, monkeypatch,
last_two_git_commits, ci_base_environment, mock_binary_index):

View File

@ -1,4 +1,4 @@
stages: [ "generate", "build" ]
stages: [ "generate", "build", "publish" ]
default:
image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] }
@ -9,16 +9,25 @@ default:
.pr:
only:
- /^pr[\d]+_.*$/
- /^github\/pr[\d]+_.*$/
variables:
SPACK_PR_BRANCH: ${CI_COMMIT_REF_NAME}
SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries-prs/${CI_COMMIT_REF_NAME}"
SPACK_PIPELINE_TYPE: "spack_pull_request"
SPACK_PRUNE_UNTOUCHED: "True"
.develop:
.protected-refs:
only:
- /^develop$/
- /^releases\/v.*/
- /^v.*/
- /^github\/develop$/
.protected:
extends: [ ".protected-refs" ]
variables:
SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}"
SPACK_COPY_BUILDCACHE: "s3://spack-binaries/${CI_COMMIT_REF_NAME}"
SPACK_PIPELINE_TYPE: "spack_protected_branch"
.generate:
@ -29,12 +38,13 @@ default:
- cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
- spack env activate --without-view .
- spack ci generate --check-index-only
--buildcache-destination "${SPACK_BUILDCACHE_DESTINATION}"
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
artifacts:
paths:
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
tags: ["spack", "public", "medium", "x86_64"]
tags: ["spack", "aws", "public", "medium", "x86_64"]
interruptible: true
retry:
max: 2
@ -45,8 +55,8 @@ default:
.pr-generate:
extends: [ ".pr", ".generate" ]
.develop-generate:
extends: [ ".develop", ".generate" ]
.protected-generate:
extends: [ ".protected", ".generate" ]
.build:
stage: build
@ -57,12 +67,24 @@ default:
AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY}
.develop-build:
extends: [ ".develop", ".build" ]
.protected-build:
extends: [ ".protected", ".build" ]
variables:
AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY}
SPACK_SIGNING_KEY: ${PACKAGE_SIGNING_KEY}
protected-publish:
stage: publish
extends: [ ".protected-refs" ]
image: "ghcr.io/spack/python-aws-bash:0.0.1"
tags: ["spack", "public", "medium", "aws", "x86_64"]
variables:
AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY}
script:
- . "./share/spack/setup-env.sh"
- spack --version
- spack buildcache update-index --mirror-url "s3://spack-binaries/${CI_COMMIT_REF_NAME}"
########################################
# TEMPLATE FOR ADDING ANOTHER PIPELINE
@ -83,8 +105,8 @@ default:
# my-super-cool-stack-pr-generate:
# extends: [ ".my-super-cool-stack", ".pr-generate"]
#
# my-super-cool-stack-develop-generate:
# extends: [ ".my-super-cool-stack", ".develop-generate"]
# my-super-cool-stack-protected-generate:
# extends: [ ".my-super-cool-stack", ".protected-generate"]
#
# my-super-cool-stack-pr-build:
# extends: [ ".my-super-cool-stack", ".pr-build" ]
@ -94,24 +116,62 @@ default:
# job: my-super-cool-stack-pr-generate
# strategy: depend
#
# my-super-cool-stack-develop-build:
# extends: [ ".my-super-cool-stack", ".develop-build" ]
# my-super-cool-stack-protected-build:
# extends: [ ".my-super-cool-stack", ".protected-build" ]
# trigger:
# include:
# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
# job: my-super-cool-stack-develop-generate
# job: my-super-cool-stack-protected-generate
# strategy: depend
########################################
# E4S Mac Stack
# E4S Mac Stack
#
# With no near-future plans to have
# protected aws runners running mac
# builds, it seems best to decouple
# them from the rest of the stacks for
# the time being. This way they can
# still run on UO runners and be signed
# using the previous approach.
########################################
.e4s-mac:
variables:
SPACK_CI_STACK_NAME: e4s-mac
allow_failure: True
.mac-pr:
only:
- /^pr[\d]+_.*$/
- /^github\/pr[\d]+_.*$/
variables:
SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries-prs/${CI_COMMIT_REF_NAME}"
SPACK_PRUNE_UNTOUCHED: "True"
.mac-protected:
only:
- /^develop$/
- /^releases\/v.*/
- /^v.*/
- /^github\/develop$/
variables:
SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}"
.mac-pr-build:
extends: [ ".mac-pr", ".build" ]
variables:
AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY}
.mac-protected-build:
extends: [ ".mac-protected", ".build" ]
variables:
AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID}
AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY}
SPACK_SIGNING_KEY: ${PACKAGE_SIGNING_KEY}
e4s-mac-pr-generate:
extends: [".e4s-mac", ".pr"]
extends: [".e4s-mac", ".mac-pr"]
stage: generate
script:
- tmp="$(mktemp -d)"; export SPACK_USER_CONFIG_PATH="$tmp"; export SPACK_USER_CACHE_PATH="$tmp"
@ -135,8 +195,8 @@ e4s-mac-pr-generate:
- stuck_or_timeout_failure
timeout: 60 minutes
e4s-mac-develop-generate:
extends: [".e4s-mac", ".develop"]
e4s-mac-protected-generate:
extends: [".e4s-mac", ".mac-protected"]
stage: generate
script:
- tmp="$(mktemp -d)"; export SPACK_USER_CONFIG_PATH="$tmp"; export SPACK_USER_CACHE_PATH="$tmp"
@ -161,7 +221,7 @@ e4s-mac-develop-generate:
timeout: 60 minutes
e4s-mac-pr-build:
extends: [ ".e4s-mac", ".pr-build" ]
extends: [ ".e4s-mac", ".mac-pr-build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
@ -171,16 +231,16 @@ e4s-mac-pr-build:
- artifacts: True
job: e4s-mac-pr-generate
e4s-mac-develop-build:
extends: [ ".e4s-mac", ".develop-build" ]
e4s-mac-protected-build:
extends: [ ".e4s-mac", ".mac-protected-build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: e4s-mac-develop-generate
job: e4s-mac-protected-generate
strategy: depend
needs:
- artifacts: True
job: e4s-mac-develop-generate
job: e4s-mac-protected-generate
########################################
# E4S pipeline
@ -192,8 +252,8 @@ e4s-mac-develop-build:
e4s-pr-generate:
extends: [ ".e4s", ".pr-generate"]
e4s-develop-generate:
extends: [ ".e4s", ".develop-generate"]
e4s-protected-generate:
extends: [ ".e4s", ".protected-generate"]
e4s-pr-build:
extends: [ ".e4s", ".pr-build" ]
@ -206,16 +266,16 @@ e4s-pr-build:
- artifacts: True
job: e4s-pr-generate
e4s-develop-build:
extends: [ ".e4s", ".develop-build" ]
e4s-protected-build:
extends: [ ".e4s", ".protected-build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: e4s-develop-generate
job: e4s-protected-generate
strategy: depend
needs:
- artifacts: True
job: e4s-develop-generate
job: e4s-protected-generate
########################################
# E4S on Power
@ -231,8 +291,8 @@ e4s-develop-build:
# e4s-on-power-pr-generate:
# extends: [ ".e4s-on-power", ".pr-generate", ".power-e4s-generate-tags-and-image"]
# e4s-on-power-develop-generate:
# extends: [ ".e4s-on-power", ".develop-generate", ".power-e4s-generate-tags-and-image"]
# e4s-on-power-protected-generate:
# extends: [ ".e4s-on-power", ".protected-generate", ".power-e4s-generate-tags-and-image"]
# e4s-on-power-pr-build:
# extends: [ ".e4s-on-power", ".pr-build" ]
@ -245,16 +305,16 @@ e4s-develop-build:
# - artifacts: True
# job: e4s-on-power-pr-generate
# e4s-on-power-develop-build:
# extends: [ ".e4s-on-power", ".develop-build" ]
# e4s-on-power-protected-build:
# extends: [ ".e4s-on-power", ".protected-build" ]
# trigger:
# include:
# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
# job: e4s-on-power-develop-generate
# job: e4s-on-power-protected-generate
# strategy: depend
# needs:
# - artifacts: True
# job: e4s-on-power-develop-generate
# job: e4s-on-power-protected-generate
#########################################
# Build tests for different build-systems
@ -266,8 +326,8 @@ e4s-develop-build:
build_systems-pr-generate:
extends: [ ".build_systems", ".pr-generate"]
build_systems-develop-generate:
extends: [ ".build_systems", ".develop-generate"]
build_systems-protected-generate:
extends: [ ".build_systems", ".protected-generate"]
build_systems-pr-build:
extends: [ ".build_systems", ".pr-build" ]
@ -280,16 +340,16 @@ build_systems-pr-build:
- artifacts: True
job: build_systems-pr-generate
build_systems-develop-build:
extends: [ ".build_systems", ".develop-build" ]
build_systems-protected-build:
extends: [ ".build_systems", ".protected-build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: build_systems-develop-generate
job: build_systems-protected-generate
strategy: depend
needs:
- artifacts: True
job: build_systems-develop-generate
job: build_systems-protected-generate
#########################################
# RADIUSS
@ -313,20 +373,20 @@ radiuss-pr-build:
- artifacts: True
job: radiuss-pr-generate
# --------- Develop ---------
radiuss-develop-generate:
extends: [ ".radiuss", ".develop-generate" ]
# --------- Protected ---------
radiuss-protected-generate:
extends: [ ".radiuss", ".protected-generate" ]
radiuss-develop-build:
extends: [ ".radiuss", ".develop-build" ]
radiuss-protected-build:
extends: [ ".radiuss", ".protected-build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: radiuss-develop-generate
job: radiuss-protected-generate
strategy: depend
needs:
- artifacts: True
job: radiuss-develop-generate
job: radiuss-protected-generate
########################################
# ECP Data & Vis SDK
@ -338,8 +398,8 @@ radiuss-develop-build:
data-vis-sdk-pr-generate:
extends: [ ".data-vis-sdk", ".pr-generate"]
data-vis-sdk-develop-generate:
extends: [ ".data-vis-sdk", ".develop-generate"]
data-vis-sdk-protected-generate:
extends: [ ".data-vis-sdk", ".protected-generate"]
data-vis-sdk-pr-build:
extends: [ ".data-vis-sdk", ".pr-build" ]
@ -352,16 +412,16 @@ data-vis-sdk-pr-build:
- artifacts: True
job: data-vis-sdk-pr-generate
data-vis-sdk-develop-build:
extends: [ ".data-vis-sdk", ".develop-build" ]
data-vis-sdk-protected-build:
extends: [ ".data-vis-sdk", ".protected-build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: data-vis-sdk-develop-generate
job: data-vis-sdk-protected-generate
strategy: depend
needs:
- artifacts: True
job: data-vis-sdk-develop-generate
job: data-vis-sdk-protected-generate
########################################
# Spack Tutorial
@ -373,8 +433,8 @@ data-vis-sdk-develop-build:
tutorial-pr-generate:
extends: [ ".tutorial", ".pr-generate"]
tutorial-develop-generate:
extends: [ ".tutorial", ".develop-generate"]
tutorial-protected-generate:
extends: [ ".tutorial", ".protected-generate"]
tutorial-pr-build:
extends: [ ".tutorial", ".pr-build" ]
@ -387,13 +447,13 @@ tutorial-pr-build:
- artifacts: True
job: tutorial-pr-generate
tutorial-develop-build:
extends: [ ".tutorial", ".develop-build" ]
tutorial-protected-build:
extends: [ ".tutorial", ".protected-build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: tutorial-develop-generate
job: tutorial-protected-generate
strategy: depend
needs:
- artifacts: True
job: tutorial-develop-generate
job: tutorial-protected-generate

View File

@ -29,7 +29,7 @@ spack:
- - $default_specs
- - $arch
mirrors: { "mirror": "s3://spack-binaries/build_systems" }
mirrors: { "mirror": "s3://spack-binaries/develop/build_systems" }
gitlab-ci:
script:
@ -38,6 +38,8 @@ spack:
- cd ${SPACK_CONCRETE_ENV_DIR}
- spack env activate --without-view .
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
- spack -d ci rebuild
image:
@ -48,7 +50,7 @@ spack:
- match:
- cmake
runner-attributes:
tags: [ "spack", "public", "large", "x86_64"]
tags: [ "spack", "large", "x86_64"]
variables:
CI_JOB_SIZE: large
KUBERNETES_CPU_REQUEST: 8000m
@ -61,7 +63,7 @@ spack:
- openjpeg
- sqlite
runner-attributes:
tags: [ "spack", "public", "medium", "x86_64" ]
tags: [ "spack", "medium", "x86_64" ]
variables:
CI_JOB_SIZE: "medium"
KUBERNETES_CPU_REQUEST: "2000m"
@ -85,7 +87,7 @@ spack:
- xz
- zlib
runner-attributes:
tags: [ "spack", "public", "medium", "x86_64" ]
tags: [ "spack", "medium", "x86_64" ]
variables:
CI_JOB_SIZE: "small"
KUBERNETES_CPU_REQUEST: "500m"
@ -94,18 +96,27 @@ spack:
- match:
- 'os=ubuntu18.04'
runner-attributes:
tags: ["spack", "public", "x86_64"]
tags: ["spack", "x86_64"]
variables:
CI_JOB_SIZE: "default"
broken-specs-url: "s3://spack-binaries/broken-specs"
broken-specs-url: "s3://spack-binaries-develop/broken-specs"
service-job-attributes:
before_script:
- . "./share/spack/setup-env.sh"
- spack --version
image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] }
tags: ["spack", "public", "x86_64"]
signing-job-attributes:
image: { "name": "ghcr.io/spack/notary:latest", "entrypoint": [""] }
tags: ["spack", "aws"]
script:
- aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache /tmp
- /sign.sh
- aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache
cdash:
build-group: Build tests for different build systems
url: https://cdash.spack.io

View File

@ -42,7 +42,7 @@ spack:
+zfp
+visit
mirrors: { "mirror": "s3://spack-binaries/data-vis-sdk" }
mirrors: { "mirror": "s3://spack-binaries/develop/data-vis-sdk" }
gitlab-ci:
image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] }
@ -52,13 +52,15 @@ spack:
- cd ${SPACK_CONCRETE_ENV_DIR}
- spack env activate --without-view .
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
- spack -d ci rebuild
mappings:
- match:
- llvm
- qt
runner-attributes:
tags: [ "spack", "public", "huge", "x86_64" ]
tags: [ "spack", "huge", "x86_64" ]
variables:
CI_JOB_SIZE: huge
KUBERNETES_CPU_REQUEST: 11000m
@ -72,7 +74,7 @@ spack:
- visit
- vtk-m
runner-attributes:
tags: [ "spack", "public", "large", "x86_64" ]
tags: [ "spack", "large", "x86_64" ]
variables:
CI_JOB_SIZE: large
KUBERNETES_CPU_REQUEST: 8000m
@ -98,7 +100,7 @@ spack:
- raja
- vtk-h
runner-attributes:
tags: [ "spack", "public", "medium", "x86_64" ]
tags: [ "spack", "medium", "x86_64" ]
variables:
CI_JOB_SIZE: "medium"
KUBERNETES_CPU_REQUEST: "2000m"
@ -133,7 +135,7 @@ spack:
- util-linux-uuid
runner-attributes:
tags: [ "spack", "public", "small", "x86_64" ]
tags: [ "spack", "small", "x86_64" ]
variables:
CI_JOB_SIZE: "small"
KUBERNETES_CPU_REQUEST: "500m"
@ -141,11 +143,12 @@ spack:
- match: ['@:']
runner-attributes:
tags: ["spack", "public", "x86_64"]
tags: ["spack", "x86_64"]
variables:
CI_JOB_SIZE: "default"
broken-specs-url: "s3://spack-binaries-develop/broken-specs"
broken-specs-url: "s3://spack-binaries/broken-specs"
service-job-attributes:
image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] }
before_script:
@ -153,6 +156,14 @@ spack:
- spack --version
tags: ["spack", "public", "medium", "x86_64"]
signing-job-attributes:
image: { "name": "ghcr.io/spack/notary:latest", "entrypoint": [""] }
tags: ["spack", "aws"]
script:
- aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache /tmp
- /sign.sh
- aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache
cdash:
build-group: Data and Vis SDK
url: https://cdash.spack.io

View File

@ -32,7 +32,7 @@ spack:
- - $easy_specs
- - $arch
mirrors: { "mirror": "s3://spack-binaries/e4s-mac" }
mirrors: { "mirror": "s3://spack-binaries/develop/e4s-mac" }
gitlab-ci:
@ -51,7 +51,9 @@ spack:
runner-attributes:
tags:
- omicron
broken-specs-url: "s3://spack-binaries-develop/broken-specs"
broken-specs-url: "s3://spack-binaries/broken-specs"
service-job-attributes:
before_script:
- . "./share/spack/setup-env.sh"

View File

@ -222,7 +222,7 @@ spack:
- - $cuda_specs
- - $arch
mirrors: { "mirror": "s3://spack-binaries/e4s" }
mirrors: { "mirror": "s3://spack-binaries/develop/e4s" }
gitlab-ci:
@ -233,6 +233,8 @@ spack:
- spack env activate --without-view .
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
- spack -d ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] }
@ -240,7 +242,7 @@ spack:
- match:
- llvm
runner-attributes:
tags: [ "spack", "public", "huge", "x86_64" ]
tags: [ "spack", "huge", "x86_64" ]
variables:
CI_JOB_SIZE: huge
KUBERNETES_CPU_REQUEST: 11000m
@ -265,7 +267,7 @@ spack:
- vtk-m
- warpx
runner-attributes:
tags: [ "spack", "public", "large", "x86_64" ]
tags: [ "spack", "large", "x86_64" ]
variables:
CI_JOB_SIZE: large
KUBERNETES_CPU_REQUEST: 8000m
@ -333,7 +335,7 @@ spack:
- vtk-h
- zfp
runner-attributes:
tags: [ "spack", "public", "medium", "x86_64" ]
tags: [ "spack", "medium", "x86_64" ]
variables:
CI_JOB_SIZE: "medium"
KUBERNETES_CPU_REQUEST: "2000m"
@ -394,7 +396,7 @@ spack:
- zlib
- zstd
runner-attributes:
tags: [ "spack", "public", "small", "x86_64" ]
tags: [ "spack", "small", "x86_64" ]
variables:
CI_JOB_SIZE: "small"
KUBERNETES_CPU_REQUEST: "500m"
@ -402,11 +404,12 @@ spack:
- match: ['os=ubuntu18.04']
runner-attributes:
tags: ["spack", "public", "x86_64"]
tags: ["spack", "x86_64"]
variables:
CI_JOB_SIZE: "default"
broken-specs-url: "s3://spack-binaries-develop/broken-specs"
broken-specs-url: "s3://spack-binaries/broken-specs"
service-job-attributes:
before_script:
- . "./share/spack/setup-env.sh"
@ -414,6 +417,14 @@ spack:
image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] }
tags: ["spack", "public", "x86_64"]
signing-job-attributes:
image: { "name": "ghcr.io/spack/notary:latest", "entrypoint": [""] }
tags: ["spack", "aws"]
script:
- aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache /tmp
- /sign.sh
- aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache
cdash:
build-group: New PR testing workflow
url: https://cdash.spack.io

View File

@ -54,7 +54,7 @@ spack:
- zfp
mirrors:
mirror: "s3://spack-binaries/radiuss"
mirror: "s3://spack-binaries/develop/radiuss"
specs:
- matrix:
@ -69,6 +69,8 @@ spack:
- cd ${SPACK_CONCRETE_ENV_DIR}
- spack env activate --without-view .
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
- spack -d ci rebuild
mappings:
- match:
@ -76,7 +78,7 @@ spack:
- openblas
- rust
runner-attributes:
tags: ["spack", "public", "large", "x86_64"]
tags: ["spack", "large", "x86_64"]
variables:
CI_JOB_SIZE: large
KUBERNETES_CPU_REQUEST: 8000m
@ -96,7 +98,7 @@ spack:
- vtk-h
- vtk-m
runner-attributes:
tags: ["spack", "public", "medium", "x86_64"]
tags: ["spack", "medium", "x86_64"]
variables:
CI_JOB_SIZE: "medium"
KUBERNETES_CPU_REQUEST: "2000m"
@ -150,7 +152,7 @@ spack:
- zfp
- zlib
runner-attributes:
tags: ["spack", "public", "small", "x86_64"]
tags: ["spack", "small", "x86_64"]
variables:
CI_JOB_SIZE: "small"
KUBERNETES_CPU_REQUEST: "500m"
@ -158,10 +160,12 @@ spack:
- match: ['os=ubuntu18.04']
runner-attributes:
tags: ["spack", "public", "x86_64"]
tags: ["spack", "x86_64"]
variables:
CI_JOB_SIZE: "default"
broken-specs-url: "s3://spack-binaries/broken-specs"
service-job-attributes:
before_script:
- . "./share/spack/setup-env.sh"
@ -169,6 +173,14 @@ spack:
image: { "name": "ghcr.io/spack/e4s-ubuntu-18.04:v2021-10-18", "entrypoint": [""] }
tags: ["spack", "public", "x86_64"]
signing-job-attributes:
image: { "name": "ghcr.io/spack/notary:latest", "entrypoint": [""] }
tags: ["spack", "aws"]
script:
- aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache /tmp
- /sign.sh
- aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache
cdash:
build-group: RADIUSS
url: https://cdash.spack.io

View File

@ -59,7 +59,7 @@ spack:
- $gcc_spack_built_packages
mirrors:
mirror: 's3://spack-binaries/tutorial'
mirror: 's3://spack-binaries/develop/tutorial'
gitlab-ci:
script:
@ -69,6 +69,8 @@ spack:
- cd ${SPACK_CONCRETE_ENV_DIR}
- spack env activate --without-view .
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
- spack -d ci rebuild
image: { "name": "ghcr.io/spack/tutorial-ubuntu-18.04:v2021-11-02", "entrypoint": [""] }
@ -81,7 +83,7 @@ spack:
- netlib-lapack
- trilinos
runner-attributes:
tags: ["spack", "public", "large", "x86_64"]
tags: ["spack", "large", "x86_64"]
variables:
CI_JOB_SIZE: large
KUBERNETES_CPU_REQUEST: 8000m
@ -99,7 +101,7 @@ spack:
- py-scipy
- slurm
runner-attributes:
tags: ["spack", "public", "medium", "x86_64"]
tags: ["spack", "medium", "x86_64"]
variables:
CI_JOB_SIZE: "medium"
KUBERNETES_CPU_REQUEST: "2000m"
@ -129,7 +131,7 @@ spack:
- tar
- util-linux-uuid
runner-attributes:
tags: ["spack", "public", "small", "x86_64"]
tags: ["spack", "small", "x86_64"]
variables:
CI_JOB_SIZE: "small"
KUBERNETES_CPU_REQUEST: "500m"
@ -137,11 +139,12 @@ spack:
- match: ['@:']
runner-attributes:
tags: ["spack", "public", "x86_64"]
tags: ["spack", "x86_64"]
variables:
CI_JOB_SIZE: default
broken-specs-url: "s3://spack-binaries-develop/broken-specs"
broken-specs-url: "s3://spack-binaries/broken-specs"
service-job-attributes:
image: { "name": "ghcr.io/spack/tutorial-ubuntu-18.04:v2021-11-02", "entrypoint": [""] }
before_script:
@ -149,6 +152,14 @@ spack:
- spack --version
tags: ["spack", "public", "x86_64"]
signing-job-attributes:
image: { "name": "ghcr.io/spack/notary:latest", "entrypoint": [""] }
tags: ["spack", "aws"]
script:
- aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache /tmp
- /sign.sh
- aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache
cdash:
build-group: Spack Tutorial
url: https://cdash.spack.io

View File

@ -626,7 +626,7 @@ _spack_ci() {
}
_spack_ci_generate() {
SPACK_COMPREPLY="-h --help --output-file --copy-to --optimize --dependencies --prune-dag --no-prune-dag --check-index-only --artifacts-root"
SPACK_COMPREPLY="-h --help --output-file --copy-to --optimize --dependencies --buildcache-destination --prune-dag --no-prune-dag --check-index-only --artifacts-root"
}
_spack_ci_rebuild_index() {