Implement an optional compiler bootstrapping phase
This commit is contained in:

committed by
Todd Gamblin

parent
5323a5cff9
commit
6d745a56fd
@@ -1,11 +1,19 @@
|
||||
spack:
|
||||
definitions:
|
||||
- compiler-pkgs:
|
||||
- 'llvm+clang@6.0.1 os=centos7'
|
||||
- 'gcc@6.5.0 os=centos7'
|
||||
- 'llvm+clang@6.0.1 os=ubuntu18.04'
|
||||
- 'gcc@6.5.0 os=ubuntu18.04'
|
||||
- pkgs:
|
||||
- readline@7.0
|
||||
# - xsdk@0.4.0
|
||||
- compilers:
|
||||
- '%gcc@5.5.0'
|
||||
- '%gcc@6.5.0'
|
||||
- '%gcc@7.3.0'
|
||||
- '%clang@6.0.0'
|
||||
- '%clang@6.0.1'
|
||||
- oses:
|
||||
- os=ubuntu18.04
|
||||
- os=centos7
|
||||
@@ -17,15 +25,15 @@ spack:
|
||||
- [$oses]
|
||||
exclude:
|
||||
- '%gcc@7.3.0 os=centos7'
|
||||
- '%gcc@5.5.0 os=ubuntu18.04'
|
||||
|
||||
mirrors:
|
||||
cloud_gitlab: https://mirror.spack.io
|
||||
|
||||
compilers:
|
||||
# The .gitlab-ci.yml for this project picks a Docker container which is
|
||||
# based on ubuntu18.04 and which already has some compilers configured.
|
||||
# Here we just add some of the ones which are defined on a different
|
||||
# builder image.
|
||||
# The .gitlab-ci.yml for this project picks a Docker container which does
|
||||
# not have any compilers pre-built and ready to use, so we need to fake the
|
||||
# existence of those here.
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
@@ -36,6 +44,16 @@ spack:
|
||||
fc: /not/used
|
||||
spec: gcc@5.5.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: gcc@6.5.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
@@ -46,11 +64,64 @@ spack:
|
||||
fc: /not/used
|
||||
spec: clang@6.0.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: centos7
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: clang@6.0.1
|
||||
target: x86_64
|
||||
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: clang@6.0.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: clang@6.0.1
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: gcc@6.5.0
|
||||
target: x86_64
|
||||
- compiler:
|
||||
operating_system: ubuntu18.04
|
||||
modules: []
|
||||
paths:
|
||||
cc: /not/used
|
||||
cxx: /not/used
|
||||
f77: /not/used
|
||||
fc: /not/used
|
||||
spec: gcc@7.3.0
|
||||
target: x86_64
|
||||
|
||||
gitlab-ci:
|
||||
bootstrap:
|
||||
- name: compiler-pkgs
|
||||
compiler-agnostic: true
|
||||
mappings:
|
||||
- spack-cloud-ubuntu:
|
||||
- # spack-cloud-ubuntu
|
||||
match:
|
||||
# these are specs, if *any* match the spec under consideration, this
|
||||
# 'mapping' will be used to generate the CI job
|
||||
@@ -61,8 +132,10 @@ spack:
|
||||
# a part of the CI workflow
|
||||
tags:
|
||||
- spack-k8s
|
||||
image: scottwittenburg/spack_builder_ubuntu_18.04
|
||||
- spack-cloud-centos:
|
||||
image:
|
||||
name: scottwittenburg/spack_builder_ubuntu_18.04
|
||||
entrypoint: [""]
|
||||
- # spack-cloud-centos
|
||||
match:
|
||||
# these are specs, if *any* match the spec under consideration, this
|
||||
# 'mapping' will be used to generate the CI job
|
||||
@@ -70,28 +143,15 @@ spack:
|
||||
runner-attributes:
|
||||
tags:
|
||||
- spack-k8s
|
||||
image: spack/centos:7
|
||||
- summit:
|
||||
match:
|
||||
- os=rhel7
|
||||
- target=power9
|
||||
- platform=secret-sauce
|
||||
runner-attributes:
|
||||
tags:
|
||||
# this is a set of tags
|
||||
- summit
|
||||
- '{os}-{target}'
|
||||
- rhel7
|
||||
- centos7
|
||||
- x86_64
|
||||
variables:
|
||||
SCHEDULER_ARGS: "arg2 arg2"
|
||||
image:
|
||||
name: scottwittenburg/spack_builder_centos_7
|
||||
entrypoint: [""]
|
||||
|
||||
cdash:
|
||||
build-group: Release Testing
|
||||
url: https://cdash.spack.io
|
||||
url: http://cdash
|
||||
project: Spack Testing
|
||||
site: Spack AWS Gitlab Instance
|
||||
site: Spack Docker-Compose Workflow
|
||||
|
||||
repos: []
|
||||
upstreams: {}
|
||||
|
@@ -801,7 +801,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
|
||||
for description in descriptions:
|
||||
url = os.path.join(mirror_root, description['url'])
|
||||
path = description['path']
|
||||
fail_if_missing = not description['required']
|
||||
fail_if_missing = description['required']
|
||||
|
||||
mkdirp(path)
|
||||
|
||||
|
@@ -5,6 +5,7 @@
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
@@ -176,8 +177,11 @@ def setup_parser(subparser):
|
||||
saveyaml = subparsers.add_parser('save-yaml',
|
||||
help=save_spec_yamls.__doc__)
|
||||
saveyaml.add_argument(
|
||||
'-r', '--root-spec', default=None,
|
||||
'--root-spec', default=None,
|
||||
help='Root spec of dependent spec')
|
||||
saveyaml.add_argument(
|
||||
'--root-spec-yaml', default=None,
|
||||
help='Path to yaml file containing root spec of dependent spec')
|
||||
saveyaml.add_argument(
|
||||
'-s', '--specs', default=None,
|
||||
help='List of dependent specs for which saved yaml is desired')
|
||||
@@ -186,6 +190,19 @@ def setup_parser(subparser):
|
||||
help='Path to directory where spec yamls should be saved')
|
||||
saveyaml.set_defaults(func=save_spec_yamls)
|
||||
|
||||
# Copy buildcache from some directory to another mirror url
|
||||
copy = subparsers.add_parser('copy', help=buildcache_copy.__doc__)
|
||||
copy.add_argument(
|
||||
'--base-dir', default=None,
|
||||
help='Path to mirror directory (root of existing buildcache)')
|
||||
copy.add_argument(
|
||||
'--spec-yaml', default=None,
|
||||
help='Path to spec yaml file representing buildcache entry to copy')
|
||||
copy.add_argument(
|
||||
'--destination-url', default=None,
|
||||
help='Destination mirror url')
|
||||
copy.set_defaults(func=buildcache_copy)
|
||||
|
||||
|
||||
def find_matching_specs(pkgs, allow_multiple_matches=False, env=None):
|
||||
"""Returns a list of specs matching the not necessarily
|
||||
@@ -526,7 +543,7 @@ def save_spec_yamls(args):
|
||||
successful. If any errors or exceptions are encountered, or if expected
|
||||
command-line arguments are not provided, then the exit code will be
|
||||
non-zero."""
|
||||
if not args.root_spec:
|
||||
if not args.root_spec and not args.root_spec_yaml:
|
||||
tty.msg('No root spec provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
@@ -538,9 +555,13 @@ def save_spec_yamls(args):
|
||||
tty.msg('No yaml directory provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_yaml = root_spec.to_yaml(hash=ht.build_hash)
|
||||
if args.root_spec_yaml:
|
||||
with open(args.root_spec_yaml) as fd:
|
||||
root_spec_as_yaml = fd.read()
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_yaml = root_spec.to_yaml(hash=ht.build_hash)
|
||||
|
||||
save_dependency_spec_yamls(
|
||||
root_spec_as_yaml, args.yaml_dir, args.specs.split())
|
||||
@@ -548,6 +569,78 @@ def save_spec_yamls(args):
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def buildcache_copy(args):
|
||||
"""Copy a buildcache entry and all its files from one mirror, given as
|
||||
'--base-dir', to some other mirror, specified as '--destination-url'.
|
||||
The specific buildcache entry to be copied from one location to the
|
||||
other is identified using the '--spec-yaml' argument."""
|
||||
# TODO: This sub-command should go away once #11117 is merged
|
||||
|
||||
if not args.spec_yaml:
|
||||
tty.msg('No spec yaml provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
if not args.base_dir:
|
||||
tty.msg('No base directory provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
if not args.destination_url:
|
||||
tty.msg('No destination mirror url provided, exiting.')
|
||||
sys.exit(1)
|
||||
|
||||
dest_url = args.destination_url
|
||||
|
||||
if dest_url[0:7] != 'file://' and dest_url[0] != '/':
|
||||
tty.msg('Only urls beginning with "file://" or "/" are supported ' +
|
||||
'by buildcache copy.')
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
with open(args.spec_yaml, 'r') as fd:
|
||||
spec = Spec.from_yaml(fd.read())
|
||||
except Exception as e:
|
||||
tty.debug(e)
|
||||
tty.error('Unable to concrectize spec from yaml {0}'.format(
|
||||
args.spec_yaml))
|
||||
sys.exit(1)
|
||||
|
||||
dest_root_path = dest_url
|
||||
if dest_url[0:7] == 'file://':
|
||||
dest_root_path = dest_url[7:]
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
|
||||
tarball_rel_path = os.path.join(
|
||||
build_cache_dir, bindist.tarball_path_name(spec, '.spack'))
|
||||
tarball_src_path = os.path.join(args.base_dir, tarball_rel_path)
|
||||
tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path)
|
||||
|
||||
specfile_rel_path = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
|
||||
specfile_src_path = os.path.join(args.base_dir, specfile_rel_path)
|
||||
specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path)
|
||||
|
||||
cdashidfile_rel_path = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.cdashid'))
|
||||
cdashid_src_path = os.path.join(args.base_dir, cdashidfile_rel_path)
|
||||
cdashid_dest_path = os.path.join(dest_root_path, cdashidfile_rel_path)
|
||||
|
||||
# Make sure directory structure exists before attempting to copy
|
||||
os.makedirs(os.path.dirname(tarball_dest_path))
|
||||
|
||||
# Now copy the specfile and tarball files to the destination mirror
|
||||
tty.msg('Copying {0}'.format(tarball_rel_path))
|
||||
shutil.copyfile(tarball_src_path, tarball_dest_path)
|
||||
|
||||
tty.msg('Copying {0}'.format(specfile_rel_path))
|
||||
shutil.copyfile(specfile_src_path, specfile_dest_path)
|
||||
|
||||
# Copy the cdashid file (if exists) to the destination mirror
|
||||
if os.path.exists(cdashid_src_path):
|
||||
tty.msg('Copying {0}'.format(cdashidfile_rel_path))
|
||||
shutil.copyfile(cdashid_src_path, cdashid_dest_path)
|
||||
|
||||
|
||||
def buildcache(parser, args):
|
||||
if args.func:
|
||||
args.func(args)
|
||||
|
@@ -3,9 +3,10 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import base64
|
||||
import json
|
||||
import zlib
|
||||
|
||||
from jsonschema import validate, ValidationError
|
||||
from six import iteritems
|
||||
from six.moves.urllib.error import HTTPError, URLError
|
||||
from six.moves.urllib.parse import urlencode
|
||||
@@ -14,10 +15,11 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.compilers as compilers
|
||||
from spack.dependency import all_deptypes
|
||||
from spack.error import SpackError
|
||||
import spack.hash_types as ht
|
||||
from spack.spec import Spec
|
||||
from spack.schema.specs_deps import schema as specs_deps_schema
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
description = "generate release build set as .gitlab-ci.yml"
|
||||
@@ -26,18 +28,10 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-f', '--force', action='store_true', default=False,
|
||||
help="Force re-concretization of environment first")
|
||||
|
||||
subparser.add_argument(
|
||||
'-o', '--output-file', default=".gitlab-ci.yml",
|
||||
help="path to output file to write")
|
||||
|
||||
subparser.add_argument(
|
||||
'-k', '--signing-key', default=None,
|
||||
help="hash of gpg key to use for package signing")
|
||||
|
||||
subparser.add_argument(
|
||||
'-p', '--print-summary', action='store_true', default=False,
|
||||
help="Print summary of staged jobs to standard output")
|
||||
@@ -54,7 +48,9 @@ def _create_buildgroup(opener, headers, url, project, group_name, group_type):
|
||||
"type": group_type
|
||||
}
|
||||
|
||||
request = Request(url, data=json.dumps(data), headers=headers)
|
||||
enc_data = json.dumps(data).encode('utf-8')
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
|
||||
response = opener.open(request)
|
||||
response_code = response.getcode()
|
||||
@@ -103,7 +99,9 @@ def populate_buildgroup(job_names, group_name, project, site,
|
||||
} for name in job_names]
|
||||
}
|
||||
|
||||
request = Request(url, data=json.dumps(data), headers=headers)
|
||||
enc_data = json.dumps(data).encode('utf-8')
|
||||
|
||||
request = Request(url, data=enc_data, headers=headers)
|
||||
request.get_method = lambda: 'PUT'
|
||||
|
||||
response = opener.open(request)
|
||||
@@ -115,9 +113,43 @@ def populate_buildgroup(job_names, group_name, project, site,
|
||||
raise SpackError(msg)
|
||||
|
||||
|
||||
def get_job_name(spec, osarch, build_group):
|
||||
return '{0} {1} {2} {3} {4}'.format(
|
||||
spec.name, spec.version, spec.compiler, osarch, build_group)
|
||||
def is_main_phase(phase_name):
|
||||
return True if phase_name == 'specs' else False
|
||||
|
||||
|
||||
def get_job_name(phase, strip_compiler, spec, osarch, build_group):
|
||||
item_idx = 0
|
||||
format_str = ''
|
||||
format_args = []
|
||||
|
||||
if phase:
|
||||
format_str += '({{{0}}})'.format(item_idx)
|
||||
format_args.append(phase)
|
||||
item_idx += 1
|
||||
|
||||
format_str += ' {{{0}}}'.format(item_idx)
|
||||
format_args.append(spec.name)
|
||||
item_idx += 1
|
||||
|
||||
format_str += ' {{{0}}}'.format(item_idx)
|
||||
format_args.append(spec.version)
|
||||
item_idx += 1
|
||||
|
||||
if is_main_phase(phase) is True or strip_compiler is False:
|
||||
format_str += ' {{{0}}}'.format(item_idx)
|
||||
format_args.append(spec.compiler)
|
||||
item_idx += 1
|
||||
|
||||
format_str += ' {{{0}}}'.format(item_idx)
|
||||
format_args.append(osarch)
|
||||
item_idx += 1
|
||||
|
||||
if build_group:
|
||||
format_str += ' {{{0}}}'.format(item_idx)
|
||||
format_args.append(build_group)
|
||||
item_idx += 1
|
||||
|
||||
return format_str.format(*format_args)
|
||||
|
||||
|
||||
def get_cdash_build_name(spec, build_group):
|
||||
@@ -137,6 +169,17 @@ def get_spec_string(spec):
|
||||
return spec.format(''.join(format_elements))
|
||||
|
||||
|
||||
def format_root_spec(spec, main_phase, strip_compiler):
|
||||
if main_phase is False and strip_compiler is True:
|
||||
return '{0}@{1} arch={2}'.format(
|
||||
spec.name, spec.version, spec.architecture)
|
||||
else:
|
||||
spec_yaml = spec.to_yaml(hash=ht.build_hash).encode('utf-8')
|
||||
return str(base64.b64encode(zlib.compress(spec_yaml)).decode('utf-8'))
|
||||
# return '{0}@{1}%{2} arch={3}'.format(
|
||||
# spec.name, spec.version, spec.compiler, spec.architecture)
|
||||
|
||||
|
||||
def spec_deps_key_label(s):
|
||||
return s.dag_hash(), "%s/%s" % (s.name, s.dag_hash(7))
|
||||
|
||||
@@ -152,14 +195,6 @@ def _add_dependency(spec_label, dep_label, deps):
|
||||
def get_spec_dependencies(specs, deps, spec_labels):
|
||||
spec_deps_obj = compute_spec_deps(specs)
|
||||
|
||||
try:
|
||||
validate(spec_deps_obj, specs_deps_schema)
|
||||
except ValidationError as val_err:
|
||||
tty.error('Ill-formed specs dependencies JSON object')
|
||||
tty.error(spec_deps_obj)
|
||||
tty.debug(val_err)
|
||||
return
|
||||
|
||||
if spec_deps_obj:
|
||||
dependencies = spec_deps_obj['dependencies']
|
||||
specs = spec_deps_obj['specs']
|
||||
@@ -247,19 +282,19 @@ def print_staging_summary(spec_labels, dependencies, stages):
|
||||
if not stages:
|
||||
return
|
||||
|
||||
tty.msg('Staging summary:')
|
||||
tty.msg(' Staging summary:')
|
||||
stage_index = 0
|
||||
for stage in stages:
|
||||
tty.msg(' stage {0} ({1} jobs):'.format(stage_index, len(stage)))
|
||||
tty.msg(' stage {0} ({1} jobs):'.format(stage_index, len(stage)))
|
||||
|
||||
for job in sorted(stage):
|
||||
s = spec_labels[job]['spec']
|
||||
tty.msg(' {0} -> {1}'.format(job, get_spec_string(s)))
|
||||
tty.msg(' {0} -> {1}'.format(job, get_spec_string(s)))
|
||||
|
||||
stage_index += 1
|
||||
|
||||
|
||||
def compute_spec_deps(spec_list, stream_like=None):
|
||||
def compute_spec_deps(spec_list):
|
||||
"""
|
||||
Computes all the dependencies for the spec(s) and generates a JSON
|
||||
object which provides both a list of unique spec names as well as a
|
||||
@@ -311,10 +346,6 @@ def compute_spec_deps(spec_list, stream_like=None):
|
||||
]
|
||||
}
|
||||
|
||||
The object can be optionally written out to some stream. This is
|
||||
useful, for example, when we need to concretize and generate the
|
||||
dependencies of a spec in a specific docker container.
|
||||
|
||||
"""
|
||||
deptype = all_deptypes
|
||||
spec_labels = {}
|
||||
@@ -331,7 +362,8 @@ def append_dep(s, d):
|
||||
for spec in spec_list:
|
||||
spec.concretize()
|
||||
|
||||
root_spec = get_spec_string(spec)
|
||||
# root_spec = get_spec_string(spec)
|
||||
root_spec = spec
|
||||
|
||||
rkey, rlabel = spec_deps_key_label(spec)
|
||||
|
||||
@@ -359,9 +391,6 @@ def append_dep(s, d):
|
||||
'dependencies': dependencies,
|
||||
}
|
||||
|
||||
if stream_like:
|
||||
stream_like.write(json.dumps(deps_json_obj))
|
||||
|
||||
return deps_json_obj
|
||||
|
||||
|
||||
@@ -379,7 +408,6 @@ def find_matching_config(spec, ci_mappings):
|
||||
|
||||
def release_jobs(parser, args):
|
||||
env = ev.get_env(args, 'release-jobs', required=True)
|
||||
env.concretize(force=args.force)
|
||||
|
||||
# FIXME: What's the difference between one that opens with 'spack'
|
||||
# and one that opens with 'env'? This will only handle the former.
|
||||
@@ -390,122 +418,219 @@ def release_jobs(parser, args):
|
||||
|
||||
ci_mappings = yaml_root['gitlab-ci']['mappings']
|
||||
|
||||
ci_cdash = yaml_root['cdash']
|
||||
build_group = ci_cdash['build-group']
|
||||
cdash_url = ci_cdash['url']
|
||||
cdash_project = ci_cdash['project']
|
||||
proj_enc = urlencode({'project': cdash_project})
|
||||
eq_idx = proj_enc.find('=') + 1
|
||||
cdash_project_enc = proj_enc[eq_idx:]
|
||||
cdash_site = ci_cdash['site']
|
||||
build_group = None
|
||||
enable_cdash_reporting = False
|
||||
cdash_auth_token = None
|
||||
|
||||
if args.cdash_credentials:
|
||||
with open(args.cdash_credentials) as fd:
|
||||
cdash_auth_token = fd.read()
|
||||
cdash_auth_token = cdash_auth_token.strip()
|
||||
if 'cdash' in yaml_root:
|
||||
enable_cdash_reporting = True
|
||||
ci_cdash = yaml_root['cdash']
|
||||
build_group = ci_cdash['build-group']
|
||||
cdash_url = ci_cdash['url']
|
||||
cdash_project = ci_cdash['project']
|
||||
proj_enc = urlencode({'project': cdash_project})
|
||||
eq_idx = proj_enc.find('=') + 1
|
||||
cdash_project_enc = proj_enc[eq_idx:]
|
||||
cdash_site = ci_cdash['site']
|
||||
|
||||
if args.cdash_credentials:
|
||||
with open(args.cdash_credentials) as fd:
|
||||
cdash_auth_token = fd.read()
|
||||
cdash_auth_token = cdash_auth_token.strip()
|
||||
|
||||
ci_mirrors = yaml_root['mirrors']
|
||||
mirror_urls = ci_mirrors.values()
|
||||
mirror_urls = [url for url in ci_mirrors.values()]
|
||||
|
||||
spec_labels, dependencies, stages = stage_spec_jobs(env.all_specs())
|
||||
bootstrap_specs = []
|
||||
phases = []
|
||||
if 'bootstrap' in yaml_root['gitlab-ci']:
|
||||
for phase in yaml_root['gitlab-ci']['bootstrap']:
|
||||
try:
|
||||
phase_name = phase.get('name')
|
||||
strip_compilers = phase.get('compiler-agnostic')
|
||||
except AttributeError:
|
||||
phase_name = phase
|
||||
strip_compilers = False
|
||||
phases.append({
|
||||
'name': phase_name,
|
||||
'strip-compilers': strip_compilers,
|
||||
})
|
||||
|
||||
if not stages:
|
||||
tty.msg('No jobs staged, exiting.')
|
||||
return
|
||||
for bs in env.spec_lists[phase_name]:
|
||||
bootstrap_specs.append({
|
||||
'spec': bs,
|
||||
'phase-name': phase_name,
|
||||
'strip-compilers': strip_compilers,
|
||||
})
|
||||
|
||||
phases.append({
|
||||
'name': 'specs',
|
||||
'strip-compilers': False,
|
||||
})
|
||||
|
||||
staged_phases = {}
|
||||
for phase in phases:
|
||||
phase_name = phase['name']
|
||||
staged_phases[phase_name] = stage_spec_jobs(env.spec_lists[phase_name])
|
||||
|
||||
if args.print_summary:
|
||||
print_staging_summary(spec_labels, dependencies, stages)
|
||||
for phase in phases:
|
||||
phase_name = phase['name']
|
||||
tty.msg('Stages for phase "{0}"'.format(phase_name))
|
||||
phase_stages = staged_phases[phase_name]
|
||||
print_staging_summary(*phase_stages)
|
||||
|
||||
all_job_names = []
|
||||
output_object = {}
|
||||
job_count = 0
|
||||
job_id = 0
|
||||
stage_id = 0
|
||||
|
||||
stage_names = ['stage-{0}'.format(i) for i in range(len(stages))]
|
||||
stage = 0
|
||||
stage_names = []
|
||||
|
||||
for stage_jobs in stages:
|
||||
stage_name = stage_names[stage]
|
||||
for phase in phases:
|
||||
phase_name = phase['name']
|
||||
strip_compilers = phase['strip-compilers']
|
||||
|
||||
for spec_label in stage_jobs:
|
||||
release_spec = spec_labels[spec_label]['spec']
|
||||
root_spec = spec_labels[spec_label]['rootSpec']
|
||||
main_phase = is_main_phase(phase_name)
|
||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
||||
|
||||
runner_attribs = find_matching_config(release_spec, ci_mappings)
|
||||
for stage_jobs in stages:
|
||||
stage_name = 'stage-{0}'.format(stage_id)
|
||||
stage_names.append(stage_name)
|
||||
stage_id += 1
|
||||
|
||||
if not runner_attribs:
|
||||
tty.warn('No match found for {0}, skipping it'.format(
|
||||
release_spec))
|
||||
continue
|
||||
for spec_label in stage_jobs:
|
||||
release_spec = spec_labels[spec_label]['spec']
|
||||
root_spec = spec_labels[spec_label]['rootSpec']
|
||||
|
||||
tags = [tag for tag in runner_attribs['tags']]
|
||||
runner_attribs = find_matching_config(root_spec, ci_mappings)
|
||||
|
||||
variables = {}
|
||||
if 'variables' in runner_attribs:
|
||||
variables.update(runner_attribs['variables'])
|
||||
if not runner_attribs:
|
||||
tty.warn('No match found for {0}, skipping it'.format(
|
||||
release_spec))
|
||||
continue
|
||||
|
||||
build_image = None
|
||||
if 'image' in runner_attribs:
|
||||
build_image = runner_attribs['image']
|
||||
tags = [tag for tag in runner_attribs['tags']]
|
||||
|
||||
osname = str(release_spec.architecture)
|
||||
job_name = get_job_name(release_spec, osname, build_group)
|
||||
cdash_build_name = get_cdash_build_name(release_spec, build_group)
|
||||
variables = {}
|
||||
if 'variables' in runner_attribs:
|
||||
variables.update(runner_attribs['variables'])
|
||||
|
||||
all_job_names.append(cdash_build_name)
|
||||
image_name = None
|
||||
image_entry = None
|
||||
if 'image' in runner_attribs:
|
||||
build_image = runner_attribs['image']
|
||||
try:
|
||||
image_name = build_image.get('name')
|
||||
entrypoint = build_image.get('entrypoint')
|
||||
image_entry = [p for p in entrypoint]
|
||||
except AttributeError:
|
||||
image_name = build_image
|
||||
|
||||
job_scripts = ['./bin/rebuild-package.sh']
|
||||
osname = str(release_spec.architecture)
|
||||
job_name = get_job_name(phase_name, strip_compilers,
|
||||
release_spec, osname, build_group)
|
||||
|
||||
job_dependencies = []
|
||||
if spec_label in dependencies:
|
||||
job_dependencies = (
|
||||
[get_job_name(spec_labels[d]['spec'], osname, build_group)
|
||||
for d in dependencies[spec_label]])
|
||||
job_scripts = ['./bin/rebuild-package.sh']
|
||||
|
||||
job_variables = {
|
||||
'MIRROR_URL': mirror_urls[0],
|
||||
'CDASH_BASE_URL': cdash_url,
|
||||
'CDASH_PROJECT': cdash_project,
|
||||
'CDASH_PROJECT_ENC': cdash_project_enc,
|
||||
'CDASH_BUILD_NAME': cdash_build_name,
|
||||
'DEPENDENCIES': ';'.join(job_dependencies),
|
||||
'ROOT_SPEC': str(root_spec),
|
||||
}
|
||||
compiler_action = 'NONE'
|
||||
if len(phases) > 1:
|
||||
compiler_action = 'FIND_ANY'
|
||||
if is_main_phase(phase_name):
|
||||
compiler_action = 'INSTALL_MISSING'
|
||||
|
||||
if args.signing_key:
|
||||
job_variables['SIGN_KEY_HASH'] = args.signing_key
|
||||
job_vars = {
|
||||
'SPACK_MIRROR_URL': mirror_urls[0],
|
||||
'SPACK_ROOT_SPEC': format_root_spec(
|
||||
root_spec, main_phase, strip_compilers),
|
||||
'SPACK_JOB_SPEC_PKG_NAME': release_spec.name,
|
||||
'SPACK_COMPILER_ACTION': compiler_action,
|
||||
}
|
||||
|
||||
variables.update(job_variables)
|
||||
job_dependencies = []
|
||||
if spec_label in dependencies:
|
||||
job_dependencies = (
|
||||
[get_job_name(phase_name, strip_compilers,
|
||||
spec_labels[dep_label]['spec'],
|
||||
osname, build_group)
|
||||
for dep_label in dependencies[spec_label]])
|
||||
|
||||
job_object = {
|
||||
'stage': stage_name,
|
||||
'variables': variables,
|
||||
'script': job_scripts,
|
||||
'artifacts': {
|
||||
'paths': [
|
||||
'local_mirror/build_cache',
|
||||
'jobs_scratch_dir',
|
||||
'cdash_report',
|
||||
],
|
||||
'when': 'always',
|
||||
},
|
||||
'dependencies': job_dependencies,
|
||||
'tags': tags,
|
||||
}
|
||||
# This next section helps gitlab make sure the right
|
||||
# bootstrapped compiler exists in the artifacts buildcache by
|
||||
# creating an artificial dependency between this spec and its
|
||||
# compiler. So, if we are in the main phase, and if the
|
||||
# compiler we are supposed to use is listed in any of the
|
||||
# bootstrap spec lists, then we will add one more dependency to
|
||||
# "job_dependencies" (that compiler).
|
||||
if is_main_phase(phase_name):
|
||||
compiler_pkg_spec = compilers.pkg_spec_for_compiler(
|
||||
release_spec.compiler)
|
||||
for bs in bootstrap_specs:
|
||||
bs_arch = bs['spec'].architecture
|
||||
if (bs['spec'].satisfies(compiler_pkg_spec) and
|
||||
bs_arch == release_spec.architecture):
|
||||
c_job_name = get_job_name(bs['phase-name'],
|
||||
bs['strip-compilers'],
|
||||
bs['spec'],
|
||||
str(bs_arch),
|
||||
build_group)
|
||||
job_dependencies.append(c_job_name)
|
||||
|
||||
if build_image:
|
||||
job_object['image'] = build_image
|
||||
if enable_cdash_reporting:
|
||||
cdash_build_name = get_cdash_build_name(
|
||||
release_spec, build_group)
|
||||
all_job_names.append(cdash_build_name)
|
||||
|
||||
output_object[job_name] = job_object
|
||||
job_count += 1
|
||||
related_builds = [] # Used for relating CDash builds
|
||||
if spec_label in dependencies:
|
||||
related_builds = (
|
||||
[spec_labels[d]['spec'].name
|
||||
for d in dependencies[spec_label]])
|
||||
|
||||
stage += 1
|
||||
job_vars['SPACK_CDASH_BASE_URL'] = cdash_url
|
||||
job_vars['SPACK_CDASH_PROJECT'] = cdash_project
|
||||
job_vars['SPACK_CDASH_PROJECT_ENC'] = cdash_project_enc
|
||||
job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
|
||||
job_vars['SPACK_CDASH_SITE'] = cdash_site
|
||||
job_vars['SPACK_RELATED_BUILDS'] = ';'.join(related_builds)
|
||||
job_vars['SPACK_JOB_SPEC_BUILDGROUP'] = build_group
|
||||
|
||||
job_vars['SPACK_ENABLE_CDASH'] = str(enable_cdash_reporting)
|
||||
|
||||
variables.update(job_vars)
|
||||
|
||||
job_object = {
|
||||
'stage': stage_name,
|
||||
'variables': variables,
|
||||
'script': job_scripts,
|
||||
'tags': tags,
|
||||
'artifacts': {
|
||||
'paths': [
|
||||
'jobs_scratch_dir',
|
||||
'cdash_report',
|
||||
'local_mirror/build_cache',
|
||||
],
|
||||
'when': 'always',
|
||||
},
|
||||
'dependencies': job_dependencies,
|
||||
}
|
||||
|
||||
if image_name:
|
||||
job_object['image'] = image_name
|
||||
if image_entry is not None:
|
||||
job_object['image'] = {
|
||||
'name': image_name,
|
||||
'entrypoint': image_entry,
|
||||
}
|
||||
|
||||
output_object[job_name] = job_object
|
||||
job_id += 1
|
||||
|
||||
tty.msg('{0} build jobs generated in {1} stages'.format(
|
||||
job_count, len(stages)))
|
||||
job_id, stage_id))
|
||||
|
||||
# Use "all_job_names" to populate the build group for this set
|
||||
if cdash_auth_token:
|
||||
if enable_cdash_reporting and cdash_auth_token:
|
||||
try:
|
||||
populate_buildgroup(all_job_names, build_group, cdash_project,
|
||||
cdash_site, cdash_auth_token, cdash_url)
|
||||
@@ -521,7 +646,7 @@ def release_jobs(parser, args):
|
||||
'variables': {
|
||||
'MIRROR_URL': mirror_urls[0],
|
||||
},
|
||||
'image': 'scottwittenburg/spack_ci_generator_alpine', # just needs some basic python image
|
||||
'image': 'scottwittenburg/spack_ci_generator_alpine',
|
||||
'script': './bin/rebuild-index.sh',
|
||||
'tags': ['spack-k8s'] # may want a runner to handle this
|
||||
}
|
||||
|
@@ -17,42 +17,77 @@
|
||||
'additionalProperties': False,
|
||||
'required': ['mappings'],
|
||||
'patternProperties': {
|
||||
r'mappings': {
|
||||
'bootstrap': {
|
||||
'type': 'array',
|
||||
'default': {},
|
||||
'additionalProperties': False,
|
||||
'patternProperties': {
|
||||
r'[\w\d\-_\.]+': {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': ['match', 'runner-attributes'],
|
||||
'properties': {
|
||||
'match': {
|
||||
'type': 'array',
|
||||
'default': [],
|
||||
'items': {
|
||||
'items': {
|
||||
'anyOf': [
|
||||
{
|
||||
'type': 'string',
|
||||
}, {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': ['name'],
|
||||
'properties': {
|
||||
'name': {
|
||||
'type': 'string',
|
||||
},
|
||||
'compiler-agnostic': {
|
||||
'type': 'boolean',
|
||||
'default': False,
|
||||
},
|
||||
},
|
||||
'runner-attributes': {
|
||||
'type': 'object',
|
||||
'additionalProperties': True,
|
||||
'required': ['tags'],
|
||||
'properties': {
|
||||
'image': {'type': 'string'},
|
||||
'tags': {
|
||||
'type': 'array',
|
||||
'default': [],
|
||||
'items': {'type': 'string'}
|
||||
},
|
||||
'variables': {
|
||||
'type': 'object',
|
||||
'default': {},
|
||||
'patternProperties': {
|
||||
r'[\w\d\-_\.]+': {
|
||||
'type': 'string',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
'mappings': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': ['match', 'runner-attributes'],
|
||||
'properties': {
|
||||
'match': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
'runner-attributes': {
|
||||
'type': 'object',
|
||||
'additionalProperties': True,
|
||||
'required': ['tags'],
|
||||
'properties': {
|
||||
'image': {
|
||||
'oneOf': [
|
||||
{
|
||||
'type': 'string'
|
||||
}, {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'name': {'type': 'string'},
|
||||
'entrypoint': {
|
||||
'type': 'array',
|
||||
'items': {
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
'tags': {
|
||||
'type': 'array',
|
||||
'default': [],
|
||||
'items': {'type': 'string'}
|
||||
},
|
||||
'variables': {
|
||||
'type': 'object',
|
||||
'default': {},
|
||||
'patternProperties': {
|
||||
r'[\w\d\-_\.]+': {
|
||||
'type': 'string',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@@ -1,48 +0,0 @@
|
||||
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for expressing dependencies of a set of specs in a JSON file
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/specs_deps.py
|
||||
:lines: 32-
|
||||
"""
|
||||
|
||||
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
'title': 'Spack schema for the dependencies of a set of specs',
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': ['specs'],
|
||||
'properties': {
|
||||
r'dependencies': {
|
||||
'type': 'array',
|
||||
'default': [],
|
||||
'items': {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': ['depends', 'spec'],
|
||||
'properties': {
|
||||
r'depends': {'type': 'string'},
|
||||
r'spec': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
},
|
||||
r'specs': {
|
||||
'type': 'array',
|
||||
'default': [],
|
||||
'items': {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': ['root_spec', 'spec', 'label'],
|
||||
'properties': {
|
||||
r'root_spec': {'type': 'string'},
|
||||
r'spec': {'type': 'string'},
|
||||
r'label': {'type': 'string'},
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
@@ -102,8 +102,7 @@ def test_release_jobs_with_env(tmpdir, mutable_mock_env_path, env_deactivate,
|
||||
some-mirror: https://my.fake.mirror
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
- some-runner-mapping:
|
||||
match:
|
||||
- match:
|
||||
- archive-files
|
||||
runner-attributes:
|
||||
tags:
|
||||
|
Reference in New Issue
Block a user