Remove CombinatorialSpecSet in favor of environments + stacks
This commit is contained in:
parent
0b67f30e21
commit
5323a5cff9
@ -1,16 +0,0 @@
|
|||||||
# -------------------------------------------------------------------------
|
|
||||||
# This is the default spack release spec set.
|
|
||||||
# -------------------------------------------------------------------------
|
|
||||||
spec-set:
|
|
||||||
include: []
|
|
||||||
exclude: []
|
|
||||||
matrix:
|
|
||||||
- packages:
|
|
||||||
xsdk:
|
|
||||||
versions: [0.4.0]
|
|
||||||
- compilers:
|
|
||||||
gcc:
|
|
||||||
versions: [5.5.0]
|
|
||||||
clang:
|
|
||||||
versions: [6.0.0, '6.0.0-1ubuntu2']
|
|
||||||
cdash: ["https://spack.io/cdash/submit.php?project=spack"]
|
|
@ -1,21 +0,0 @@
|
|||||||
spec-set:
|
|
||||||
include: [ ape, atompaw, transset]
|
|
||||||
exclude: [binutils,tk]
|
|
||||||
packages:
|
|
||||||
ape:
|
|
||||||
versions: [2.2.1]
|
|
||||||
atompaw:
|
|
||||||
versions: [3.1.0.3, 4.0.0.13]
|
|
||||||
binutils:
|
|
||||||
versions: [2.20.1, 2.25, 2.23.2, 2.24, 2.27, 2.26]
|
|
||||||
tk:
|
|
||||||
versions: [8.6.5, 8.6.3]
|
|
||||||
transset:
|
|
||||||
versions: [1.0.1]
|
|
||||||
compilers:
|
|
||||||
gcc:
|
|
||||||
versions: [4.9, 4.8, 4.7]
|
|
||||||
clang:
|
|
||||||
versions: [3.5, 3.6]
|
|
||||||
|
|
||||||
dashboard: ["https://spack.io/cdash/submit.php?project=spack"]
|
|
@ -20,11 +20,8 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.store
|
import spack.store
|
||||||
|
|
||||||
from spack.error import SpecError
|
from spack.error import SpecError
|
||||||
from spack.paths import etc_path
|
|
||||||
from spack.spec import Spec, save_dependency_spec_yamls
|
from spack.spec import Spec, save_dependency_spec_yamls
|
||||||
from spack.spec_set import CombinatorialSpecSet
|
|
||||||
|
|
||||||
from spack.cmd import display_specs
|
from spack.cmd import display_specs
|
||||||
|
|
||||||
@ -417,10 +414,9 @@ def check_binaries(args):
|
|||||||
if args.spec or args.spec_yaml:
|
if args.spec or args.spec_yaml:
|
||||||
specs = [get_concrete_spec(args)]
|
specs = [get_concrete_spec(args)]
|
||||||
else:
|
else:
|
||||||
release_specs_path = os.path.join(
|
env = ev.get_env(args, 'buildcache', required=True)
|
||||||
etc_path, 'spack', 'defaults', 'release.yaml')
|
env.concretize()
|
||||||
spec_set = CombinatorialSpecSet.from_file(release_specs_path)
|
specs = env.all_specs()
|
||||||
specs = [spec for spec in spec_set]
|
|
||||||
|
|
||||||
if not specs:
|
if not specs:
|
||||||
tty.msg('No specs provided, exiting.')
|
tty.msg('No specs provided, exiting.')
|
||||||
|
@ -4,17 +4,18 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import sys
|
|
||||||
|
|
||||||
from jsonschema import validate, ValidationError
|
from jsonschema import validate, ValidationError
|
||||||
from six import iteritems
|
from six import iteritems
|
||||||
from six.moves.urllib.request import build_opener, HTTPHandler, Request
|
from six.moves.urllib.error import HTTPError, URLError
|
||||||
from six.moves.urllib.parse import urlencode
|
from six.moves.urllib.parse import urlencode
|
||||||
|
from six.moves.urllib.request import build_opener, HTTPHandler, Request
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
from spack.dependency import all_deptypes
|
from spack.dependency import all_deptypes
|
||||||
|
from spack.error import SpackError
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.schema.specs_deps import schema as specs_deps_schema
|
from spack.schema.specs_deps import schema as specs_deps_schema
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
@ -42,7 +43,7 @@ def setup_parser(subparser):
|
|||||||
help="Print summary of staged jobs to standard output")
|
help="Print summary of staged jobs to standard output")
|
||||||
|
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
'-c', '--cdash-credentials', default=None,
|
'--cdash-credentials', default=None,
|
||||||
help="Path to file containing CDash authentication token")
|
help="Path to file containing CDash authentication token")
|
||||||
|
|
||||||
|
|
||||||
@ -59,9 +60,9 @@ def _create_buildgroup(opener, headers, url, project, group_name, group_type):
|
|||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code != 200 and response_code != 201:
|
if response_code != 200 and response_code != 201:
|
||||||
print('Creating buildgroup failed (response code = {0}'.format(
|
msg = 'Creating buildgroup failed (response code = {0}'.format(
|
||||||
response_code))
|
response_code)
|
||||||
return None
|
raise SpackError(msg)
|
||||||
|
|
||||||
response_text = response.read()
|
response_text = response.read()
|
||||||
response_json = json.loads(response_text)
|
response_json = json.loads(response_text)
|
||||||
@ -71,7 +72,7 @@ def _create_buildgroup(opener, headers, url, project, group_name, group_type):
|
|||||||
|
|
||||||
|
|
||||||
def populate_buildgroup(job_names, group_name, project, site,
|
def populate_buildgroup(job_names, group_name, project, site,
|
||||||
credentials, cdash_url, exit_on_fail=False):
|
credentials, cdash_url):
|
||||||
url = "{0}/api/v1/buildgroup.php".format(cdash_url)
|
url = "{0}/api/v1/buildgroup.php".format(cdash_url)
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
@ -88,8 +89,9 @@ def populate_buildgroup(job_names, group_name, project, site,
|
|||||||
'Latest')
|
'Latest')
|
||||||
|
|
||||||
if not parent_group_id or not group_id:
|
if not parent_group_id or not group_id:
|
||||||
print('Unable to create or retrieve the build groups')
|
msg = 'Failed to create or retrieve buildgroups for {0}'.format(
|
||||||
sys.exit(1)
|
group_name)
|
||||||
|
raise SpackError(msg)
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
'project': project,
|
'project': project,
|
||||||
@ -107,10 +109,10 @@ def populate_buildgroup(job_names, group_name, project, site,
|
|||||||
response = opener.open(request)
|
response = opener.open(request)
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code != 200 and exit_on_fail:
|
if response_code != 200:
|
||||||
print('Unexpected response ({0}) when populating buildgroup'.format(
|
msg = 'Error response code ({0}) in populate_buildgroup'.format(
|
||||||
response_code))
|
response_code)
|
||||||
sys.exit(1)
|
raise SpackError(msg)
|
||||||
|
|
||||||
|
|
||||||
def get_job_name(spec, osarch, build_group):
|
def get_job_name(spec, osarch, build_group):
|
||||||
@ -173,24 +175,12 @@ def get_spec_dependencies(specs, deps, spec_labels):
|
|||||||
|
|
||||||
|
|
||||||
def stage_spec_jobs(specs):
|
def stage_spec_jobs(specs):
|
||||||
"""Take a set of release specs along with a dictionary describing the
|
"""Take a set of release specs and generate a list of "stages", where the
|
||||||
available docker containers and what compilers they have, and generate
|
jobs in any stage are dependent only on jobs in previous stages. This
|
||||||
a list of "stages", where the jobs in any stage are dependent only on
|
allows us to maximize build parallelism within the gitlab-ci framework.
|
||||||
jobs in previous stages. This allows us to maximize build parallelism
|
|
||||||
within the gitlab-ci framework.
|
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
spec_set (CombinatorialSpecSet): Iterable containing all the specs
|
specs (Iterable): Specs to build
|
||||||
to build.
|
|
||||||
containers (dict): Describes the docker containers available to use
|
|
||||||
for concretizing specs (and also for the gitlab runners to use
|
|
||||||
for building packages). The schema can be found at
|
|
||||||
"lib/spack/spack/schema/os_container_mapping.py"
|
|
||||||
current_system (string): If provided, this indicates not to use the
|
|
||||||
containers for concretizing the release specs, but rather just
|
|
||||||
assume the current system is in the "containers" dictionary. A
|
|
||||||
SpackError will be raised if the current system is not in that
|
|
||||||
dictionary.
|
|
||||||
|
|
||||||
Returns: A tuple of information objects describing the specs, dependencies
|
Returns: A tuple of information objects describing the specs, dependencies
|
||||||
and stages:
|
and stages:
|
||||||
@ -391,6 +381,8 @@ def release_jobs(parser, args):
|
|||||||
env = ev.get_env(args, 'release-jobs', required=True)
|
env = ev.get_env(args, 'release-jobs', required=True)
|
||||||
env.concretize(force=args.force)
|
env.concretize(force=args.force)
|
||||||
|
|
||||||
|
# FIXME: What's the difference between one that opens with 'spack'
|
||||||
|
# and one that opens with 'env'? This will only handle the former.
|
||||||
yaml_root = env.yaml['spack']
|
yaml_root = env.yaml['spack']
|
||||||
|
|
||||||
if 'gitlab-ci' not in yaml_root:
|
if 'gitlab-ci' not in yaml_root:
|
||||||
@ -514,8 +506,11 @@ def release_jobs(parser, args):
|
|||||||
|
|
||||||
# Use "all_job_names" to populate the build group for this set
|
# Use "all_job_names" to populate the build group for this set
|
||||||
if cdash_auth_token:
|
if cdash_auth_token:
|
||||||
|
try:
|
||||||
populate_buildgroup(all_job_names, build_group, cdash_project,
|
populate_buildgroup(all_job_names, build_group, cdash_project,
|
||||||
cdash_site, cdash_auth_token, cdash_url)
|
cdash_site, cdash_auth_token, cdash_url)
|
||||||
|
except (SpackError, HTTPError, URLError) as err:
|
||||||
|
tty.warn('Problem populating buildgroup: {0}'.format(err))
|
||||||
else:
|
else:
|
||||||
tty.warn('Unable to populate buildgroup without CDash credentials')
|
tty.warn('Unable to populate buildgroup without CDash credentials')
|
||||||
|
|
||||||
|
@ -14,7 +14,6 @@
|
|||||||
properties = {
|
properties = {
|
||||||
'cdash': {
|
'cdash': {
|
||||||
'type': 'object',
|
'type': 'object',
|
||||||
'default': {},
|
|
||||||
'additionalProperties': False,
|
'additionalProperties': False,
|
||||||
'required': ['build-group', 'url', 'project', 'site'],
|
'required': ['build-group', 'url', 'project', 'site'],
|
||||||
'patternProperties': {
|
'patternProperties': {
|
||||||
|
@ -14,7 +14,6 @@
|
|||||||
properties = {
|
properties = {
|
||||||
'gitlab-ci': {
|
'gitlab-ci': {
|
||||||
'type': 'object',
|
'type': 'object',
|
||||||
'default': {},
|
|
||||||
'additionalProperties': False,
|
'additionalProperties': False,
|
||||||
'required': ['mappings'],
|
'required': ['mappings'],
|
||||||
'patternProperties': {
|
'patternProperties': {
|
||||||
@ -25,7 +24,6 @@
|
|||||||
'patternProperties': {
|
'patternProperties': {
|
||||||
r'[\w\d\-_\.]+': {
|
r'[\w\d\-_\.]+': {
|
||||||
'type': 'object',
|
'type': 'object',
|
||||||
'default': {},
|
|
||||||
'additionalProperties': False,
|
'additionalProperties': False,
|
||||||
'required': ['match', 'runner-attributes'],
|
'required': ['match', 'runner-attributes'],
|
||||||
'properties': {
|
'properties': {
|
||||||
@ -38,7 +36,6 @@
|
|||||||
},
|
},
|
||||||
'runner-attributes': {
|
'runner-attributes': {
|
||||||
'type': 'object',
|
'type': 'object',
|
||||||
'default': {},
|
|
||||||
'additionalProperties': True,
|
'additionalProperties': True,
|
||||||
'required': ['tags'],
|
'required': ['tags'],
|
||||||
'properties': {
|
'properties': {
|
||||||
|
@ -1,111 +0,0 @@
|
|||||||
# Copyright 2013-2018 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
"""Schema for Spack spec-set configuration file.
|
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/spec_set.py
|
|
||||||
:lines: 32-
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
schema = {
|
|
||||||
'$schema': 'http://json-schema.org/schema#',
|
|
||||||
'title': 'Spack test configuration file schema',
|
|
||||||
'definitions': {
|
|
||||||
# used for include/exclude
|
|
||||||
'list_of_specs': {
|
|
||||||
'type': 'array',
|
|
||||||
'items': {'type': 'string'}
|
|
||||||
},
|
|
||||||
# used for compilers and for packages
|
|
||||||
'objects_with_version_list': {
|
|
||||||
'type': 'object',
|
|
||||||
'additionalProperties': False,
|
|
||||||
'patternProperties': {
|
|
||||||
r'\w[\w-]*': {
|
|
||||||
'type': 'object',
|
|
||||||
'additionalProperties': False,
|
|
||||||
'required': ['versions'],
|
|
||||||
'properties': {
|
|
||||||
'versions': {
|
|
||||||
'type': 'array',
|
|
||||||
'items': {
|
|
||||||
'oneOf': [
|
|
||||||
{'type': 'string'},
|
|
||||||
{'type': 'number'},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'packages': {
|
|
||||||
'type': 'object',
|
|
||||||
'additionalProperties': False,
|
|
||||||
'properties': {
|
|
||||||
'packages': {
|
|
||||||
'$ref': '#/definitions/objects_with_version_list'
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'compilers': {
|
|
||||||
'type': 'object',
|
|
||||||
'additionalProperties': False,
|
|
||||||
'properties': {
|
|
||||||
'compilers': {
|
|
||||||
'$ref': '#/definitions/objects_with_version_list'
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'specs': {
|
|
||||||
'type': 'object',
|
|
||||||
'additionalProperties': False,
|
|
||||||
'properties': {
|
|
||||||
'specs': {'$ref': '#/definitions/list_of_specs'},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
# this is the actual top level object
|
|
||||||
'type': 'object',
|
|
||||||
'additionalProperties': False,
|
|
||||||
'properties': {
|
|
||||||
'spec-set': {
|
|
||||||
'type': 'object',
|
|
||||||
'additionalProperties': False,
|
|
||||||
'required': ['matrix'],
|
|
||||||
'properties': {
|
|
||||||
# top-level settings are keys and need to be unique
|
|
||||||
'include': {'$ref': '#/definitions/list_of_specs'},
|
|
||||||
'exclude': {'$ref': '#/definitions/list_of_specs'},
|
|
||||||
'cdash': {
|
|
||||||
'oneOf': [
|
|
||||||
{'type': 'string'},
|
|
||||||
{
|
|
||||||
'type': 'array',
|
|
||||||
'items': {'type': 'string'}
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
'project': {
|
|
||||||
'type': 'string',
|
|
||||||
},
|
|
||||||
# things under matrix (packages, compilers, etc.) are a
|
|
||||||
# list so that we can potentiall have multiple of them.
|
|
||||||
'matrix': {
|
|
||||||
'type': 'array',
|
|
||||||
'items': {
|
|
||||||
'type': 'object',
|
|
||||||
'oneOf': [
|
|
||||||
{'$ref': '#/definitions/specs'},
|
|
||||||
{'$ref': '#/definitions/packages'},
|
|
||||||
{'$ref': '#/definitions/compilers'},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
@ -1,188 +0,0 @@
|
|||||||
# Copyright 2013-2018 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
import itertools
|
|
||||||
from jsonschema import validate
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
from llnl.util.tty.colify import colify
|
|
||||||
|
|
||||||
import spack
|
|
||||||
import spack.compilers
|
|
||||||
import spack.architecture as sarch
|
|
||||||
import spack.schema.spec_set as spec_set_schema
|
|
||||||
import spack.util.spack_yaml as syaml
|
|
||||||
|
|
||||||
from spack.error import SpackError
|
|
||||||
from spack.spec import Spec, ArchSpec
|
|
||||||
|
|
||||||
|
|
||||||
class CombinatorialSpecSet:
|
|
||||||
"""Set of combinatorial Specs constructed from YAML file."""
|
|
||||||
|
|
||||||
def __init__(self, yaml_like, ignore_invalid=True):
|
|
||||||
"""Construct a combinatorial Spec set.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
yaml_like: either raw YAML data as a dict, a file-like object
|
|
||||||
to read the YAML from, or a string containing YAML. In the
|
|
||||||
first case, we assume already-parsed YAML data. In the second
|
|
||||||
two cases, we just run yaml.load() on the data.
|
|
||||||
ignore_invalid (bool): whether to ignore invalid specs when
|
|
||||||
expanding the values of this spec set.
|
|
||||||
"""
|
|
||||||
self.ignore_invalid = ignore_invalid
|
|
||||||
|
|
||||||
if isinstance(yaml_like, dict):
|
|
||||||
# if it's raw data, just assign it to self.data
|
|
||||||
self.data = yaml_like
|
|
||||||
else:
|
|
||||||
# otherwise try to load it.
|
|
||||||
self.data = syaml.load(yaml_like)
|
|
||||||
|
|
||||||
# validate against the spec set schema
|
|
||||||
validate(self.data, spec_set_schema.schema)
|
|
||||||
|
|
||||||
# chop off the initial spec-set label after valiation.
|
|
||||||
self.data = self.data['spec-set']
|
|
||||||
|
|
||||||
# initialize these from data.
|
|
||||||
self.cdash = self.data.get('cdash', None)
|
|
||||||
if isinstance(self.cdash, str):
|
|
||||||
self.cdash = [self.cdash]
|
|
||||||
self.project = self.data.get('project', None)
|
|
||||||
|
|
||||||
# _spec_lists is a list of lists of specs, to be combined as a
|
|
||||||
# cartesian product when we iterate over all specs in the set.
|
|
||||||
# it's initialized lazily.
|
|
||||||
self._spec_lists = None
|
|
||||||
self._include = []
|
|
||||||
self._exclude = []
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_file(path):
|
|
||||||
try:
|
|
||||||
with open(path, 'r') as fin:
|
|
||||||
specs_yaml = syaml.load(fin.read())
|
|
||||||
|
|
||||||
# For now, turn off ignoring invalid specs, as it prevents
|
|
||||||
# iteration if the specified compilers can't be found.
|
|
||||||
return CombinatorialSpecSet(specs_yaml, ignore_invalid=False)
|
|
||||||
except Exception as e:
|
|
||||||
emsg = e.message
|
|
||||||
if not emsg:
|
|
||||||
emsg = e.problem
|
|
||||||
msg = ('Unable to create CombinatorialSpecSet from file ({0})'
|
|
||||||
' due to {1}'.format(path, emsg))
|
|
||||||
raise SpackError(msg)
|
|
||||||
|
|
||||||
def all_package_versions(self):
|
|
||||||
"""Get package/version combinations for all spack packages."""
|
|
||||||
for name in spack.repo.all_package_names():
|
|
||||||
pkg = spack.repo.get(name)
|
|
||||||
for v in pkg.versions:
|
|
||||||
yield Spec('{0}@{1}'.format(name, v))
|
|
||||||
|
|
||||||
def _specs(self, data):
|
|
||||||
"""Read a list of specs from YAML data"""
|
|
||||||
return [Spec(s) for s in data]
|
|
||||||
|
|
||||||
def _compiler_specs(self, data):
|
|
||||||
"""Read compiler specs from YAML data.
|
|
||||||
Example YAML:
|
|
||||||
gcc:
|
|
||||||
versions: [4.4.8, 4.9.3]
|
|
||||||
clang:
|
|
||||||
versions: [3.6.1, 3.7.2, 3.8]
|
|
||||||
|
|
||||||
Optionally, data can be 'all', in which case all compilers for
|
|
||||||
the current platform are returned.
|
|
||||||
"""
|
|
||||||
# get usable compilers for current platform.
|
|
||||||
arch = ArchSpec(str(sarch.platform()), 'default_os', 'default_target')
|
|
||||||
available_compilers = [
|
|
||||||
c.spec for c in spack.compilers.compilers_for_arch(arch)]
|
|
||||||
|
|
||||||
# return compilers for this platform if asked for everything.
|
|
||||||
if data == 'all':
|
|
||||||
return [cspec.copy() for cspec in available_compilers]
|
|
||||||
|
|
||||||
# otherwise create specs from the YAML file.
|
|
||||||
cspecs = set([
|
|
||||||
Spec('%{0}@{1}'.format(compiler, version))
|
|
||||||
for compiler in data for version in data[compiler]['versions']])
|
|
||||||
|
|
||||||
# filter out invalid specs if caller said to ignore them.
|
|
||||||
if self.ignore_invalid:
|
|
||||||
missing = [c for c in cspecs if not any(
|
|
||||||
c.compiler.satisfies(comp) for comp in available_compilers)]
|
|
||||||
tty.warn("The following compilers were unavailable:")
|
|
||||||
colify(sorted(m.compiler for m in missing))
|
|
||||||
cspecs -= set(missing)
|
|
||||||
|
|
||||||
return cspecs
|
|
||||||
|
|
||||||
def _package_specs(self, data):
|
|
||||||
"""Read package/version specs from YAML data.
|
|
||||||
Example YAML:
|
|
||||||
gmake:
|
|
||||||
versions: [4.0, 4.1, 4.2]
|
|
||||||
qt:
|
|
||||||
versions: [4.8.6, 5.2.1, 5.7.1]
|
|
||||||
|
|
||||||
Optionally, data can be 'all', in which case all packages and
|
|
||||||
versions from the package repository are returned.
|
|
||||||
"""
|
|
||||||
if data == 'all':
|
|
||||||
return set(self.all_package_versions())
|
|
||||||
|
|
||||||
return set([
|
|
||||||
Spec('{0}@{1}'.format(name, version))
|
|
||||||
for name in data for version in data[name]['versions']])
|
|
||||||
|
|
||||||
def _get_specs(self, matrix_dict):
|
|
||||||
"""Parse specs out of an element in the build matrix."""
|
|
||||||
readers = {
|
|
||||||
'packages': self._package_specs,
|
|
||||||
'compilers': self._compiler_specs,
|
|
||||||
'specs': self._specs
|
|
||||||
}
|
|
||||||
|
|
||||||
key = next(iter(matrix_dict), None)
|
|
||||||
assert key in readers
|
|
||||||
return readers[key](matrix_dict[key])
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
# read in data from YAML file lazily.
|
|
||||||
if self._spec_lists is None:
|
|
||||||
self._spec_lists = [self._get_specs(spec_list)
|
|
||||||
for spec_list in self.data['matrix']]
|
|
||||||
|
|
||||||
if 'include' in self.data:
|
|
||||||
self._include = [Spec(s) for s in self.data['include']]
|
|
||||||
if 'exclude' in self.data:
|
|
||||||
self._exclude = [Spec(s) for s in self.data['exclude']]
|
|
||||||
|
|
||||||
for spec_list in itertools.product(*self._spec_lists):
|
|
||||||
# if there is an empty array in spec_lists, we'll get this.
|
|
||||||
if not spec_list:
|
|
||||||
yield spec_list
|
|
||||||
continue
|
|
||||||
|
|
||||||
# merge all the constraints in spec_list with each other
|
|
||||||
spec = spec_list[0].copy()
|
|
||||||
for s in spec_list[1:]:
|
|
||||||
spec.constrain(s)
|
|
||||||
|
|
||||||
# test each spec for include/exclude
|
|
||||||
if (self._include and
|
|
||||||
not any(spec.satisfies(s) for s in self._include)):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if any(spec.satisfies(s) for s in self._exclude):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# we now know we can include this spec in the set
|
|
||||||
yield spec
|
|
@ -3,37 +3,27 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import json
|
import os
|
||||||
|
import pytest
|
||||||
from jsonschema import validate
|
|
||||||
|
|
||||||
|
import spack
|
||||||
|
import spack.environment as ev
|
||||||
from spack import repo
|
from spack import repo
|
||||||
from spack.architecture import sys_type
|
|
||||||
from spack.cmd.release_jobs import stage_spec_jobs, spec_deps_key_label
|
from spack.cmd.release_jobs import stage_spec_jobs, spec_deps_key_label
|
||||||
from spack.main import SpackCommand
|
from spack.main import SpackCommand
|
||||||
from spack.schema.specs_deps import schema as specs_deps_schema
|
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.test.conftest import MockPackage, MockPackageMultiRepo
|
from spack.test.conftest import MockPackage, MockPackageMultiRepo
|
||||||
|
|
||||||
|
|
||||||
|
env = SpackCommand('env')
|
||||||
release_jobs = SpackCommand('release-jobs')
|
release_jobs = SpackCommand('release-jobs')
|
||||||
|
|
||||||
|
|
||||||
def test_specs_deps(tmpdir, config):
|
@pytest.fixture()
|
||||||
"""If we ask for the specs dependencies to be written to disk, then make
|
def env_deactivate():
|
||||||
sure we get a file of the correct format."""
|
yield
|
||||||
|
spack.environment._active_environment = None
|
||||||
output_path = str(tmpdir.mkdir('json').join('spec_deps.json'))
|
os.environ.pop('SPACK_ENV', None)
|
||||||
release_jobs('--specs-deps-output', output_path, 'readline')
|
|
||||||
|
|
||||||
deps_object = None
|
|
||||||
|
|
||||||
with open(output_path, 'r') as fd:
|
|
||||||
deps_object = json.loads(fd.read())
|
|
||||||
|
|
||||||
assert (deps_object is not None)
|
|
||||||
|
|
||||||
validate(deps_object, specs_deps_schema)
|
|
||||||
|
|
||||||
|
|
||||||
def test_specs_staging(config):
|
def test_specs_staging(config):
|
||||||
@ -52,25 +42,6 @@ def test_specs_staging(config):
|
|||||||
and then 'd', 'b', and 'a' to be put in the next three stages, respectively.
|
and then 'd', 'b', and 'a' to be put in the next three stages, respectively.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
current_system = sys_type()
|
|
||||||
|
|
||||||
config_compilers = config.get_config('compilers')
|
|
||||||
first_compiler = config_compilers[0]
|
|
||||||
compiler_spec = first_compiler['compiler']['spec']
|
|
||||||
|
|
||||||
# Whatever that first compiler in the configuration was, let's make sure
|
|
||||||
# we mock up an entry like we'd find in os-container-mapping.yaml which
|
|
||||||
# has that compiler.
|
|
||||||
mock_containers = {}
|
|
||||||
mock_containers[current_system] = {
|
|
||||||
"image": "dontcare",
|
|
||||||
"compilers": [
|
|
||||||
{
|
|
||||||
"name": compiler_spec,
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
default = ('build', 'link')
|
default = ('build', 'link')
|
||||||
|
|
||||||
g = MockPackage('g', [], [])
|
g = MockPackage('g', [], [])
|
||||||
@ -84,9 +55,7 @@ def test_specs_staging(config):
|
|||||||
mock_repo = MockPackageMultiRepo([a, b, c, d, e, f, g])
|
mock_repo = MockPackageMultiRepo([a, b, c, d, e, f, g])
|
||||||
|
|
||||||
with repo.swap(mock_repo):
|
with repo.swap(mock_repo):
|
||||||
# Now we'll ask for the root package to be compiled with whatever that
|
spec_a = Spec('a')
|
||||||
# first compiler in the configuration was.
|
|
||||||
spec_a = Spec('a%{0}'.format(compiler_spec))
|
|
||||||
spec_a.concretize()
|
spec_a.concretize()
|
||||||
|
|
||||||
spec_a_label = spec_deps_key_label(spec_a)[1]
|
spec_a_label = spec_deps_key_label(spec_a)[1]
|
||||||
@ -97,8 +66,7 @@ def test_specs_staging(config):
|
|||||||
spec_f_label = spec_deps_key_label(spec_a['f'])[1]
|
spec_f_label = spec_deps_key_label(spec_a['f'])[1]
|
||||||
spec_g_label = spec_deps_key_label(spec_a['g'])[1]
|
spec_g_label = spec_deps_key_label(spec_a['g'])[1]
|
||||||
|
|
||||||
spec_labels, dependencies, stages = stage_spec_jobs(
|
spec_labels, dependencies, stages = stage_spec_jobs([spec_a])
|
||||||
[spec_a], mock_containers, current_system)
|
|
||||||
|
|
||||||
assert (len(stages) == 4)
|
assert (len(stages) == 4)
|
||||||
|
|
||||||
@ -116,3 +84,45 @@ def test_specs_staging(config):
|
|||||||
|
|
||||||
assert (len(stages[3]) == 1)
|
assert (len(stages[3]) == 1)
|
||||||
assert (spec_a_label in stages[3])
|
assert (spec_a_label in stages[3])
|
||||||
|
|
||||||
|
|
||||||
|
def test_release_jobs_with_env(tmpdir, mutable_mock_env_path, env_deactivate,
|
||||||
|
install_mockery, mock_packages):
|
||||||
|
"""Make sure we can get a .gitlab-ci.yml from an environment file
|
||||||
|
which has the gitlab-ci, cdash, and mirrors sections."""
|
||||||
|
filename = str(tmpdir.join('spack.yaml'))
|
||||||
|
with open(filename, 'w') as f:
|
||||||
|
f.write("""\
|
||||||
|
spack:
|
||||||
|
definitions:
|
||||||
|
- packages: [archive-files]
|
||||||
|
specs:
|
||||||
|
- $packages
|
||||||
|
mirrors:
|
||||||
|
some-mirror: https://my.fake.mirror
|
||||||
|
gitlab-ci:
|
||||||
|
mappings:
|
||||||
|
- some-runner-mapping:
|
||||||
|
match:
|
||||||
|
- archive-files
|
||||||
|
runner-attributes:
|
||||||
|
tags:
|
||||||
|
- donotcare
|
||||||
|
image: donotcare
|
||||||
|
cdash:
|
||||||
|
build-group: Not important
|
||||||
|
url: https://my.fake.cdash
|
||||||
|
project: Not used
|
||||||
|
site: Nothing
|
||||||
|
""")
|
||||||
|
with tmpdir.as_cwd():
|
||||||
|
env('create', 'test', './spack.yaml')
|
||||||
|
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
|
||||||
|
|
||||||
|
with ev.read('test'):
|
||||||
|
release_jobs('--output-file', outputfile)
|
||||||
|
|
||||||
|
with open(outputfile) as f:
|
||||||
|
contents = f.read()
|
||||||
|
assert('archive-files' in contents)
|
||||||
|
assert('stages: [stage-0' in contents)
|
||||||
|
@ -1,299 +0,0 @@
|
|||||||
# Copyright 2013-2018 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from spack.spec import Spec
|
|
||||||
from jsonschema import ValidationError
|
|
||||||
from spack.spec_set import CombinatorialSpecSet
|
|
||||||
|
|
||||||
|
|
||||||
pytestmark = pytest.mark.usefixtures('config')
|
|
||||||
|
|
||||||
|
|
||||||
basic_yaml_file = {
|
|
||||||
'spec-set': {
|
|
||||||
'cdash': 'http://example.com/cdash',
|
|
||||||
'project': 'testproj',
|
|
||||||
'include': ['gmake'],
|
|
||||||
'matrix': [
|
|
||||||
{'packages': {
|
|
||||||
'gmake': {
|
|
||||||
'versions': ['4.0']
|
|
||||||
}
|
|
||||||
}},
|
|
||||||
{'compilers': {
|
|
||||||
'gcc': {
|
|
||||||
'versions': ['4.2.1', '6.3.0']
|
|
||||||
}, 'clang': {
|
|
||||||
'versions': ['8.0', '3.8']
|
|
||||||
}
|
|
||||||
}},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_spec_set_basic():
|
|
||||||
"""The "include" isn't required, but if it is present, we should only
|
|
||||||
see specs mentioned there. Also, if we include cdash and project
|
|
||||||
properties, those should be captured and stored on the resulting
|
|
||||||
CombinatorialSpecSet as attributes."""
|
|
||||||
spec_set = CombinatorialSpecSet(basic_yaml_file, False)
|
|
||||||
specs = list(spec for spec in spec_set)
|
|
||||||
assert len(specs) == 4
|
|
||||||
assert spec_set.cdash == ['http://example.com/cdash']
|
|
||||||
assert spec_set.project == 'testproj'
|
|
||||||
|
|
||||||
|
|
||||||
def test_spec_set_no_include():
|
|
||||||
"""Make sure that without any exclude or include, we get the full cross-
|
|
||||||
product of specs/versions."""
|
|
||||||
yaml_file = {
|
|
||||||
'spec-set': {
|
|
||||||
'matrix': [
|
|
||||||
{'packages': {
|
|
||||||
'gmake': {
|
|
||||||
'versions': ['4.0']
|
|
||||||
}
|
|
||||||
}},
|
|
||||||
{'compilers': {
|
|
||||||
'gcc': {
|
|
||||||
'versions': ['4.2.1', '6.3.0']
|
|
||||||
}, 'clang': {
|
|
||||||
'versions': ['8.0', '3.8']
|
|
||||||
}
|
|
||||||
}},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
spec_set = CombinatorialSpecSet(yaml_file, False)
|
|
||||||
specs = list(spec for spec in spec_set)
|
|
||||||
assert len(specs) == 4
|
|
||||||
|
|
||||||
|
|
||||||
def test_spec_set_include_exclude_conflict():
|
|
||||||
"""Exclude should override include"""
|
|
||||||
yaml_file = {
|
|
||||||
'spec-set': {
|
|
||||||
'include': ['gmake'],
|
|
||||||
'exclude': ['gmake'],
|
|
||||||
'matrix': [
|
|
||||||
{'packages': {
|
|
||||||
'gmake': {
|
|
||||||
'versions': ['4.0']
|
|
||||||
}
|
|
||||||
}},
|
|
||||||
{'compilers': {
|
|
||||||
'gcc': {
|
|
||||||
'versions': ['4.2.1', '6.3.0']
|
|
||||||
}, 'clang': {
|
|
||||||
'versions': ['8.0', '3.8']
|
|
||||||
}
|
|
||||||
}},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
spec_set = CombinatorialSpecSet(yaml_file, False)
|
|
||||||
specs = list(spec for spec in spec_set)
|
|
||||||
assert len(specs) == 0
|
|
||||||
|
|
||||||
|
|
||||||
def test_spec_set_exclude():
|
|
||||||
"""The exclude property isn't required, but if it appears, any specs
|
|
||||||
mentioned there should not appear in the output specs"""
|
|
||||||
yaml_file = {
|
|
||||||
'spec-set': {
|
|
||||||
'exclude': ['gmake'],
|
|
||||||
'matrix': [
|
|
||||||
{'packages': {
|
|
||||||
'gmake': {
|
|
||||||
'versions': ['4.0']
|
|
||||||
},
|
|
||||||
'appres': {
|
|
||||||
'versions': ['1.0.4']
|
|
||||||
},
|
|
||||||
'allinea-reports': {
|
|
||||||
'versions': ['6.0.4']
|
|
||||||
}
|
|
||||||
}},
|
|
||||||
{'compilers': {
|
|
||||||
'gcc': {
|
|
||||||
'versions': ['4.2.1', '6.3.0']
|
|
||||||
}, 'clang': {
|
|
||||||
'versions': ['8.0', '3.8']
|
|
||||||
}
|
|
||||||
}},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
spec_set = CombinatorialSpecSet(yaml_file, False)
|
|
||||||
specs = list(spec for spec in spec_set)
|
|
||||||
assert len(specs) == 8
|
|
||||||
|
|
||||||
|
|
||||||
def test_spec_set_include_limited_packages():
|
|
||||||
"""If we see the include key, it is a filter and only the specs mentioned
|
|
||||||
there should actually be included."""
|
|
||||||
yaml_file = {
|
|
||||||
'spec-set': {
|
|
||||||
'include': ['gmake'],
|
|
||||||
'matrix': [
|
|
||||||
{'packages': {
|
|
||||||
'gmake': {
|
|
||||||
'versions': ['4.0']
|
|
||||||
},
|
|
||||||
'appres': {
|
|
||||||
'versions': ['1.0.4']
|
|
||||||
},
|
|
||||||
'allinea-reports': {
|
|
||||||
'versions': ['6.0.4']
|
|
||||||
}
|
|
||||||
}},
|
|
||||||
{'compilers': {
|
|
||||||
'gcc': {
|
|
||||||
'versions': ['4.2.1', '6.3.0']
|
|
||||||
}, 'clang': {
|
|
||||||
'versions': ['8.0', '3.8']
|
|
||||||
}
|
|
||||||
}},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
spec_set = CombinatorialSpecSet(yaml_file, False)
|
|
||||||
specs = list(spec for spec in spec_set)
|
|
||||||
assert len(specs) == 4
|
|
||||||
|
|
||||||
|
|
||||||
def test_spec_set_simple_spec_list():
|
|
||||||
"""Make sure we can handle the slightly more concise syntax where we
|
|
||||||
include the package name/version together and skip the extra keys in
|
|
||||||
the dictionary."""
|
|
||||||
yaml_file = {
|
|
||||||
'spec-set': {
|
|
||||||
'matrix': [
|
|
||||||
{'specs': [
|
|
||||||
'gmake@4.0',
|
|
||||||
'appres@1.0.4',
|
|
||||||
'allinea-reports@6.0.4'
|
|
||||||
]},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
spec_set = CombinatorialSpecSet(yaml_file, False)
|
|
||||||
specs = list(spec for spec in spec_set)
|
|
||||||
assert len(specs) == 3
|
|
||||||
|
|
||||||
|
|
||||||
def test_spec_set_with_specs():
|
|
||||||
"""Make sure we only see the specs mentioned in the include"""
|
|
||||||
yaml_file = {
|
|
||||||
'spec-set': {
|
|
||||||
'include': ['gmake', 'appres'],
|
|
||||||
'matrix': [
|
|
||||||
{'specs': [
|
|
||||||
'gmake@4.0',
|
|
||||||
'appres@1.0.4',
|
|
||||||
'allinea-reports@6.0.4'
|
|
||||||
]},
|
|
||||||
{'compilers': {
|
|
||||||
'gcc': {
|
|
||||||
'versions': ['4.2.1', '6.3.0']
|
|
||||||
}, 'clang': {
|
|
||||||
'versions': ['8.0', '3.8']
|
|
||||||
}
|
|
||||||
}},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
spec_set = CombinatorialSpecSet(yaml_file, False)
|
|
||||||
specs = list(spec for spec in spec_set)
|
|
||||||
assert len(specs) == 8
|
|
||||||
|
|
||||||
|
|
||||||
def test_spec_set_packages_no_matrix():
|
|
||||||
"""The matrix property is required, make sure we error out if it is
|
|
||||||
missing"""
|
|
||||||
yaml_file = {
|
|
||||||
'spec-set': {
|
|
||||||
'include': ['gmake'],
|
|
||||||
'packages': {
|
|
||||||
'gmake': {
|
|
||||||
'versions': ['4.0']
|
|
||||||
},
|
|
||||||
'appres': {
|
|
||||||
'versions': ['1.0.4']
|
|
||||||
},
|
|
||||||
'allinea-reports': {
|
|
||||||
'versions': ['6.0.4']
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
with pytest.raises(ValidationError):
|
|
||||||
CombinatorialSpecSet(yaml_file)
|
|
||||||
|
|
||||||
|
|
||||||
def test_spec_set_get_cdash_array():
|
|
||||||
"""Make sure we can handle multiple cdash sites in a list"""
|
|
||||||
yaml_file = {
|
|
||||||
'spec-set': {
|
|
||||||
'cdash': ['http://example.com/cdash', 'http://example.com/cdash2'],
|
|
||||||
'project': 'testproj',
|
|
||||||
'matrix': [
|
|
||||||
{'packages': {
|
|
||||||
'gmake': {'versions': ['4.0']},
|
|
||||||
}},
|
|
||||||
{'compilers': {
|
|
||||||
'gcc': {'versions': ['4.2.1', '6.3.0']},
|
|
||||||
'clang': {'versions': ['8.0', '3.8']},
|
|
||||||
}},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
spec_set = CombinatorialSpecSet(yaml_file)
|
|
||||||
assert spec_set.cdash == [
|
|
||||||
'http://example.com/cdash', 'http://example.com/cdash2']
|
|
||||||
assert spec_set.project == 'testproj'
|
|
||||||
|
|
||||||
|
|
||||||
def test_compiler_specs():
|
|
||||||
spec_set = CombinatorialSpecSet(basic_yaml_file, False)
|
|
||||||
compilers = spec_set._compiler_specs({
|
|
||||||
'gcc': {
|
|
||||||
'versions': ['4.2.1', '6.3.0']
|
|
||||||
}, 'clang': {
|
|
||||||
'versions': ['8.0', '3.8']
|
|
||||||
}})
|
|
||||||
|
|
||||||
assert len(list(compilers)) == 4
|
|
||||||
assert Spec('%gcc@4.2.1') in compilers
|
|
||||||
assert Spec('%gcc@6.3.0') in compilers
|
|
||||||
assert Spec('%clang@8.0') in compilers
|
|
||||||
assert Spec('%clang@3.8') in compilers
|
|
||||||
|
|
||||||
|
|
||||||
def test_package_specs():
|
|
||||||
spec_set = CombinatorialSpecSet(basic_yaml_file, False)
|
|
||||||
|
|
||||||
packages = spec_set._package_specs({
|
|
||||||
'gmake': {
|
|
||||||
'versions': ['4.0', '5.0']
|
|
||||||
},
|
|
||||||
'appres': {
|
|
||||||
'versions': ['1.0.4']
|
|
||||||
},
|
|
||||||
'allinea-reports': {
|
|
||||||
'versions': ['6.0.1', '6.0.3', '6.0.4']
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
assert Spec('gmake@4.0') in packages
|
|
||||||
assert Spec('gmake@5.0') in packages
|
|
||||||
assert Spec('appres@1.0.4') in packages
|
|
||||||
assert Spec('allinea-reports@6.0.1') in packages
|
|
||||||
assert Spec('allinea-reports@6.0.3') in packages
|
|
||||||
assert Spec('allinea-reports@6.0.4') in packages
|
|
Loading…
Reference in New Issue
Block a user