Merge branch 'develop' into features/shared

This commit is contained in:
Carson Woods
2020-06-26 12:19:32 -04:00
191 changed files with 3272 additions and 600 deletions

View File

@@ -627,8 +627,8 @@ output metadata on specs and all dependencies as json:
"target": "x86_64"
},
"compiler": {
"name": "clang",
"version": "10.0.0-apple"
"name": "apple-clang",
"version": "10.0.0"
},
"namespace": "builtin",
"parameters": {

View File

@@ -647,7 +647,7 @@ named list ``compilers`` is ``['%gcc', '%clang', '%intel']`` on
spack:
definitions:
- compilers: ['%gcc', '%clang']
- when: target == 'x86_64'
- when: arch.satisfies('x86_64:')
compilers: ['%intel']
.. note::
@@ -666,8 +666,12 @@ The valid variables for a ``when`` clause are:
#. ``target``. The target string of the default Spack
architecture on the system.
#. ``architecture`` or ``arch``. The full string of the
default Spack architecture on the system.
#. ``architecture`` or ``arch``. A Spack spec satisfying the default Spack
architecture on the system. This supports querying via the ``satisfies``
method, as shown above.
#. ``arch_str``. The architecture string of the default Spack architecture
on the system.
#. ``re``. The standard regex module in Python.

View File

@@ -478,7 +478,7 @@ Fortran.
cxx: /usr/bin/clang++
f77: /path/to/bin/gfortran
fc: /path/to/bin/gfortran
spec: clang@11.0.0-apple
spec: apple-clang@11.0.0
If you used Spack to install GCC, you can get the installation prefix by

View File

@@ -1675,15 +1675,15 @@ can see the patches that would be applied to ``m4``::
Concretized
--------------------------------
m4@1.4.18%clang@9.0.0-apple patches=3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00,c0a408fbffb7255fcc75e26bd8edab116fc81d216bfd18b473668b7739a4158e,fc9b61654a3ba1a8d6cd78ce087e7c96366c290bc8d2c299f09828d793b853c8 +sigsegv arch=darwin-highsierra-x86_64
^libsigsegv@2.11%clang@9.0.0-apple arch=darwin-highsierra-x86_64
m4@1.4.18%apple-clang@9.0.0 patches=3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00,c0a408fbffb7255fcc75e26bd8edab116fc81d216bfd18b473668b7739a4158e,fc9b61654a3ba1a8d6cd78ce087e7c96366c290bc8d2c299f09828d793b853c8 +sigsegv arch=darwin-highsierra-x86_64
^libsigsegv@2.11%apple-clang@9.0.0 arch=darwin-highsierra-x86_64
You can also see patches that have been applied to installed packages
with ``spack find -v``::
$ spack find -v m4
==> 1 installed package
-- darwin-highsierra-x86_64 / clang@9.0.0-apple -----------------
-- darwin-highsierra-x86_64 / apple-clang@9.0.0 -----------------
m4@1.4.18 patches=3877ab548f88597ab2327a2230ee048d2d07ace1062efe81fc92e91b7f39cd00,c0a408fbffb7255fcc75e26bd8edab116fc81d216bfd18b473668b7739a4158e,fc9b61654a3ba1a8d6cd78ce087e7c96366c290bc8d2c299f09828d793b853c8 +sigsegv
.. _cmd-spack-resource:
@@ -1713,7 +1713,7 @@ wonder where the extra boost patches are coming from::
$ spack spec dealii ^boost@1.68.0 ^hdf5+fortran | grep '\^boost'
^boost@1.68.0
^boost@1.68.0%clang@9.0.0-apple+atomic+chrono~clanglibcpp cxxstd=default +date_time~debug+exception+filesystem+graph~icu+iostreams+locale+log+math~mpi+multithreaded~numpy patches=2ab6c72d03dec6a4ae20220a9dfd5c8c572c5294252155b85c6874d97c323199,b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f ~pic+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer~versionedlayout+wave arch=darwin-highsierra-x86_64
^boost@1.68.0%apple-clang@9.0.0+atomic+chrono~clanglibcpp cxxstd=default +date_time~debug+exception+filesystem+graph~icu+iostreams+locale+log+math~mpi+multithreaded~numpy patches=2ab6c72d03dec6a4ae20220a9dfd5c8c572c5294252155b85c6874d97c323199,b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f ~pic+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer~versionedlayout+wave arch=darwin-highsierra-x86_64
$ spack resource show b37164268
b37164268f34f7133cbc9a4066ae98fda08adf51e1172223f6a969909216870f
path: /home/spackuser/src/spack/var/spack/repos/builtin/packages/dealii/boost_1.68.0.patch
@@ -4252,23 +4252,29 @@ Does this in one of two ways:
``spack clean``
^^^^^^^^^^^^^^^
Cleans up all of Spack's temporary and cached files. This can be used to
Cleans up Spack's temporary and cached files. This command can be used to
recover disk space if temporary files from interrupted or failed installs
accumulate in the staging area.
accumulate.
When called with ``--stage`` or without arguments this removes all staged
files.
When called with ``--downloads`` this will clear all resources
:ref:`cached <caching>` during installs.
The ``--downloads`` option removes cached :ref:`cached <caching>` downloads.
When called with ``--user-cache`` this will remove caches in the user home
directory, including cached virtual indices.
You can force the removal of all install failure tracking markers using the
``--failures`` option. Note that ``spack install`` will automatically clear
relevant failure markings prior to performing the requested installation(s).
Long-lived caches, like the virtual package index, are removed using the
``--misc-cache`` option.
The ``--python-cache`` option removes `.pyc`, `.pyo`, and `__pycache__`
folders.
To remove all of the above, the command can be called with ``--all``.
When called with positional arguments, cleans up temporary files only
for a particular package. If ``fetch``, ``stage``, or ``install``
When called with positional arguments, this command cleans up temporary files
only for a particular package. If ``fetch``, ``stage``, or ``install``
are run again after this, Spack's build process will start from scratch.

View File

@@ -280,16 +280,16 @@ you install it, you can use ``spack spec -N``:
Concretized
--------------------------------
builtin.hdf5@1.10.0-patch1%clang@7.0.2-apple+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=darwin-elcapitan-x86_64
^builtin.openmpi@2.0.1%clang@7.0.2-apple~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm~verbs+vt arch=darwin-elcapitan-x86_64
^builtin.hwloc@1.11.4%clang@7.0.2-apple arch=darwin-elcapitan-x86_64
^builtin.libpciaccess@0.13.4%clang@7.0.2-apple arch=darwin-elcapitan-x86_64
^builtin.libtool@2.4.6%clang@7.0.2-apple arch=darwin-elcapitan-x86_64
^builtin.m4@1.4.17%clang@7.0.2-apple+sigsegv arch=darwin-elcapitan-x86_64
^builtin.libsigsegv@2.10%clang@7.0.2-apple arch=darwin-elcapitan-x86_64
^builtin.pkg-config@0.29.1%clang@7.0.2-apple+internal_glib arch=darwin-elcapitan-x86_64
^builtin.util-macros@1.19.0%clang@7.0.2-apple arch=darwin-elcapitan-x86_64
^builtin.zlib@1.2.8%clang@7.0.2-apple+pic arch=darwin-elcapitan-x86_64
builtin.hdf5@1.10.0-patch1%apple-clang@7.0.2+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=darwin-elcapitan-x86_64
^builtin.openmpi@2.0.1%apple-clang@7.0.2~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm~verbs+vt arch=darwin-elcapitan-x86_64
^builtin.hwloc@1.11.4%apple-clang@7.0.2 arch=darwin-elcapitan-x86_64
^builtin.libpciaccess@0.13.4%apple-clang@7.0.2 arch=darwin-elcapitan-x86_64
^builtin.libtool@2.4.6%apple-clang@7.0.2 arch=darwin-elcapitan-x86_64
^builtin.m4@1.4.17%apple-clang@7.0.2+sigsegv arch=darwin-elcapitan-x86_64
^builtin.libsigsegv@2.10%apple-clang@7.0.2 arch=darwin-elcapitan-x86_64
^builtin.pkg-config@0.29.1%apple-clang@7.0.2+internal_glib arch=darwin-elcapitan-x86_64
^builtin.util-macros@1.19.0%apple-clang@7.0.2 arch=darwin-elcapitan-x86_64
^builtin.zlib@1.2.8%apple-clang@7.0.2+pic arch=darwin-elcapitan-x86_64
.. warning::

View File

@@ -204,10 +204,22 @@ def optimization_flags(self, compiler, version):
compiler (str): name of the compiler to be used
version (str): version of the compiler to be used
"""
# If we don't have information on compiler return an empty string
if compiler not in self.compilers:
# If we don't have information on compiler at all
# return an empty string
if compiler not in self.family.compilers:
return ''
# If we have information but it stops before this
# microarchitecture, fall back to the best known target
if compiler not in self.compilers:
best_target = [
x for x in self.ancestors if compiler in x.compilers
][0]
msg = ("'{0}' compiler is known to optimize up to the '{1}'"
" microarchitecture in the '{2}' architecture family")
msg = msg.format(compiler, best_target, best_target.family)
raise UnsupportedMicroarchitecture(msg)
# If we have information on this compiler we need to check the
# version being used
compiler_info = self.compilers[compiler]
@@ -219,15 +231,10 @@ def optimization_flags(self, compiler, version):
def satisfies_constraint(entry, version):
min_version, max_version = entry['versions'].split(':')
# Check version suffixes
min_version, min_suffix = version_components(min_version)
max_version, max_suffix = version_components(max_version)
version, suffix = version_components(version)
# If the suffixes are not all equal there's no match
if ((suffix != min_suffix and min_version) or
(suffix != max_suffix and max_version)):
return False
# Extract numeric part of the version
min_version, _ = version_components(min_version)
max_version, _ = version_components(max_version)
version, _ = version_components(version)
# Assume compiler versions fit into semver
tuplify = lambda x: tuple(int(y) for y in x.split('.'))

View File

@@ -61,12 +61,14 @@
"flags": "-march={name} -mtune={name}"
}
],
"clang": [
"apple-clang": [
{
"versions": "0.0.0-apple:",
"versions": ":",
"name": "x86-64",
"flags": "-march={name}"
},
}
],
"clang": [
{
"versions": ":",
"name": "x86-64",

View File

@@ -345,18 +345,18 @@ def compute_spec_deps(spec_list):
],
"specs": [
{
"root_spec": "readline@7.0%clang@9.1.0-apple arch=darwin-...",
"spec": "readline@7.0%clang@9.1.0-apple arch=darwin-highs...",
"root_spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-...",
"spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-highs...",
"label": "readline/ip6aiun"
},
{
"root_spec": "readline@7.0%clang@9.1.0-apple arch=darwin-...",
"spec": "ncurses@6.1%clang@9.1.0-apple arch=darwin-highsi...",
"root_spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-...",
"spec": "ncurses@6.1%apple-clang@9.1.0 arch=darwin-highsi...",
"label": "ncurses/y43rifz"
},
{
"root_spec": "readline@7.0%clang@9.1.0-apple arch=darwin-...",
"spec": "pkgconf@1.5.4%clang@9.1.0-apple arch=darwin-high...",
"root_spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-...",
"spec": "pkgconf@1.5.4%apple-clang@9.1.0 arch=darwin-high...",
"label": "pkgconf/eg355zb"
}
]
@@ -450,7 +450,7 @@ def format_job_needs(phase_name, strip_compilers, dep_jobs,
def generate_gitlab_ci_yaml(env, print_summary, output_file,
custom_spack_repo=None, custom_spack_ref=None,
run_optimizer=False):
run_optimizer=False, use_dependencies=False):
# FIXME: What's the difference between one that opens with 'spack'
# and one that opens with 'env'? This will only handle the former.
with spack.concretize.disable_compiler_existence_check():
@@ -794,6 +794,11 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
import spack.ci_optimization as ci_opt
sorted_output = ci_opt.optimizer(sorted_output)
# TODO(opadron): remove this or refactor
if use_dependencies:
import spack.ci_needs_workaround as cinw
sorted_output = cinw.needs_to_dependencies(sorted_output)
with open(output_file, 'w') as outf:
outf.write(syaml.dump_config(sorted_output, default_flow_style=True))

View File

@@ -0,0 +1,47 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
try:
# dynamically import to keep vermin from complaining
collections_abc = __import__('collections.abc')
except ImportError:
collections_abc = collections
get_job_name = lambda needs_entry: (
needs_entry.get('job') if (
isinstance(needs_entry, collections_abc.Mapping) and
needs_entry.get('artifacts', True))
else
needs_entry if isinstance(needs_entry, str)
else None)
def convert_job(job_entry):
if not isinstance(job_entry, collections_abc.Mapping):
return job_entry
needs = job_entry.get('needs')
if needs is None:
return job_entry
new_job = {}
new_job.update(job_entry)
del new_job['needs']
new_job['dependencies'] = list(filter(
(lambda x: x is not None),
(get_job_name(needs_entry) for needs_entry in needs)))
return new_job
def needs_to_dependencies(yaml):
return dict((k, convert_job(v)) for k, v in yaml.items())

View File

@@ -190,10 +190,10 @@ def print_delta(name, old, new, applied=None):
applied = (new <= old)
print('\n'.join((
'{} {}:',
' before: {: 10d}',
' after : {: 10d}',
' delta : {:+10d} ({:=+3d}.{}%)',
'{0} {1}:',
' before: {2: 10d}',
' after : {3: 10d}',
' delta : {4:+10d} ({5:=+3d}.{6}%)',
)).format(
name,
('+' if applied else 'x'),

View File

@@ -338,10 +338,14 @@ def _createtarball(env, spec_yaml, packages, add_spec, add_deps,
elif packages:
packages = packages
elif env:
packages = env.concretized_user_specs
else:
tty.die("build cache file creation requires at least one" +
" installed package argument or else path to a" +
" yaml file containing a spec to install")
" installed package spec, an activate environment," +
" or else a path to a yaml file containing a spec" +
" to install")
pkgs = set(packages)
specs = set()

View File

@@ -55,10 +55,14 @@ def setup_parser(subparser):
"should be checked out as a step in each generated job. " +
"This argument is ignored if no --spack-repo is provided.")
generate.add_argument(
'--optimize', action='store_true',
'--optimize', action='store_true', default=False,
help="(Experimental) run the generated document through a series of "
"optimization passes designed to reduce the size of the "
"generated file.")
generate.add_argument(
'--dependencies', action='store_true', default=False,
help="(Experimental) disable DAG scheduling; use "
' "plain" dependencies.')
generate.set_defaults(func=ci_generate)
# Check a spec against mirror. Rebuild, create buildcache and push to
@@ -81,6 +85,7 @@ def ci_generate(args):
spack_repo = args.spack_repo
spack_ref = args.spack_ref
run_optimizer = args.optimize
use_dependencies = args.dependencies
if not output_file:
gen_ci_dir = os.getcwd()
@@ -93,7 +98,8 @@ def ci_generate(args):
# Generate the jobs
spack_ci.generate_gitlab_ci_yaml(
env, True, output_file, spack_repo, spack_ref,
run_optimizer=run_optimizer)
run_optimizer=run_optimizer,
use_dependencies=use_dependencies)
if copy_yaml_to:
copy_to_dir = os.path.dirname(copy_yaml_to)

View File

@@ -23,9 +23,9 @@
class AllClean(argparse.Action):
"""Activates flags -s -d -m and -p simultaneously"""
"""Activates flags -s -d -f -m and -p simultaneously"""
def __call__(self, parser, namespace, values, option_string=None):
parser.parse_args(['-sdmp'], namespace=namespace)
parser.parse_args(['-sdfmp'], namespace=namespace)
def setup_parser(subparser):
@@ -35,6 +35,9 @@ def setup_parser(subparser):
subparser.add_argument(
'-d', '--downloads', action='store_true',
help="remove cached downloads")
subparser.add_argument(
'-f', '--failures', action='store_true',
help="force removal of all install failure tracking markers")
subparser.add_argument(
'-m', '--misc-cache', action='store_true',
help="remove long-lived caches, like the virtual package index")
@@ -42,15 +45,15 @@ def setup_parser(subparser):
'-p', '--python-cache', action='store_true',
help="remove .pyc, .pyo files and __pycache__ folders")
subparser.add_argument(
'-a', '--all', action=AllClean, help="equivalent to -sdmp", nargs=0
'-a', '--all', action=AllClean, help="equivalent to -sdfmp", nargs=0
)
arguments.add_common_arguments(subparser, ['specs'])
def clean(parser, args):
# If nothing was set, activate the default
if not any([args.specs, args.stage, args.downloads, args.misc_cache,
args.python_cache]):
if not any([args.specs, args.stage, args.downloads, args.failures,
args.misc_cache, args.python_cache]):
args.stage = True
# Then do the cleaning falling through the cases
@@ -70,6 +73,10 @@ def clean(parser, args):
tty.msg('Removing cached downloads')
spack.caches.fetch_cache.destroy()
if args.failures:
tty.msg('Removing install failure marks')
spack.installer.clear_failures()
if args.misc_cache:
tty.msg('Removing cached information on repositories')
spack.caches.misc_cache.destroy()

View File

@@ -5,12 +5,14 @@
from __future__ import print_function
import os
import re
import llnl.util.tty as tty
import spack.config
import spack.schema.env
import spack.environment as ev
import spack.util.spack_yaml as syaml
from spack.util.editor import editor
description = "get and set configuration options"
@@ -58,24 +60,48 @@ def setup_parser(subparser):
sp.add_parser('list', help='list configuration sections')
add_parser = sp.add_parser('add', help='add configuration parameters')
add_parser.add_argument(
'path', nargs='?',
help="colon-separated path to config that should be added,"
" e.g. 'config:default:true'")
add_parser.add_argument(
'-f', '--file',
help="file from which to set all config values"
)
remove_parser = sp.add_parser('remove', aliases=['rm'],
help='remove configuration parameters')
remove_parser.add_argument(
'path',
help="colon-separated path to config that should be removed,"
" e.g. 'config:default:true'")
# Make the add parser available later
setup_parser.add_parser = add_parser
def _get_scope_and_section(args):
"""Extract config scope and section from arguments."""
scope = args.scope
section = args.section
section = getattr(args, 'section', None)
path = getattr(args, 'path', None)
# w/no args and an active environment, point to env manifest
if not args.section:
if not section:
env = ev.get_env(args, 'config edit')
if env:
scope = env.env_file_config_scope_name()
# set scope defaults
elif not args.scope:
if section == 'compilers':
scope = spack.config.default_modify_scope()
else:
scope = 'user'
elif not scope:
scope = spack.config.default_modify_scope(section)
# special handling for commands that take value instead of section
if path:
section = path[:path.find(':')] if ':' in path else path
if not scope:
scope = spack.config.default_modify_scope(section)
return scope, section
@@ -135,11 +161,126 @@ def config_list(args):
print(' '.join(list(spack.config.section_schemas)))
def set_config(args, section, new, scope):
if re.match(r'env.*', scope):
e = ev.get_env(args, 'config add')
e.set_config(section, new)
else:
spack.config.set(section, new, scope=scope)
def config_add(args):
"""Add the given configuration to the specified config scope
This is a stateful operation that edits the config files."""
if not (args.file or args.path):
tty.error("No changes requested. Specify a file or value.")
setup_parser.add_parser.print_help()
exit(1)
scope, section = _get_scope_and_section(args)
# Updates from file
if args.file:
# Get file as config dict
data = spack.config.read_config_file(args.file)
if any(k in data for k in spack.schema.env.keys):
data = ev.config_dict(data)
# update all sections from config dict
# We have to iterate on keys to keep overrides from the file
for section in data.keys():
if section in spack.config.section_schemas.keys():
# Special handling for compiler scope difference
# Has to be handled after we choose a section
if scope is None:
scope = spack.config.default_modify_scope(section)
value = data[section]
existing = spack.config.get(section, scope=scope)
new = spack.config.merge_yaml(existing, value)
set_config(args, section, new, scope)
if args.path:
components = spack.config.process_config_path(args.path)
has_existing_value = True
path = ''
override = False
for idx, name in enumerate(components[:-1]):
# First handle double colons in constructing path
colon = '::' if override else ':' if path else ''
path += colon + name
if getattr(name, 'override', False):
override = True
else:
override = False
# Test whether there is an existing value at this level
existing = spack.config.get(path, scope=scope)
if existing is None:
has_existing_value = False
# We've nested further than existing config, so we need the
# type information for validation to know how to handle bare
# values appended to lists.
existing = spack.config.get_valid_type(path)
# construct value from this point down
value = syaml.load_config(components[-1])
for component in reversed(components[idx + 1:-1]):
value = {component: value}
break
if has_existing_value:
path, _, value = args.path.rpartition(':')
value = syaml.load_config(value)
existing = spack.config.get(path, scope=scope)
# append values to lists
if isinstance(existing, list) and not isinstance(value, list):
value = [value]
# merge value into existing
new = spack.config.merge_yaml(existing, value)
set_config(args, path, new, scope)
def config_remove(args):
"""Remove the given configuration from the specified config scope
This is a stateful operation that edits the config files."""
scope, _ = _get_scope_and_section(args)
path, _, value = args.path.rpartition(':')
existing = spack.config.get(path, scope=scope)
if not isinstance(existing, (list, dict)):
path, _, value = path.rpartition(':')
existing = spack.config.get(path, scope=scope)
value = syaml.load(value)
if isinstance(existing, list):
values = value if isinstance(value, list) else [value]
for v in values:
existing.remove(v)
elif isinstance(existing, dict):
existing.pop(value, None)
else:
# This should be impossible to reach
raise spack.config.ConfigError('Config has nested non-dict values')
set_config(args, path, existing, scope)
def config(parser, args):
action = {
'get': config_get,
'blame': config_blame,
'edit': config_edit,
'list': config_list,
}
action = {'get': config_get,
'blame': config_blame,
'edit': config_edit,
'list': config_list,
'add': config_add,
'rm': config_remove,
'remove': config_remove}
action[args.config_command](args)

View File

@@ -186,7 +186,7 @@ def _update_pkg_config(pkg_to_entries, not_buildable):
cfg_scope = spack.config.default_modify_scope()
pkgs_cfg = spack.config.get('packages', scope=cfg_scope)
spack.config._merge_yaml(pkgs_cfg, pkg_to_cfg)
spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg)
spack.config.set('packages', pkgs_cfg, scope=cfg_scope)

View File

@@ -245,7 +245,9 @@ def supported_compilers():
See available_compilers() to get a list of all the available
versions of supported compilers.
"""
return sorted(name for name in
# Hack to be able to call the compiler `apple-clang` while still
# using a valid python name for the module
return sorted(name if name != 'apple_clang' else 'apple-clang' for name in
llnl.util.lang.list_modules(spack.paths.compilers_path))
@@ -469,7 +471,13 @@ def class_for_compiler_name(compiler_name):
"""Given a compiler module name, get the corresponding Compiler class."""
assert(supported(compiler_name))
file_path = os.path.join(spack.paths.compilers_path, compiler_name + ".py")
# Hack to be able to call the compiler `apple-clang` while still
# using a valid python name for the module
module_name = compiler_name
if compiler_name == 'apple-clang':
module_name = compiler_name.replace('-', '_')
file_path = os.path.join(spack.paths.compilers_path, module_name + ".py")
compiler_mod = simp.load_source(_imported_compilers_module, file_path)
cls = getattr(compiler_mod, mod_to_class(compiler_name))
@@ -662,7 +670,7 @@ def _default(cmp_id, paths):
operating_system, compiler_name, version = cmp_id
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
paths = [paths.get(l, None) for l in ('cc', 'cxx', 'f77', 'fc')]
paths = [paths.get(x, None) for x in ('cc', 'cxx', 'f77', 'fc')]
target = cpu.host()
compiler = compiler_cls(
spec, operating_system, str(target.family), paths
@@ -716,6 +724,8 @@ def name_matches(name, name_list):
toolchains.add(compiler_cls.__name__)
if len(toolchains) > 1:
if toolchains == set(['Clang', 'AppleClang']):
return False
tty.debug("[TOOLCHAINS] {0}".format(toolchains))
return True

View File

@@ -0,0 +1,165 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os.path
import re
import shutil
import llnl.util.tty as tty
import llnl.util.lang
import spack.compiler
import spack.compilers.clang
import spack.util.executable
import spack.version
class AppleClang(spack.compilers.clang.Clang):
openmp_flag = "-Xpreprocessor -fopenmp"
@classmethod
@llnl.util.lang.memoized
def extract_version_from_output(cls, output):
ver = 'unknown'
match = re.search(
# Apple's LLVM compiler has its own versions, so suffix them.
r'^Apple (?:LLVM|clang) version ([^ )]+)', output
)
if match:
ver = match.group(match.lastindex)
return ver
@property
def cxx11_flag(self):
# Adapted from CMake's AppleClang-CXX rules
# Spack's AppleClang detection only valid from Xcode >= 4.6
if self.version < spack.version.ver('4.0.0'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++11 standard", "cxx11_flag", "Xcode < 4.0.0"
)
return "-std=c++11"
@property
def cxx14_flag(self):
# Adapted from CMake's rules for AppleClang
if self.version < spack.version.ver('5.1.0'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++14 standard", "cxx14_flag", "Xcode < 5.1.0"
)
elif self.version < spack.version.ver('6.1.0'):
return "-std=c++1y"
return "-std=c++14"
@property
def cxx17_flag(self):
# Adapted from CMake's rules for AppleClang
if self.version < spack.version.ver('6.1.0'):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++17 standard", "cxx17_flag", "Xcode < 6.1.0"
)
return "-std=c++1z"
def setup_custom_environment(self, pkg, env):
"""Set the DEVELOPER_DIR environment for the Xcode toolchain.
On macOS, not all buildsystems support querying CC and CXX for the
compilers to use and instead query the Xcode toolchain for what
compiler to run. This side-steps the spack wrappers. In order to inject
spack into this setup, we need to copy (a subset of) Xcode.app and
replace the compiler executables with symlinks to the spack wrapper.
Currently, the stage is used to store the Xcode.app copies. We then set
the 'DEVELOPER_DIR' environment variables to cause the xcrun and
related tools to use this Xcode.app.
"""
super(AppleClang, self).setup_custom_environment(pkg, env)
if not pkg.use_xcode:
# if we do it for all packages, we get into big troubles with MPI:
# filter_compilers(self) will use mockup XCode compilers on macOS
# with Clang. Those point to Spack's compiler wrappers and
# consequently render MPI non-functional outside of Spack.
return
# Use special XCode versions of compiler wrappers when using XCode
# Overwrites build_environment's setting of SPACK_CC and SPACK_CXX
xcrun = spack.util.executable.Executable('xcrun')
xcode_clang = xcrun('-f', 'clang', output=str).strip()
xcode_clangpp = xcrun('-f', 'clang++', output=str).strip()
env.set('SPACK_CC', xcode_clang, force=True)
env.set('SPACK_CXX', xcode_clangpp, force=True)
xcode_select = spack.util.executable.Executable('xcode-select')
# Get the path of the active developer directory
real_root = xcode_select('--print-path', output=str).strip()
# The path name can be used to determine whether the full Xcode suite
# or just the command-line tools are installed
if real_root.endswith('Developer'):
# The full Xcode suite is installed
pass
else:
if real_root.endswith('CommandLineTools'):
# Only the command-line tools are installed
msg = 'It appears that you have the Xcode command-line tools '
msg += 'but not the full Xcode suite installed.\n'
else:
# Xcode is not installed
msg = 'It appears that you do not have Xcode installed.\n'
msg += 'In order to use Spack to build the requested application, '
msg += 'you need the full Xcode suite. It can be installed '
msg += 'through the App Store. Make sure you launch the '
msg += 'application and accept the license agreement.\n'
raise OSError(msg)
real_root = os.path.dirname(os.path.dirname(real_root))
developer_root = os.path.join(spack.stage.get_stage_root(),
'xcode-select',
self.name,
str(self.version))
xcode_link = os.path.join(developer_root, 'Xcode.app')
if not os.path.exists(developer_root):
tty.warn('Copying Xcode from %s to %s in order to add spack '
'wrappers to it. Please do not interrupt.'
% (real_root, developer_root))
# We need to make a new Xcode.app instance, but with symlinks to
# the spack wrappers for the compilers it ships. This is necessary
# because some projects insist on just asking xcrun and related
# tools where the compiler runs. These tools are very hard to trick
# as they do realpath and end up ignoring the symlinks in a
# "softer" tree of nothing but symlinks in the right places.
shutil.copytree(
real_root, developer_root, symlinks=True,
ignore=shutil.ignore_patterns(
'AppleTV*.platform', 'Watch*.platform', 'iPhone*.platform',
'Documentation', 'swift*'
))
real_dirs = [
'Toolchains/XcodeDefault.xctoolchain/usr/bin',
'usr/bin',
]
bins = ['c++', 'c89', 'c99', 'cc', 'clang', 'clang++', 'cpp']
for real_dir in real_dirs:
dev_dir = os.path.join(developer_root,
'Contents',
'Developer',
real_dir)
for fname in os.listdir(dev_dir):
if fname in bins:
os.unlink(os.path.join(dev_dir, fname))
os.symlink(
os.path.join(spack.paths.build_env_path, 'cc'),
os.path.join(dev_dir, fname))
os.symlink(developer_root, xcode_link)
env.set('DEVELOPER_DIR', xcode_link)

View File

@@ -32,9 +32,14 @@ class Cce(Compiler):
'f77': 'cce/ftn',
'fc': 'cce/ftn'}
@property
def is_clang_based(self):
version = self.version
return version >= ver('9.0') and 'classic' not in str(version)
@property
def version_argument(self):
if self.version >= ver('9.0'):
if self.is_clang_based:
return '--version'
return '-V'
@@ -50,19 +55,19 @@ def debug_flags(self):
@property
def openmp_flag(self):
if self.version >= ver('9.0'):
if self.is_clang_based:
return '-fopenmp'
return "-h omp"
@property
def cxx11_flag(self):
if self.version >= ver('9.0'):
if self.is_clang_based:
return '-std=c++11'
return "-h std=c++11"
@property
def c99_flag(self):
if self.version >= ver('9.0'):
if self.is_clang_based:
return '-std=c99'
elif self.version >= ver('8.4'):
return '-h std=c99,noconform,gnu'
@@ -75,7 +80,7 @@ def c99_flag(self):
@property
def c11_flag(self):
if self.version >= ver('9.0'):
if self.is_clang_based:
return '-std=c11'
elif self.version >= ver('8.5'):
return '-h std=c11,noconform,gnu'

View File

@@ -4,17 +4,11 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import re
import os
import sys
from shutil import copytree, ignore_patterns
import llnl.util.lang
import llnl.util.tty as tty
import spack.paths
import spack.stage
from spack.compiler import Compiler, UnsupportedCompilerFlag
from spack.util.executable import Executable
from spack.version import ver
@@ -88,88 +82,41 @@ def link_paths(self):
return link_paths
@property
def is_apple(self):
ver_string = str(self.version)
return ver_string.endswith('-apple')
@property
def verbose_flag(self):
return "-v"
@property
def openmp_flag(self):
if self.is_apple:
return "-Xpreprocessor -fopenmp"
else:
return "-fopenmp"
openmp_flag = "-fopenmp"
@property
def cxx11_flag(self):
if self.is_apple:
# Adapted from CMake's AppleClang-CXX rules
# Spack's AppleClang detection only valid from Xcode >= 4.6
if self.version < ver('4.0.0'):
raise UnsupportedCompilerFlag(self,
"the C++11 standard",
"cxx11_flag",
"Xcode < 4.0.0")
else:
return "-std=c++11"
else:
if self.version < ver('3.3'):
raise UnsupportedCompilerFlag(self,
"the C++11 standard",
"cxx11_flag",
"< 3.3")
else:
return "-std=c++11"
if self.version < ver('3.3'):
raise UnsupportedCompilerFlag(
self, "the C++11 standard", "cxx11_flag", "< 3.3"
)
return "-std=c++11"
@property
def cxx14_flag(self):
if self.is_apple:
# Adapted from CMake's rules for AppleClang
if self.version < ver('5.1.0'):
raise UnsupportedCompilerFlag(self,
"the C++14 standard",
"cxx14_flag",
"Xcode < 5.1.0")
elif self.version < ver('6.1.0'):
return "-std=c++1y"
else:
return "-std=c++14"
else:
if self.version < ver('3.4'):
raise UnsupportedCompilerFlag(self,
"the C++14 standard",
"cxx14_flag",
"< 3.5")
elif self.version < ver('3.5'):
return "-std=c++1y"
else:
return "-std=c++14"
if self.version < ver('3.4'):
raise UnsupportedCompilerFlag(
self, "the C++14 standard", "cxx14_flag", "< 3.5"
)
elif self.version < ver('3.5'):
return "-std=c++1y"
return "-std=c++14"
@property
def cxx17_flag(self):
if self.is_apple:
# Adapted from CMake's rules for AppleClang
if self.version < ver('6.1.0'):
raise UnsupportedCompilerFlag(self,
"the C++17 standard",
"cxx17_flag",
"Xcode < 6.1.0")
else:
return "-std=c++1z"
else:
if self.version < ver('3.5'):
raise UnsupportedCompilerFlag(self,
"the C++17 standard",
"cxx17_flag",
"< 3.5")
elif self.version < ver('5.0'):
return "-std=c++1z"
else:
return "-std=c++17"
if self.version < ver('3.5'):
raise UnsupportedCompilerFlag(
self, "the C++17 standard", "cxx17_flag", "< 3.5"
)
elif self.version < ver('5.0'):
return "-std=c++1z"
return "-std=c++17"
@property
def c99_flag(self):
@@ -207,9 +154,10 @@ def fc_pic_flag(self):
@llnl.util.lang.memoized
def extract_version_from_output(cls, output):
ver = 'unknown'
if 'Apple' in output:
return ver
match = re.search(
# Apple's LLVM compiler has its own versions, so suffix them.
r'^Apple (?:LLVM|clang) version ([^ )]+)|'
# Normal clang compiler versions are left as-is
r'clang version ([^ )]+)-svn[~.\w\d-]*|'
# Don't include hyphenated patch numbers in the version
@@ -219,8 +167,7 @@ def extract_version_from_output(cls, output):
output
)
if match:
suffix = '-apple' if match.lastindex == 1 else ''
ver = match.group(match.lastindex) + suffix
ver = match.group(match.lastindex)
return ver
@classmethod
@@ -235,107 +182,3 @@ def fc_version(cls, fc):
@classmethod
def f77_version(cls, f77):
return cls.fc_version(f77)
def setup_custom_environment(self, pkg, env):
"""Set the DEVELOPER_DIR environment for the Xcode toolchain.
On macOS, not all buildsystems support querying CC and CXX for the
compilers to use and instead query the Xcode toolchain for what
compiler to run. This side-steps the spack wrappers. In order to inject
spack into this setup, we need to copy (a subset of) Xcode.app and
replace the compiler executables with symlinks to the spack wrapper.
Currently, the stage is used to store the Xcode.app copies. We then set
the 'DEVELOPER_DIR' environment variables to cause the xcrun and
related tools to use this Xcode.app.
"""
super(Clang, self).setup_custom_environment(pkg, env)
if not self.is_apple or not pkg.use_xcode:
# if we do it for all packages, we get into big troubles with MPI:
# filter_compilers(self) will use mockup XCode compilers on macOS
# with Clang. Those point to Spack's compiler wrappers and
# consequently render MPI non-functional outside of Spack.
return
# Use special XCode versions of compiler wrappers when using XCode
# Overwrites build_environment's setting of SPACK_CC and SPACK_CXX
xcrun = Executable('xcrun')
xcode_clang = xcrun('-f', 'clang', output=str).strip()
xcode_clangpp = xcrun('-f', 'clang++', output=str).strip()
env.set('SPACK_CC', xcode_clang, force=True)
env.set('SPACK_CXX', xcode_clangpp, force=True)
xcode_select = Executable('xcode-select')
# Get the path of the active developer directory
real_root = xcode_select('--print-path', output=str).strip()
# The path name can be used to determine whether the full Xcode suite
# or just the command-line tools are installed
if real_root.endswith('Developer'):
# The full Xcode suite is installed
pass
else:
if real_root.endswith('CommandLineTools'):
# Only the command-line tools are installed
msg = 'It appears that you have the Xcode command-line tools '
msg += 'but not the full Xcode suite installed.\n'
else:
# Xcode is not installed
msg = 'It appears that you do not have Xcode installed.\n'
msg += 'In order to use Spack to build the requested application, '
msg += 'you need the full Xcode suite. It can be installed '
msg += 'through the App Store. Make sure you launch the '
msg += 'application and accept the license agreement.\n'
raise OSError(msg)
real_root = os.path.dirname(os.path.dirname(real_root))
developer_root = os.path.join(spack.stage.get_stage_root(),
'xcode-select',
self.name,
str(self.version))
xcode_link = os.path.join(developer_root, 'Xcode.app')
if not os.path.exists(developer_root):
tty.warn('Copying Xcode from %s to %s in order to add spack '
'wrappers to it. Please do not interrupt.'
% (real_root, developer_root))
# We need to make a new Xcode.app instance, but with symlinks to
# the spack wrappers for the compilers it ships. This is necessary
# because some projects insist on just asking xcrun and related
# tools where the compiler runs. These tools are very hard to trick
# as they do realpath and end up ignoring the symlinks in a
# "softer" tree of nothing but symlinks in the right places.
copytree(real_root, developer_root, symlinks=True,
ignore=ignore_patterns('AppleTV*.platform',
'Watch*.platform',
'iPhone*.platform',
'Documentation',
'swift*'))
real_dirs = [
'Toolchains/XcodeDefault.xctoolchain/usr/bin',
'usr/bin',
]
bins = ['c++', 'c89', 'c99', 'cc', 'clang', 'clang++', 'cpp']
for real_dir in real_dirs:
dev_dir = os.path.join(developer_root,
'Contents',
'Developer',
real_dir)
for fname in os.listdir(dev_dir):
if fname in bins:
os.unlink(os.path.join(dev_dir, fname))
os.symlink(
os.path.join(spack.paths.build_env_path, 'cc'),
os.path.join(dev_dir, fname))
os.symlink(developer_root, xcode_link)
env.set('DEVELOPER_DIR', xcode_link)

View File

@@ -56,12 +56,12 @@
import spack.schema.modules
import spack.schema.config
import spack.schema.upstreams
import spack.schema.env
from spack.error import SpackError
# Hacked yaml for configuration files preserves line numbers.
import spack.util.spack_yaml as syaml
#: Dict from section names -> schema for that section
section_schemas = {
'compilers': spack.schema.compilers.schema,
@@ -70,9 +70,15 @@
'packages': spack.schema.packages.schema,
'modules': spack.schema.modules.schema,
'config': spack.schema.config.schema,
'upstreams': spack.schema.upstreams.schema
'upstreams': spack.schema.upstreams.schema,
}
# Same as above, but including keys for environments
# this allows us to unify config reading between configs and environments
all_schemas = copy.deepcopy(section_schemas)
all_schemas.update(dict((key, spack.schema.env.schema)
for key in spack.schema.env.keys))
#: Builtin paths to configuration files in Spack
configuration_paths = (
# Default configuration scope is the lowest-level scope. These are
@@ -142,19 +148,21 @@ def get_section(self, section):
if section not in self.sections:
path = self.get_section_filename(section)
schema = section_schemas[section]
data = _read_config_file(path, schema)
data = read_config_file(path, schema)
self.sections[section] = data
return self.sections[section]
def write_section(self, section):
filename = self.get_section_filename(section)
data = self.get_section(section)
validate(data, section_schemas[section])
# We copy data here to avoid adding defaults at write time
validate_data = copy.deepcopy(data)
validate(validate_data, section_schemas[section])
try:
mkdirp(self.path)
with open(filename, 'w') as f:
validate(data, section_schemas[section])
syaml.dump_config(data, stream=f, default_flow_style=False)
except (yaml.YAMLError, IOError) as e:
raise ConfigFileError(
@@ -217,7 +225,7 @@ def get_section(self, section):
# }
# }
if self._raw_data is None:
self._raw_data = _read_config_file(self.path, self.schema)
self._raw_data = read_config_file(self.path, self.schema)
if self._raw_data is None:
return None
@@ -376,6 +384,16 @@ def highest_precedence_scope(self):
"""Non-internal scope with highest precedence."""
return next(reversed(self.file_scopes), None)
def highest_precedence_non_platform_scope(self):
"""Non-internal non-platform scope with highest precedence
Platform-specific scopes are of the form scope/platform"""
generator = reversed(self.file_scopes)
highest = next(generator, None)
while highest and '/' in highest.name:
highest = next(generator, None)
return highest
def matching_scopes(self, reg_expr):
"""
List of all scopes whose names match the provided regular expression.
@@ -435,8 +453,21 @@ def update_config(self, section, update_data, scope=None):
_validate_section_name(section) # validate section name
scope = self._validate_scope(scope) # get ConfigScope object
# manually preserve comments
need_comment_copy = (section in scope.sections and
scope.sections[section] is not None)
if need_comment_copy:
comments = getattr(scope.sections[section][section],
yaml.comments.Comment.attrib,
None)
# read only the requested section's data.
scope.sections[section] = {section: update_data}
scope.sections[section] = syaml.syaml_dict({section: update_data})
if need_comment_copy and comments:
setattr(scope.sections[section][section],
yaml.comments.Comment.attrib,
comments)
scope.write_section(section)
def get_config(self, section, scope=None):
@@ -483,14 +514,17 @@ def get_config(self, section, scope=None):
if section not in data:
continue
merged_section = _merge_yaml(merged_section, data)
merged_section = merge_yaml(merged_section, data)
# no config files -- empty config.
if section not in merged_section:
return {}
return syaml.syaml_dict()
# take the top key off before returning.
return merged_section[section]
ret = merged_section[section]
if isinstance(ret, dict):
ret = syaml.syaml_dict(ret)
return ret
def get(self, path, default=None, scope=None):
"""Get a config section or a single value from one.
@@ -506,14 +540,12 @@ def get(self, path, default=None, scope=None):
We use ``:`` as the separator, like YAML objects.
"""
# TODO: Currently only handles maps. Think about lists if neded.
section, _, rest = path.partition(':')
# TODO: Currently only handles maps. Think about lists if needed.
parts = process_config_path(path)
section = parts.pop(0)
value = self.get_config(section, scope=scope)
if not rest:
return value
parts = rest.split(':')
while parts:
key = parts.pop(0)
value = value.get(key, default)
@@ -525,21 +557,40 @@ def set(self, path, value, scope=None):
Accepts the path syntax described in ``get()``.
"""
parts = _process_config_path(path)
if ':' not in path:
# handle bare section name as path
self.update_config(path, value, scope=scope)
return
parts = process_config_path(path)
section = parts.pop(0)
if not parts:
self.update_config(section, value, scope=scope)
else:
section_data = self.get_config(section, scope=scope)
section_data = self.get_config(section, scope=scope)
data = section_data
while len(parts) > 1:
key = parts.pop(0)
data = data[key]
data[parts[0]] = value
data = section_data
while len(parts) > 1:
key = parts.pop(0)
self.update_config(section, section_data, scope=scope)
if _override(key):
new = type(data[key])()
del data[key]
else:
new = data[key]
if isinstance(new, dict):
# Make it an ordered dict
new = syaml.syaml_dict(new)
# reattach to parent object
data[key] = new
data = new
if _override(parts[0]):
data.pop(parts[0], None)
# update new value
data[parts[0]] = value
self.update_config(section, section_data, scope=scope)
def __iter__(self):
"""Iterate over scopes in this configuration."""
@@ -692,26 +743,53 @@ def _validate_section_name(section):
% (section, " ".join(section_schemas.keys())))
def validate(data, schema, set_defaults=True):
def validate(data, schema, filename=None):
"""Validate data read in from a Spack YAML file.
Arguments:
data (dict or list): data read from a Spack YAML file
schema (dict or list): jsonschema to validate data
set_defaults (bool): whether to set defaults based on the schema
This leverages the line information (start_mark, end_mark) stored
on Spack YAML structures.
"""
import jsonschema
# validate a copy to avoid adding defaults
# This allows us to round-trip data without adding to it.
test_data = copy.deepcopy(data)
if isinstance(test_data, yaml.comments.CommentedMap):
# HACK to fully copy ruamel CommentedMap that doesn't provide copy
# method. Especially necessary for environments
setattr(test_data,
yaml.comments.Comment.attrib,
getattr(data,
yaml.comments.Comment.attrib,
yaml.comments.Comment()))
try:
spack.schema.Validator(schema).validate(data)
spack.schema.Validator(schema).validate(test_data)
except jsonschema.ValidationError as e:
raise ConfigFormatError(e, data)
if hasattr(e.instance, 'lc'):
line_number = e.instance.lc.line + 1
else:
line_number = None
raise ConfigFormatError(e, data, filename, line_number)
# return the validated data so that we can access the raw data
# mostly relevant for environments
return test_data
def _read_config_file(filename, schema):
"""Read a YAML configuration file."""
def read_config_file(filename, schema=None):
"""Read a YAML configuration file.
User can provide a schema for validation. If no schema is provided,
we will infer the schema from the top-level key."""
# Dev: Inferring schema and allowing it to be provided directly allows us
# to preserve flexibility in calling convention (don't need to provide
# schema when it's not necessary) while allowing us to validate against a
# known schema when the top-level key could be incorrect.
# Ignore nonexisting files.
if not os.path.exists(filename):
return None
@@ -729,9 +807,16 @@ def _read_config_file(filename, schema):
data = syaml.load_config(f)
if data:
if not schema:
key = next(iter(data))
schema = all_schemas[key]
validate(data, schema)
return data
except StopIteration:
raise ConfigFileError(
"Config file is empty or is not a valid YAML dict: %s" % filename)
except MarkedYAMLError as e:
raise ConfigFileError(
"Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
@@ -772,13 +857,40 @@ def _mark_internal(data, name):
return d
def _merge_yaml(dest, source):
def get_valid_type(path):
"""Returns an instance of a type that will pass validation for path.
The instance is created by calling the constructor with no arguments.
If multiple types will satisfy validation for data at the configuration
path given, the priority order is ``list``, ``dict``, ``str``, ``bool``,
``int``, ``float``.
"""
components = process_config_path(path)
section = components[0]
for type in (list, syaml.syaml_dict, str, bool, int, float):
try:
ret = type()
test_data = ret
for component in reversed(components):
test_data = {component: test_data}
validate(test_data, section_schemas[section])
return ret
except (ConfigFormatError, AttributeError):
# This type won't validate, try the next one
# Except AttributeError because undefined behavior of dict ordering
# in python 3.5 can cause the validator to raise an AttributeError
# instead of a ConfigFormatError.
pass
raise ConfigError("Cannot determine valid type for path '%s'." % path)
def merge_yaml(dest, source):
"""Merges source into dest; entries in source take precedence over dest.
This routine may modify dest and should be assigned to dest, in
case dest was None to begin with, e.g.:
dest = _merge_yaml(dest, source)
dest = merge_yaml(dest, source)
Config file authors can optionally end any attribute in a dict
with `::` instead of `:`, and the key will override that of the
@@ -793,6 +905,7 @@ def they_are(t):
# Source list is prepended (for precedence)
if they_are(list):
# Make sure to copy ruamel comments
dest[:] = source + [x for x in dest if x not in source]
return dest
@@ -805,9 +918,10 @@ def they_are(t):
if _override(sk) or sk not in dest:
# if sk ended with ::, or if it's new, completely override
dest[sk] = copy.copy(sv)
# copy ruamel comments manually
else:
# otherwise, merge the YAML
dest[sk] = _merge_yaml(dest[sk], source[sk])
dest[sk] = merge_yaml(dest[sk], source[sk])
# this seems unintuitive, but see below. We need this because
# Python dicts do not overwrite keys on insert, and we want
@@ -837,7 +951,7 @@ def they_are(t):
# Process a path argument to config.set() that may contain overrides ('::' or
# trailing ':')
#
def _process_config_path(path):
def process_config_path(path):
result = []
if path.startswith(':'):
raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".
@@ -861,13 +975,20 @@ def _process_config_path(path):
#
# Settings for commands that modify configuration
#
def default_modify_scope():
def default_modify_scope(section='config'):
"""Return the config scope that commands should modify by default.
Commands that modify configuration by default modify the *highest*
priority scope.
Arguments:
section (boolean): Section for which to get the default scope.
If this is not 'compilers', a general (non-platform) scope is used.
"""
return spack.config.config.highest_precedence_scope().name
if section == 'compilers':
return spack.config.config.highest_precedence_scope().name
else:
return spack.config.config.highest_precedence_non_platform_scope().name
def default_list_scope():
@@ -894,17 +1015,17 @@ class ConfigFormatError(ConfigError):
"""Raised when a configuration format does not match its schema."""
def __init__(self, validation_error, data, filename=None, line=None):
# spack yaml has its own file/line marks -- try to find them
# we prioritize these over the inputs
mark = self._get_mark(validation_error, data)
if mark:
filename = mark.name
line = mark.line + 1
self.filename = filename # record this for ruamel.yaml
# construct location
location = '<unknown file>'
# spack yaml has its own file/line marks -- try to find them
if not filename and not line:
mark = self._get_mark(validation_error, data)
if mark:
filename = mark.name
line = mark.line + 1
if filename:
location = '%s' % filename
if line is not None:

View File

@@ -23,6 +23,7 @@
import contextlib
import datetime
import os
import six
import socket
import sys
import time
@@ -33,14 +34,14 @@
_use_uuid = False
pass
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import six
import spack.repo
import spack.spec
import spack.store
import spack.util.lock as lk
import spack.util.spack_json as sjson
from llnl.util.filesystem import mkdirp
from spack.directory_layout import DirectoryLayoutError
from spack.error import SpackError
from spack.filesystem_view import YamlFilesystemView
@@ -316,10 +317,10 @@ def __init__(self, root, db_dir=None, upstream_dbs=None,
# Create needed directories and files
if not os.path.exists(self._db_dir):
mkdirp(self._db_dir)
fs.mkdirp(self._db_dir)
if not os.path.exists(self._failure_dir) and not is_upstream:
mkdirp(self._failure_dir)
fs.mkdirp(self._failure_dir)
self.is_upstream = is_upstream
self.last_seen_verifier = ''
@@ -392,6 +393,23 @@ def _failed_spec_path(self, spec):
return os.path.join(self._failure_dir,
'{0}-{1}'.format(spec.name, spec.full_hash()))
def clear_all_failures(self):
"""Force remove install failure tracking files."""
tty.debug('Releasing prefix failure locks')
for pkg_id in list(self._prefix_failures.keys()):
lock = self._prefix_failures.pop(pkg_id, None)
if lock:
lock.release_write()
# Remove all failure markings (aka files)
tty.debug('Removing prefix failure tracking files')
for fail_mark in os.listdir(self._failure_dir):
try:
os.remove(os.path.join(self._failure_dir, fail_mark))
except OSError as exc:
tty.warn('Unable to remove failure marking file {0}: {1}'
.format(fail_mark, str(exc)))
def clear_failure(self, spec, force=False):
"""
Remove any persistent and cached failure tracking for the spec.

View File

@@ -80,9 +80,6 @@
#: version of the lockfile format. Must increase monotonically.
lockfile_format_version = 2
#: legal first keys in the spack.yaml manifest file
env_schema_keys = ('spack', 'env')
# Magic names
# The name of the standalone spec list in the manifest yaml
user_speclist_name = 'specs'
@@ -366,7 +363,7 @@ def create(name, init_file=None, with_view=None):
def config_dict(yaml_data):
"""Get the configuration scope section out of an spack.yaml"""
key = spack.config.first_existing(yaml_data, env_schema_keys)
key = spack.config.first_existing(yaml_data, spack.schema.env.keys)
return yaml_data[key]
@@ -392,42 +389,19 @@ def all_environments():
yield read(name)
def validate(data, filename=None):
# validating changes data by adding defaults. Return validated data
validate_data = copy.deepcopy(data)
# HACK to fully copy ruamel CommentedMap that doesn't provide copy method
import ruamel.yaml as yaml
setattr(
validate_data,
yaml.comments.Comment.attrib,
getattr(data, yaml.comments.Comment.attrib, yaml.comments.Comment())
)
import jsonschema
try:
spack.schema.Validator(spack.schema.env.schema).validate(validate_data)
except jsonschema.ValidationError as e:
if hasattr(e.instance, 'lc'):
line_number = e.instance.lc.line + 1
else:
line_number = None
raise spack.config.ConfigFormatError(
e, data, filename, line_number)
return validate_data
def _read_yaml(str_or_file):
"""Read YAML from a file for round-trip parsing."""
data = syaml.load_config(str_or_file)
filename = getattr(str_or_file, 'name', None)
default_data = validate(data, filename)
default_data = spack.config.validate(
data, spack.schema.env.schema, filename)
return (data, default_data)
def _write_yaml(data, str_or_file):
"""Write YAML to a file preserving comments and dict order."""
filename = getattr(str_or_file, 'name', None)
validate(data, filename)
spack.config.validate(data, spack.schema.env.schema, filename)
syaml.dump_config(data, str_or_file, default_flow_style=False)
@@ -435,12 +409,14 @@ def _eval_conditional(string):
"""Evaluate conditional definitions using restricted variable scope."""
arch = architecture.Arch(
architecture.platform(), 'default_os', 'default_target')
arch_spec = spack.spec.Spec('arch=%s' % arch)
valid_variables = {
'target': str(arch.target),
'os': str(arch.os),
'platform': str(arch.platform),
'arch': str(arch),
'architecture': str(arch),
'arch': arch_spec,
'architecture': arch_spec,
'arch_str': str(arch),
're': re,
'env': os.environ,
'hostname': socket.gethostname()
@@ -815,12 +791,21 @@ def env_file_config_scope(self):
return spack.config.SingleFileScope(config_name,
self.manifest_path,
spack.schema.env.schema,
[env_schema_keys])
[spack.schema.env.keys])
def config_scopes(self):
"""A list of all configuration scopes for this environment."""
return self.included_config_scopes() + [self.env_file_config_scope()]
def set_config(self, path, value):
"""Set configuration for this environment"""
yaml = config_dict(self.yaml)
keys = spack.config.process_config_path(path)
for key in keys[:-1]:
yaml = yaml[key]
yaml[keys[-1]] = value
self.write()
def destroy(self):
"""Remove this environment from Spack entirely."""
shutil.rmtree(self.path)
@@ -907,13 +892,23 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
old_specs = set(self.user_specs)
for spec in matches:
if spec in list_to_change:
list_to_change.remove(spec)
self.update_stale_references(list_name)
try:
list_to_change.remove(spec)
self.update_stale_references(list_name)
new_specs = set(self.user_specs)
except spack.spec_list.SpecListError:
# define new specs list
new_specs = set(self.user_specs)
msg = "Spec '%s' is part of a spec matrix and " % spec
msg += "cannot be removed from list '%s'." % list_to_change
if force:
msg += " It will be removed from the concrete specs."
# Mock new specs so we can remove this spec from
# concrete spec lists
new_specs.remove(spec)
tty.warn(msg)
# If force, update stale concretized specs
# Only check specs removed by this operation
new_specs = set(self.user_specs)
for spec in old_specs - new_specs:
if force and spec in self.concretized_user_specs:
i = self.concretized_user_specs.index(spec)
@@ -1501,8 +1496,12 @@ def write(self, regenerate_views=True):
del yaml_dict[key]
# if all that worked, write out the manifest file at the top level
# Only actually write if it has changed or was never written
changed = self.yaml != self.raw_yaml
# (we used to check whether the yaml had changed and not write it out
# if it hadn't. We can't do that anymore because it could be the only
# thing that changed is the "override" attribute on a config dict,
# which would not show up in even a string comparison between the two
# keys).
changed = not yaml_equivalent(self.yaml, self.raw_yaml)
written = os.path.exists(self.manifest_path)
if changed or not written:
self.raw_yaml = copy.deepcopy(self.yaml)
@@ -1529,6 +1528,39 @@ def __exit__(self, exc_type, exc_val, exc_tb):
activate(self._previous_active)
def yaml_equivalent(first, second):
"""Returns whether two spack yaml items are equivalent, including overrides
"""
if isinstance(first, dict):
return isinstance(second, dict) and _equiv_dict(first, second)
elif isinstance(first, list):
return isinstance(second, list) and _equiv_list(first, second)
else: # it's a string
return isinstance(second, six.string_types) and first == second
def _equiv_list(first, second):
"""Returns whether two spack yaml lists are equivalent, including overrides
"""
if len(first) != len(second):
return False
return all(yaml_equivalent(f, s) for f, s in zip(first, second))
def _equiv_dict(first, second):
"""Returns whether two spack yaml dicts are equivalent, including overrides
"""
if len(first) != len(second):
return False
same_values = all(yaml_equivalent(fv, sv)
for fv, sv in zip(first.values(), second.values()))
same_keys_with_same_overrides = all(
fk == sk and getattr(fk, 'override', False) == getattr(sk, 'override',
False)
for fk, sk in zip(first.keys(), second.keys()))
return same_values and same_keys_with_same_overrides
def display_specs(concretized_specs):
"""Displays the list of specs returned by `Environment.concretize()`.

View File

@@ -373,6 +373,13 @@ def _update_explicit_entry_in_db(pkg, rec, explicit):
rec.explicit = True
def clear_failures():
"""
Remove all failure tracking markers for the Spack instance.
"""
spack.store.db.clear_all_failures()
def dump_packages(spec, path):
"""
Dump all package information for a spec and its dependencies.
@@ -835,7 +842,7 @@ def _cleanup_failed(self, pkg_id):
"""
lock = self.failed.get(pkg_id, None)
if lock is not None:
err = "{0} exception when removing failure mark for {1}: {2}"
err = "{0} exception when removing failure tracking for {1}: {2}"
msg = 'Removing failure mark on {0}'
try:
tty.verbose(msg.format(pkg_id))

View File

@@ -142,7 +142,9 @@ def detect_version(self, detect_version_args):
compiler_name = detect_version_args.id.compiler_name
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
output = modulecmd('avail', compiler_cls.PrgEnv_compiler)
version_regex = r'(%s)/([\d\.]+[\d])' % compiler_cls.PrgEnv_compiler
version_regex = r'({0})/([\d\.]+[\d]-?[\w]*)'.format(
compiler_cls.PrgEnv_compiler
)
matches = re.findall(version_regex, output)
version = tuple(version for _, version in matches)
compiler_id = detect_version_args.id

View File

@@ -1027,6 +1027,11 @@ def is_activated(self, view):
if not self.is_extension:
raise ValueError(
"is_activated called on package that is not an extension.")
if self.extendee_spec.package.installed_upstream:
# If this extends an upstream package, it cannot be activated for
# it. This bypasses construction of the extension map, which can
# can fail when run in the context of a downstream Spack instance
return False
extensions_layout = view.extensions_layout
exts = extensions_layout.extension_map(self.extendee_spec)
return (self.name in exts) and (exts[self.name] == self.spec)

View File

@@ -13,6 +13,8 @@
import spack.schema.merged
import spack.schema.projections
#: legal first keys in the schema
keys = ('spack', 'env')
spec_list_schema = {
'type': 'array',

View File

@@ -511,8 +511,17 @@ def __init__(self, *args):
raise TypeError(
"__init__ takes 1 or 2 arguments. (%d given)" % nargs)
def _add_version(self, version):
self.versions.add(version)
def _add_versions(self, version_list):
# If it already has a non-trivial version list, this is an error
if self.versions and self.versions != vn.VersionList(':'):
# Note: This may be impossible to reach by the current parser
# Keeping it in case the implementation changes.
raise MultipleVersionError(
'A spec cannot contain multiple version signifiers.'
' Use a version list instead.')
self.versions = vn.VersionList()
for version in version_list:
self.versions.add(version)
def _autospec(self, compiler_spec_like):
if isinstance(compiler_spec_like, CompilerSpec):
@@ -1050,9 +1059,16 @@ def dependents_dict(self, deptype='all'):
#
# Private routines here are called by the parser when building a spec.
#
def _add_version(self, version):
def _add_versions(self, version_list):
"""Called by the parser to add an allowable version."""
self.versions.add(version)
# If it already has a non-trivial version list, this is an error
if self.versions and self.versions != vn.VersionList(':'):
raise MultipleVersionError(
'A spec cannot contain multiple version signifiers.'
' Use a version list instead.')
self.versions = vn.VersionList()
for version in version_list:
self.versions.add(version)
def _add_flag(self, name, value):
"""Called by the parser to add a known flag.
@@ -1448,8 +1464,8 @@ def to_node_dict(self, hash=ht.dag_hash):
'target': 'x86_64',
},
'compiler': {
'name': 'clang',
'version': '10.0.0-apple',
'name': 'apple-clang',
'version': '10.0.0',
},
'namespace': 'builtin',
'parameters': {
@@ -1554,8 +1570,8 @@ def to_dict(self, hash=ht.dag_hash):
'target': 'x86_64',
},
'compiler': {
'name': 'clang',
'version': '10.0.0-apple',
'name': 'apple-clang',
'version': '10.0.0',
},
'namespace': 'builtin',
'parameters': {
@@ -4157,9 +4173,7 @@ def spec(self, name):
while self.next:
if self.accept(AT):
vlist = self.version_list()
spec.versions = vn.VersionList()
for version in vlist:
spec._add_version(version)
spec._add_versions(vlist)
elif self.accept(ON):
name = self.variant()
@@ -4251,8 +4265,7 @@ def compiler(self):
compiler.versions = vn.VersionList()
if self.accept(AT):
vlist = self.version_list()
for version in vlist:
compiler._add_version(version)
compiler._add_versions(vlist)
else:
compiler.versions = vn.VersionList(':')
return compiler
@@ -4325,6 +4338,10 @@ class DuplicateDependencyError(spack.error.SpecError):
"""Raised when the same dependency occurs in a spec twice."""
class MultipleVersionError(spack.error.SpecError):
"""Raised when version constraints occur in a spec twice."""
class DuplicateCompilerSpecError(spack.error.SpecError):
"""Raised when the same compiler occurs in a spec twice."""

View File

@@ -176,8 +176,8 @@ def test_arch_spec_container_semantic(item, architecture_str):
# Check mixed toolchains
('clang@8.0.0', 'broadwell', ''),
('clang@3.5', 'x86_64', '-march=x86-64 -mtune=generic'),
# Check clang compilers with 'apple' suffix
('clang@9.1.0-apple', 'x86_64', '-march=x86-64')
# Check Apple's Clang compilers
('apple-clang@9.1.0', 'x86_64', '-march=x86-64')
])
@pytest.mark.filterwarnings("ignore:microarchitecture specific")
def test_optimization_flags(
@@ -200,7 +200,7 @@ def test_optimization_flags(
'-march=icelake-client -mtune=icelake-client'),
# Check that the special case for Apple's clang is treated correctly
# i.e. it won't try to detect the version again
(spack.spec.CompilerSpec('clang@9.1.0-apple'), None, 'x86_64',
(spack.spec.CompilerSpec('apple-clang@9.1.0'), None, 'x86_64',
'-march=x86-64'),
])
def test_optimization_flags_with_custom_versions(

View File

@@ -15,6 +15,17 @@
import spack.util.web as web_util
import spack.util.gpg
import spack.ci_optimization as ci_opt
import spack.ci_needs_workaround as cinw
import spack.util.spack_yaml as syaml
import itertools as it
import collections
try:
# dynamically import to keep vermin from complaining
collections_abc = __import__('collections.abc')
except ImportError:
collections_abc = collections
@pytest.fixture
def tmp_scope():
@@ -166,3 +177,162 @@ def test_read_write_cdash_ids(config, tmp_scope, tmpdir, mock_packages):
read_cdashid = ci.read_cdashid_from_mirror(mock_spec, mirror_url)
assert(str(read_cdashid) == orig_cdashid)
def test_ci_workarounds():
fake_root_spec = 'x' * 544
fake_spack_ref = 'x' * 40
common_variables = {
'SPACK_COMPILER_ACTION': 'NONE',
'SPACK_IS_PR_PIPELINE': 'False',
}
common_script = ['spack ci rebuild']
common_before_script = [
'git clone "https://github.com/spack/spack"',
' && '.join((
'pushd ./spack',
'git checkout "{ref}"'.format(ref=fake_spack_ref),
'popd')),
'. "./spack/share/spack/setup-env.sh"'
]
def make_build_job(name, deps, stage, use_artifact_buildcache, optimize,
use_dependencies):
variables = common_variables.copy()
variables['SPACK_JOB_SPEC_PKG_NAME'] = name
result = {
'stage': stage,
'tags': ['tag-0', 'tag-1'],
'artifacts': {
'paths': [
'jobs_scratch_dir',
'cdash_report',
name + '.spec.yaml',
name + '.cdashid',
name
],
'when': 'always'
},
'retry': {'max': 2, 'when': ['always']},
'after_script': ['rm -rf "./spack"'],
'image': {'name': 'spack/centos7', 'entrypoint': ['']},
}
if optimize:
result['extends'] = ['.c0', '.c1', '.c2']
else:
variables['SPACK_ROOT_SPEC'] = fake_root_spec
result['script'] = common_script
result['before_script'] = common_before_script
result['variables'] = variables
if use_dependencies:
result['dependencies'] = (
list(deps) if use_artifact_buildcache
else [])
else:
result['needs'] = [
{'job': dep, 'artifacts': use_artifact_buildcache}
for dep in deps]
return {name: result}
def make_rebuild_index_job(
use_artifact_buildcache, optimize, use_dependencies):
result = {
'stage': 'stage-rebuild-index',
'script': 'spack buildcache update-index -d s3://mirror',
'tags': ['tag-0', 'tag-1'],
'image': {'name': 'spack/centos7', 'entrypoint': ['']},
'after_script': ['rm -rf "./spack"'],
}
if optimize:
result['extends'] = '.c1'
else:
result['before_script'] = common_before_script
return {'rebuild-index': result}
def make_factored_jobs(optimize):
return {
'.c0': {'script': common_script},
'.c1': {'before_script': common_before_script},
'.c2': {'variables': {'SPACK_ROOT_SPEC': fake_root_spec}}
} if optimize else {}
def make_yaml_obj(use_artifact_buildcache, optimize, use_dependencies):
result = {}
result.update(make_build_job(
'pkg-a', [], 'stage-0', use_artifact_buildcache, optimize,
use_dependencies))
result.update(make_build_job(
'pkg-b', ['pkg-a'], 'stage-1', use_artifact_buildcache, optimize,
use_dependencies))
result.update(make_build_job(
'pkg-c', ['pkg-a', 'pkg-b'], 'stage-2', use_artifact_buildcache,
optimize, use_dependencies))
result.update(make_rebuild_index_job(
use_artifact_buildcache, optimize, use_dependencies))
result.update(make_factored_jobs(optimize))
return result
def sort_yaml_obj(obj):
if isinstance(obj, collections_abc.Mapping):
result = syaml.syaml_dict()
for k in sorted(obj.keys(), key=str):
result[k] = sort_yaml_obj(obj[k])
return result
if (isinstance(obj, collections_abc.Sequence) and
not isinstance(obj, str)):
return syaml.syaml_list(sorted(
(sort_yaml_obj(x) for x in obj), key=str))
return obj
# test every combination of:
# use artifact buildcache: true or false
# run optimization pass: true or false
# convert needs to dependencies: true or false
for use_ab in (False, True):
original = make_yaml_obj(
use_artifact_buildcache=use_ab,
optimize=False,
use_dependencies=False)
for opt, deps in it.product(*(((False, True),) * 2)):
# neither optimizing nor converting needs->dependencies
if not (opt or deps):
# therefore, nothing to test
continue
predicted = make_yaml_obj(
use_artifact_buildcache=use_ab,
optimize=opt,
use_dependencies=deps)
actual = original.copy()
if opt:
actual = ci_opt.optimizer(actual)
if deps:
actual = cinw.needs_to_dependencies(actual)
predicted = syaml.dump_config(
sort_yaml_obj(predicted), default_flow_style=True)
actual = syaml.dump_config(
sort_yaml_obj(actual), default_flow_style=True)
assert(predicted == actual)

View File

@@ -5,14 +5,19 @@
import errno
import platform
import os
import pytest
import spack.main
import spack.binary_distribution
import spack.environment as ev
from spack.spec import Spec
buildcache = spack.main.SpackCommand('buildcache')
install = spack.main.SpackCommand('install')
env = spack.main.SpackCommand('env')
add = spack.main.SpackCommand('add')
@pytest.fixture()
@@ -45,6 +50,52 @@ def test_buildcache_list_duplicates(mock_get_specs, capsys):
assert output.count('mpileaks') == 3
def tests_buildcache_create(
install_mockery, mock_fetch, monkeypatch, tmpdir):
""""Ensure that buildcache create creates output files"""
pkg = 'trivial-install-test-package'
install(pkg)
buildcache('create', '-d', str(tmpdir), '--unsigned', pkg)
spec = Spec(pkg).concretized()
tarball_path = spack.binary_distribution.tarball_path_name(spec, '.spack')
tarball = spack.binary_distribution.tarball_name(spec, '.spec.yaml')
assert os.path.exists(
os.path.join(str(tmpdir), 'build_cache', tarball_path))
assert os.path.exists(
os.path.join(str(tmpdir), 'build_cache', tarball))
def tests_buildcache_create_env(
install_mockery, mock_fetch, monkeypatch,
tmpdir, mutable_mock_env_path):
""""Ensure that buildcache create creates output files from env"""
pkg = 'trivial-install-test-package'
env('create', 'test')
with ev.read('test'):
add(pkg)
install()
buildcache('create', '-d', str(tmpdir), '--unsigned')
spec = Spec(pkg).concretized()
tarball_path = spack.binary_distribution.tarball_path_name(spec, '.spack')
tarball = spack.binary_distribution.tarball_name(spec, '.spec.yaml')
assert os.path.exists(
os.path.join(str(tmpdir), 'build_cache', tarball_path))
assert os.path.exists(
os.path.join(str(tmpdir), 'build_cache', tarball))
def test_buildcache_create_fails_on_noargs(tmpdir):
"""Ensure that buildcache create fails when given no args or
environment."""
with pytest.raises(spack.main.SpackCommandError):
buildcache('create', '-d', str(tmpdir), '--unsigned')
def test_buildcache_create_fail_on_perm_denied(
install_mockery, mock_fetch, monkeypatch, tmpdir):
"""Ensure that buildcache create fails on permission denied error."""

View File

@@ -28,18 +28,21 @@ def __call__(self, *args, **kwargs):
spack.caches.fetch_cache, 'destroy', Counter(), raising=False)
monkeypatch.setattr(
spack.caches.misc_cache, 'destroy', Counter())
monkeypatch.setattr(
spack.installer, 'clear_failures', Counter())
@pytest.mark.usefixtures(
'mock_packages', 'config', 'mock_calls_for_clean'
)
@pytest.mark.parametrize('command_line,counters', [
('mpileaks', [1, 0, 0, 0]),
('-s', [0, 1, 0, 0]),
('-sd', [0, 1, 1, 0]),
('-m', [0, 0, 0, 1]),
('-a', [0, 1, 1, 1]),
('', [0, 0, 0, 0]),
('mpileaks', [1, 0, 0, 0, 0]),
('-s', [0, 1, 0, 0, 0]),
('-sd', [0, 1, 1, 0, 0]),
('-m', [0, 0, 0, 1, 0]),
('-f', [0, 0, 0, 0, 1]),
('-a', [0, 1, 1, 1, 1]),
('', [0, 0, 0, 0, 0]),
])
def test_function_calls(command_line, counters):
@@ -52,3 +55,4 @@ def test_function_calls(command_line, counters):
assert spack.stage.purge.call_count == counters[1]
assert spack.caches.fetch_cache.destroy.call_count == counters[2]
assert spack.caches.misc_cache.destroy.call_count == counters[3]
assert spack.installer.clear_failures.call_count == counters[4]

View File

@@ -2,7 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pytest
import os
from llnl.util.filesystem import mkdirp
@@ -12,6 +12,7 @@
from spack.main import SpackCommand
config = SpackCommand('config')
env = SpackCommand('env')
def test_get_config_scope(mock_low_high_config):
@@ -46,7 +47,7 @@ def test_get_config_scope_merged(mock_low_high_config):
def test_config_edit():
"""Ensure `spack config edit` edits the right paths."""
dms = spack.config.default_modify_scope()
dms = spack.config.default_modify_scope('compilers')
dms_path = spack.config.config.scopes[dms].path
user_path = spack.config.config.scopes['user'].path
@@ -97,3 +98,308 @@ def test_config_list():
output = config('list')
assert 'compilers' in output
assert 'packages' in output
def test_config_add(mutable_empty_config):
config('add', 'config:dirty:true')
output = config('get', 'config')
assert output == """config:
dirty: true
"""
def test_config_add_list(mutable_empty_config):
config('add', 'config:template_dirs:test1')
config('add', 'config:template_dirs:[test2]')
config('add', 'config:template_dirs:test3')
output = config('get', 'config')
assert output == """config:
template_dirs:
- test3
- test2
- test1
"""
def test_config_add_override(mutable_empty_config):
config('--scope', 'site', 'add', 'config:template_dirs:test1')
config('add', 'config:template_dirs:[test2]')
output = config('get', 'config')
assert output == """config:
template_dirs:
- test2
- test1
"""
config('add', 'config::template_dirs:[test2]')
output = config('get', 'config')
assert output == """config:
template_dirs:
- test2
"""
def test_config_add_override_leaf(mutable_empty_config):
config('--scope', 'site', 'add', 'config:template_dirs:test1')
config('add', 'config:template_dirs:[test2]')
output = config('get', 'config')
assert output == """config:
template_dirs:
- test2
- test1
"""
config('add', 'config:template_dirs::[test2]')
output = config('get', 'config')
assert output == """config:
'template_dirs:':
- test2
"""
def test_config_add_update_dict(mutable_empty_config):
config('add', 'packages:all:compiler:[gcc]')
config('add', 'packages:all:version:1.0.0')
output = config('get', 'packages')
expected = """packages:
all:
compiler: [gcc]
version:
- 1.0.0
"""
assert output == expected
def test_config_add_ordered_dict(mutable_empty_config):
config('add', 'mirrors:first:/path/to/first')
config('add', 'mirrors:second:/path/to/second')
output = config('get', 'mirrors')
assert output == """mirrors:
first: /path/to/first
second: /path/to/second
"""
def test_config_add_invalid_fails(mutable_empty_config):
config('add', 'packages:all:variants:+debug')
with pytest.raises(
(spack.config.ConfigFormatError, AttributeError)
):
config('add', 'packages:all:True')
def test_config_add_from_file(mutable_empty_config, tmpdir):
contents = """spack:
config:
dirty: true
"""
file = str(tmpdir.join('spack.yaml'))
with open(file, 'w') as f:
f.write(contents)
config('add', '-f', file)
output = config('get', 'config')
assert output == """config:
dirty: true
"""
def test_config_add_from_file_multiple(mutable_empty_config, tmpdir):
contents = """spack:
config:
dirty: true
template_dirs: [test1]
"""
file = str(tmpdir.join('spack.yaml'))
with open(file, 'w') as f:
f.write(contents)
config('add', '-f', file)
output = config('get', 'config')
assert output == """config:
dirty: true
template_dirs: [test1]
"""
def test_config_add_override_from_file(mutable_empty_config, tmpdir):
config('--scope', 'site', 'add', 'config:template_dirs:test1')
contents = """spack:
config::
template_dirs: [test2]
"""
file = str(tmpdir.join('spack.yaml'))
with open(file, 'w') as f:
f.write(contents)
config('add', '-f', file)
output = config('get', 'config')
assert output == """config:
template_dirs: [test2]
"""
def test_config_add_override_leaf_from_file(mutable_empty_config, tmpdir):
config('--scope', 'site', 'add', 'config:template_dirs:test1')
contents = """spack:
config:
template_dirs:: [test2]
"""
file = str(tmpdir.join('spack.yaml'))
with open(file, 'w') as f:
f.write(contents)
config('add', '-f', file)
output = config('get', 'config')
assert output == """config:
'template_dirs:': [test2]
"""
def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
config('add', 'packages:all:compiler:[gcc]')
# contents to add to file
contents = """spack:
packages:
all:
version:
- 1.0.0
"""
# create temp file and add it to config
file = str(tmpdir.join('spack.yaml'))
with open(file, 'w') as f:
f.write(contents)
config('add', '-f', file)
# get results
output = config('get', 'packages')
expected = """packages:
all:
compiler: [gcc]
version:
- 1.0.0
"""
assert output == expected
def test_config_add_invalid_file_fails(tmpdir):
# contents to add to file
# invalid because version requires a list
contents = """spack:
packages:
all:
version: 1.0.0
"""
# create temp file and add it to config
file = str(tmpdir.join('spack.yaml'))
with open(file, 'w') as f:
f.write(contents)
with pytest.raises(
(spack.config.ConfigFormatError)
):
config('add', '-f', file)
def test_config_remove_value(mutable_empty_config):
config('add', 'config:dirty:true')
config('remove', 'config:dirty:true')
output = config('get', 'config')
assert output == """config: {}
"""
def test_config_remove_alias_rm(mutable_empty_config):
config('add', 'config:dirty:true')
config('rm', 'config:dirty:true')
output = config('get', 'config')
assert output == """config: {}
"""
def test_config_remove_dict(mutable_empty_config):
config('add', 'config:dirty:true')
config('rm', 'config:dirty')
output = config('get', 'config')
assert output == """config: {}
"""
def test_remove_from_list(mutable_empty_config):
config('add', 'config:template_dirs:test1')
config('add', 'config:template_dirs:[test2]')
config('add', 'config:template_dirs:test3')
config('remove', 'config:template_dirs:test2')
output = config('get', 'config')
assert output == """config:
template_dirs:
- test3
- test1
"""
def test_remove_list(mutable_empty_config):
config('add', 'config:template_dirs:test1')
config('add', 'config:template_dirs:[test2]')
config('add', 'config:template_dirs:test3')
config('remove', 'config:template_dirs:[test2]')
output = config('get', 'config')
assert output == """config:
template_dirs:
- test3
- test1
"""
def test_config_add_to_env(mutable_empty_config, mutable_mock_env_path):
env = ev.create('test')
with env:
config('add', 'config:dirty:true')
output = config('get')
expected = ev.default_manifest_yaml
expected += """ config:
dirty: true
"""
assert output == expected
def test_config_remove_from_env(mutable_empty_config, mutable_mock_env_path):
env('create', 'test')
with ev.read('test'):
config('add', 'config:dirty:true')
with ev.read('test'):
config('rm', 'config:dirty')
output = config('get')
expected = ev.default_manifest_yaml
expected += """ config: {}
"""
assert output == expected

View File

@@ -19,7 +19,6 @@
from spack.main import SpackCommand
from spack.stage import stage_prefix
from spack.spec_list import SpecListError
from spack.util.mock_package import MockPackageMultiRepo
import spack.util.spack_json as sjson
from spack.util.path import substitute_path_variables
@@ -1234,7 +1233,7 @@ def test_stack_yaml_remove_from_list_force(tmpdir):
assert Spec('callpath ^mpich') in test.user_specs
def test_stack_yaml_attempt_remove_from_matrix(tmpdir):
def test_stack_yaml_remove_from_matrix_no_effect(tmpdir):
filename = str(tmpdir.join('spack.yaml'))
with open(filename, 'w') as f:
f.write("""\
@@ -1249,9 +1248,45 @@ def test_stack_yaml_attempt_remove_from_matrix(tmpdir):
""")
with tmpdir.as_cwd():
env('create', 'test', './spack.yaml')
with pytest.raises(SpecListError):
with ev.read('test'):
remove('-l', 'packages', 'mpileaks')
with ev.read('test') as e:
before = e.user_specs.specs
remove('-l', 'packages', 'mpileaks')
after = e.user_specs.specs
assert before == after
def test_stack_yaml_force_remove_from_matrix(tmpdir):
filename = str(tmpdir.join('spack.yaml'))
with open(filename, 'w') as f:
f.write("""\
env:
definitions:
- packages:
- matrix:
- [mpileaks, callpath]
- [target=be]
specs:
- $packages
""")
with tmpdir.as_cwd():
env('create', 'test', './spack.yaml')
with ev.read('test') as e:
concretize()
before_user = e.user_specs.specs
before_conc = e.concretized_user_specs
remove('-f', '-l', 'packages', 'mpileaks')
after_user = e.user_specs.specs
after_conc = e.concretized_user_specs
assert before_user == after_user
mpileaks_spec = Spec('mpileaks target=be')
assert mpileaks_spec in before_conc
assert mpileaks_spec not in after_conc
def test_stack_concretize_extraneous_deps(tmpdir, config, mock_packages):
@@ -1429,6 +1464,29 @@ def test_stack_definition_conditional_with_variable(tmpdir):
assert Spec('callpath') in test.user_specs
def test_stack_definition_conditional_with_satisfaction(tmpdir):
filename = str(tmpdir.join('spack.yaml'))
with open(filename, 'w') as f:
f.write("""\
env:
definitions:
- packages: [libelf, mpileaks]
when: arch.satisfies('platform=foo') # will be "test" when testing
- packages: [callpath]
when: arch.satisfies('platform=test')
specs:
- $packages
""")
with tmpdir.as_cwd():
env('create', 'test', './spack.yaml')
test = ev.read('test')
assert Spec('libelf') not in test.user_specs
assert Spec('mpileaks') not in test.user_specs
assert Spec('callpath') in test.user_specs
def test_stack_definition_complex_conditional(tmpdir):
filename = str(tmpdir.join('spack.yaml'))
with open(filename, 'w') as f:

View File

@@ -650,17 +650,17 @@ def test_build_warning_output(tmpdir, mock_fetch, install_mockery, capfd):
assert 'foo.c:89: warning: some weird warning!' in msg
def test_cache_only_fails(tmpdir, mock_fetch, install_mockery, capfd):
msg = ''
with capfd.disabled():
try:
install('--cache-only', 'libdwarf')
except spack.installer.InstallError as e:
msg = str(e)
def test_cache_only_fails(tmpdir, mock_fetch, install_mockery):
# libelf from cache fails to install, which automatically removes the
# the libdwarf build task and flags the package as failed to install.
err_msg = 'Installation of libdwarf failed'
with pytest.raises(spack.installer.InstallError, match=err_msg):
install('--cache-only', 'libdwarf')
# libelf from cache failed to install, which automatically removed the
# the libdwarf build task and flagged the package as failed to install.
assert 'Installation of libdwarf failed' in msg
# Check that failure prefix locks are still cached
failure_lock_prefixes = ','.join(spack.store.db._prefix_failures.keys())
assert 'libelf' in failure_lock_prefixes
assert 'libdwarf' in failure_lock_prefixes
def test_install_only_dependencies(tmpdir, mock_fetch, install_mockery):

View File

@@ -7,6 +7,7 @@
import sys
import os
import shutil
from copy import copy
from six import iteritems
@@ -16,6 +17,8 @@
import spack.spec
import spack.compiler
import spack.compilers as compilers
import spack.spec
import spack.util.environment
from spack.compiler import Compiler
from spack.util.executable import ProcessError
@@ -130,7 +133,7 @@ def test_compiler_flags_from_config_are_grouped():
# Utility function to test most flags.
default_compiler_entry = {
'spec': 'clang@2.0.0-apple',
'spec': 'apple-clang@2.0.0',
'operating_system': 'foo-os',
'paths': {
'cc': 'cc-path',
@@ -355,12 +358,20 @@ def test_arm_flags():
def test_cce_flags():
supported_flag_test("version_argument", "--version", "cce@9.0.1")
supported_flag_test("version_argument", "-V", "cce@9.0.1-classic")
supported_flag_test("openmp_flag", "-fopenmp", "cce@9.0.1")
supported_flag_test("openmp_flag", "-h omp", "cce@9.0.1-classic")
supported_flag_test("openmp_flag", "-h omp", "cce@1.0")
supported_flag_test("cxx11_flag", "-std=c++11", "cce@9.0.1")
supported_flag_test("cxx11_flag", "-h std=c++11", "cce@9.0.1-classic")
supported_flag_test("cxx11_flag", "-h std=c++11", "cce@1.0")
unsupported_flag_test("c99_flag", "cce@8.0")
supported_flag_test("c99_flag", "-std=c99", "cce@9.0.1")
supported_flag_test("c99_flag", "-h c99,noconform,gnu", "cce@8.1")
supported_flag_test("c99_flag", "-h std=c99,noconform,gnu", "cce@8.4")
unsupported_flag_test("c11_flag", "cce@8.4")
supported_flag_test("c11_flag", "-std=c11", "cce@9.0.1")
supported_flag_test("c11_flag", "-h std=c11,noconform,gnu", "cce@8.5")
supported_flag_test("cc_pic_flag", "-h PIC", "cce@1.0")
supported_flag_test("cxx_pic_flag", "-h PIC", "cce@1.0")
@@ -370,26 +381,27 @@ def test_cce_flags():
'cce@1.0')
def test_clang_flags():
# Apple Clang.
def test_apple_clang_flags():
supported_flag_test(
"openmp_flag", "-Xpreprocessor -fopenmp", "clang@2.0.0-apple")
unsupported_flag_test("cxx11_flag", "clang@2.0.0-apple")
supported_flag_test("cxx11_flag", "-std=c++11", "clang@4.0.0-apple")
unsupported_flag_test("cxx14_flag", "clang@5.0.0-apple")
supported_flag_test("cxx14_flag", "-std=c++1y", "clang@5.1.0-apple")
supported_flag_test("cxx14_flag", "-std=c++14", "clang@6.1.0-apple")
unsupported_flag_test("cxx17_flag", "clang@6.0.0-apple")
supported_flag_test("cxx17_flag", "-std=c++1z", "clang@6.1.0-apple")
supported_flag_test("c99_flag", "-std=c99", "clang@6.1.0-apple")
unsupported_flag_test("c11_flag", "clang@6.0.0-apple")
supported_flag_test("c11_flag", "-std=c11", "clang@6.1.0-apple")
supported_flag_test("cc_pic_flag", "-fPIC", "clang@2.0.0-apple")
supported_flag_test("cxx_pic_flag", "-fPIC", "clang@2.0.0-apple")
supported_flag_test("f77_pic_flag", "-fPIC", "clang@2.0.0-apple")
supported_flag_test("fc_pic_flag", "-fPIC", "clang@2.0.0-apple")
"openmp_flag", "-Xpreprocessor -fopenmp", "apple-clang@2.0.0"
)
unsupported_flag_test("cxx11_flag", "apple-clang@2.0.0")
supported_flag_test("cxx11_flag", "-std=c++11", "apple-clang@4.0.0")
unsupported_flag_test("cxx14_flag", "apple-clang@5.0.0")
supported_flag_test("cxx14_flag", "-std=c++1y", "apple-clang@5.1.0")
supported_flag_test("cxx14_flag", "-std=c++14", "apple-clang@6.1.0")
unsupported_flag_test("cxx17_flag", "apple-clang@6.0.0")
supported_flag_test("cxx17_flag", "-std=c++1z", "apple-clang@6.1.0")
supported_flag_test("c99_flag", "-std=c99", "apple-clang@6.1.0")
unsupported_flag_test("c11_flag", "apple-clang@6.0.0")
supported_flag_test("c11_flag", "-std=c11", "apple-clang@6.1.0")
supported_flag_test("cc_pic_flag", "-fPIC", "apple-clang@2.0.0")
supported_flag_test("cxx_pic_flag", "-fPIC", "apple-clang@2.0.0")
supported_flag_test("f77_pic_flag", "-fPIC", "apple-clang@2.0.0")
supported_flag_test("fc_pic_flag", "-fPIC", "apple-clang@2.0.0")
# non-Apple Clang.
def test_clang_flags():
supported_flag_test("version_argument", "--version", "clang@foo.bar")
supported_flag_test("openmp_flag", "-fopenmp", "clang@3.3")
unsupported_flag_test("cxx11_flag", "clang@3.2")
@@ -713,3 +725,101 @@ def _call(*args, **kwargs):
except ProcessError:
# Confirm environment does not change after failed call
assert 'SPACK_TEST_CMP_ON' not in os.environ
def test_apple_clang_setup_environment(mock_executable, monkeypatch):
"""Test a code path that is taken only if the package uses
Xcode on MacOS.
"""
class MockPackage(object):
use_xcode = False
apple_clang_cls = spack.compilers.class_for_compiler_name('apple-clang')
compiler = apple_clang_cls(
spack.spec.CompilerSpec('apple-clang@11.0.0'), 'catalina', 'x86_64', [
'/usr/bin/clang', '/usr/bin/clang++', None, None
]
)
env = spack.util.environment.EnvironmentModifications()
# Check a package that doesn't use xcode and ensure we don't add changes
# to the environment
pkg = MockPackage()
compiler.setup_custom_environment(pkg, env)
assert not env
# Prepare mock executables to fake the Xcode environment
xcrun = mock_executable('xcrun', """
if [[ "$2" == "clang" ]] ; then
echo "/Library/Developer/CommandLineTools/usr/bin/clang"
fi
if [[ "$2" == "clang++" ]] ; then
echo "/Library/Developer/CommandLineTools/usr/bin/clang++"
fi
""")
mock_executable('xcode-select', """
echo "/Library/Developer"
""")
bin_dir = os.path.dirname(xcrun)
monkeypatch.setenv('PATH', bin_dir, prepend=os.pathsep)
def noop(*args, **kwargs):
pass
real_listdir = os.listdir
def _listdir(path):
if not os.path.exists(path):
return []
return real_listdir(path)
# Set a few operations to noop
monkeypatch.setattr(shutil, 'copytree', noop)
monkeypatch.setattr(os, 'unlink', noop)
monkeypatch.setattr(os, 'symlink', noop)
monkeypatch.setattr(os, 'listdir', _listdir)
# Qt is so far the only package that uses this code path, change
# introduced in https://github.com/spack/spack/pull/1832
pkg.use_xcode = True
compiler.setup_custom_environment(pkg, env)
assert len(env) == 3
assert env.env_modifications[0].name == 'SPACK_CC'
assert env.env_modifications[1].name == 'SPACK_CXX'
assert env.env_modifications[2].name == 'DEVELOPER_DIR'
@pytest.mark.parametrize('xcode_select_output', [
'', '/Library/Developer/CommandLineTools'
])
def test_xcode_not_available(
xcode_select_output, mock_executable, monkeypatch
):
# Prepare mock executables to fake the Xcode environment
xcrun = mock_executable('xcrun', """
if [[ "$2" == "clang" ]] ; then
echo "/Library/Developer/CommandLineTools/usr/bin/clang"
fi
if [[ "$2" == "clang++" ]] ; then
echo "/Library/Developer/CommandLineTools/usr/bin/clang++"
fi
""")
mock_executable('xcode-select', """
echo "{0}"
""".format(xcode_select_output))
bin_dir = os.path.dirname(xcrun)
monkeypatch.setenv('PATH', bin_dir, prepend=os.pathsep)
# Prepare compiler
apple_clang_cls = spack.compilers.class_for_compiler_name('apple-clang')
compiler = apple_clang_cls(
spack.spec.CompilerSpec('apple-clang@11.0.0'), 'catalina', 'x86_64', [
'/usr/bin/clang', '/usr/bin/clang++', None, None
]
)
env = spack.util.environment.EnvironmentModifications()
class MockPackage(object):
use_xcode = True
pkg = MockPackage()
with pytest.raises(OSError):
compiler.setup_custom_environment(pkg, env)

View File

@@ -53,11 +53,22 @@ def test_cce_version_detection(version_str, expected_version):
'Target: x86_64-apple-darwin18.7.0\n'
'Thread model: posix\n'
'InstalledDir: /Applications/Xcode.app/Contents/Developer/Toolchains/XcodeDefault.xctoolchain/usr/bin\n', # noqa
'11.0.0-apple'),
'11.0.0'),
('Apple LLVM version 7.0.2 (clang-700.1.81)\n'
'Target: x86_64-apple-darwin15.2.0\n'
'Thread model: posix\n', '7.0.2-apple'),
# Other platforms
'Thread model: posix\n', '7.0.2'),
])
def test_apple_clang_version_detection(
version_str, expected_version
):
cls = spack.compilers.class_for_compiler_name('apple-clang')
version = cls.extract_version_from_output(version_str)
assert version == expected_version
@pytest.mark.regression('10191')
@pytest.mark.parametrize('version_str,expected_version', [
# LLVM Clang
('clang version 6.0.1-svn334776-1~exp1~20181018152737.116 (branches/release_60)\n' # noqa
'Target: x86_64-pc-linux-gnu\n'
'Thread model: posix\n'

View File

@@ -608,7 +608,7 @@ def test_noversion_pkg(self, spec):
('mpileaks%gcc@4.8', 'haswell'),
('mpileaks%gcc@5.3.0', 'broadwell'),
# Apple's clang always falls back to x86-64 for now
('mpileaks%clang@9.1.0-apple', 'x86_64')
('mpileaks%apple-clang@9.1.0', 'x86_64')
])
@pytest.mark.regression('13361')
def test_adjusting_default_target_based_on_compiler(

View File

@@ -576,7 +576,7 @@ def get_config_error(filename, schema, yaml_string):
# parse and return error, or fail.
try:
spack.config._read_config_file(filename, schema)
spack.config.read_config_file(filename, schema)
except spack.config.ConfigFormatError as e:
return e
else:

View File

@@ -374,6 +374,19 @@ def linux_os():
return LinuxOS(name=name, version=version)
@pytest.fixture(scope='session')
def default_config():
"""Isolates the default configuration from the user configs.
This ensures we can test the real default configuration without having
tests fail when the user overrides the defaults that we test against."""
defaults_path = os.path.join(spack.paths.etc_path, 'spack', 'defaults')
defaults_scope = spack.config.ConfigScope('defaults', defaults_path)
defaults_config = spack.config.Configuration(defaults_scope)
with use_configuration(defaults_config):
yield defaults_config
@pytest.fixture(scope='session')
def configuration_dir(tmpdir_factory, linux_os):
"""Copies mock configuration files in a temporary directory. Returns the
@@ -436,6 +449,19 @@ def mutable_config(tmpdir_factory, configuration_dir):
yield cfg
@pytest.fixture(scope='function')
def mutable_empty_config(tmpdir_factory, configuration_dir):
"""Empty configuration that can be modified by the tests."""
mutable_dir = tmpdir_factory.mktemp('mutable_config').join('tmp')
cfg = spack.config.Configuration(
*[spack.config.ConfigScope(name, str(mutable_dir.join(name)))
for name in ['site', 'system', 'user']])
with use_configuration(cfg):
yield cfg
@pytest.fixture()
def mock_low_high_config(tmpdir):
"""Mocks two configuration scopes: 'low' and 'high'."""
@@ -601,6 +627,16 @@ def install_mockery(tmpdir, config, mock_packages, monkeypatch):
tmpdir.join('opt').remove()
spack.store.store = real_store
# Also wipe out any cached prefix failure locks (associated with
# the session-scoped mock archive).
for pkg_id in list(spack.store.db._prefix_failures.keys()):
lock = spack.store.db._prefix_failures.pop(pkg_id, None)
if lock:
try:
lock.release_write()
except Exception:
pass
@pytest.fixture(scope='function')
def install_mockery_mutable_config(
@@ -1109,7 +1145,7 @@ def mock_executable(tmpdir):
import jinja2
def _factory(name, output, subdir=('bin',)):
f = tmpdir.mkdir(*subdir).join(name)
f = tmpdir.ensure(*subdir, dir=True).join(name)
t = jinja2.Template('#!/bin/bash\n{{ output }}\n')
f.write(t.render(output=output))
f.chmod(0o755)

View File

@@ -9,8 +9,8 @@
containerize = spack.main.SpackCommand('containerize')
def test_command(configuration_dir, capsys):
def test_command(default_config, container_config_dir, capsys):
with capsys.disabled():
with fs.working_dir(configuration_dir):
with fs.working_dir(container_config_dir):
output = containerize()
assert 'FROM spack/ubuntu-bionic' in output

View File

@@ -39,5 +39,5 @@ def dumper(configuration):
@pytest.fixture()
def configuration_dir(minimal_configuration, config_dumper):
def container_config_dir(minimal_configuration, config_dumper):
return config_dumper(minimal_configuration)

View File

@@ -42,7 +42,7 @@ def test_packages(minimal_configuration):
assert p.list == pkgs
def test_ensure_render_works(minimal_configuration):
def test_ensure_render_works(minimal_configuration, default_config):
# Here we just want to ensure that nothing is raised
writer = writers.create(minimal_configuration)
writer()

View File

@@ -13,7 +13,7 @@ def singularity_configuration(minimal_configuration):
return minimal_configuration
def test_ensure_render_works(singularity_configuration):
def test_ensure_render_works(default_config, singularity_configuration):
container_config = singularity_configuration['spack']['container']
assert container_config['format'] == 'singularity'
# Here we just want to ensure that nothing is raised

View File

@@ -127,7 +127,7 @@ compilers:
cxxflags: -O3
modules: 'None'
- compiler:
spec: clang@9.1.0-apple
spec: apple-clang@9.1.0
operating_system: elcapitan
paths:
cc: /path/to/clang

View File

@@ -462,6 +462,58 @@ def _repoerr(repo, name):
assert "Couldn't copy in provenance for cmake" in out
def test_clear_failures_success(install_mockery):
"""Test the clear_failures happy path."""
# Set up a test prefix failure lock
lock = lk.Lock(spack.store.db.prefix_fail_path, start=1, length=1,
default_timeout=1e-9, desc='test')
try:
lock.acquire_write()
except lk.LockTimeoutError:
tty.warn('Failed to write lock the test install failure')
spack.store.db._prefix_failures['test'] = lock
# Set up a fake failure mark (or file)
fs.touch(os.path.join(spack.store.db._failure_dir, 'test'))
# Now clear failure tracking
inst.clear_failures()
# Ensure there are no cached failure locks or failure marks
assert len(spack.store.db._prefix_failures) == 0
assert len(os.listdir(spack.store.db._failure_dir)) == 0
# Ensure the core directory and failure lock file still exist
assert os.path.isdir(spack.store.db._failure_dir)
assert os.path.isfile(spack.store.db.prefix_fail_path)
def test_clear_failures_errs(install_mockery, monkeypatch, capsys):
"""Test the clear_failures exception paths."""
orig_fn = os.remove
err_msg = 'Mock os remove'
def _raise_except(path):
raise OSError(err_msg)
# Set up a fake failure mark (or file)
fs.touch(os.path.join(spack.store.db._failure_dir, 'test'))
monkeypatch.setattr(os, 'remove', _raise_except)
# Clear failure tracking
inst.clear_failures()
# Ensure expected warning generated
out = str(capsys.readouterr()[1])
assert 'Unable to remove failure' in out
assert err_msg in out
# Restore remove for teardown
monkeypatch.setattr(os, 'remove', orig_fn)
def test_check_deps_status_install_failure(install_mockery, monkeypatch):
spec, installer = create_installer('a')
@@ -669,7 +721,7 @@ def _raise_except(lock):
installer._cleanup_failed(pkg_id)
out = str(capsys.readouterr()[1])
assert 'exception when removing failure mark' in out
assert 'exception when removing failure tracking' in out
assert msg in out

View File

@@ -20,6 +20,7 @@
from spack.spec import DuplicateArchitectureError
from spack.spec import DuplicateDependencyError, DuplicateCompilerSpecError
from spack.spec import SpecFilenameError, NoSuchSpecFileError
from spack.spec import MultipleVersionError
from spack.variant import DuplicateVariantError
@@ -149,6 +150,9 @@ def test_simple_dependence(self):
self.check_parse("openmpi ^hwloc ^libunwind",
"openmpi^hwloc^libunwind")
def test_version_after_compiler(self):
self.check_parse('foo@2.0%bar@1.0', 'foo %bar@1.0 @2.0')
def test_dependencies_with_versions(self):
self.check_parse("openmpi ^hwloc@1.2e6")
self.check_parse("openmpi ^hwloc@1.2e6:")
@@ -432,6 +436,17 @@ def test_duplicate_variant(self):
]
self._check_raises(DuplicateVariantError, duplicates)
def test_multiple_versions(self):
multiples = [
'x@1.2@2.3',
'x@1.2:2.3@1.4',
'x@1.2@2.3:2.4',
'x@1.2@2.3,2.4',
'x@1.2 +foo~bar @2.3',
'x@1.2%y@1.2@2.3:2.4',
]
self._check_raises(MultipleVersionError, multiples)
def test_duplicate_dependency(self):
self._check_raises(DuplicateDependencyError, ["x ^y ^y"])

View File

@@ -408,3 +408,32 @@ def test_perl_activation_view(tmpdir, perl_and_extension_dirs,
assert not os.path.exists(os.path.join(perl_prefix, 'bin/perl-ext-tool'))
assert os.path.exists(os.path.join(view_dir, 'bin/perl-ext-tool'))
def test_is_activated_upstream_extendee(tmpdir, builtin_and_mock_packages,
monkeypatch):
"""When an extendee is installed upstream, make sure that the extension
spec is never considered to be globally activated for it.
"""
extendee_spec = spack.spec.Spec('python')
extendee_spec._concrete = True
python_name = 'python'
tmpdir.ensure(python_name, dir=True)
python_prefix = str(tmpdir.join(python_name))
# Set the prefix on the package's spec reference because that is a copy of
# the original spec
extendee_spec.package.spec.prefix = python_prefix
monkeypatch.setattr(extendee_spec.package.__class__,
'installed_upstream', True)
ext_name = 'py-extension1'
tmpdir.ensure(ext_name, dir=True)
ext_pkg = create_ext_pkg(
ext_name, str(tmpdir.join(ext_name)), extendee_spec, monkeypatch)
# The view should not be checked at all if the extendee is installed
# upstream, so use 'None' here
mock_view = None
assert not ext_pkg.is_activated(mock_view)

View File

@@ -782,6 +782,9 @@ def __reversed__(self):
def __len__(self):
return len(self.versions)
def __bool__(self):
return bool(self.versions)
@coerced
def __eq__(self, other):
return other is not None and self.versions == other.versions