packages.yaml: allow virtuals to specify buildable: false (#14934)

Currently, to force Spack to use an external MPI, you have to specify `buildable: False`
for every MPI provider in Spack in your packages.yaml file. This is both tedious and
fragile, as new MPI providers can be added and break your workflow when you do a
git pull.

This PR allows you to specify an entire virtual dependency as non-buildable, and
specify particular implementations to be built:

```
packages:
all:
    providers:
        mpi: [mpich]
mpi:
    buildable: false
    paths:
        mpich@3.2 %gcc@7.3.0: /usr/packages/mpich-3.2-gcc-7.3.0
```
will force all Spack builds to use the specified `mpich` install.
This commit is contained in:
Greg Becker 2020-03-31 16:09:08 -07:00 committed by GitHub
parent 8748160984
commit 336191c251
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 155 additions and 153 deletions

View File

@ -124,6 +124,39 @@ The ``buildable`` does not need to be paired with external packages.
It could also be used alone to forbid packages that may be
buggy or otherwise undesirable.
Virtual packages in Spack can also be specified as not buildable, and
external implementations can be provided. In the example above,
OpenMPI is configured as not buildable, but Spack will often prefer
other MPI implementations over the externally available OpenMPI. Spack
can be configured with every MPI provider not buildable individually,
but more conveniently:
.. code-block:: yaml
packages:
mpi:
buildable: False
openmpi:
paths:
openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64: /opt/openmpi-1.4.3
openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug: /opt/openmpi-1.4.3-debug
openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64: /opt/openmpi-1.6.5-intel
Implementations can also be listed immediately under the virtual they provide:
.. code-block:: yaml
packages:
mpi:
buildable: False
openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64: /opt/openmpi-1.4.3
openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug: /opt/openmpi-1.4.3-debug
openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64: /opt/openmpi-1.6.5-intel
mpich@3.3 %clang@9.0.0 arch=linux-debian7-x86_64: /opt/mpich-3.3-intel
Spack can then use any of the listed external implementations of MPI
to satisfy a dependency, and will choose depending on the compiler and
architecture.
.. _concretization-preferences:

View File

@ -1035,6 +1035,14 @@ def provides(self, vpkg_name):
for s, constraints in self.provided.items() if s.name == vpkg_name
)
@property
def virtuals_provided(self):
"""
virtual packages provided by this package with its spec
"""
return [vspec for vspec, constraints in self.provided.items()
if any(self.spec.satisfies(c) for c in constraints)]
@property
def installed(self):
"""Installation status of a package.

View File

@ -6,7 +6,6 @@
import stat
from six import string_types
from six import iteritems
import spack.repo
import spack.error
@ -23,27 +22,6 @@ def _spec_type(component):
return _lesser_spec_types.get(component, spack.spec.Spec)
def get_packages_config():
"""Wrapper around get_packages_config() to validate semantics."""
config = spack.config.get('packages')
# Get a list of virtuals from packages.yaml. Note that because we
# check spack.repo, this collects virtuals that are actually provided
# by sometihng, not just packages/names that don't exist.
# So, this won't include, e.g., 'all'.
virtuals = [(pkg_name, pkg_name._start_mark) for pkg_name in config
if spack.repo.path.is_virtual(pkg_name)]
# die if there are virtuals in `packages.py`
if virtuals:
errors = ["%s: %s" % (line_info, name) for name, line_info in virtuals]
raise VirtualInPackagesYAMLError(
"packages.yaml entries cannot be virtual packages:",
'\n'.join(errors))
return config
class PackagePrefs(object):
"""Defines the sort order for a set of specs.
@ -116,7 +94,7 @@ def order_for_package(cls, pkgname, component, vpkg=None, all=True):
pkglist.append('all')
for pkg in pkglist:
pkg_entry = get_packages_config().get(pkg)
pkg_entry = spack.config.get('packages').get(pkg)
if not pkg_entry:
continue
@ -160,7 +138,8 @@ def has_preferred_targets(cls, pkg_name):
def preferred_variants(cls, pkg_name):
"""Return a VariantMap of preferred variants/values for a spec."""
for pkg in (pkg_name, 'all'):
variants = get_packages_config().get(pkg, {}).get('variants', '')
variants = spack.config.get('packages').get(pkg, {}).get(
'variants', '')
if variants:
break
@ -181,33 +160,29 @@ def spec_externals(spec):
# break circular import.
from spack.util.module_cmd import get_path_from_module # NOQA: ignore=F401
allpkgs = get_packages_config()
name = spec.name
allpkgs = spack.config.get('packages')
names = set([spec.name])
names |= set(vspec.name for vspec in spec.package.virtuals_provided)
external_specs = []
pkg_paths = allpkgs.get(name, {}).get('paths', None)
pkg_modules = allpkgs.get(name, {}).get('modules', None)
if (not pkg_paths) and (not pkg_modules):
return []
for external_spec, path in iteritems(pkg_paths):
if not path:
# skip entries without paths (avoid creating extra Specs)
for name in names:
pkg_config = allpkgs.get(name, {})
pkg_paths = pkg_config.get('paths', {})
pkg_modules = pkg_config.get('modules', {})
if (not pkg_paths) and (not pkg_modules):
continue
external_spec = spack.spec.Spec(external_spec,
external_path=canonicalize_path(path))
if external_spec.satisfies(spec):
external_specs.append(external_spec)
for external_spec, path in pkg_paths.items():
external_spec = spack.spec.Spec(
external_spec, external_path=canonicalize_path(path))
if external_spec.satisfies(spec):
external_specs.append(external_spec)
for external_spec, module in iteritems(pkg_modules):
if not module:
continue
external_spec = spack.spec.Spec(
external_spec, external_module=module)
if external_spec.satisfies(spec):
external_specs.append(external_spec)
for external_spec, module in pkg_modules.items():
external_spec = spack.spec.Spec(
external_spec, external_module=module)
if external_spec.satisfies(spec):
external_specs.append(external_spec)
# defensively copy returned specs
return [s.copy() for s in external_specs]
@ -215,12 +190,11 @@ def spec_externals(spec):
def is_spec_buildable(spec):
"""Return true if the spec pkgspec is configured as buildable"""
allpkgs = get_packages_config()
if spec.name not in allpkgs:
return True
if 'buildable' not in allpkgs[spec.name]:
return True
return allpkgs[spec.name]['buildable']
allpkgs = spack.config.get('packages')
do_not_build = [name for name, entry in allpkgs.items()
if not entry.get('buildable', True)]
return not (spec.name in do_not_build or
any(spec.package.provides(name) for name in do_not_build))
def get_package_dir_permissions(spec):

View File

@ -185,34 +185,6 @@ def test_develop(self):
spec.concretize()
assert spec.version == Version('0.2.15.develop')
def test_no_virtuals_in_packages_yaml(self):
"""Verify that virtuals are not allowed in packages.yaml."""
# set up a packages.yaml file with a vdep as a key. We use
# syaml.load_config here to make sure source lines in the config are
# attached to parsed strings, as the error message uses them.
conf = syaml.load_config("""\
mpi:
paths:
mpi-with-lapack@2.1: /path/to/lapack
""")
spack.config.set('packages', conf, scope='concretize')
# now when we get the packages.yaml config, there should be an error
with pytest.raises(spack.package_prefs.VirtualInPackagesYAMLError):
spack.package_prefs.get_packages_config()
def test_all_is_not_a_virtual(self):
"""Verify that `all` is allowed in packages.yaml."""
conf = syaml.load_config("""\
all:
variants: [+mpi]
""")
spack.config.set('packages', conf, scope='concretize')
# should be no error for 'all':
spack.package_prefs.get_packages_config()
def test_external_mpi(self):
# make sure this doesn't give us an external first.
spec = Spec('mpi')
@ -236,6 +208,37 @@ def test_external_mpi(self):
spec.concretize()
assert spec['mpich'].external_path == '/dummy/path'
def test_external_module(self, monkeypatch):
"""Test that packages can find externals specified by module
The specific code for parsing the module is tested elsewhere.
This just tests that the preference is accounted for"""
# make sure this doesn't give us an external first.
def mock_module(cmd, module):
return 'prepend-path PATH /dummy/path'
monkeypatch.setattr(spack.util.module_cmd, 'module', mock_module)
spec = Spec('mpi')
spec.concretize()
assert not spec['mpi'].external
# load config
conf = syaml.load_config("""\
all:
providers:
mpi: [mpich]
mpi:
buildable: false
modules:
mpich@3.0.4: dummy
""")
spack.config.set('packages', conf, scope='concretize')
# ensure that once config is in place, external is used
spec = Spec('mpi')
spec.concretize()
assert spec['mpich'].external_path == '/dummy/path'
def test_config_permissions_from_all(self, configure_permissions):
# Although these aren't strictly about concretization, they are
# configured in the same file and therefore convenient to test here.

View File

@ -1040,6 +1040,13 @@ def __init__(self, name, dependencies, dependency_types, conditions=None,
self.conflicts = {}
self.patches = {}
def provides(self, vname):
return vname in self.provided
@property
def virtuals_provided(self):
return [v.name for v, c in self.provided]
class MockPackageMultiRepo(object):
def __init__(self, packages):

View File

@ -52,52 +52,52 @@ def check_mirror():
mirror_root = os.path.join(stage.path, 'test-mirror')
# register mirror with spack config
mirrors = {'spack-mirror-test': 'file://' + mirror_root}
spack.config.set('mirrors', mirrors)
with spack.config.override('config:checksum', False):
specs = [Spec(x).concretized() for x in repos]
spack.mirror.create(mirror_root, specs)
# Stage directory exists
assert os.path.isdir(mirror_root)
for spec in specs:
fetcher = spec.package.fetcher[0]
per_package_ref = os.path.join(
spec.name, '-'.join([spec.name, str(spec.version)]))
mirror_paths = spack.mirror.mirror_archive_paths(
fetcher,
per_package_ref)
expected_path = os.path.join(
mirror_root, mirror_paths.storage_path)
assert os.path.exists(expected_path)
# Now try to fetch each package.
for name, mock_repo in repos.items():
spec = Spec(name).concretized()
pkg = spec.package
with spack.config.override('mirrors', mirrors):
with spack.config.override('config:checksum', False):
with pkg.stage:
pkg.do_stage(mirror_only=True)
specs = [Spec(x).concretized() for x in repos]
spack.mirror.create(mirror_root, specs)
# Compare the original repo with the expanded archive
original_path = mock_repo.path
if 'svn' in name:
# have to check out the svn repo to compare.
original_path = os.path.join(
mock_repo.path, 'checked_out')
# Stage directory exists
assert os.path.isdir(mirror_root)
svn = which('svn', required=True)
svn('checkout', mock_repo.url, original_path)
for spec in specs:
fetcher = spec.package.fetcher[0]
per_package_ref = os.path.join(
spec.name, '-'.join([spec.name, str(spec.version)]))
mirror_paths = spack.mirror.mirror_archive_paths(
fetcher,
per_package_ref)
expected_path = os.path.join(
mirror_root, mirror_paths.storage_path)
assert os.path.exists(expected_path)
dcmp = filecmp.dircmp(
original_path, pkg.stage.source_path)
# Now try to fetch each package.
for name, mock_repo in repos.items():
spec = Spec(name).concretized()
pkg = spec.package
# make sure there are no new files in the expanded
# tarball
assert not dcmp.right_only
# and that all original files are present.
assert all(l in exclude for l in dcmp.left_only)
with spack.config.override('config:checksum', False):
with pkg.stage:
pkg.do_stage(mirror_only=True)
# Compare the original repo with the expanded archive
original_path = mock_repo.path
if 'svn' in name:
# have to check out the svn repo to compare.
original_path = os.path.join(
mock_repo.path, 'checked_out')
svn = which('svn', required=True)
svn('checkout', mock_repo.url, original_path)
dcmp = filecmp.dircmp(
original_path, pkg.stage.source_path)
# make sure there are no new files in the expanded
# tarball
assert not dcmp.right_only
# and that all original files are present.
assert all(l in exclude for l in dcmp.left_only)
def test_url_mirror(mock_archive):

View File

@ -20,28 +20,11 @@
'setenv LDFLAGS -L/path/to/lib',
'prepend-path PATH /path/to/bin']
@pytest.fixture
def module_function_test_mode():
old_mode = spack.util.module_cmd._test_mode
spack.util.module_cmd._test_mode = True
yield
spack.util.module_cmd._test_mode = old_mode
_test_template = "'. %s 2>&1' % args[1]"
@pytest.fixture
def save_module_func():
old_func = spack.util.module_cmd.module
yield
spack.util.module_cmd.module = old_func
def test_module_function_change_env(tmpdir, working_env,
module_function_test_mode):
def test_module_function_change_env(tmpdir, working_env, monkeypatch):
monkeypatch.setattr(spack.util.module_cmd, '_cmd_template', _test_template)
src_file = str(tmpdir.join('src_me'))
with open(src_file, 'w') as f:
f.write('export TEST_MODULE_ENV_VAR=TEST_SUCCESS\n')
@ -53,7 +36,8 @@ def test_module_function_change_env(tmpdir, working_env,
assert os.environ['NOT_AFFECTED'] == "NOT_AFFECTED"
def test_module_function_no_change(tmpdir, module_function_test_mode):
def test_module_function_no_change(tmpdir, monkeypatch):
monkeypatch.setattr(spack.util.module_cmd, '_cmd_template', _test_template)
src_file = str(tmpdir.join('src_me'))
with open(src_file, 'w') as f:
f.write('echo TEST_MODULE_FUNCTION_PRINT')
@ -65,11 +49,11 @@ def test_module_function_no_change(tmpdir, module_function_test_mode):
assert os.environ == old_env
def test_get_path_from_module_faked(save_module_func):
def test_get_path_from_module_faked(monkeypatch):
for line in test_module_lines:
def fake_module(*args):
return line
spack.util.module_cmd.module = fake_module
monkeypatch.setattr(spack.util.module_cmd, 'module', fake_module)
path = get_path_from_module('mod')
assert path == '/path/to'

View File

@ -387,7 +387,6 @@ def test_unsatisfiable_architecture(self, set_dependency):
with pytest.raises(spack.spec.UnsatisfiableArchitectureSpecError):
spec.normalize()
@pytest.mark.usefixtures('config')
def test_invalid_dep(self):
spec = Spec('libelf ^mpich')
with pytest.raises(spack.spec.InvalidDependencyError):
@ -602,7 +601,6 @@ def test_copy_normalized(self):
copy_ids = set(id(s) for s in copy.traverse())
assert not orig_ids.intersection(copy_ids)
@pytest.mark.usefixtures('config')
def test_copy_concretized(self):
orig = Spec('mpileaks')
orig.concretize()

View File

@ -19,16 +19,11 @@
# If we need another option that changes the environment, add it here.
module_change_commands = ['load', 'swap', 'unload', 'purge', 'use', 'unuse']
py_cmd = "'import os;import json;print(json.dumps(dict(os.environ)))'"
# This is just to enable testing. I hate it but we can't find a better way
_test_mode = False
_cmd_template = "'module ' + ' '.join(args) + ' 2>&1'"
def module(*args):
module_cmd = 'module ' + ' '.join(args) + ' 2>&1'
if _test_mode:
tty.warn('module function operating in test mode')
module_cmd = ". %s 2>&1" % args[1]
module_cmd = eval(_cmd_template) # So we can monkeypatch for testing
if args[0] in module_change_commands:
# Do the module manipulation, then output the environment in JSON
# and read the JSON back in the parent process to update os.environ