Environments: specify packages for developer builds (#15256)

* allow environments to specify dev-build packages

* spack develop and spack undevelop commands

* never pull dev-build packges from bincache

* reinstall dev_specs when code has changed; reinstall dependents too

* preserve dev info paths and versions in concretization as special variant

* move install overwrite transaction into installer

* move dev-build argument handling to package.do_install

now that specs are dev-aware, package.do_install can add
necessary args (keep_stage=True, use_cache=False) to dev
builds. This simplifies driving logic in cmd and env._install

* allow 'any' as wildcard for variants

* spec: allow anonymous dependencies

raise an error when constraining by or normalizing an anonymous dep
refactor concretize_develop to remove dev_build variant
refactor tests to check for ^dev_path=any instead of +dev_build

* fix variant class hierarchy
This commit is contained in:
Greg Becker 2020-10-15 17:23:16 -07:00 committed by GitHub
parent 2ed39dfd8e
commit 7a6268593c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 1122 additions and 84 deletions

View File

@ -47,6 +47,7 @@
'install_tree', 'install_tree',
'is_exe', 'is_exe',
'join_path', 'join_path',
'last_modification_time_recursive',
'mkdirp', 'mkdirp',
'partition_path', 'partition_path',
'prefixes', 'prefixes',
@ -920,6 +921,15 @@ def set_executable(path):
os.chmod(path, mode) os.chmod(path, mode)
def last_modification_time_recursive(path):
path = os.path.abspath(path)
times = [os.stat(path).st_mtime]
times.extend(os.stat(os.path.join(root, name)).st_mtime
for root, dirs, files in os.walk(path)
for name in dirs + files)
return max(times)
def remove_empty_directories(root): def remove_empty_directories(root):
"""Ascend up from the leaves accessible from `root` and remove empty """Ascend up from the leaves accessible from `root` and remove empty
directories. directories.

View File

@ -12,7 +12,6 @@
import spack.cmd import spack.cmd
import spack.cmd.common.arguments as arguments import spack.cmd.common.arguments as arguments
import spack.repo import spack.repo
from spack.stage import DIYStage
description = "developer build: build from code in current working directory" description = "developer build: build from code in current working directory"
section = "build" section = "build"
@ -72,6 +71,14 @@ def dev_build(self, args):
"spack dev-build spec must have a single, concrete version. " "spack dev-build spec must have a single, concrete version. "
"Did you forget a package version number?") "Did you forget a package version number?")
source_path = args.source_path
if source_path is None:
source_path = os.getcwd()
source_path = os.path.abspath(source_path)
# Forces the build to run out of the source directory.
spec.constrain('dev_path=%s' % source_path)
spec.concretize() spec.concretize()
package = spack.repo.get(spec) package = spack.repo.get(spec)
@ -80,14 +87,6 @@ def dev_build(self, args):
tty.msg("Uninstall or try adding a version suffix for this dev build.") tty.msg("Uninstall or try adding a version suffix for this dev build.")
sys.exit(1) sys.exit(1)
source_path = args.source_path
if source_path is None:
source_path = os.getcwd()
source_path = os.path.abspath(source_path)
# Forces the build to run out of the current directory.
package.stage = DIYStage(source_path)
# disable checksumming if requested # disable checksumming if requested
if args.no_checksum: if args.no_checksum:
spack.config.set('config:checksum', False, scope='command_line') spack.config.set('config:checksum', False, scope='command_line')
@ -97,7 +96,6 @@ def dev_build(self, args):
keep_prefix=args.keep_prefix, keep_prefix=args.keep_prefix,
install_deps=not args.ignore_deps, install_deps=not args.ignore_deps,
verbose=not args.quiet, verbose=not args.quiet,
keep_stage=True, # don't remove source dir for dev build.
dirty=args.dirty, dirty=args.dirty,
stop_before=args.before, stop_before=args.before,
stop_at=args.until) stop_at=args.until)

View File

@ -0,0 +1,102 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import shutil
import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
from spack.error import SpackError
description = "add a spec to an environment's dev-build information"
section = "environments"
level = "long"
def setup_parser(subparser):
subparser.add_argument(
'-p', '--path', help='Source location of package')
clone_group = subparser.add_mutually_exclusive_group()
clone_group.add_argument(
'--no-clone', action='store_false', dest='clone', default=None,
help='Do not clone. The package already exists at the source path')
clone_group.add_argument(
'--clone', action='store_true', dest='clone', default=None,
help='Clone the package even if the path already exists')
subparser.add_argument(
'-f', '--force',
help='Remove any files or directories that block cloning source code')
arguments.add_common_arguments(subparser, ['spec'])
def develop(parser, args):
env = ev.get_env(args, 'develop', required=True)
if not args.spec:
if args.clone is False:
raise SpackError("No spec provided to spack develop command")
# download all dev specs
for name, entry in env.dev_specs.items():
path = entry.get('path', name)
abspath = path if os.path.isabs(path) else os.path.join(
env.path, path)
if os.path.exists(abspath):
msg = "Skipping developer download of %s" % entry['spec']
msg += " because its path already exists."
tty.msg(msg)
continue
stage = spack.spec.Spec(entry['spec']).package.stage
stage.steal_source(abspath)
if not env.dev_specs:
tty.warn("No develop specs to download")
return
specs = spack.cmd.parse_specs(args.spec)
if len(specs) > 1:
raise SpackError("spack develop requires at most one named spec")
spec = specs[0]
if not spec.versions.concrete:
raise SpackError("Packages to develop must have a concrete version")
# default path is relative path to spec.name
path = args.path or spec.name
# get absolute path to check
abspath = path
if not os.path.isabs(abspath):
abspath = os.path.join(env.path, path)
# clone default: only if the path doesn't exist
clone = args.clone
if clone is None:
clone = not os.path.exists(abspath)
if not clone and not os.path.exists(abspath):
raise SpackError("Provided path %s does not exist" % abspath)
if clone and os.path.exists(abspath):
if args.force:
shutil.rmtree(abspath)
else:
msg = "Path %s already exists and cannot be cloned to." % abspath
msg += " Use `spack develop -f` to overwrite."
raise SpackError(msg)
with env.write_transaction():
changed = env.develop(spec, path, clone)
if changed:
env.write()

View File

@ -392,13 +392,8 @@ def install(parser, args, **kwargs):
if not answer: if not answer:
tty.die('Reinstallation aborted.') tty.die('Reinstallation aborted.')
for abstract, concrete in zip(abstract_specs, specs): # overwrite all concrete explicit specs from this build
if concrete in installed: kwargs['overwrite'] = [spec.dag_hash() for spec in specs]
with fs.replace_directory_transaction(concrete.prefix):
install_spec(args, kwargs, abstract, concrete)
else:
install_spec(args, kwargs, abstract, concrete)
else: for abstract, concrete in zip(abstract_specs, specs):
for abstract, concrete in zip(abstract_specs, specs): install_spec(args, kwargs, abstract, concrete)
install_spec(args, kwargs, abstract, concrete)

View File

@ -0,0 +1,40 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
description = 'remove specs from an environment'
section = "environments"
level = "long"
def setup_parser(subparser):
subparser.add_argument(
'-a', '--all', action='store_true',
help="remove all specs from (clear) the environment")
arguments.add_common_arguments(subparser, ['specs'])
def undevelop(parser, args):
env = ev.get_env(args, 'undevelop', required=True)
if args.all:
specs = env.dev_specs.keys()
else:
specs = spack.cmd.parse_specs(args.specs)
with env.write_transaction():
changed = False
for spec in specs:
tty.msg('Removing %s from environment %s development specs'
% (spec, env.name))
changed |= env.undevelop(spec)
if changed:
env.write()

View File

@ -36,6 +36,7 @@
import spack.architecture import spack.architecture
import spack.error import spack.error
import spack.tengine import spack.tengine
import spack.variant as vt
from spack.config import config from spack.config import config
from spack.version import ver, Version, VersionList, VersionRange from spack.version import ver, Version, VersionList, VersionRange
from spack.package_prefs import PackagePrefs, spec_externals, is_spec_buildable from spack.package_prefs import PackagePrefs, spec_externals, is_spec_buildable
@ -61,6 +62,29 @@ def __init__(self, abstract_spec=None):
self.abstract_spec = abstract_spec self.abstract_spec = abstract_spec
self._adjust_target_answer_generator = None self._adjust_target_answer_generator = None
def concretize_develop(self, spec):
"""
Add ``dev_path=*`` variant to packages built from local source.
"""
env = spack.environment.get_env(None, None)
dev_info = env.dev_specs.get(spec.name, {}) if env else {}
if not dev_info:
return False
path = dev_info['path']
path = path if os.path.isabs(path) else os.path.join(
env.path, path)
if 'dev_path' in spec.variants:
assert spec.variants['dev_path'].value == path
changed = False
else:
spec.variants.setdefault(
'dev_path', vt.SingleValuedVariant('dev_path', path))
changed = True
changed |= spec.constrain(dev_info['spec'])
return changed
def _valid_virtuals_and_externals(self, spec): def _valid_virtuals_and_externals(self, spec):
"""Returns a list of candidate virtual dep providers and external """Returns a list of candidate virtual dep providers and external
packages that coiuld be used to concretize a spec. packages that coiuld be used to concretize a spec.
@ -328,6 +352,18 @@ def concretize_variants(self, spec):
preferred_variants = PackagePrefs.preferred_variants(spec.name) preferred_variants = PackagePrefs.preferred_variants(spec.name)
pkg_cls = spec.package_class pkg_cls = spec.package_class
for name, variant in pkg_cls.variants.items(): for name, variant in pkg_cls.variants.items():
any_set = False
var = spec.variants.get(name, None)
if var and 'any' in var:
# remove 'any' variant before concretizing
# 'any' cannot be combined with other variables in a
# multivalue variant, a concrete variant cannot have the value
# 'any', and 'any' does not constrain a variant except to
# preclude the values 'none' and None. We track `any_set` to
# avoid replacing 'any' with None, and remove it to continue
# concretization.
spec.variants.pop(name)
any_set = True
if name not in spec.variants: if name not in spec.variants:
changed = True changed = True
if name in preferred_variants: if name in preferred_variants:
@ -335,6 +371,14 @@ def concretize_variants(self, spec):
else: else:
spec.variants[name] = variant.make_default() spec.variants[name] = variant.make_default()
var = spec.variants[name]
if any_set and 'none' in var or None in var:
msg = "Attempted non-deterministic setting of variant"
msg += " '%s' set to 'any' and preference is." % name
msg += "'%s'. Set the variant to a non 'any'" % var.value
msg += " value or set a preference for variant '%s'." % name
raise NonDeterministicVariantError(msg)
return changed return changed
def concretize_compiler(self, spec): def concretize_compiler(self, spec):
@ -761,3 +805,7 @@ def __init__(self, spec):
msg = ("The spec\n '%s'\n is configured as not buildable, " msg = ("The spec\n '%s'\n is configured as not buildable, "
"and no matching external installs were found") "and no matching external installs were found")
super(NoBuildError, self).__init__(msg % spec) super(NoBuildError, self).__init__(msg % spec)
class NonDeterministicVariantError(spack.error.SpecError):
"""Raised when a spec variant is set to 'any' and concretizes to 'none'."""

View File

@ -1114,8 +1114,10 @@ def _add(
new_spec._hash = key new_spec._hash = key
else: else:
# If it is already there, mark it as installed. # If it is already there, mark it as installed and update
# installation time
self._data[key].installed = True self._data[key].installed = True
self._data[key].installation_time = _now()
self._data[key].explicit = explicit self._data[key].explicit = explicit

View File

@ -50,7 +50,7 @@ class OpenMpi(Package):
__all__ = [] __all__ = []
#: These are variant names used by Spack internally; packages can't use them #: These are variant names used by Spack internally; packages can't use them
reserved_names = ['patches'] reserved_names = ['patches', 'dev_path']
_patch_order_index = 0 _patch_order_index = 0

View File

@ -26,6 +26,7 @@
import spack.schema.env import spack.schema.env
import spack.spec import spack.spec
import spack.store import spack.store
import spack.stage
import spack.util.spack_json as sjson import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
import spack.config import spack.config
@ -707,6 +708,15 @@ def _read_manifest(self, f, raw_yaml=None):
configuration = config_dict(self.yaml) configuration = config_dict(self.yaml)
self.concretization = configuration.get('concretization') self.concretization = configuration.get('concretization')
# Retrieve dev-build packages:
self.dev_specs = configuration['develop']
for name, entry in self.dev_specs.items():
# spec must include a concrete version
assert Spec(entry['spec']).version.concrete
# default path is the spec name
if 'path' not in entry:
self.dev_specs[name]['path'] = name
@property @property
def user_specs(self): def user_specs(self):
return self.spec_lists[user_speclist_name] return self.spec_lists[user_speclist_name]
@ -722,6 +732,7 @@ def _set_user_specs_from_lockfile(self):
def clear(self): def clear(self):
self.spec_lists = {user_speclist_name: SpecList()} # specs from yaml self.spec_lists = {user_speclist_name: SpecList()} # specs from yaml
self.dev_specs = {} # dev-build specs from yaml
self.concretized_user_specs = [] # user specs from last concretize self.concretized_user_specs = [] # user specs from last concretize
self.concretized_order = [] # roots of last concretize, in order self.concretized_order = [] # roots of last concretize, in order
self.specs_by_hash = {} # concretized specs by hash self.specs_by_hash = {} # concretized specs by hash
@ -976,6 +987,71 @@ def remove(self, query_spec, list_name=user_speclist_name, force=False):
del self.concretized_order[i] del self.concretized_order[i]
del self.specs_by_hash[dag_hash] del self.specs_by_hash[dag_hash]
def develop(self, spec, path, clone=False):
"""Add dev-build info for package
Args:
spec (Spec): Set constraints on development specs. Must include a
concrete version.
path (string): Path to find code for developer builds. Relative
paths will be resolved relative to the environment.
clone (bool, default False): Clone the package code to the path.
If clone is False Spack will assume the code is already present
at ``path``.
Return:
(bool): True iff the environment was changed.
"""
spec = spec.copy() # defensive copy since we access cached attributes
if not spec.versions.concrete:
raise SpackEnvironmentError(
'Cannot develop spec %s without a concrete version' % spec)
for name, entry in self.dev_specs.items():
if name == spec.name:
e_spec = Spec(entry['spec'])
e_path = entry['path']
if e_spec == spec:
if path == e_path:
tty.msg("Spec %s already configured for development" %
spec)
return False
else:
tty.msg("Updating development path for spec %s" % spec)
break
else:
msg = "Updating development spec for package "
msg += "%s with path %s" % (spec.name, path)
tty.msg(msg)
break
else:
tty.msg("Configuring spec %s for development at path %s" %
(spec, path))
if clone:
# "steal" the source code via staging API
abspath = path if os.path.isabs(path) else os.path.join(
self.path, path)
stage = spec.package.stage
stage.steal_source(abspath)
# If it wasn't already in the list, append it
self.dev_specs[spec.name] = {'path': path, 'spec': str(spec)}
return True
def undevelop(self, spec):
"""Remove develop info for abstract spec ``spec``.
returns True on success, False if no entry existed."""
spec = Spec(spec) # In case it's a spec object
if spec.name in self.dev_specs:
del self.dev_specs[spec.name]
return True
return False
def concretize(self, force=False): def concretize(self, force=False):
"""Concretize user_specs in this environment. """Concretize user_specs in this environment.
@ -1248,6 +1324,53 @@ def _add_concrete_spec(self, spec, concrete, new=True):
self.concretized_order.append(h) self.concretized_order.append(h)
self.specs_by_hash[h] = concrete self.specs_by_hash[h] = concrete
def _spec_needs_overwrite(self, spec):
# Overwrite the install if it's a dev build (non-transitive)
# and the code has been changed since the last install
# or one of the dependencies has been reinstalled since
# the last install
# if it's not installed, we don't need to overwrite it
if not spec.package.installed:
return False
# if spec and all deps aren't dev builds, we don't need to overwrite it
if not any(spec.satisfies(c)
for c in ('dev_path=any', '^dev_path=any')):
return False
# if any dep needs overwrite, or any dep is missing and is a dev build
# then overwrite this package
if any(
self._spec_needs_overwrite(dep) or
((not dep.package.installed) and dep.satisfies('dev_path=any'))
for dep in spec.traverse(root=False)
):
return True
# if it's not a direct dev build and its dependencies haven't
# changed, it hasn't changed.
# We don't merely check satisfaction (spec.satisfies('dev_path=any')
# because we need the value of the variant in the next block of code
dev_path_var = spec.variants.get('dev_path', None)
if not dev_path_var:
return False
# if it is a direct dev build, check whether the code changed
# we already know it is installed
_, record = spack.store.db.query_by_spec_hash(spec.dag_hash())
mtime = fs.last_modification_time_recursive(dev_path_var.value)
return mtime > record.installation_time
def _get_overwrite_specs(self):
ret = []
for dag_hash in self.concretized_order:
spec = self.specs_by_hash[dag_hash]
ret.extend([d.dag_hash() for d in spec.traverse(root=True)
if self._spec_needs_overwrite(d)])
return ret
def install(self, user_spec, concrete_spec=None, **install_args): def install(self, user_spec, concrete_spec=None, **install_args):
"""Install a single spec into an environment. """Install a single spec into an environment.
@ -1260,7 +1383,11 @@ def install(self, user_spec, concrete_spec=None, **install_args):
def _install(self, spec, **install_args): def _install(self, spec, **install_args):
# "spec" must be concrete # "spec" must be concrete
spec.package.do_install(**install_args) package = spec.package
install_args['overwrite'] = install_args.get(
'overwrite', []) + self._get_overwrite_specs()
package.do_install(**install_args)
if not spec.external: if not spec.external:
# Make sure log directory exists # Make sure log directory exists
@ -1288,14 +1415,18 @@ def install_all(self, args=None):
# a large amount of time due to repeatedly acquiring and releasing # a large amount of time due to repeatedly acquiring and releasing
# locks, this does an initial check across all specs within a single # locks, this does an initial check across all specs within a single
# DB read transaction to reduce time spent in this case. # DB read transaction to reduce time spent in this case.
uninstalled_specs = [] specs_to_install = []
with spack.store.db.read_transaction(): with spack.store.db.read_transaction():
for concretized_hash in self.concretized_order: for concretized_hash in self.concretized_order:
spec = self.specs_by_hash[concretized_hash] spec = self.specs_by_hash[concretized_hash]
if not spec.package.installed: if not spec.package.installed or (
uninstalled_specs.append(spec) spec.satisfies('dev_path=any') or
spec.satisfies('^dev_path=any')
):
# If it's a dev build it could need to be reinstalled
specs_to_install.append(spec)
for spec in uninstalled_specs: for spec in specs_to_install:
# Parse cli arguments and construct a dictionary # Parse cli arguments and construct a dictionary
# that will be passed to Package.do_install API # that will be passed to Package.do_install API
kwargs = dict() kwargs = dict()
@ -1583,6 +1714,17 @@ def _update_and_write_manifest(self, raw_yaml_dict, yaml_dict):
else: else:
view = False view = False
yaml_dict['view'] = view yaml_dict['view'] = view
if self.dev_specs:
# Remove entries that are mirroring defaults
write_dev_specs = copy.deepcopy(self.dev_specs)
for name, entry in write_dev_specs.items():
if entry['path'] == name:
del entry['path']
yaml_dict['develop'] = write_dev_specs
else:
yaml_dict.pop('develop', None)
# Remove yaml sections that are shadowing defaults # Remove yaml sections that are shadowing defaults
# construct garbage path to ensure we don't find a manifest by accident # construct garbage path to ensure we don't find a manifest by accident
with fs.temp_cwd() as env_dir: with fs.temp_cwd() as env_dir:

View File

@ -565,7 +565,6 @@ def package_id(pkg):
otherwise, the default is to install as many dependencies as otherwise, the default is to install as many dependencies as
possible (i.e., best effort installation). possible (i.e., best effort installation).
fake (bool): Don't really build; install fake stub files instead. fake (bool): Don't really build; install fake stub files instead.
force (bool): Install again, even if already installed.
install_deps (bool): Install dependencies before installing this install_deps (bool): Install dependencies before installing this
package package
install_source (bool): By default, source is not installed, but install_source (bool): By default, source is not installed, but
@ -575,6 +574,8 @@ def package_id(pkg):
keep_stage (bool): By default, stage is destroyed only if there keep_stage (bool): By default, stage is destroyed only if there
are no exceptions during build. Set to True to keep the stage are no exceptions during build. Set to True to keep the stage
even with exceptions. even with exceptions.
overwrite (list): list of hashes for packages to do overwrite
installs. Default empty list.
restage (bool): Force spack to restage the package source. restage (bool): Force spack to restage the package source.
skip_patch (bool): Skip patch stage of build if True. skip_patch (bool): Skip patch stage of build if True.
stop_before (InstallPhase): stop execution before this stop_before (InstallPhase): stop execution before this
@ -638,6 +639,10 @@ def __init__(self, pkg):
# Cache of installed packages' unique ids # Cache of installed packages' unique ids
self.installed = set() self.installed = set()
# Cache of overwrite information
self.overwrite = set()
self.overwrite_time = time.time()
# Data store layout # Data store layout
self.layout = spack.store.layout self.layout = spack.store.layout
@ -727,7 +732,9 @@ def _check_deps_status(self):
# Check the database to see if the dependency has been installed # Check the database to see if the dependency has been installed
# and flag as such if appropriate # and flag as such if appropriate
rec, installed_in_db = self._check_db(dep) rec, installed_in_db = self._check_db(dep)
if installed_in_db: if installed_in_db and (
dep.dag_hash() not in self.overwrite or
rec.installation_time > self.overwrite_time):
tty.debug('Flagging {0} as installed per the database' tty.debug('Flagging {0} as installed per the database'
.format(dep_id)) .format(dep_id))
self.installed.add(dep_id) self.installed.add(dep_id)
@ -778,7 +785,10 @@ def _prepare_for_install(self, task, keep_prefix, keep_stage,
if restage and task.pkg.stage.managed_by_spack: if restage and task.pkg.stage.managed_by_spack:
task.pkg.stage.destroy() task.pkg.stage.destroy()
if not partial and self.layout.check_installed(task.pkg.spec): if not partial and self.layout.check_installed(task.pkg.spec) and (
rec.spec.dag_hash() not in self.overwrite or
rec.installation_time > self.overwrite_time
):
self._update_installed(task) self._update_installed(task)
# Only update the explicit entry once for the explicit package # Only update the explicit entry once for the explicit package
@ -1417,6 +1427,12 @@ def install(self, **kwargs):
# always installed regardless of whether the root was installed # always installed regardless of whether the root was installed
install_package = kwargs.pop('install_package', True) install_package = kwargs.pop('install_package', True)
# take a timestamp with the overwrite argument to check whether another
# process has already overridden the package.
self.overwrite = set(kwargs.get('overwrite', []))
if self.overwrite:
self.overwrite_time = time.time()
# Ensure not attempting to perform an installation when user didn't # Ensure not attempting to perform an installation when user didn't
# want to go that far. # want to go that far.
self._check_last_phase(**kwargs) self._check_last_phase(**kwargs)
@ -1543,7 +1559,23 @@ def install(self, **kwargs):
# Proceed with the installation since we have an exclusive write # Proceed with the installation since we have an exclusive write
# lock on the package. # lock on the package.
try: try:
self._install_task(task, **kwargs) if pkg.spec.dag_hash() in self.overwrite:
rec, _ = self._check_db(pkg.spec)
if rec and rec.installed:
if rec.installation_time < self.overwrite_time:
# If it's actually overwriting, do a fs transaction
if os.path.exists(rec.path):
with fs.replace_directory_transaction(
rec.path):
self._install_task(task, **kwargs)
else:
tty.debug("Missing installation to overwrite")
self._install_task(task, **kwargs)
else:
# overwriting nothing
self._install_task(task, **kwargs)
else:
self._install_task(task, **kwargs)
self._update_installed(task) self._update_installed(task)
# If we installed then we should keep the prefix # If we installed then we should keep the prefix

View File

@ -916,6 +916,11 @@ def download_search():
return stage return stage
def _make_stage(self): def _make_stage(self):
# If it's a dev package (not transitively), use a DIY stage object
dev_path_var = self.spec.variants.get('dev_path', None)
if dev_path_var:
return spack.stage.DIYStage(dev_path_var.value)
# Construct a composite stage on top of the composite FetchStrategy # Construct a composite stage on top of the composite FetchStrategy
composite_fetcher = self.fetcher composite_fetcher = self.fetcher
composite_stage = StageComposite() composite_stage = StageComposite()
@ -1230,16 +1235,14 @@ def do_fetch(self, mirror_only=False):
Creates a stage directory and downloads the tarball for this package. Creates a stage directory and downloads the tarball for this package.
Working directory will be set to the stage directory. Working directory will be set to the stage directory.
""" """
if not self.spec.concrete:
raise ValueError("Can only fetch concrete packages.")
if not self.has_code: if not self.has_code:
tty.debug('No fetch required for {0}: package has no code.' tty.debug('No fetch required for {0}: package has no code.'
.format(self.name)) .format(self.name))
start_time = time.time() start_time = time.time()
checksum = spack.config.get('config:checksum') checksum = spack.config.get('config:checksum')
if checksum and self.version not in self.versions: fetch = self.stage.managed_by_spack
if checksum and fetch and self.version not in self.versions:
tty.warn("There is no checksum on file to fetch %s safely." % tty.warn("There is no checksum on file to fetch %s safely." %
self.spec.cformat('{name}{@version}')) self.spec.cformat('{name}{@version}'))
@ -1275,9 +1278,6 @@ def do_fetch(self, mirror_only=False):
def do_stage(self, mirror_only=False): def do_stage(self, mirror_only=False):
"""Unpacks and expands the fetched tarball.""" """Unpacks and expands the fetched tarball."""
if not self.spec.concrete:
raise ValueError("Can only stage concrete packages.")
# Always create the stage directory at this point. Why? A no-code # Always create the stage directory at this point. Why? A no-code
# package may want to use the installation process to install metadata. # package may want to use the installation process to install metadata.
self.stage.create() self.stage.create()
@ -1587,6 +1587,12 @@ def do_install(self, **kwargs):
their build process. their build process.
Args:""" Args:"""
# Non-transitive dev specs need to keep the dev stage and be built from
# source every time. Transitive ones just need to be built from source.
dev_path_var = self.spec.variants.get('dev_path', None)
if dev_path_var:
kwargs['keep_stage'] = True
builder = PackageInstaller(self) builder = PackageInstaller(self)
builder.install(**kwargs) builder.install(**kwargs)

View File

@ -73,6 +73,25 @@
'type': 'string' 'type': 'string'
}, },
}, },
'develop': {
'type': 'object',
'default': {},
'additionalProperties': False,
'patternProperties': {
r'\w[\w-]*': {
'type': 'object',
'additionalProperties': False,
'properties': {
'spec': {
'type': 'string'
},
'path': {
'type': 'string'
},
},
},
},
},
'definitions': { 'definitions': {
'type': 'array', 'type': 'array',
'default': [], 'default': [],

View File

@ -96,6 +96,12 @@
'version': {'type': 'string'}, 'version': {'type': 'string'},
}, },
}, },
'develop': {
'anyOf': [
{'type': 'boolean'},
{'type': 'string'},
],
},
'namespace': {'type': 'string'}, 'namespace': {'type': 'string'},
'parameters': { 'parameters': {
'type': 'object', 'type': 'object',

View File

@ -2063,7 +2063,8 @@ def _concretize_helper(self, concretizer, presets=None, visited=None):
# still need to select a concrete package later. # still need to select a concrete package later.
if not self.virtual: if not self.virtual:
changed |= any( changed |= any(
(concretizer.concretize_architecture(self), (concretizer.concretize_develop(self), # special variant
concretizer.concretize_architecture(self),
concretizer.concretize_compiler(self), concretizer.concretize_compiler(self),
concretizer.adjust_target(self), concretizer.adjust_target(self),
# flags must be concretized after compiler # flags must be concretized after compiler
@ -2698,6 +2699,10 @@ def normalize(self, force=False, tests=False, user_spec_deps=None):
if user_spec_deps: if user_spec_deps:
for name, spec in user_spec_deps.items(): for name, spec in user_spec_deps.items():
if not name:
msg = "Attempted to normalize anonymous dependency spec"
msg += " %s" % spec
raise InvalidSpecDetected(msg)
if name not in all_spec_deps: if name not in all_spec_deps:
all_spec_deps[name] = spec all_spec_deps[name] = spec
else: else:
@ -2850,6 +2855,9 @@ def _constrain_dependencies(self, other):
if not other.satisfies_dependencies(self): if not other.satisfies_dependencies(self):
raise UnsatisfiableDependencySpecError(other, self) raise UnsatisfiableDependencySpecError(other, self)
if any(not d.name for d in other.traverse(root=False)):
raise UnconstrainableDependencySpecError(other)
# Handle common first-order constraints directly # Handle common first-order constraints directly
changed = False changed = False
for name in self.common_dependencies(other): for name in self.common_dependencies(other):
@ -4123,8 +4131,23 @@ def do_parse(self):
if not dep: if not dep:
# We're adding a dependency to the last spec # We're adding a dependency to the last spec
self.expect(ID) if self.accept(ID):
dep = self.spec(self.token.value) self.previous = self.token
if self.accept(EQ):
# This is an anonymous dep with a key=value
# push tokens to be parsed as part of the
# dep spec
self.push_tokens(
[self.previous, self.token])
dep_name = None
else:
# named dep (standard)
dep_name = self.token.value
self.previous = None
else:
# anonymous dep
dep_name = None
dep = self.spec(dep_name)
# Raise an error if the previous spec is already # Raise an error if the previous spec is already
# concrete (assigned by hash) # concrete (assigned by hash)
@ -4509,6 +4532,14 @@ def __init__(self, provided, required):
provided, required, "dependency") provided, required, "dependency")
class UnconstrainableDependencySpecError(spack.error.SpecError):
"""Raised when attempting to constrain by an anonymous dependency spec"""
def __init__(self, spec):
msg = "Cannot constrain by spec '%s'. Cannot constrain by a" % spec
msg += " spec containing anonymous dependencies"
super(UnconstrainableDependencySpecError, self).__init__(msg)
class AmbiguousHashError(spack.error.SpecError): class AmbiguousHashError(spack.error.SpecError):
def __init__(self, msg, *specs): def __init__(self, msg, *specs):
spec_fmt = '{namespace}.{name}{@version}{%compiler}{compiler_flags}' spec_fmt = '{namespace}.{name}{@version}{%compiler}{compiler_flags}'

View File

@ -5,13 +5,15 @@
from __future__ import print_function from __future__ import print_function
import errno
import getpass
import glob
import hashlib
import os import os
import shutil
import stat import stat
import sys import sys
import errno
import hashlib
import tempfile import tempfile
import getpass
from six import string_types from six import string_types
from six import iteritems from six import iteritems
@ -491,6 +493,41 @@ def print_errors(errors):
print_errors(errors) print_errors(errors)
def steal_source(self, dest):
"""Copy the source_path directory in its entirety to directory dest
This operation creates/fetches/expands the stage if it is not already,
and destroys the stage when it is done."""
if not self.created:
self.create()
if not self.expanded and not self.archive_file:
self.fetch()
if not self.expanded:
self.expand_archive()
if not os.path.isdir(dest):
mkdirp(dest)
# glob all files and directories in the source path
hidden_entries = glob.glob(os.path.join(self.source_path, '.*'))
entries = glob.glob(os.path.join(self.source_path, '*'))
# Move all files from stage to destination directory
# Include hidden files for VCS repo history
for entry in hidden_entries + entries:
if os.path.isdir(entry):
d = os.path.join(dest, os.path.basename(entry))
shutil.copytree(entry, d)
else:
shutil.copy2(entry, dest)
# copy archive file if we downloaded from url -- replaces for vcs
if self.archive_file and os.path.exists(self.archive_file):
shutil.copy2(self.archive_file, dest)
# remove leftover stage
self.destroy()
def check(self): def check(self):
"""Check the downloaded archive against a checksum digest. """Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository.""" No-op if this stage checks code out of a repository."""
@ -655,7 +692,8 @@ def _add_to_root_stage(self):
@pattern.composite(method_list=[ @pattern.composite(method_list=[
'fetch', 'create', 'created', 'check', 'expand_archive', 'restage', 'fetch', 'create', 'created', 'check', 'expand_archive', 'restage',
'destroy', 'cache_local', 'cache_mirror', 'managed_by_spack']) 'destroy', 'cache_local', 'cache_mirror', 'steal_source',
'managed_by_spack'])
class StageComposite: class StageComposite:
"""Composite for Stage type objects. The first item in this composite is """Composite for Stage type objects. The first item in this composite is
considered to be the root package, and operations that return a value are considered to be the root package, and operations that return a value are

View File

@ -2,16 +2,22 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os import os
import pytest import pytest
import spack.spec import spack.spec
import llnl.util.filesystem as fs
import spack.environment as ev
from spack.main import SpackCommand, SpackCommandError from spack.main import SpackCommand, SpackCommandError
dev_build = SpackCommand('dev-build') dev_build = SpackCommand('dev-build')
install = SpackCommand('install')
env = SpackCommand('env')
def test_dev_build_basics(tmpdir, mock_packages, install_mockery): def test_dev_build_basics(tmpdir, mock_packages, install_mockery):
spec = spack.spec.Spec('dev-build-test-install@0.0.0').concretized() spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' % tmpdir)
spec.concretize()
with tmpdir.as_cwd(): with tmpdir.as_cwd():
with open(spec.package.filename, 'w') as f: with open(spec.package.filename, 'w') as f:
@ -23,9 +29,12 @@ def test_dev_build_basics(tmpdir, mock_packages, install_mockery):
with open(os.path.join(spec.prefix, spec.package.filename), 'r') as f: with open(os.path.join(spec.prefix, spec.package.filename), 'r') as f:
assert f.read() == spec.package.replacement_string assert f.read() == spec.package.replacement_string
assert os.path.exists(str(tmpdir))
def test_dev_build_before(tmpdir, mock_packages, install_mockery): def test_dev_build_before(tmpdir, mock_packages, install_mockery):
spec = spack.spec.Spec('dev-build-test-install@0.0.0').concretized() spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' % tmpdir)
spec.concretize()
with tmpdir.as_cwd(): with tmpdir.as_cwd():
with open(spec.package.filename, 'w') as f: with open(spec.package.filename, 'w') as f:
@ -41,7 +50,8 @@ def test_dev_build_before(tmpdir, mock_packages, install_mockery):
def test_dev_build_until(tmpdir, mock_packages, install_mockery): def test_dev_build_until(tmpdir, mock_packages, install_mockery):
spec = spack.spec.Spec('dev-build-test-install@0.0.0').concretized() spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' % tmpdir)
spec.concretize()
with tmpdir.as_cwd(): with tmpdir.as_cwd():
with open(spec.package.filename, 'w') as f: with open(spec.package.filename, 'w') as f:
@ -59,7 +69,8 @@ def test_dev_build_until(tmpdir, mock_packages, install_mockery):
def test_dev_build_until_last_phase(tmpdir, mock_packages, install_mockery): def test_dev_build_until_last_phase(tmpdir, mock_packages, install_mockery):
# Test that we ignore the last_phase argument if it is already last # Test that we ignore the last_phase argument if it is already last
spec = spack.spec.Spec('dev-build-test-install@0.0.0').concretized() spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' % tmpdir)
spec.concretize()
with tmpdir.as_cwd(): with tmpdir.as_cwd():
with open(spec.package.filename, 'w') as f: with open(spec.package.filename, 'w') as f:
@ -73,10 +84,12 @@ def test_dev_build_until_last_phase(tmpdir, mock_packages, install_mockery):
assert os.path.exists(spec.prefix) assert os.path.exists(spec.prefix)
assert spack.store.db.query(spec, installed=True) assert spack.store.db.query(spec, installed=True)
assert os.path.exists(str(tmpdir))
def test_dev_build_before_until(tmpdir, mock_packages, install_mockery): def test_dev_build_before_until(tmpdir, mock_packages, install_mockery):
spec = spack.spec.Spec('dev-build-test-install@0.0.0').concretized() spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' % tmpdir)
spec.concretize()
with tmpdir.as_cwd(): with tmpdir.as_cwd():
with open(spec.package.filename, 'w') as f: with open(spec.package.filename, 'w') as f:
@ -118,7 +131,8 @@ def module(*args):
def test_dev_build_fails_already_installed(tmpdir, mock_packages, def test_dev_build_fails_already_installed(tmpdir, mock_packages,
install_mockery): install_mockery):
spec = spack.spec.Spec('dev-build-test-install@0.0.0').concretized() spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' % tmpdir)
spec.concretize()
with tmpdir.as_cwd(): with tmpdir.as_cwd():
with open(spec.package.filename, 'w') as f: with open(spec.package.filename, 'w') as f:
@ -147,3 +161,226 @@ def test_dev_build_fails_nonexistent_package_name(mock_packages):
def test_dev_build_fails_no_version(mock_packages): def test_dev_build_fails_no_version(mock_packages):
output = dev_build('dev-build-test-install', fail_on_error=False) output = dev_build('dev-build-test-install', fail_on_error=False)
assert 'dev-build spec must have a single, concrete version' in output assert 'dev-build spec must have a single, concrete version' in output
def test_dev_build_env(tmpdir, mock_packages, install_mockery,
mutable_mock_env_path):
"""Test Spack does dev builds for packages in develop section of env."""
# setup dev-build-test-install package for dev build
build_dir = tmpdir.mkdir('build')
spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' %
build_dir)
spec.concretize()
with build_dir.as_cwd():
with open(spec.package.filename, 'w') as f:
f.write(spec.package.original_string)
# setup environment
envdir = tmpdir.mkdir('env')
with envdir.as_cwd():
with open('spack.yaml', 'w') as f:
f.write("""\
env:
specs:
- dev-build-test-install@0.0.0
develop:
dev-build-test-install:
spec: dev-build-test-install@0.0.0
path: %s
""" % build_dir)
env('create', 'test', './spack.yaml')
with ev.read('test'):
install()
assert spec.package.filename in os.listdir(spec.prefix)
with open(os.path.join(spec.prefix, spec.package.filename), 'r') as f:
assert f.read() == spec.package.replacement_string
def test_dev_build_env_version_mismatch(tmpdir, mock_packages, install_mockery,
mutable_mock_env_path):
"""Test Spack constraints concretization by develop specs."""
# setup dev-build-test-install package for dev build
build_dir = tmpdir.mkdir('build')
spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' % tmpdir)
spec.concretize()
with build_dir.as_cwd():
with open(spec.package.filename, 'w') as f:
f.write(spec.package.original_string)
# setup environment
envdir = tmpdir.mkdir('env')
with envdir.as_cwd():
with open('spack.yaml', 'w') as f:
f.write("""\
env:
specs:
- dev-build-test-install@0.0.0
develop:
dev-build-test-install:
spec: dev-build-test-install@1.1.1
path: %s
""" % build_dir)
env('create', 'test', './spack.yaml')
with ev.read('test'):
with pytest.raises(spack.spec.UnsatisfiableVersionSpecError):
install()
def test_dev_build_multiple(tmpdir, mock_packages, install_mockery,
mutable_mock_env_path, mock_fetch):
"""Test spack install with multiple developer builds"""
# setup dev-build-test-install package for dev build
# Wait to concretize inside the environment to set dev_path on the specs;
# without the environment, the user would need to set dev_path for both the
# root and dependency if they wanted a dev build for both.
leaf_dir = tmpdir.mkdir('leaf')
leaf_spec = spack.spec.Spec('dev-build-test-install@0.0.0')
with leaf_dir.as_cwd():
with open(leaf_spec.package.filename, 'w') as f:
f.write(leaf_spec.package.original_string)
# setup dev-build-test-dependent package for dev build
# don't concretize outside environment -- dev info will be wrong
root_dir = tmpdir.mkdir('root')
root_spec = spack.spec.Spec('dev-build-test-dependent@0.0.0')
with root_dir.as_cwd():
with open(root_spec.package.filename, 'w') as f:
f.write(root_spec.package.original_string)
# setup environment
envdir = tmpdir.mkdir('env')
with envdir.as_cwd():
with open('spack.yaml', 'w') as f:
f.write("""\
env:
specs:
- dev-build-test-install@0.0.0
- dev-build-test-dependent@0.0.0
develop:
dev-build-test-install:
path: %s
spec: dev-build-test-install@0.0.0
dev-build-test-dependent:
spec: dev-build-test-dependent@0.0.0
path: %s
""" % (leaf_dir, root_dir))
env('create', 'test', './spack.yaml')
with ev.read('test'):
# Do concretization inside environment for dev info
leaf_spec.concretize()
root_spec.concretize()
# Do install
install()
for spec in (leaf_spec, root_spec):
assert spec.package.filename in os.listdir(spec.prefix)
with open(os.path.join(spec.prefix, spec.package.filename), 'r') as f:
assert f.read() == spec.package.replacement_string
def test_dev_build_env_dependency(tmpdir, mock_packages, install_mockery,
mock_fetch, mutable_mock_env_path):
"""
Test non-root specs in an environment are properly marked for dev builds.
"""
# setup dev-build-test-install package for dev build
build_dir = tmpdir.mkdir('build')
spec = spack.spec.Spec('dependent-of-dev-build@0.0.0')
dep_spec = spack.spec.Spec('dev-build-test-install')
with build_dir.as_cwd():
with open(dep_spec.package.filename, 'w') as f:
f.write(dep_spec.package.original_string)
# setup environment
envdir = tmpdir.mkdir('env')
with envdir.as_cwd():
with open('spack.yaml', 'w') as f:
f.write("""\
env:
specs:
- dependent-of-dev-build@0.0.0
develop:
dev-build-test-install:
spec: dev-build-test-install@0.0.0
path: %s
""" % build_dir)
env('create', 'test', './spack.yaml')
with ev.read('test'):
# concretize in the environment to get the dev build info
# equivalent to setting dev_build and dev_path variants
# on all specs above
spec.concretize()
dep_spec.concretize()
install()
# Ensure that both specs installed properly
assert dep_spec.package.filename in os.listdir(dep_spec.prefix)
assert os.path.exists(spec.prefix)
# Ensure variants set properly
for dep in (dep_spec, spec['dev-build-test-install']):
assert dep.satisfies('dev_path=%s' % build_dir)
assert spec.satisfies('^dev_path=any')
@pytest.mark.parametrize('test_spec', ['dev-build-test-install',
'dependent-of-dev-build'])
def test_dev_build_rebuild_on_source_changes(
test_spec, tmpdir, mock_packages, install_mockery,
mutable_mock_env_path, mock_fetch):
"""Test dev builds rebuild on changes to source code.
``test_spec = dev-build-test-install`` tests rebuild for changes to package
``test_spec = dependent-of-dev-build`` tests rebuild for changes to dep
"""
# setup dev-build-test-install package for dev build
build_dir = tmpdir.mkdir('build')
spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' %
build_dir)
spec.concretize()
def reset_string():
with build_dir.as_cwd():
with open(spec.package.filename, 'w') as f:
f.write(spec.package.original_string)
reset_string()
# setup environment
envdir = tmpdir.mkdir('env')
with envdir.as_cwd():
with open('spack.yaml', 'w') as f:
f.write("""\
env:
specs:
- %s@0.0.0
develop:
dev-build-test-install:
spec: dev-build-test-install@0.0.0
path: %s
""" % (test_spec, build_dir))
env('create', 'test', './spack.yaml')
with ev.read('test'):
install()
reset_string() # so the package will accept rebuilds
fs.touch(os.path.join(str(build_dir), 'test'))
output = install()
assert 'Installing %s' % test_spec in output

View File

@ -0,0 +1,100 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pytest
import os
import shutil
import llnl.util.filesystem as fs
import spack.spec
import spack.environment as ev
from spack.main import SpackCommand
develop = SpackCommand('develop')
env = SpackCommand('env')
@pytest.mark.usefixtures(
'mutable_mock_env_path', 'mock_packages', 'mock_fetch')
class TestDevelop(object):
def check_develop(self, env, spec, path=None):
path = path or spec.name
# check in memory representation
assert spec.name in env.dev_specs
dev_specs_entry = env.dev_specs[spec.name]
assert dev_specs_entry['path'] == path
assert dev_specs_entry['spec'] == str(spec)
# check yaml representation
yaml = ev.config_dict(env.yaml)
assert spec.name in yaml['develop']
yaml_entry = yaml['develop'][spec.name]
assert yaml_entry['spec'] == str(spec)
if path == spec.name:
# default paths aren't written out
assert 'path' not in yaml_entry
else:
assert yaml_entry['path'] == path
def test_develop_no_path_no_clone(self):
env('create', 'test')
with ev.read('test') as e:
# develop checks that the path exists
fs.mkdirp(os.path.join(e.path, 'mpich'))
develop('--no-clone', 'mpich@1.0')
self.check_develop(e, spack.spec.Spec('mpich@1.0'))
def test_develop_no_clone(self, tmpdir):
env('create', 'test')
with ev.read('test') as e:
develop('--no-clone', '-p', str(tmpdir), 'mpich@1.0')
self.check_develop(e, spack.spec.Spec('mpich@1.0'), str(tmpdir))
def test_develop(self):
env('create', 'test')
with ev.read('test') as e:
develop('mpich@1.0')
self.check_develop(e, spack.spec.Spec('mpich@1.0'))
def test_develop_no_args(self):
env('create', 'test')
with ev.read('test') as e:
# develop and remove it
develop('mpich@1.0')
shutil.rmtree(os.path.join(e.path, 'mpich'))
# test develop with no args
develop()
self.check_develop(e, spack.spec.Spec('mpich@1.0'))
def test_develop_twice(self):
env('create', 'test')
with ev.read('test') as e:
develop('mpich@1.0')
self.check_develop(e, spack.spec.Spec('mpich@1.0'))
develop('mpich@1.0')
# disk representation isn't updated unless we write
# second develop command doesn't change it, so we don't write
# but we check disk representation
e.write()
self.check_develop(e, spack.spec.Spec('mpich@1.0'))
assert len(e.dev_specs) == 1
def test_develop_update_path(self, tmpdir):
env('create', 'test')
with ev.read('test') as e:
develop('mpich@1.0')
develop('-p', str(tmpdir), 'mpich@1.0')
self.check_develop(e, spack.spec.Spec('mpich@1.0'), str(tmpdir))
assert len(e.dev_specs) == 1
def test_develop_update_spec(self):
env('create', 'test')
with ev.read('test') as e:
develop('mpich@1.0')
develop('mpich@2.0')
self.check_develop(e, spack.spec.Spec('mpich@2.0'))
assert len(e.dev_specs) == 1

View File

@ -0,0 +1,67 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.spec
import spack.environment as ev
from spack.main import SpackCommand
undevelop = SpackCommand('undevelop')
env = SpackCommand('env')
concretize = SpackCommand('concretize')
def test_undevelop(tmpdir, mock_packages, mutable_mock_env_path):
# setup environment
envdir = tmpdir.mkdir('env')
with envdir.as_cwd():
with open('spack.yaml', 'w') as f:
f.write("""\
env:
specs:
- mpich
develop:
mpich:
spec: mpich@1.0
path: /fake/path
""")
env('create', 'test', './spack.yaml')
with ev.read('test'):
before = spack.spec.Spec('mpich').concretized()
undevelop('mpich')
after = spack.spec.Spec('mpich').concretized()
# Removing dev spec from environment changes concretization
assert before.satisfies('dev_path=any')
assert not after.satisfies('dev_path=any')
def test_undevelop_nonexistent(tmpdir, mock_packages, mutable_mock_env_path):
# setup environment
envdir = tmpdir.mkdir('env')
with envdir.as_cwd():
with open('spack.yaml', 'w') as f:
f.write("""\
env:
specs:
- mpich
develop:
mpich:
spec: mpich@1.0
path: /fake/path
""")
env('create', 'test', './spack.yaml')
with ev.read('test') as e:
concretize()
before = e.specs_by_hash
undevelop('package-not-in-develop') # does nothing
concretize('-f')
after = e.specs_by_hash
# nothing should have changed
assert before == after

View File

@ -195,7 +195,7 @@ def test_concretize_with_provides_when(self):
s.satisfies('mpich2') for s in repo.providers_for('mpi@3') s.satisfies('mpich2') for s in repo.providers_for('mpi@3')
) )
def test_provides_handles_multiple_providers_of_same_vesrion(self): def test_provides_handles_multiple_providers_of_same_version(self):
""" """
""" """
providers = spack.repo.path.providers_for('mpi@3.0') providers = spack.repo.path.providers_for('mpi@3.0')
@ -644,3 +644,12 @@ def test_concretize_anonymous(self):
with pytest.raises(spack.error.SpecError): with pytest.raises(spack.error.SpecError):
s = Spec('+variant') s = Spec('+variant')
s.concretize() s.concretize()
def test_concretize_anonymous_dep(self):
with pytest.raises(spack.error.SpecError):
s = Spec('mpileaks ^%gcc')
s.concretize()
with pytest.raises(spack.error.SpecError):
s = Spec('mpileaks ^cflags=-g')
s.concretize()

View File

@ -9,6 +9,7 @@
import spack.package_prefs import spack.package_prefs
import spack.repo import spack.repo
import spack.util.spack_yaml as syaml import spack.util.spack_yaml as syaml
from spack.concretize import NonDeterministicVariantError
from spack.config import ConfigScope, ConfigError from spack.config import ConfigScope, ConfigError
from spack.spec import Spec from spack.spec import Spec
from spack.version import Version from spack.version import Version
@ -84,6 +85,23 @@ def test_preferred_variants(self):
'mpileaks', debug=True, opt=True, shared=False, static=False 'mpileaks', debug=True, opt=True, shared=False, static=False
) )
def test_preferred_variants_from_any(self):
"""
Test that 'foo=any' concretizes to any non-none value
Test that concretization of variants raises an error attempting
non-deterministic concretization from 'any' when preferred value is
'none'.
"""
update_packages('multivalue-variant', 'variants', 'foo=bar')
assert_variant_values(
'multivalue-variant foo=any', foo=('bar',)
)
update_packages('multivalue-variant', 'variants', 'foo=none')
with pytest.raises(NonDeterministicVariantError):
concretize('multivalue-variant foo=any')
def test_preferred_compilers(self): def test_preferred_compilers(self):
"""Test preferred compilers are applied correctly """Test preferred compilers are applied correctly
""" """

View File

@ -512,12 +512,6 @@ def test_unconcretized_install(install_mockery, mock_fetch, mock_packages):
with pytest.raises(ValueError, match="only install concrete packages"): with pytest.raises(ValueError, match="only install concrete packages"):
spec.package.do_install() spec.package.do_install()
with pytest.raises(ValueError, match="only fetch concrete packages"):
spec.package.do_fetch()
with pytest.raises(ValueError, match="only stage concrete packages"):
spec.package.do_stage()
with pytest.raises(ValueError, match="only patch concrete packages"): with pytest.raises(ValueError, match="only patch concrete packages"):
spec.package.do_patch() spec.package.do_patch()

View File

@ -34,7 +34,6 @@ def _mock(filename, mode):
if not mode == 'w': if not mode == 'w':
raise RuntimeError('opening mode must be "w" [stringio_open]') raise RuntimeError('opening mode must be "w" [stringio_open]')
file_registry[filename] = StringIO()
try: try:
yield file_registry[filename] yield file_registry[filename]
finally: finally:
@ -63,6 +62,7 @@ def _impl(spec_str):
# Get its filename # Get its filename
filename = generator.layout.filename filename = generator.layout.filename
# Retrieve the content # Retrieve the content
content = filename_dict[filename].split('\n') content = filename_dict[filename].split('\n')
generator.remove() generator.remove()

View File

@ -7,6 +7,7 @@
import pytest import pytest
from spack.error import SpecError, UnsatisfiableSpecError from spack.error import SpecError, UnsatisfiableSpecError
from spack.spec import UnconstrainableDependencySpecError
from spack.spec import Spec, SpecFormatSigilError, SpecFormatStringError from spack.spec import Spec, SpecFormatSigilError, SpecFormatStringError
from spack.variant import InvalidVariantValueError, UnknownVariantError from spack.variant import InvalidVariantValueError, UnknownVariantError
from spack.variant import MultipleValuesInExclusiveVariantError from spack.variant import MultipleValuesInExclusiveVariantError
@ -80,7 +81,8 @@ def check_constrain_not_changed(spec, constraint):
def check_invalid_constraint(spec, constraint): def check_invalid_constraint(spec, constraint):
spec = Spec(spec) spec = Spec(spec)
constraint = Spec(constraint) constraint = Spec(constraint)
with pytest.raises(UnsatisfiableSpecError): with pytest.raises((UnsatisfiableSpecError,
UnconstrainableDependencySpecError)):
spec.constrain(constraint) spec.constrain(constraint)
@ -272,6 +274,8 @@ def test_satisfies_matching_variant(self):
check_satisfies('mpich foo=true', 'mpich+foo') check_satisfies('mpich foo=true', 'mpich+foo')
check_satisfies('mpich~foo', 'mpich foo=FALSE') check_satisfies('mpich~foo', 'mpich foo=FALSE')
check_satisfies('mpich foo=False', 'mpich~foo') check_satisfies('mpich foo=False', 'mpich~foo')
check_satisfies('mpich foo=any', 'mpich~foo')
check_satisfies('mpich +foo', 'mpich foo=any')
def test_satisfies_multi_value_variant(self): def test_satisfies_multi_value_variant(self):
# Check quoting # Check quoting
@ -283,6 +287,12 @@ def test_satisfies_multi_value_variant(self):
'multivalue-variant foo=bar,baz') 'multivalue-variant foo=bar,baz')
# A more constrained spec satisfies a less constrained one # A more constrained spec satisfies a less constrained one
check_satisfies('multivalue-variant foo="bar,baz"',
'multivalue-variant foo=any')
check_satisfies('multivalue-variant foo=any',
'multivalue-variant foo="bar,baz"')
check_satisfies('multivalue-variant foo="bar,baz"', check_satisfies('multivalue-variant foo="bar,baz"',
'multivalue-variant foo="bar"') 'multivalue-variant foo="bar"')
@ -307,6 +317,7 @@ def test_satisfies_single_valued_variant(self):
a.concretize() a.concretize()
assert a.satisfies('foobar=bar') assert a.satisfies('foobar=bar')
assert a.satisfies('foobar=any')
# Assert that an autospec generated from a literal # Assert that an autospec generated from a literal
# gives the right result for a single valued variant # gives the right result for a single valued variant
@ -441,6 +452,10 @@ def test_unsatisfiable_variants(self):
check_unsatisfiable('mpich', 'mpich~foo', True) check_unsatisfiable('mpich', 'mpich~foo', True)
check_unsatisfiable('mpich', 'mpich foo=1', True) check_unsatisfiable('mpich', 'mpich foo=1', True)
# None and any do not satisfy each other
check_unsatisfiable('foo=none', 'foo=any')
check_unsatisfiable('foo=any', 'foo=none')
def test_unsatisfiable_variant_mismatch(self): def test_unsatisfiable_variant_mismatch(self):
# No matchi in specs # No matchi in specs
check_unsatisfiable('mpich~foo', 'mpich+foo') check_unsatisfiable('mpich~foo', 'mpich+foo')
@ -608,6 +623,11 @@ def test_constrain_multi_value_variant(self):
'multivalue-variant foo="baz"' 'multivalue-variant foo="baz"'
) )
check_constrain(
'libelf foo=bar,baz', 'libelf foo=bar,baz', 'libelf foo=any')
check_constrain(
'libelf foo=bar,baz', 'libelf foo=any', 'libelf foo=bar,baz')
def test_constrain_compiler_flags(self): def test_constrain_compiler_flags(self):
check_constrain( check_constrain(
'libelf cflags="-O3" cppflags="-Wall"', 'libelf cflags="-O3" cppflags="-Wall"',
@ -648,12 +668,15 @@ def test_invalid_constraint(self):
check_invalid_constraint('libelf+debug', 'libelf~debug') check_invalid_constraint('libelf+debug', 'libelf~debug')
check_invalid_constraint('libelf+debug~foo', 'libelf+debug+foo') check_invalid_constraint('libelf+debug~foo', 'libelf+debug+foo')
check_invalid_constraint('libelf debug=True', 'libelf debug=False') check_invalid_constraint('libelf debug=True', 'libelf debug=False')
check_invalid_constraint('libelf foo=none', 'libelf foo=any')
check_invalid_constraint('libelf foo=any', 'libelf foo=none')
check_invalid_constraint( check_invalid_constraint(
'libelf cppflags="-O3"', 'libelf cppflags="-O2"') 'libelf cppflags="-O3"', 'libelf cppflags="-O2"')
check_invalid_constraint( check_invalid_constraint(
'libelf platform=test target=be os=be', 'libelf target=fe os=fe' 'libelf platform=test target=be os=be', 'libelf target=fe os=fe'
) )
check_invalid_constraint('libdwarf', '^%gcc')
def test_constrain_changed(self): def test_constrain_changed(self):
check_constrain_changed('libelf', '@1.0') check_constrain_changed('libelf', '@1.0')
@ -661,6 +684,7 @@ def test_constrain_changed(self):
check_constrain_changed('libelf', '%gcc') check_constrain_changed('libelf', '%gcc')
check_constrain_changed('libelf%gcc', '%gcc@4.5') check_constrain_changed('libelf%gcc', '%gcc@4.5')
check_constrain_changed('libelf', '+debug') check_constrain_changed('libelf', '+debug')
check_constrain_changed('libelf', 'debug=any')
check_constrain_changed('libelf', '~debug') check_constrain_changed('libelf', '~debug')
check_constrain_changed('libelf', 'debug=2') check_constrain_changed('libelf', 'debug=2')
check_constrain_changed('libelf', 'cppflags="-O3"') check_constrain_changed('libelf', 'cppflags="-O3"')
@ -680,6 +704,7 @@ def test_constrain_not_changed(self):
check_constrain_not_changed('libelf+debug', '+debug') check_constrain_not_changed('libelf+debug', '+debug')
check_constrain_not_changed('libelf~debug', '~debug') check_constrain_not_changed('libelf~debug', '~debug')
check_constrain_not_changed('libelf debug=2', 'debug=2') check_constrain_not_changed('libelf debug=2', 'debug=2')
check_constrain_not_changed('libelf debug=2', 'debug=any')
check_constrain_not_changed( check_constrain_not_changed(
'libelf cppflags="-O3"', 'cppflags="-O3"') 'libelf cppflags="-O3"', 'cppflags="-O3"')
@ -893,13 +918,14 @@ def test_spec_flags_maintain_order(self):
for x in ('cflags', 'cxxflags', 'fflags') for x in ('cflags', 'cxxflags', 'fflags')
) )
def test_any_combination_of(self): def test_combination_of_any_or_none(self):
# Test that using 'none' and another value raise during concretization # Test that using 'none' and another value raises
spec = Spec('multivalue-variant foo=none,bar') with pytest.raises(spack.variant.InvalidVariantValueCombinationError):
with pytest.raises(spack.error.SpecError) as exc_info: Spec('multivalue-variant foo=none,bar')
spec.concretize()
assert "is mutually exclusive with any of the" in str(exc_info.value) # Test that using 'any' and another value raises
with pytest.raises(spack.variant.InvalidVariantValueCombinationError):
Spec('multivalue-variant foo=any,bar')
@pytest.mark.skipif( @pytest.mark.skipif(
sys.version_info[0] == 2, reason='__wrapped__ requires python 3' sys.version_info[0] == 2, reason='__wrapped__ requires python 3'

View File

@ -25,6 +25,8 @@
except ImportError: except ImportError:
from collections import Sequence from collections import Sequence
special_variant_values = [None, 'none', 'any']
class Variant(object): class Variant(object):
"""Represents a variant in a package, as declared in the """Represents a variant in a package, as declared in the
@ -119,13 +121,14 @@ def validate_or_raise(self, vspec, pkg=None):
# Check and record the values that are not allowed # Check and record the values that are not allowed
not_allowed_values = [ not_allowed_values = [
x for x in value if self.single_value_validator(x) is False x for x in value
if x != 'any' and self.single_value_validator(x) is False
] ]
if not_allowed_values: if not_allowed_values:
raise InvalidVariantValueError(self, not_allowed_values, pkg) raise InvalidVariantValueError(self, not_allowed_values, pkg)
# Validate the group of values if needed # Validate the group of values if needed
if self.group_validator is not None: if self.group_validator is not None and value != ('any',):
self.group_validator(pkg.name, self.name, value) self.group_validator(pkg.name, self.name, value)
@property @property
@ -267,6 +270,14 @@ def _value_setter(self, value):
# Tuple is necessary here instead of list because the # Tuple is necessary here instead of list because the
# values need to be hashed # values need to be hashed
value = re.split(r'\s*,\s*', str(value)) value = re.split(r'\s*,\s*', str(value))
value = list(map(lambda x: 'any' if str(x).upper() == 'ANY' else x,
value))
for val in special_variant_values:
if val in value and len(value) > 1:
msg = "'%s' cannot be combined with other variant" % val
msg += " values."
raise InvalidVariantValueCombinationError(msg)
# With multi-value variants it is necessary # With multi-value variants it is necessary
# to remove duplicates and give an order # to remove duplicates and give an order
@ -302,7 +313,15 @@ def satisfies(self, other):
""" """
# If names are different then `self` does not satisfy `other` # If names are different then `self` does not satisfy `other`
# (`foo=bar` will never satisfy `baz=bar`) # (`foo=bar` will never satisfy `baz=bar`)
return other.name == self.name if other.name != self.name:
return False
# If the variant is already set to none, it can't satisfy any
if ('none' in self or None in self) and 'any' in other:
return False
# If the variant is set to any, it can't be constrained by none
if 'any' in self and ('none' in other or None in other):
return False
return True
@implicit_variant_conversion @implicit_variant_conversion
def compatible(self, other): def compatible(self, other):
@ -317,8 +336,17 @@ def compatible(self, other):
Returns: Returns:
bool: True or False bool: True or False
""" """
# If names are different then they are not compatible # If names are different then `self` is not compatible with `other`
return other.name == self.name # (`foo=bar` is incompatible with `baz=bar`)
if other.name != self.name:
return False
# If the variant is already set to none, incompatible with any
if ('none' in self or None in self) and 'any' in other:
return False
# If the variant is set to any, it can't be compatible with none
if 'any' in self and ('none' in other or None in other):
return False
return True
@implicit_variant_conversion @implicit_variant_conversion
def constrain(self, other): def constrain(self, other):
@ -336,7 +364,13 @@ def constrain(self, other):
raise ValueError('variants must have the same name') raise ValueError('variants must have the same name')
old_value = self.value old_value = self.value
self.value = ','.join(sorted(set(self.value + other.value)))
values = list(sorted(set(self.value + other.value)))
# If we constraint any by another value, just take value
if 'any' in values and len(values) > 1:
values.remove('any')
self.value = ','.join(values)
return old_value != self.value return old_value != self.value
def __contains__(self, item): def __contains__(self, item):
@ -367,16 +401,16 @@ def satisfies(self, other):
Returns: Returns:
bool: True or False bool: True or False
""" """
# If names are different then `self` does not satisfy `other` # If it doesn't satisfy as an AbstractVariant, it doesn't satisfy as a
# (`foo=bar` does not satisfy `baz=bar`) # MultiValuedVariant this handles conflicts between none and any
if other.name != self.name: super_sat = super(MultiValuedVariant, self).satisfies(other)
return False
# Otherwise we want all the values in `other` to be also in `self` # Otherwise we want all the values in `other` to be also in `self`
return all(v in self.value for v in other.value) return super_sat and (all(v in self.value for v in other.value) or
'any' in other or 'any' in self)
class SingleValuedVariant(MultiValuedVariant): class SingleValuedVariant(AbstractVariant):
"""A variant that can hold multiple values, but one at a time.""" """A variant that can hold multiple values, but one at a time."""
def _value_setter(self, value): def _value_setter(self, value):
@ -393,12 +427,12 @@ def __str__(self):
@implicit_variant_conversion @implicit_variant_conversion
def satisfies(self, other): def satisfies(self, other):
# If names are different then `self` does not satisfy `other` # If it doesn't satisfy as an AbstractVariant, it doesn't satisfy as a
# (`foo=bar` does not satisfy `baz=bar`) # SingleValuedVariant this handles conflicts between none and any
if other.name != self.name: abstract_sat = super(SingleValuedVariant, self).satisfies(other)
return False
return self.value == other.value return abstract_sat and (self.value == other.value or
other.value == 'any' or self.value == 'any')
def compatible(self, other): def compatible(self, other):
return self.satisfies(other) return self.satisfies(other)
@ -408,6 +442,13 @@ def constrain(self, other):
if self.name != other.name: if self.name != other.name:
raise ValueError('variants must have the same name') raise ValueError('variants must have the same name')
if self.value == 'any':
self.value = other.value
return self.value != other.value
if other.value == 'any' and self.value not in ('none', None):
return False
if self.value != other.value: if self.value != other.value:
raise UnsatisfiableVariantSpecError(other.value, self.value) raise UnsatisfiableVariantSpecError(other.value, self.value)
return False return False
@ -420,7 +461,10 @@ def yaml_entry(self):
class BoolValuedVariant(SingleValuedVariant): class BoolValuedVariant(SingleValuedVariant):
"""A variant that can hold either True or False.""" """A variant that can hold either True or False.
BoolValuedVariant can also hold the value 'any', for coerced
comparisons between ``foo=any`` and ``+foo`` or ``~foo``."""
def _value_setter(self, value): def _value_setter(self, value):
# Check the string representation of the value and turn # Check the string representation of the value and turn
@ -431,6 +475,9 @@ def _value_setter(self, value):
elif str(value).upper() == 'FALSE': elif str(value).upper() == 'FALSE':
self._original_value = value self._original_value = value
self._value = False self._value = False
elif str(value).upper() == 'ANY':
self._original_value = value
self._value = 'any'
else: else:
msg = 'cannot construct a BoolValuedVariant for "{0}" from ' msg = 'cannot construct a BoolValuedVariant for "{0}" from '
msg += 'a value that does not represent a bool' msg += 'a value that does not represent a bool'
@ -604,6 +651,9 @@ def substitute_abstract_variants(spec):
failed = [] failed = []
for name, v in spec.variants.items(): for name, v in spec.variants.items():
if name in spack.directives.reserved_names: if name in spack.directives.reserved_names:
if name == 'dev_path':
new_variant = SingleValuedVariant(name, v._original_value)
spec.variants.substitute(new_variant)
continue continue
pkg_variant = spec.package_class.variants.get(name, None) pkg_variant = spec.package_class.variants.get(name, None)
if not pkg_variant: if not pkg_variant:
@ -823,6 +873,10 @@ def __init__(self, variant, pkg):
) )
class InvalidVariantValueCombinationError(error.SpecError):
"""Raised when a variant has values 'any' or 'none' with other values."""
class InvalidVariantValueError(error.SpecError): class InvalidVariantValueError(error.SpecError):
"""Raised when a valid variant has at least an invalid value.""" """Raised when a valid variant has at least an invalid value."""

View File

@ -320,7 +320,7 @@ _spack() {
then then
SPACK_COMPREPLY="-h --help -H --all-help --color -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars" SPACK_COMPREPLY="-h --help -H --all-help --color -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
else else
SPACK_COMPREPLY="activate add arch blame build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mirror module patch pkg providers pydoc python reindex remove rm repo resource restage setup spec stage test uninstall unload url verify versions view" SPACK_COMPREPLY="activate add arch blame build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mirror module patch pkg providers pydoc python reindex remove rm repo resource restage setup spec stage test undevelop uninstall unload url verify versions view"
fi fi
} }
@ -725,6 +725,15 @@ _spack_dev_build() {
fi fi
} }
_spack_develop() {
if $list_options
then
SPACK_COMPREPLY="-h --help -p --path --no-clone --clone -f --force"
else
_all_packages
fi
}
_spack_docs() { _spack_docs() {
SPACK_COMPREPLY="-h --help" SPACK_COMPREPLY="-h --help"
} }
@ -1482,6 +1491,15 @@ _spack_test() {
fi fi
} }
_spack_undevelop() {
if $list_options
then
SPACK_COMPREPLY="-h --help -a --all"
else
_all_packages
fi
}
_spack_uninstall() { _spack_uninstall() {
if $list_options if $list_options
then then

View File

@ -0,0 +1,17 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class DependentOfDevBuild(Package):
homepage = "example.com"
url = "fake.com"
version('0.0.0', sha256='0123456789abcdefgh')
depends_on('dev-build-test-install')
def install(self, spec, prefix):
with open(prefix.filename, 'w') as f:
f.write("This file is installed")

View File

@ -0,0 +1,29 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class DevBuildTestDependent(Package):
homepage = "example.com"
url = "fake.com"
version('0.0.0', sha256='0123456789abcdefgh')
phases = ['edit', 'install']
filename = 'dev-build-test-file.txt'
original_string = "This file should be edited"
replacement_string = "This file has been edited"
depends_on('dev-build-test-install')
def edit(self, spec, prefix):
with open(self.filename, 'r+') as f:
assert f.read() == self.original_string
f.seek(0)
f.truncate()
f.write(self.replacement_string)
def install(self, spec, prefix):
install(self.filename, prefix)