env: move main Environment class and logic to spack.environment
- `spack.environment` is now the home for most of the infrastructure around Spack environments - refactor `cmd/env.py` to use everything from spack.environment - refactor the cmd/env test to use pytest and fixtures
This commit is contained in:
parent
0e60fcccfb
commit
3e94c4d573
@ -20,10 +20,10 @@
|
||||
|
||||
from spack.version import Version, ver
|
||||
from spack.package import PackageBase, run_after, InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.prefix import Prefix
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.environment import EnvironmentModifications
|
||||
|
||||
|
||||
# A couple of utility functions that might be useful in general. If so, they
|
||||
|
@ -5,25 +5,18 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import argparse
|
||||
from contextlib import contextmanager
|
||||
from six.moves import zip_longest
|
||||
|
||||
import spack.modules
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.environment as ev
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.schema.env
|
||||
|
||||
import spack.config
|
||||
import spack.cmd.spec
|
||||
import spack.cmd.install
|
||||
import spack.cmd.uninstall
|
||||
import spack.cmd.module
|
||||
import spack.cmd.common.arguments as arguments
|
||||
from spack.config import ConfigScope
|
||||
from spack.spec import Spec, CompilerSpec, FlagMap
|
||||
from spack.repo import Repo
|
||||
from spack.version import VersionList
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.filesystem as fs
|
||||
@ -32,8 +25,6 @@
|
||||
section = "environment"
|
||||
level = "long"
|
||||
|
||||
_db_dirname = fs.join_path(spack.paths.var_path, 'environments')
|
||||
|
||||
|
||||
#: List of subcommands of `spack env`
|
||||
subcommands = [
|
||||
@ -53,368 +44,6 @@
|
||||
]
|
||||
|
||||
|
||||
def get_env_root(name):
|
||||
"""Given an environment name, determines its root directory"""
|
||||
return fs.join_path(_db_dirname, name)
|
||||
|
||||
|
||||
def get_dotenv_dir(env_root):
|
||||
"""@return Directory in an environment that is owned by Spack"""
|
||||
return fs.join_path(env_root, '.env')
|
||||
|
||||
|
||||
def get_write_paths(env_root):
|
||||
"""Determines the names of temporary and permanent directories to
|
||||
write machine-generated environment info."""
|
||||
tmp_new = fs.join_path(env_root, '.env.new')
|
||||
dest = get_dotenv_dir(env_root)
|
||||
tmp_old = fs.join_path(env_root, '.env.old')
|
||||
return tmp_new, dest, tmp_old
|
||||
|
||||
|
||||
class Environment(object):
|
||||
def clear(self):
|
||||
self.user_specs = list()
|
||||
self.concretized_order = list()
|
||||
self.specs_by_hash = dict()
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.clear()
|
||||
|
||||
# Default config
|
||||
self.yaml = {
|
||||
'configs': ['<env>'],
|
||||
'specs': []
|
||||
}
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return get_env_root(self.name)
|
||||
|
||||
def repo_path(self):
|
||||
return fs.join_path(get_dotenv_dir(self.path), 'repo')
|
||||
|
||||
def add(self, user_spec, report_existing=True):
|
||||
"""Add a single user_spec (non-concretized) to the Environment"""
|
||||
query_spec = Spec(user_spec)
|
||||
existing = set(x for x in self.user_specs
|
||||
if Spec(x).name == query_spec.name)
|
||||
if existing:
|
||||
if report_existing:
|
||||
tty.die("Package {0} was already added to {1}"
|
||||
.format(query_spec.name, self.name))
|
||||
else:
|
||||
tty.msg("Package {0} was already added to {1}"
|
||||
.format(query_spec.name, self.name))
|
||||
else:
|
||||
tty.msg('Adding %s to environment %s' % (user_spec, self.name))
|
||||
self.user_specs.append(user_spec)
|
||||
|
||||
def remove(self, query_spec):
|
||||
"""Remove specs from an environment that match a query_spec"""
|
||||
query_spec = Spec(query_spec)
|
||||
match_index = -1
|
||||
for i, spec in enumerate(self.user_specs):
|
||||
if Spec(spec).name == query_spec.name:
|
||||
match_index = i
|
||||
break
|
||||
|
||||
if match_index < 0:
|
||||
tty.die("Not found: {0}".format(query_spec))
|
||||
|
||||
del self.user_specs[match_index]
|
||||
if match_index < len(self.concretized_order):
|
||||
spec_hash = self.concretized_order[match_index]
|
||||
del self.concretized_order[match_index]
|
||||
del self.specs_by_hash[spec_hash]
|
||||
|
||||
def concretize(self, force=False):
|
||||
"""Concretize user_specs in an Environment, creating (fully
|
||||
concretized) specs.
|
||||
|
||||
force: bool
|
||||
If set, re-concretize ALL specs, even those that were
|
||||
already concretized.
|
||||
"""
|
||||
|
||||
if force:
|
||||
# Clear previously concretized specs
|
||||
self.specs_by_hash = dict()
|
||||
self.concretized_order = list()
|
||||
|
||||
num_concretized = len(self.concretized_order)
|
||||
new_specs = list()
|
||||
for user_spec in self.user_specs[num_concretized:]:
|
||||
tty.msg('Concretizing %s' % user_spec)
|
||||
|
||||
spec = spack.cmd.parse_specs(user_spec)[0]
|
||||
spec.concretize()
|
||||
new_specs.append(spec)
|
||||
dag_hash = spec.dag_hash()
|
||||
self.specs_by_hash[dag_hash] = spec
|
||||
self.concretized_order.append(spec.dag_hash())
|
||||
|
||||
# Display concretized spec to the user
|
||||
sys.stdout.write(spec.tree(
|
||||
recurse_dependencies=True, install_status=True,
|
||||
hashlen=7, hashes=True))
|
||||
|
||||
return new_specs
|
||||
|
||||
def install(self, install_args=None):
|
||||
"""Do a `spack install` on all the (concretized)
|
||||
specs in an Environment."""
|
||||
|
||||
# Make sure log directory exists
|
||||
logs = fs.join_path(self.path, 'logs')
|
||||
try:
|
||||
os.makedirs(logs)
|
||||
except OSError:
|
||||
if not os.path.isdir(logs):
|
||||
raise
|
||||
|
||||
for concretized_hash in self.concretized_order:
|
||||
spec = self.specs_by_hash[concretized_hash]
|
||||
|
||||
# Parse cli arguments and construct a dictionary
|
||||
# that will be passed to Package.do_install API
|
||||
kwargs = dict()
|
||||
if install_args:
|
||||
spack.cmd.install.update_kwargs_from_args(install_args, kwargs)
|
||||
with pushd(self.path):
|
||||
spec.package.do_install(**kwargs)
|
||||
|
||||
# Link the resulting log file into logs dir
|
||||
logname = '%s-%s.log' % (spec.name, spec.dag_hash(7))
|
||||
logpath = fs.join_path(logs, logname)
|
||||
try:
|
||||
os.remove(logpath)
|
||||
except OSError:
|
||||
pass
|
||||
os.symlink(spec.package.build_log_path, logpath)
|
||||
|
||||
def uninstall(self, args):
|
||||
"""Uninstall all the specs in an Environment."""
|
||||
specs = self._get_environment_specs(recurse_dependencies=True)
|
||||
args.all = False
|
||||
spack.cmd.uninstall.uninstall_specs(args, specs)
|
||||
|
||||
def list(self, stream, **kwargs):
|
||||
"""List the specs in an environment."""
|
||||
for user_spec, concretized_hash in zip_longest(
|
||||
self.user_specs, self.concretized_order):
|
||||
|
||||
stream.write('========= {0}\n'.format(user_spec))
|
||||
|
||||
if concretized_hash:
|
||||
concretized_spec = self.specs_by_hash[concretized_hash]
|
||||
stream.write(concretized_spec.tree(**kwargs))
|
||||
|
||||
def upgrade_dependency(self, dep_name, dry_run=False):
|
||||
# TODO: if you have
|
||||
# w -> x -> y
|
||||
# and
|
||||
# v -> x -> y
|
||||
# then it would be desirable to ensure that w and v refer to the
|
||||
# same x after upgrading y. This is not currently guaranteed.
|
||||
new_order = list()
|
||||
new_deps = list()
|
||||
for i, spec_hash in enumerate(self.concretized_order):
|
||||
spec = self.specs_by_hash[spec_hash]
|
||||
if dep_name in spec:
|
||||
if dry_run:
|
||||
tty.msg("Would upgrade {0} for {1}"
|
||||
.format(spec[dep_name].format(), spec.format()))
|
||||
else:
|
||||
new_spec = upgrade_dependency_version(spec, dep_name)
|
||||
new_order.append(new_spec.dag_hash())
|
||||
self.specs_by_hash[new_spec.dag_hash()] = new_spec
|
||||
new_deps.append(new_spec[dep_name])
|
||||
else:
|
||||
new_order.append(spec_hash)
|
||||
|
||||
if not dry_run:
|
||||
self.concretized_order = new_order
|
||||
return new_deps[0] if new_deps else None
|
||||
|
||||
def reset_os_and_compiler(self, compiler=None):
|
||||
new_order = list()
|
||||
new_specs_by_hash = {}
|
||||
for spec_hash in self.concretized_order:
|
||||
spec = self.specs_by_hash[spec_hash]
|
||||
new_spec = reset_os_and_compiler(spec, compiler)
|
||||
new_order.append(new_spec.dag_hash())
|
||||
new_specs_by_hash[new_spec.dag_hash()] = new_spec
|
||||
self.concretized_order = new_order
|
||||
self.specs_by_hash = new_specs_by_hash
|
||||
|
||||
def _get_environment_specs(self, recurse_dependencies=True):
|
||||
"""Returns the specs of all the packages in an environment.
|
||||
If these specs appear under different user_specs, only one copy
|
||||
is added to the list returned."""
|
||||
package_to_spec = {}
|
||||
spec_list = list()
|
||||
|
||||
for spec_hash in self.concretized_order:
|
||||
spec = self.specs_by_hash[spec_hash]
|
||||
|
||||
specs = spec.traverse(deptype=('link', 'run')) \
|
||||
if recurse_dependencies else (spec,)
|
||||
for dep in specs:
|
||||
if dep.name in package_to_spec:
|
||||
tty.warn("{0} takes priority over {1}"
|
||||
.format(package_to_spec[dep.name].format(),
|
||||
dep.format()))
|
||||
else:
|
||||
package_to_spec[dep.name] = dep
|
||||
spec_list.append(dep)
|
||||
|
||||
return spec_list
|
||||
|
||||
def to_dict(self):
|
||||
"""Used in serializing to JSON"""
|
||||
concretized_order = list(self.concretized_order)
|
||||
concrete_specs = dict()
|
||||
for spec in self.specs_by_hash.values():
|
||||
for s in spec.traverse():
|
||||
if s.dag_hash() not in concrete_specs:
|
||||
concrete_specs[s.dag_hash()] = (
|
||||
s.to_node_dict(all_deps=True))
|
||||
format = {
|
||||
'user_specs': self.user_specs,
|
||||
'concretized_order': concretized_order,
|
||||
'concrete_specs': concrete_specs,
|
||||
}
|
||||
return format
|
||||
|
||||
@staticmethod
|
||||
def from_dict(name, d):
|
||||
"""Used in deserializing from JSON"""
|
||||
env = Environment(name)
|
||||
env.user_specs = list(d['user_specs'])
|
||||
env.concretized_order = list(d['concretized_order'])
|
||||
specs_dict = d['concrete_specs']
|
||||
|
||||
hash_to_node_dict = specs_dict
|
||||
root_hashes = set(env.concretized_order)
|
||||
|
||||
specs_by_hash = {}
|
||||
for dag_hash, node_dict in hash_to_node_dict.items():
|
||||
specs_by_hash[dag_hash] = Spec.from_node_dict(node_dict)
|
||||
|
||||
for dag_hash, node_dict in hash_to_node_dict.items():
|
||||
for dep_name, dep_hash, deptypes in (
|
||||
Spec.dependencies_from_node_dict(node_dict)):
|
||||
specs_by_hash[dag_hash]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes)
|
||||
|
||||
env.specs_by_hash = dict(
|
||||
(x, y) for x, y in specs_by_hash.items() if x in root_hashes)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def reset_os_and_compiler(spec, compiler=None):
|
||||
spec = spec.copy()
|
||||
for x in spec.traverse():
|
||||
x.compiler = None
|
||||
x.architecture = None
|
||||
x.compiler_flags = FlagMap(x)
|
||||
x._concrete = False
|
||||
x._hash = None
|
||||
if compiler:
|
||||
spec.compiler = CompilerSpec(compiler)
|
||||
spec.concretize()
|
||||
return spec
|
||||
|
||||
|
||||
def upgrade_dependency_version(spec, dep_name):
|
||||
spec = spec.copy()
|
||||
for x in spec.traverse():
|
||||
x._concrete = False
|
||||
x._normal = False
|
||||
x._hash = None
|
||||
spec[dep_name].versions = VersionList(':')
|
||||
spec.concretize()
|
||||
return spec
|
||||
|
||||
|
||||
def check_consistent_env(env_root):
|
||||
tmp_new, dest, tmp_old = get_write_paths(env_root)
|
||||
if os.path.exists(tmp_new) or os.path.exists(tmp_old):
|
||||
tty.die("Partial write state, run 'spack env repair'")
|
||||
|
||||
|
||||
def write(environment, new_repo=None):
|
||||
"""Writes an in-memory environment back to its location on disk,
|
||||
in an atomic manner."""
|
||||
|
||||
tmp_new, dest, tmp_old = get_write_paths(get_env_root(environment.name))
|
||||
|
||||
# Write the machine-generated stuff
|
||||
fs.mkdirp(tmp_new)
|
||||
# create one file for the environment object
|
||||
with open(fs.join_path(tmp_new, 'environment.json'), 'w') as f:
|
||||
sjson.dump(environment.to_dict(), stream=f)
|
||||
|
||||
dest_repo_dir = fs.join_path(tmp_new, 'repo')
|
||||
if new_repo:
|
||||
shutil.copytree(new_repo.root, dest_repo_dir)
|
||||
elif os.path.exists(environment.repo_path()):
|
||||
shutil.copytree(environment.repo_path(), dest_repo_dir)
|
||||
|
||||
# Swap in new directory atomically
|
||||
if os.path.exists(dest):
|
||||
shutil.move(dest, tmp_old)
|
||||
shutil.move(tmp_new, dest)
|
||||
if os.path.exists(tmp_old):
|
||||
shutil.rmtree(tmp_old)
|
||||
|
||||
|
||||
def repair(environment_name):
|
||||
"""Recovers from crash during critical section of write().
|
||||
Possibilities:
|
||||
|
||||
tmp_new, dest
|
||||
tmp_new, tmp_old
|
||||
tmp_old, dest
|
||||
"""
|
||||
tmp_new, dest, tmp_old = get_write_paths(get_env_root(environment_name))
|
||||
if os.path.exists(tmp_old):
|
||||
if not os.path.exists(dest):
|
||||
shutil.move(tmp_new, dest)
|
||||
else:
|
||||
shutil.rmtree(tmp_old)
|
||||
tty.info("Previous update completed")
|
||||
elif os.path.exists(tmp_new):
|
||||
tty.info("Previous update did not complete")
|
||||
else:
|
||||
tty.info("Previous update may have completed")
|
||||
|
||||
if os.path.exists(tmp_new):
|
||||
shutil.rmtree(tmp_new)
|
||||
|
||||
|
||||
def read(environment_name):
|
||||
# Check that env is in a consistent state on disk
|
||||
env_root = get_env_root(environment_name)
|
||||
|
||||
# Read env.yaml file
|
||||
env_yaml = spack.config._read_config_file(
|
||||
fs.join_path(env_root, 'env.yaml'),
|
||||
spack.schema.env.schema)
|
||||
|
||||
dotenv_dir = get_dotenv_dir(env_root)
|
||||
with open(fs.join_path(dotenv_dir, 'environment.json'), 'r') as f:
|
||||
environment_dict = sjson.load(f)
|
||||
environment = Environment.from_dict(environment_name, environment_dict)
|
||||
if env_yaml:
|
||||
environment.yaml = env_yaml['env']
|
||||
|
||||
return environment
|
||||
|
||||
|
||||
# =============== Modifies Environment
|
||||
|
||||
def setup_create_parser(subparser):
|
||||
@ -425,14 +54,14 @@ def setup_create_parser(subparser):
|
||||
|
||||
|
||||
def environment_create(args):
|
||||
if os.path.exists(get_env_root(args.environment)):
|
||||
if os.path.exists(ev.root(args.environment)):
|
||||
raise tty.die("Environment already exists: " + args.environment)
|
||||
|
||||
_environment_create(args.environment)
|
||||
|
||||
|
||||
def _environment_create(name, init_config=None):
|
||||
environment = Environment(name)
|
||||
environment = ev.Environment(name)
|
||||
|
||||
user_specs = list()
|
||||
config_sections = {}
|
||||
@ -446,7 +75,7 @@ def _environment_create(name, init_config=None):
|
||||
for user_spec in user_specs:
|
||||
environment.add(user_spec)
|
||||
|
||||
write(environment)
|
||||
ev.write(environment)
|
||||
|
||||
# When creating the environment, the user may specify configuration
|
||||
# to place in the environment initially. Spack does not interfere
|
||||
@ -473,8 +102,8 @@ def setup_add_parser(subparser):
|
||||
|
||||
|
||||
def environment_add(args):
|
||||
check_consistent_env(get_env_root(args.environment))
|
||||
environment = read(args.environment)
|
||||
ev.check_consistency(args.environment)
|
||||
environment = ev.read(args.environment)
|
||||
parsed_specs = spack.cmd.parse_specs(args.package)
|
||||
|
||||
if args.all:
|
||||
@ -493,7 +122,7 @@ def environment_add(args):
|
||||
for spec in parsed_specs:
|
||||
environment.add(str(spec))
|
||||
|
||||
write(environment)
|
||||
ev.write(environment)
|
||||
|
||||
|
||||
def setup_remove_parser(subparser):
|
||||
@ -507,14 +136,14 @@ def setup_remove_parser(subparser):
|
||||
|
||||
|
||||
def environment_remove(args):
|
||||
check_consistent_env(get_env_root(args.environment))
|
||||
environment = read(args.environment)
|
||||
ev.check_consistency(args.environment)
|
||||
environment = ev.read(args.environment)
|
||||
if args.all:
|
||||
environment.clear()
|
||||
else:
|
||||
for spec in spack.cmd.parse_specs(args.package):
|
||||
environment.remove(spec.format())
|
||||
write(environment)
|
||||
ev.write(environment)
|
||||
|
||||
|
||||
def setup_spec_parser(subparser):
|
||||
@ -524,9 +153,9 @@ def setup_spec_parser(subparser):
|
||||
|
||||
|
||||
def environment_spec(args):
|
||||
environment = read(args.environment)
|
||||
prepare_repository(environment, use_repo=args.use_repo)
|
||||
prepare_config_scope(environment)
|
||||
environment = ev.read(args.environment)
|
||||
ev.prepare_repository(environment, use_repo=args.use_repo)
|
||||
ev.prepare_config_scope(environment)
|
||||
spack.cmd.spec.spec(None, args)
|
||||
|
||||
|
||||
@ -539,8 +168,8 @@ def setup_concretize_parser(subparser):
|
||||
|
||||
|
||||
def environment_concretize(args):
|
||||
check_consistent_env(get_env_root(args.environment))
|
||||
environment = read(args.environment)
|
||||
ev.check_consistency(args.environment)
|
||||
environment = ev.read(args.environment)
|
||||
_environment_concretize(
|
||||
environment, use_repo=args.use_repo, force=args.force)
|
||||
|
||||
@ -549,17 +178,17 @@ def _environment_concretize(environment, use_repo=False, force=False):
|
||||
"""Function body separated out to aid in testing."""
|
||||
|
||||
# Change global search paths
|
||||
repo = prepare_repository(environment, use_repo=use_repo)
|
||||
prepare_config_scope(environment)
|
||||
repo = ev.prepare_repository(environment, use_repo=use_repo)
|
||||
ev.prepare_config_scope(environment)
|
||||
|
||||
new_specs = environment.concretize(force=force)
|
||||
|
||||
for spec in new_specs:
|
||||
for dep in spec.traverse():
|
||||
dump_to_environment_repo(dep, repo)
|
||||
ev.dump_to_environment_repo(dep, repo)
|
||||
|
||||
# Moves <env>/.env.new to <env>/.env
|
||||
write(environment, repo)
|
||||
ev.write(environment, repo)
|
||||
|
||||
|
||||
# =============== Does not Modify Environment
|
||||
@ -570,9 +199,9 @@ def setup_install_parser(subparser):
|
||||
|
||||
|
||||
def environment_install(args):
|
||||
check_consistent_env(get_env_root(args.environment))
|
||||
environment = read(args.environment)
|
||||
prepare_repository(environment, use_repo=args.use_repo)
|
||||
ev.check_consistency(args.environment)
|
||||
environment = ev.read(args.environment)
|
||||
ev.prepare_repository(environment, use_repo=args.use_repo)
|
||||
environment.install(args)
|
||||
|
||||
|
||||
@ -582,68 +211,15 @@ def setup_uninstall_parser(subparser):
|
||||
|
||||
|
||||
def environment_uninstall(args):
|
||||
check_consistent_env(get_env_root(args.environment))
|
||||
environment = read(args.environment)
|
||||
prepare_repository(environment)
|
||||
ev.check_consistency(args.environment)
|
||||
environment = ev.read(args.environment)
|
||||
ev.prepare_repository(environment)
|
||||
environment.uninstall(args)
|
||||
|
||||
|
||||
# =======================================
|
||||
|
||||
|
||||
def dump_to_environment_repo(spec, repo):
|
||||
dest_pkg_dir = repo.dirname_for_package_name(spec.name)
|
||||
if not os.path.exists(dest_pkg_dir):
|
||||
spack.repo.path.dump_provenance(spec, dest_pkg_dir)
|
||||
|
||||
|
||||
def prepare_repository(environment, remove=None, use_repo=False):
|
||||
"""Adds environment's repository to the global search path of repos"""
|
||||
import tempfile
|
||||
repo_stage = tempfile.mkdtemp()
|
||||
new_repo_dir = fs.join_path(repo_stage, 'repo')
|
||||
if os.path.exists(environment.repo_path()):
|
||||
shutil.copytree(environment.repo_path(), new_repo_dir)
|
||||
else:
|
||||
spack.repo.create_repo(new_repo_dir, environment.name)
|
||||
if remove:
|
||||
remove_dirs = []
|
||||
repo = Repo(new_repo_dir)
|
||||
for pkg_name in remove:
|
||||
remove_dirs.append(repo.dirname_for_package_name(pkg_name))
|
||||
for d in remove_dirs:
|
||||
shutil.rmtree(d)
|
||||
repo = Repo(new_repo_dir)
|
||||
if use_repo:
|
||||
spack.repo.put_first(repo)
|
||||
return repo
|
||||
|
||||
|
||||
def prepare_config_scope(environment):
|
||||
"""Adds environment's scope to the global search path
|
||||
of configuration scopes"""
|
||||
|
||||
# Load up configs
|
||||
for config_spec in environment.yaml['configs']:
|
||||
config_name = os.path.split(config_spec)[1]
|
||||
if config_name == '<env>':
|
||||
# Use default config for the environment; doesn't have to exist
|
||||
config_dir = fs.join_path(environment.path, 'config')
|
||||
if not os.path.isdir(config_dir):
|
||||
continue
|
||||
config_name = environment.name
|
||||
else:
|
||||
# Use external user-provided config
|
||||
config_dir = os.path.normpath(os.path.join(
|
||||
environment.path, config_spec.format(**os.environ)))
|
||||
if not os.path.isdir(config_dir):
|
||||
tty.die('Spack config %s (%s) not found' %
|
||||
(config_name, config_dir))
|
||||
|
||||
tty.msg('Using Spack config %s scope at %s' %
|
||||
(config_name, config_dir))
|
||||
spack.config.config.push_scope(ConfigScope(config_name, config_dir))
|
||||
|
||||
|
||||
def setup_relocate_parser(subparser):
|
||||
"""reconcretize environment with new OS and/or compiler"""
|
||||
subparser.add_argument('--compiler', help="Compiler spec to use")
|
||||
@ -651,10 +227,10 @@ def setup_relocate_parser(subparser):
|
||||
|
||||
|
||||
def environment_relocate(args):
|
||||
environment = read(args.environment)
|
||||
prepare_repository(environment, use_repo=args.use_repo)
|
||||
environment = ev.read(args.environment)
|
||||
ev.prepare_repository(environment, use_repo=args.use_repo)
|
||||
environment.reset_os_and_compiler(compiler=args.compiler)
|
||||
write(environment)
|
||||
ev.write(environment)
|
||||
|
||||
|
||||
def setup_list_parser(subparser):
|
||||
@ -666,8 +242,7 @@ def setup_list_parser(subparser):
|
||||
|
||||
def environment_list(args):
|
||||
# TODO? option to list packages w/ multiple instances?
|
||||
environment = read(args.environment)
|
||||
import sys
|
||||
environment = ev.read(args.environment)
|
||||
environment.list(
|
||||
sys.stdout, recurse_dependencies=args.recurse_dependencies,
|
||||
hashes=args.long or args.very_long,
|
||||
@ -681,8 +256,8 @@ def setup_stage_parser(subparser):
|
||||
|
||||
|
||||
def environment_stage(args):
|
||||
environment = read(args.environment)
|
||||
prepare_repository(environment, use_repo=args.use_repo)
|
||||
environment = ev.read(args.environment)
|
||||
ev.prepare_repository(environment, use_repo=args.use_repo)
|
||||
for spec in environment.specs_by_hash.values():
|
||||
for dep in spec.traverse():
|
||||
dep.package.do_stage()
|
||||
@ -693,7 +268,7 @@ def setup_location_parser(subparser):
|
||||
|
||||
|
||||
def environment_location(args):
|
||||
environment = read(args.environment)
|
||||
environment = ev.read(args.environment)
|
||||
print(environment.path)
|
||||
|
||||
|
||||
@ -708,14 +283,6 @@ def redirect_stdout(ofname):
|
||||
sys.stdout = original
|
||||
|
||||
|
||||
@contextmanager
|
||||
def pushd(dir):
|
||||
original = os.getcwd()
|
||||
os.chdir(dir)
|
||||
yield
|
||||
os.chdir(original)
|
||||
|
||||
|
||||
def setup_loads_parser(subparser):
|
||||
"""list modules for an installed environment '(see spack module loads)'"""
|
||||
spack.cmd.modules.add_loads_arguments(subparser)
|
||||
@ -730,7 +297,7 @@ def environment_loads(args):
|
||||
|
||||
module_types = list(set(module_types))
|
||||
|
||||
environment = read(args.environment)
|
||||
environment = ev.read(args.environment)
|
||||
recurse_dependencies = args.recurse_dependencies
|
||||
args.recurse_dependencies = False
|
||||
ofname = fs.join_path(environment.path, 'loads')
|
||||
@ -752,13 +319,13 @@ def setup_upgrade_parser(subparser):
|
||||
|
||||
|
||||
def environment_upgrade(args):
|
||||
environment = read(args.environment)
|
||||
repo = prepare_repository(
|
||||
environment = ev.read(args.environment)
|
||||
repo = ev.prepare_repository(
|
||||
environment, use_repo=args.use_repo, remove=[args.dep_name])
|
||||
new_dep = environment.upgrade_dependency(args.dep_name, args.dry_run)
|
||||
if not args.dry_run and new_dep:
|
||||
dump_to_environment_repo(new_dep, repo)
|
||||
write(environment, repo)
|
||||
ev.dump_to_environment_repo(new_dep, repo)
|
||||
ev.write(environment, repo)
|
||||
|
||||
|
||||
def add_use_repo_argument(cmd_parser):
|
||||
|
460
lib/spack/spack/environment.py
Normal file
460
lib/spack/spack/environment.py
Normal file
@ -0,0 +1,460 @@
|
||||
##############################################################################
|
||||
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
|
||||
# Produced at the Lawrence Livermore National Laboratory.
|
||||
#
|
||||
# This file is part of Spack.
|
||||
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
|
||||
# LLNL-CODE-647188
|
||||
#
|
||||
# For details, see https://github.com/spack/spack
|
||||
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License (as
|
||||
# published by the Free Software Foundation) version 2.1, February 1999.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
|
||||
# conditions of the GNU Lesser General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public
|
||||
# License along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
##############################################################################
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import tempfile
|
||||
from six.moves import zip_longest
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.repo
|
||||
import spack.schema.env
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.config import ConfigScope
|
||||
from spack.spec import Spec, CompilerSpec, FlagMap
|
||||
from spack.version import VersionList
|
||||
|
||||
|
||||
#: path where environments are stored in the spack tree
|
||||
env_path = fs.join_path(spack.paths.var_path, 'environments')
|
||||
|
||||
|
||||
def root(name):
|
||||
"""Get the root directory for an environment by name."""
|
||||
return fs.join_path(env_path, name)
|
||||
|
||||
|
||||
def get_dotenv_dir(env_root):
|
||||
"""@return Directory in an environment that is owned by Spack"""
|
||||
return fs.join_path(env_root, '.env')
|
||||
|
||||
|
||||
def get_write_paths(env_root):
|
||||
"""Determines the names of temporary and permanent directories to
|
||||
write machine-generated environment info."""
|
||||
tmp_new = fs.join_path(env_root, '.env.new')
|
||||
dest = get_dotenv_dir(env_root)
|
||||
tmp_old = fs.join_path(env_root, '.env.old')
|
||||
return tmp_new, dest, tmp_old
|
||||
|
||||
|
||||
def _reset_os_and_compiler(spec, compiler=None):
|
||||
spec = spec.copy()
|
||||
for x in spec.traverse():
|
||||
x.compiler = None
|
||||
x.architecture = None
|
||||
x.compiler_flags = FlagMap(x)
|
||||
x._concrete = False
|
||||
x._hash = None
|
||||
if compiler:
|
||||
spec.compiler = CompilerSpec(compiler)
|
||||
spec.concretize()
|
||||
return spec
|
||||
|
||||
|
||||
def _upgrade_dependency_version(spec, dep_name):
|
||||
spec = spec.copy()
|
||||
for x in spec.traverse():
|
||||
x._concrete = False
|
||||
x._normal = False
|
||||
x._hash = None
|
||||
spec[dep_name].versions = VersionList(':')
|
||||
spec.concretize()
|
||||
return spec
|
||||
|
||||
|
||||
class Environment(object):
|
||||
def clear(self):
|
||||
self.user_specs = list()
|
||||
self.concretized_order = list()
|
||||
self.specs_by_hash = dict()
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.clear()
|
||||
|
||||
# Default config
|
||||
self.yaml = {
|
||||
'configs': ['<env>'],
|
||||
'specs': []
|
||||
}
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return root(self.name)
|
||||
|
||||
def repo_path(self):
|
||||
return fs.join_path(get_dotenv_dir(self.path), 'repo')
|
||||
|
||||
def add(self, user_spec, report_existing=True):
|
||||
"""Add a single user_spec (non-concretized) to the Environment"""
|
||||
query_spec = Spec(user_spec)
|
||||
existing = set(x for x in self.user_specs
|
||||
if Spec(x).name == query_spec.name)
|
||||
if existing:
|
||||
if report_existing:
|
||||
tty.die("Package {0} was already added to {1}"
|
||||
.format(query_spec.name, self.name))
|
||||
else:
|
||||
tty.msg("Package {0} was already added to {1}"
|
||||
.format(query_spec.name, self.name))
|
||||
else:
|
||||
tty.msg('Adding %s to environment %s' % (user_spec, self.name))
|
||||
self.user_specs.append(user_spec)
|
||||
|
||||
def remove(self, query_spec):
|
||||
"""Remove specs from an environment that match a query_spec"""
|
||||
query_spec = Spec(query_spec)
|
||||
match_index = -1
|
||||
for i, spec in enumerate(self.user_specs):
|
||||
if Spec(spec).name == query_spec.name:
|
||||
match_index = i
|
||||
break
|
||||
|
||||
if match_index < 0:
|
||||
tty.die("Not found: {0}".format(query_spec))
|
||||
|
||||
del self.user_specs[match_index]
|
||||
if match_index < len(self.concretized_order):
|
||||
spec_hash = self.concretized_order[match_index]
|
||||
del self.concretized_order[match_index]
|
||||
del self.specs_by_hash[spec_hash]
|
||||
|
||||
def concretize(self, force=False):
|
||||
"""Concretize user_specs in an Environment, creating (fully
|
||||
concretized) specs.
|
||||
|
||||
force: bool
|
||||
If set, re-concretize ALL specs, even those that were
|
||||
already concretized.
|
||||
"""
|
||||
|
||||
if force:
|
||||
# Clear previously concretized specs
|
||||
self.specs_by_hash = dict()
|
||||
self.concretized_order = list()
|
||||
|
||||
num_concretized = len(self.concretized_order)
|
||||
new_specs = list()
|
||||
for user_spec in self.user_specs[num_concretized:]:
|
||||
tty.msg('Concretizing %s' % user_spec)
|
||||
|
||||
spec = spack.cmd.parse_specs(user_spec)[0]
|
||||
spec.concretize()
|
||||
new_specs.append(spec)
|
||||
dag_hash = spec.dag_hash()
|
||||
self.specs_by_hash[dag_hash] = spec
|
||||
self.concretized_order.append(spec.dag_hash())
|
||||
|
||||
# Display concretized spec to the user
|
||||
sys.stdout.write(spec.tree(
|
||||
recurse_dependencies=True, install_status=True,
|
||||
hashlen=7, hashes=True))
|
||||
|
||||
return new_specs
|
||||
|
||||
def install(self, install_args=None):
|
||||
"""Do a `spack install` on all the (concretized)
|
||||
specs in an Environment."""
|
||||
|
||||
# Make sure log directory exists
|
||||
logs = fs.join_path(self.path, 'logs')
|
||||
try:
|
||||
os.makedirs(logs)
|
||||
except OSError:
|
||||
if not os.path.isdir(logs):
|
||||
raise
|
||||
|
||||
for concretized_hash in self.concretized_order:
|
||||
spec = self.specs_by_hash[concretized_hash]
|
||||
|
||||
# Parse cli arguments and construct a dictionary
|
||||
# that will be passed to Package.do_install API
|
||||
kwargs = dict()
|
||||
if install_args:
|
||||
spack.cmd.install.update_kwargs_from_args(install_args, kwargs)
|
||||
with fs.working_dir(self.path):
|
||||
spec.package.do_install(**kwargs)
|
||||
|
||||
# Link the resulting log file into logs dir
|
||||
logname = '%s-%s.log' % (spec.name, spec.dag_hash(7))
|
||||
logpath = fs.join_path(logs, logname)
|
||||
try:
|
||||
os.remove(logpath)
|
||||
except OSError:
|
||||
pass
|
||||
os.symlink(spec.package.build_log_path, logpath)
|
||||
|
||||
def uninstall(self, args):
|
||||
"""Uninstall all the specs in an Environment."""
|
||||
specs = self._get_environment_specs(recurse_dependencies=True)
|
||||
args.all = False
|
||||
spack.cmd.uninstall.uninstall_specs(args, specs)
|
||||
|
||||
def list(self, stream, **kwargs):
|
||||
"""List the specs in an environment."""
|
||||
for user_spec, concretized_hash in zip_longest(
|
||||
self.user_specs, self.concretized_order):
|
||||
|
||||
stream.write('========= {0}\n'.format(user_spec))
|
||||
|
||||
if concretized_hash:
|
||||
concretized_spec = self.specs_by_hash[concretized_hash]
|
||||
stream.write(concretized_spec.tree(**kwargs))
|
||||
|
||||
def upgrade_dependency(self, dep_name, dry_run=False):
|
||||
# TODO: if you have
|
||||
# w -> x -> y
|
||||
# and
|
||||
# v -> x -> y
|
||||
# then it would be desirable to ensure that w and v refer to the
|
||||
# same x after upgrading y. This is not currently guaranteed.
|
||||
new_order = list()
|
||||
new_deps = list()
|
||||
for i, spec_hash in enumerate(self.concretized_order):
|
||||
spec = self.specs_by_hash[spec_hash]
|
||||
if dep_name in spec:
|
||||
if dry_run:
|
||||
tty.msg("Would upgrade {0} for {1}"
|
||||
.format(spec[dep_name].format(), spec.format()))
|
||||
else:
|
||||
new_spec = _upgrade_dependency_version(spec, dep_name)
|
||||
new_order.append(new_spec.dag_hash())
|
||||
self.specs_by_hash[new_spec.dag_hash()] = new_spec
|
||||
new_deps.append(new_spec[dep_name])
|
||||
else:
|
||||
new_order.append(spec_hash)
|
||||
|
||||
if not dry_run:
|
||||
self.concretized_order = new_order
|
||||
return new_deps[0] if new_deps else None
|
||||
|
||||
def reset_os_and_compiler(self, compiler=None):
|
||||
new_order = list()
|
||||
new_specs_by_hash = {}
|
||||
for spec_hash in self.concretized_order:
|
||||
spec = self.specs_by_hash[spec_hash]
|
||||
new_spec = _reset_os_and_compiler(spec, compiler)
|
||||
new_order.append(new_spec.dag_hash())
|
||||
new_specs_by_hash[new_spec.dag_hash()] = new_spec
|
||||
self.concretized_order = new_order
|
||||
self.specs_by_hash = new_specs_by_hash
|
||||
|
||||
def _get_environment_specs(self, recurse_dependencies=True):
|
||||
"""Returns the specs of all the packages in an environment.
|
||||
If these specs appear under different user_specs, only one copy
|
||||
is added to the list returned."""
|
||||
package_to_spec = {}
|
||||
spec_list = list()
|
||||
|
||||
for spec_hash in self.concretized_order:
|
||||
spec = self.specs_by_hash[spec_hash]
|
||||
|
||||
specs = spec.traverse(deptype=('link', 'run')) \
|
||||
if recurse_dependencies else (spec,)
|
||||
for dep in specs:
|
||||
if dep.name in package_to_spec:
|
||||
tty.warn("{0} takes priority over {1}"
|
||||
.format(package_to_spec[dep.name].format(),
|
||||
dep.format()))
|
||||
else:
|
||||
package_to_spec[dep.name] = dep
|
||||
spec_list.append(dep)
|
||||
|
||||
return spec_list
|
||||
|
||||
def to_dict(self):
|
||||
"""Used in serializing to JSON"""
|
||||
concretized_order = list(self.concretized_order)
|
||||
concrete_specs = dict()
|
||||
for spec in self.specs_by_hash.values():
|
||||
for s in spec.traverse():
|
||||
if s.dag_hash() not in concrete_specs:
|
||||
concrete_specs[s.dag_hash()] = (
|
||||
s.to_node_dict(all_deps=True))
|
||||
format = {
|
||||
'user_specs': self.user_specs,
|
||||
'concretized_order': concretized_order,
|
||||
'concrete_specs': concrete_specs,
|
||||
}
|
||||
return format
|
||||
|
||||
@staticmethod
|
||||
def from_dict(name, d):
|
||||
"""Used in deserializing from JSON"""
|
||||
env = Environment(name)
|
||||
env.user_specs = list(d['user_specs'])
|
||||
env.concretized_order = list(d['concretized_order'])
|
||||
specs_dict = d['concrete_specs']
|
||||
|
||||
hash_to_node_dict = specs_dict
|
||||
root_hashes = set(env.concretized_order)
|
||||
|
||||
specs_by_hash = {}
|
||||
for dag_hash, node_dict in hash_to_node_dict.items():
|
||||
specs_by_hash[dag_hash] = Spec.from_node_dict(node_dict)
|
||||
|
||||
for dag_hash, node_dict in hash_to_node_dict.items():
|
||||
for dep_name, dep_hash, deptypes in (
|
||||
Spec.dependencies_from_node_dict(node_dict)):
|
||||
specs_by_hash[dag_hash]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes)
|
||||
|
||||
env.specs_by_hash = dict(
|
||||
(x, y) for x, y in specs_by_hash.items() if x in root_hashes)
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def check_consistency(name):
|
||||
"""check whether an environment directory is consistent"""
|
||||
env_root = root(name)
|
||||
tmp_new, dest, tmp_old = get_write_paths(env_root)
|
||||
if os.path.exists(tmp_new) or os.path.exists(tmp_old):
|
||||
tty.die("Partial write state, run 'spack env repair'")
|
||||
|
||||
|
||||
def write(environment, new_repo=None):
|
||||
"""Writes an in-memory environment back to its location on disk,
|
||||
in an atomic manner."""
|
||||
|
||||
tmp_new, dest, tmp_old = get_write_paths(root(environment.name))
|
||||
|
||||
# Write the machine-generated stuff
|
||||
fs.mkdirp(tmp_new)
|
||||
# create one file for the environment object
|
||||
with open(fs.join_path(tmp_new, 'environment.json'), 'w') as f:
|
||||
sjson.dump(environment.to_dict(), stream=f)
|
||||
|
||||
dest_repo_dir = fs.join_path(tmp_new, 'repo')
|
||||
if new_repo:
|
||||
shutil.copytree(new_repo.root, dest_repo_dir)
|
||||
elif os.path.exists(environment.repo_path()):
|
||||
shutil.copytree(environment.repo_path(), dest_repo_dir)
|
||||
|
||||
# Swap in new directory atomically
|
||||
if os.path.exists(dest):
|
||||
shutil.move(dest, tmp_old)
|
||||
shutil.move(tmp_new, dest)
|
||||
if os.path.exists(tmp_old):
|
||||
shutil.rmtree(tmp_old)
|
||||
|
||||
|
||||
def repair(environment_name):
|
||||
"""Recovers from crash during critical section of write().
|
||||
Possibilities:
|
||||
|
||||
tmp_new, dest
|
||||
tmp_new, tmp_old
|
||||
tmp_old, dest
|
||||
"""
|
||||
tmp_new, dest, tmp_old = get_write_paths(root(environment_name))
|
||||
if os.path.exists(tmp_old):
|
||||
if not os.path.exists(dest):
|
||||
shutil.move(tmp_new, dest)
|
||||
else:
|
||||
shutil.rmtree(tmp_old)
|
||||
tty.info("Previous update completed")
|
||||
elif os.path.exists(tmp_new):
|
||||
tty.info("Previous update did not complete")
|
||||
else:
|
||||
tty.info("Previous update may have completed")
|
||||
|
||||
if os.path.exists(tmp_new):
|
||||
shutil.rmtree(tmp_new)
|
||||
|
||||
|
||||
def read(environment_name):
|
||||
# Check that env is in a consistent state on disk
|
||||
env_root = root(environment_name)
|
||||
|
||||
# Read env.yaml file
|
||||
env_yaml = spack.config._read_config_file(
|
||||
fs.join_path(env_root, 'env.yaml'),
|
||||
spack.schema.env.schema)
|
||||
|
||||
dotenv_dir = get_dotenv_dir(env_root)
|
||||
with open(fs.join_path(dotenv_dir, 'environment.json'), 'r') as f:
|
||||
environment_dict = sjson.load(f)
|
||||
environment = Environment.from_dict(environment_name, environment_dict)
|
||||
if env_yaml:
|
||||
environment.yaml = env_yaml['env']
|
||||
|
||||
return environment
|
||||
|
||||
|
||||
def dump_to_environment_repo(spec, repo):
|
||||
dest_pkg_dir = repo.dirname_for_package_name(spec.name)
|
||||
if not os.path.exists(dest_pkg_dir):
|
||||
spack.repo.path.dump_provenance(spec, dest_pkg_dir)
|
||||
|
||||
|
||||
def prepare_repository(environment, remove=None, use_repo=False):
|
||||
"""Adds environment's repository to the global search path of repos"""
|
||||
repo_stage = tempfile.mkdtemp()
|
||||
new_repo_dir = fs.join_path(repo_stage, 'repo')
|
||||
if os.path.exists(environment.repo_path()):
|
||||
shutil.copytree(environment.repo_path(), new_repo_dir)
|
||||
else:
|
||||
spack.repo.create_repo(new_repo_dir, environment.name)
|
||||
if remove:
|
||||
remove_dirs = []
|
||||
repo = spack.repo.Repo(new_repo_dir)
|
||||
for pkg_name in remove:
|
||||
remove_dirs.append(repo.dirname_for_package_name(pkg_name))
|
||||
for d in remove_dirs:
|
||||
shutil.rmtree(d)
|
||||
repo = spack.repo.Repo(new_repo_dir)
|
||||
if use_repo:
|
||||
spack.repo.put_first(repo)
|
||||
return repo
|
||||
|
||||
|
||||
def prepare_config_scope(environment):
|
||||
"""Adds environment's scope to the global search path
|
||||
of configuration scopes"""
|
||||
|
||||
# Load up configs
|
||||
for config_spec in environment.yaml['configs']:
|
||||
config_name = os.path.split(config_spec)[1]
|
||||
if config_name == '<env>':
|
||||
# Use default config for the environment; doesn't have to exist
|
||||
config_dir = fs.join_path(environment.path, 'config')
|
||||
if not os.path.isdir(config_dir):
|
||||
continue
|
||||
config_name = environment.name
|
||||
else:
|
||||
# Use external user-provided config
|
||||
config_dir = os.path.normpath(os.path.join(
|
||||
environment.path, config_spec.format(**os.environ)))
|
||||
if not os.path.isdir(config_dir):
|
||||
tty.die('Spack config %s (%s) not found' %
|
||||
(config_name, config_dir))
|
||||
|
||||
tty.msg('Using Spack config %s scope at %s' %
|
||||
(config_name, config_dir))
|
||||
spack.config.config.push_scope(ConfigScope(config_name, config_dir))
|
@ -3,145 +3,137 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import unittest
|
||||
import tempfile
|
||||
import shutil
|
||||
from six import StringIO
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.cmd.env
|
||||
import spack.modules
|
||||
import spack.environment as ev
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.cmd.env import (Environment, prepare_repository,
|
||||
_environment_concretize, prepare_config_scope,
|
||||
_environment_create)
|
||||
from spack.cmd.env import _environment_concretize, _environment_create
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
class TestEnvironment(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.env_dir = spack.cmd.env._db_dirname
|
||||
spack.cmd.env._db_dirname = tempfile.mkdtemp()
|
||||
# everything here uses the mock_env_path
|
||||
pytestmark = pytest.mark.usefixtures(
|
||||
'mock_env_path', 'config', 'mutable_mock_packages')
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(spack.cmd.env._db_dirname)
|
||||
spack.cmd.env._db_dirname = self.env_dir
|
||||
|
||||
def test_add(self):
|
||||
c = Environment('test')
|
||||
c.add('mpileaks')
|
||||
assert 'mpileaks' in c.user_specs
|
||||
def test_add():
|
||||
c = ev.Environment('test')
|
||||
c.add('mpileaks')
|
||||
assert 'mpileaks' in c.user_specs
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mutable_mock_packages')
|
||||
def test_concretize(self):
|
||||
c = Environment('test')
|
||||
c.add('mpileaks')
|
||||
c.concretize()
|
||||
env_specs = c._get_environment_specs()
|
||||
assert any(x.name == 'mpileaks' for x in env_specs)
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mutable_mock_packages',
|
||||
'install_mockery', 'mock_fetch')
|
||||
def test_env_install(self):
|
||||
c = Environment('test')
|
||||
c.add('cmake-client')
|
||||
c.concretize()
|
||||
c.install()
|
||||
env_specs = c._get_environment_specs()
|
||||
spec = next(x for x in env_specs if x.name == 'cmake-client')
|
||||
assert spec.package.installed
|
||||
def test_concretize():
|
||||
c = ev.Environment('test')
|
||||
c.add('mpileaks')
|
||||
c.concretize()
|
||||
env_specs = c._get_environment_specs()
|
||||
assert any(x.name == 'mpileaks' for x in env_specs)
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mutable_mock_packages')
|
||||
def test_remove_after_concretize(self):
|
||||
c = Environment('test')
|
||||
c.add('mpileaks')
|
||||
c.concretize()
|
||||
c.add('python')
|
||||
c.concretize()
|
||||
c.remove('mpileaks')
|
||||
env_specs = c._get_environment_specs()
|
||||
assert not any(x.name == 'mpileaks' for x in env_specs)
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mutable_mock_packages')
|
||||
def test_reset_compiler(self):
|
||||
c = Environment('test')
|
||||
c.add('mpileaks')
|
||||
c.concretize()
|
||||
def test_env_install(install_mockery, mock_fetch):
|
||||
c = ev.Environment('test')
|
||||
c.add('cmake-client')
|
||||
c.concretize()
|
||||
c.install()
|
||||
env_specs = c._get_environment_specs()
|
||||
spec = next(x for x in env_specs if x.name == 'cmake-client')
|
||||
assert spec.package.installed
|
||||
|
||||
first_spec = c.specs_by_hash[c.concretized_order[0]]
|
||||
available = set(['gcc', 'clang'])
|
||||
available.remove(first_spec.compiler.name)
|
||||
new_compiler = next(iter(available))
|
||||
c.reset_os_and_compiler(compiler=new_compiler)
|
||||
|
||||
new_spec = c.specs_by_hash[c.concretized_order[0]]
|
||||
assert new_spec.compiler != first_spec.compiler
|
||||
def test_remove_after_concretize():
|
||||
c = ev.Environment('test')
|
||||
c.add('mpileaks')
|
||||
c.concretize()
|
||||
c.add('python')
|
||||
c.concretize()
|
||||
c.remove('mpileaks')
|
||||
env_specs = c._get_environment_specs()
|
||||
assert not any(x.name == 'mpileaks' for x in env_specs)
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mutable_mock_packages')
|
||||
def test_environment_list(self):
|
||||
c = Environment('test')
|
||||
c.add('mpileaks')
|
||||
c.concretize()
|
||||
c.add('python')
|
||||
mock_stream = StringIO()
|
||||
c.list(mock_stream)
|
||||
list_content = mock_stream.getvalue()
|
||||
assert 'mpileaks' in list_content
|
||||
assert 'python' in list_content
|
||||
mpileaks_spec = c.specs_by_hash[c.concretized_order[0]]
|
||||
assert mpileaks_spec.format() in list_content
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mutable_mock_packages')
|
||||
def test_upgrade_dependency(self):
|
||||
c = Environment('test')
|
||||
c.add('mpileaks ^callpath@0.9')
|
||||
c.concretize()
|
||||
def test_reset_compiler():
|
||||
c = ev.Environment('test')
|
||||
c.add('mpileaks')
|
||||
c.concretize()
|
||||
|
||||
c.upgrade_dependency('callpath')
|
||||
env_specs = c._get_environment_specs()
|
||||
callpath_dependents = list(x for x in env_specs if 'callpath' in x)
|
||||
assert callpath_dependents
|
||||
for spec in callpath_dependents:
|
||||
assert spec['callpath'].version == Version('1.0')
|
||||
first_spec = c.specs_by_hash[c.concretized_order[0]]
|
||||
available = set(['gcc', 'clang'])
|
||||
available.remove(first_spec.compiler.name)
|
||||
new_compiler = next(iter(available))
|
||||
c.reset_os_and_compiler(compiler=new_compiler)
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mutable_mock_packages')
|
||||
def test_init_config(self):
|
||||
test_config = """user_specs:
|
||||
- mpileaks
|
||||
new_spec = c.specs_by_hash[c.concretized_order[0]]
|
||||
assert new_spec.compiler != first_spec.compiler
|
||||
|
||||
|
||||
def test_environment_list():
|
||||
c = ev.Environment('test')
|
||||
c.add('mpileaks')
|
||||
c.concretize()
|
||||
c.add('python')
|
||||
mock_stream = StringIO()
|
||||
c.list(mock_stream)
|
||||
list_content = mock_stream.getvalue()
|
||||
assert 'mpileaks' in list_content
|
||||
assert 'python' in list_content
|
||||
mpileaks_spec = c.specs_by_hash[c.concretized_order[0]]
|
||||
assert mpileaks_spec.format() in list_content
|
||||
|
||||
|
||||
def test_upgrade_dependency():
|
||||
c = ev.Environment('test')
|
||||
c.add('mpileaks ^callpath@0.9')
|
||||
c.concretize()
|
||||
|
||||
c.upgrade_dependency('callpath')
|
||||
env_specs = c._get_environment_specs()
|
||||
callpath_dependents = list(x for x in env_specs if 'callpath' in x)
|
||||
assert callpath_dependents
|
||||
for spec in callpath_dependents:
|
||||
assert spec['callpath'].version == Version('1.0')
|
||||
|
||||
|
||||
def test_init_config():
|
||||
test_config = """\
|
||||
user_specs:
|
||||
- mpileaks
|
||||
packages:
|
||||
mpileaks:
|
||||
version: [2.2]
|
||||
"""
|
||||
spack.package_prefs.PackagePrefs._packages_config_cache = None
|
||||
spack.package_prefs.PackagePrefs._spec_cache = {}
|
||||
spack.package_prefs.PackagePrefs._packages_config_cache = None
|
||||
spack.package_prefs.PackagePrefs._spec_cache = {}
|
||||
|
||||
_environment_create('test', syaml.load(StringIO(test_config)))
|
||||
c = spack.cmd.env.read('test')
|
||||
prepare_config_scope(c)
|
||||
c.concretize()
|
||||
assert any(x.satisfies('mpileaks@2.2')
|
||||
for x in c._get_environment_specs())
|
||||
_environment_create('test', syaml.load(StringIO(test_config)))
|
||||
c = ev.read('test')
|
||||
ev.prepare_config_scope(c)
|
||||
c.concretize()
|
||||
assert any(x.satisfies('mpileaks@2.2')
|
||||
for x in c._get_environment_specs())
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mutable_mock_packages')
|
||||
def test_to_dict(self):
|
||||
c = Environment('test')
|
||||
c.add('mpileaks')
|
||||
c.concretize()
|
||||
context_dict = c.to_dict()
|
||||
c_copy = Environment.from_dict('test_copy', context_dict)
|
||||
assert c.specs_by_hash == c_copy.specs_by_hash
|
||||
|
||||
@pytest.mark.usefixtures('config', 'mutable_mock_packages')
|
||||
def test_prepare_repo(self):
|
||||
c = Environment('testx')
|
||||
c.add('mpileaks')
|
||||
_environment_concretize(c)
|
||||
repo = None
|
||||
try:
|
||||
repo = prepare_repository(c)
|
||||
package = repo.get(spack.spec.Spec('mpileaks'))
|
||||
assert package.namespace.split('.')[-1] == 'testx'
|
||||
finally:
|
||||
if repo:
|
||||
shutil.rmtree(repo.root)
|
||||
def test_to_dict():
|
||||
c = ev.Environment('test')
|
||||
c.add('mpileaks')
|
||||
c.concretize()
|
||||
context_dict = c.to_dict()
|
||||
c_copy = ev.Environment.from_dict('test_copy', context_dict)
|
||||
assert c.specs_by_hash == c_copy.specs_by_hash
|
||||
|
||||
|
||||
def test_prepare_repo():
|
||||
c = ev.Environment('testx')
|
||||
c.add('mpileaks')
|
||||
_environment_concretize(c)
|
||||
repo = None
|
||||
try:
|
||||
repo = ev.prepare_repository(c)
|
||||
package = repo.get(spack.spec.Spec('mpileaks'))
|
||||
assert package.namespace.split('.')[-1] == 'testx'
|
||||
finally:
|
||||
if repo:
|
||||
shutil.rmtree(repo.root)
|
||||
|
@ -663,6 +663,15 @@ def get_rev():
|
||||
yield t
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def mock_env_path(tmpdir_factory):
|
||||
"""Fixture for mocking the internal spack environments directory."""
|
||||
saved_path = spack.environment.env_path
|
||||
spack.environment.env_path = tmpdir_factory.mktemp('mock-env-path')
|
||||
yield spack.environment.env_path
|
||||
spack.environment.env_path = saved_path
|
||||
|
||||
|
||||
##########
|
||||
# Mock packages
|
||||
##########
|
||||
|
Loading…
Reference in New Issue
Block a user