env: rework environments

- env.yaml is now meaningful; it contains authoritative user specs

  - concretize diffs user specs in env.yaml and env.json to allow user to
    add/remove by simply updating env.yaml

  - comments are preserved when env.yaml is updated by add/unadd

  - env.yaml can contain configuration and include external configuration
    either from merged files or from config scopes

  - there is only one file format to remember (env.yaml, no separate init
    format)

- env.json is now env.lock, and it stores the *last* user specs to be
  concretized, along with full provenance.
  - internal structure was modified slightly for readability
  - env.lock contains a _meta section with metadata, in case needed

- added more tests for environments

- env commands follow Spack conventions; no more `spack env foo install`
This commit is contained in:
Todd Gamblin 2018-10-01 01:54:43 -07:00
parent 6af5dfbbc2
commit ce230fa3f4
8 changed files with 1154 additions and 525 deletions

View File

@ -6,19 +6,22 @@
import os
import sys
import argparse
from contextlib import contextmanager
import spack.environment as ev
import spack.util.spack_yaml as syaml
import spack.config
import spack.cmd.install
import spack.cmd.uninstall
import spack.cmd.module
import spack.cmd.common.arguments as arguments
import shutil
import tempfile
import llnl.util.tty as tty
import llnl.util.filesystem as fs
from llnl.util.tty.colify import colify
from llnl.util.tty.color import colorize
import spack.config
import spack.schema.env
import spack.cmd.install
import spack.cmd.uninstall
import spack.cmd.modules
import spack.cmd.common.arguments as arguments
import spack.environment as ev
import spack.util.string as string
description = "manage virtual environments"
section = "environment"
@ -28,301 +31,384 @@
#: List of subcommands of `spack env`
subcommands = [
'create',
'destroy',
['list', 'ls'],
'add',
'remove',
['remove', 'rm'],
'upgrade',
'concretize',
'status',
['status', 'st'],
'loads',
'relocate',
'stage',
'install',
'uninstall'
'uninstall',
]
# =============== Modifies Environment
def setup_create_parser(subparser):
"""create a new environment."""
#
# env create
#
def env_create_setup_parser(subparser):
"""create a new environment"""
subparser.add_argument('env', help='name of environment to create')
subparser.add_argument('envfile', nargs='?', default=None,
help='YAML initialization file (optional)')
def env_create(args):
if args.envfile:
with open(args.envfile) as f:
_environment_create(args.env, f)
else:
_environment_create(args.env)
def _environment_create(name, env_yaml=None):
"""Create a new environment, with an optional yaml description.
Arguments:
name (str): name of the environment to create
env_yaml (str or file): yaml text or file object containing
configuration information.
"""
if os.path.exists(ev.root(name)):
tty.die("'%s': environment already exists" % name)
env = ev.Environment(name, env_yaml)
env.write()
tty.msg("Created environment '%s' in %s" % (name, env.path))
return env
#
# env remove
#
def env_destroy_setup_parser(subparser):
"""destroy an existing environment"""
subparser.add_argument(
'envfile', nargs='?', help='optional initialization file')
'env', nargs='+', help='environment(s) to destroy')
arguments.add_common_arguments(subparser, ['yes_to_all'])
def environment_create(args):
if os.path.exists(ev.root(args.env)):
raise tty.die("Environment already exists: " + args.env)
_environment_create(args.env)
def env_destroy(args):
for env in args.env:
if not ev.exists(ev.root(env)):
tty.die("No such environment: '%s'" % env)
elif not os.access(ev.root(env), os.W_OK):
tty.die("insufficient permissions to modify environment: '%s'"
% args.env)
if not args.yes_to_all:
answer = tty.get_yes_or_no(
'Really destroy %s %s?' % (
string.plural(len(args.env), 'environment', show_n=False),
string.comma_and(args.env)),
default=False)
if not answer:
tty.die("Will not destroy any environments")
for env in args.env:
ev.Environment(env).destroy()
tty.msg("Successfully destroyed environment '%s'" % env)
def _environment_create(name, init_config=None):
environment = ev.Environment(name)
user_specs = list()
config_sections = {}
if init_config:
for key, val in init_config.items():
if key == 'user_specs':
user_specs.extend(val)
else:
config_sections[key] = val
for user_spec in user_specs:
environment.add(user_spec)
ev.write(environment)
# When creating the environment, the user may specify configuration
# to place in the environment initially. Spack does not interfere
# with this configuration after initialization so it is handled here
if len(config_sections) > 0:
config_basedir = fs.join_path(environment.path, 'config')
os.mkdir(config_basedir)
for key, val in config_sections.items():
yaml_section = syaml.dump({key: val}, default_flow_style=False)
yaml_file = '{0}.yaml'.format(key)
yaml_path = fs.join_path(config_basedir, yaml_file)
with open(yaml_path, 'w') as f:
f.write(yaml_section)
#
# env list
#
def env_list_setup_parser(subparser):
"""list available environments"""
pass
def setup_add_parser(subparser):
def env_list(args):
names = ev.list_environments()
color_names = []
for name in names:
if ev.active and name == ev.active.name:
name = colorize('@*g{%s}' % name)
color_names.append(name)
# say how many there are if writing to a tty
if sys.stdout.isatty():
if not names:
tty.msg('No environments')
else:
tty.msg('%d environments' % len(names))
colify(color_names, indent=4)
#
# env add
#
def env_add_setup_parser(subparser):
"""add a spec to an environment"""
subparser.add_argument(
'-a', '--all', action='store_true', dest='all',
help="Add all specs listed in env.yaml")
'-e', '--env', help='add spec to environment with this name')
subparser.add_argument(
'package', nargs=argparse.REMAINDER,
help="Spec of the package to add")
'specs', nargs=argparse.REMAINDER, help="spec of the package to add")
def environment_add(args):
ev.check_consistency(args.environment)
environment = ev.read(args.environment)
parsed_specs = spack.cmd.parse_specs(args.package)
def env_add(args):
if not args.env:
tty.die('spack env unadd requires an active env or argument')
if args.all:
# Don't allow command-line specs with --all
if len(parsed_specs) > 0:
tty.die('Cannot specify --all and specs too on the command line')
yaml_specs = environment.yaml['specs']
if len(yaml_specs) == 0:
tty.msg('No specs to add from env.yaml')
# Add list of specs from env.yaml file
for user_spec, _ in yaml_specs.items(): # OrderedDict
environment.add(str(user_spec), report_existing=False)
else:
for spec in parsed_specs:
environment.add(str(spec))
ev.write(environment)
env = ev.read(args.env)
for spec in spack.cmd.parse_specs(args.specs):
if not env.add(spec):
tty.msg("Package {0} was already added to {1}"
.format(spec.name, env.name))
else:
tty.msg('Adding %s to environment %s' % (spec, env.name))
env.write()
def setup_remove_parser(subparser):
#
# env remove
#
def env_remove_setup_parser(subparser):
"""remove a spec from an environment"""
subparser.add_argument(
'-e', '--env', help='remove spec with this name from environment')
subparser.add_argument(
'-a', '--all', action='store_true', dest='all',
help="Remove all specs from (clear) the environment")
subparser.add_argument(
'package', nargs=argparse.REMAINDER,
help="Spec of the package to remove")
'specs', nargs=argparse.REMAINDER, help="specs to be removed")
def environment_remove(args):
ev.check_consistency(args.environment)
environment = ev.read(args.environment)
def env_remove(args):
env = get_env(args, 'remove')
if args.all:
environment.clear()
env.clear()
else:
for spec in spack.cmd.parse_specs(args.package):
environment.remove(spec.format())
ev.write(environment)
for spec in spack.cmd.parse_specs(args.specs):
tty.msg('Removing %s from environment %s' % (spec, env.name))
env.remove(spec)
env.write()
def setup_concretize_parser(subparser):
#
# env concretize
#
def env_concretize_setup_parser(subparser):
"""concretize user specs and write lockfile"""
subparser.add_argument(
'env', nargs='?', help='concretize all packages for this environment')
subparser.add_argument(
'-f', '--force', action='store_true',
help="Re-concretize even if already concretized.")
add_use_repo_argument(subparser)
def environment_concretize(args):
ev.check_consistency(args.environment)
environment = ev.read(args.environment)
def env_concretize(args):
if not args.env:
tty.die('spack env status requires an active env or argument')
environment = ev.read(args.env)
_environment_concretize(
environment, use_repo=args.use_repo, force=args.force)
environment, use_repo=bool(args.exact_env), force=args.force)
def _environment_concretize(environment, use_repo=False, force=False):
"""Function body separated out to aid in testing."""
# Change global search paths
repo = ev.prepare_repository(environment, use_repo=use_repo)
ev.prepare_config_scope(environment)
new_specs = environment.concretize(force=force)
for spec in new_specs:
for dep in spec.traverse():
ev.dump_to_environment_repo(dep, repo)
# Moves <env>/.env.new to <env>/.env
ev.write(environment, repo)
environment.write(dump_packages=new_specs)
# =============== Does not Modify Environment
def setup_install_parser(subparser):
# REMOVE
# env install
#
def env_install_setup_parser(subparser):
"""install all concretized specs in an environment"""
subparser.add_argument(
'env', nargs='?', help='install all packages in this environment')
spack.cmd.install.add_common_arguments(subparser)
add_use_repo_argument(subparser)
def environment_install(args):
ev.check_consistency(args.environment)
environment = ev.read(args.environment)
ev.prepare_repository(environment, use_repo=args.use_repo)
environment.install(args)
def env_install(args):
if not args.env:
tty.die('spack env status requires an active env or argument')
env = ev.read(args.env)
env.install(args)
def setup_uninstall_parser(subparser):
# REMOVE
# env uninstall
#
def env_uninstall_setup_parser(subparser):
"""uninstall packages from an environment"""
subparser.add_argument(
'env', nargs='?', help='uninstall all packages in this environment')
spack.cmd.uninstall.add_common_arguments(subparser)
def environment_uninstall(args):
ev.check_consistency(args.environment)
environment = ev.read(args.environment)
ev.prepare_repository(environment)
def env_uninstall(args):
if not args.env:
tty.die('spack env uninstall requires an active env or argument')
environment = ev.read(args.env)
environment.uninstall(args)
# =======================================
def setup_relocate_parser(subparser):
#
# env relocate
#
def env_relocate_setup_parser(subparser):
"""reconcretize environment with new OS and/or compiler"""
subparser.add_argument('--compiler', help="Compiler spec to use")
add_use_repo_argument(subparser)
def environment_relocate(args):
environment = ev.read(args.environment)
ev.prepare_repository(environment, use_repo=args.use_repo)
def env_relocate(args):
environment = ev.read(args.env)
environment.reset_os_and_compiler(compiler=args.compiler)
ev.write(environment)
environment.write()
def setup_status_parser(subparser):
#
# env status
#
def env_status_setup_parser(subparser):
"""get install status of specs in an environment"""
subparser.add_argument(
'env', nargs='?', help='name of environment to show status for')
arguments.add_common_arguments(
subparser,
['recurse_dependencies', 'long', 'very_long', 'install_status'])
def environment_status(args):
def env_status(args):
if not args.env:
tty.die('spack env status requires an active env or argument')
# TODO? option to show packages w/ multiple instances?
environment = ev.read(args.environment)
environment.list(
environment = ev.read(args.env)
environment.status(
sys.stdout, recurse_dependencies=args.recurse_dependencies,
hashes=args.long or args.very_long,
hashlen=None if args.very_long else 7,
install_status=args.install_status)
def setup_stage_parser(subparser):
"""Download all source files for all packages in an environment"""
add_use_repo_argument(subparser)
#
# env stage
#
def env_stage_setup_parser(subparser):
"""download all source files for all packages in an environment"""
subparser.add_argument(
'env', nargs='?', help='name of env to generate loads file for')
def environment_stage(args):
environment = ev.read(args.environment)
ev.prepare_repository(environment, use_repo=args.use_repo)
def env_stage(args):
if not args.env:
tty.die('spack env loads requires an active env or argument')
environment = ev.read(args.env)
for spec in environment.specs_by_hash.values():
for dep in spec.traverse():
dep.package.do_stage()
@contextmanager
def redirect_stdout(ofname):
"""Redirects STDOUT to (by default) a file within the environment;
or else a user-specified filename."""
with open(ofname, 'w') as f:
original = sys.stdout
sys.stdout = f
yield
sys.stdout = original
def setup_loads_parser(subparser):
#
# env loads
#
def env_loads_setup_parser(subparser):
"""list modules for an installed environment '(see spack module loads)'"""
subparser.add_argument(
'env', nargs='?', help='name of env to generate loads file for')
subparser.add_argument(
'-m', '--module-type', choices=('tcl', 'lmod'),
help='type of module system to generate loads for')
spack.cmd.modules.add_loads_arguments(subparser)
def environment_loads(args):
def env_loads(args):
if not args.env:
tty.die('spack env loads requires an active env or argument')
# Set the module types that have been selected
module_types = args.module_type
if module_types is None:
module_type = args.module_type
if module_type is None:
# If no selection has been made select all of them
module_types = ['tcl']
module_type = 'tcl'
module_types = list(set(module_types))
environment = ev.read(args.environment)
environment = ev.read(args.env)
recurse_dependencies = args.recurse_dependencies
args.recurse_dependencies = False
ofname = fs.join_path(environment.path, 'loads')
with redirect_stdout(ofname):
loads_file = fs.join_path(environment.path, 'loads')
with open(loads_file, 'w') as f:
specs = environment._get_environment_specs(
recurse_dependencies=recurse_dependencies)
spack.cmd.module.loads(module_types, specs, args)
spack.cmd.modules.loads(module_type, specs, args, f)
print('To load this environment, type:')
print(' source %s' % ofname)
print(' source %s' % loads_file)
def setup_upgrade_parser(subparser):
#
# env upgrade
#
def env_upgrade_setup_parser(subparser):
"""upgrade a dependency package in an environment to the latest version"""
subparser.add_argument('dep_name', help='Dependency package to upgrade')
subparser.add_argument('--dry-run', action='store_true', dest='dry_run',
help="Just show the updates that would take place")
add_use_repo_argument(subparser)
def environment_upgrade(args):
environment = ev.read(args.environment)
repo = ev.prepare_repository(
environment, use_repo=args.use_repo, remove=[args.dep_name])
new_dep = environment.upgrade_dependency(args.dep_name, args.dry_run)
def env_upgrade(args):
env = ev.read(args.env)
if os.path.exists(env.repos_path):
repo_stage = tempfile.mkdtemp()
new_repos_path = os.path.join_path(repo_stage, 'repos')
shutil.copytree(env.repos_path, new_repos_path)
repo = spack.environment.make_repo_path(new_repos_path)
if args.dep_name in repo:
shutil.rmtree(repo.dirname_for_package_name(args.dep_name))
spack.repo.path.put_first(repo)
new_dep = env.upgrade_dependency(args.dep_name, args.dry_run)
if not args.dry_run and new_dep:
ev.dump_to_environment_repo(new_dep, repo)
ev.write(environment, repo)
env.write(new_dep)
def add_use_repo_argument(cmd_parser):
cmd_parser.add_argument(
'--use-env-repo', action='store_true', dest='use_repo',
help='Use package definitions stored in the environment')
#: Dictionary mapping subcommand names and aliases to functions
subcommand_functions = {}
#
# spack env
#
def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='environment_command')
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='env_command')
for name in subcommands:
setup_parser_cmd_name = 'setup_%s_parser' % name
if isinstance(name, (list, tuple)):
name, aliases = name[0], name[1:]
else:
aliases = []
# add commands to subcommands dict
function_name = 'env_%s' % name
function = globals()[function_name]
for alias in [name] + aliases:
subcommand_functions[alias] = function
# make a subparser and run the command's setup function on it
setup_parser_cmd_name = 'env_%s_setup_parser' % name
setup_parser_cmd = globals()[setup_parser_cmd_name]
subsubparser = sp.add_parser(name, help=setup_parser_cmd.__doc__)
subsubparser = sp.add_parser(
name, aliases=aliases, help=setup_parser_cmd.__doc__)
setup_parser_cmd(subsubparser)
def env(parser, args, **kwargs):
"""Look for a function called environment_<name> and call it."""
function_name = 'environment_%s' % args.environment_command
action = globals()[function_name]
action = subcommand_functions[args.env_command]
action(args)

View File

@ -8,6 +8,7 @@
import collections
import os.path
import shutil
import sys
from llnl.util import filesystem, tty
@ -104,7 +105,7 @@ def one_spec_or_raise(specs):
return specs[0]
def loads(module_type, specs, args):
def loads(module_type, specs, args, out=sys.stdout):
"""Prompt the list of modules associated with a list of specs"""
# Get a comprehensive list of specs
@ -147,7 +148,8 @@ def loads(module_type, specs, args):
d['comment'] = '' if not args.shell else '# {0}\n'.format(
spec.format())
d['name'] = mod
print(prompt_template.format(**d))
out.write(prompt_template.format(**d))
out.write('\n')
def find(module_type, specs, args):

View File

@ -3,8 +3,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from __future__ import print_function
"""This module implements Spack's configuration file handling.
This implements Spack's configuration system, which handles merging
@ -109,6 +107,14 @@
scopes_metavar = '{defaults,system,site,user}[/PLATFORM]'
def first_existing(dictionary, keys):
"""Get the value of the first key in keys that is in the dictionary."""
try:
return next(k for k in keys if k in dictionary)
except StopIteration:
raise KeyError("None of %s is in dict!" % keys)
def _extend_with_default(validator_class):
"""Add support for the 'default' attr for properties and patternProperties.
@ -204,7 +210,10 @@ def __init__(self, name, path, schema, yaml_path=None):
Arguments:
schema (dict): jsonschema for the file to read
yaml_path (list): list of dict keys in the schema where
config data can be found.
config data can be found;
Elements of ``yaml_path`` can be tuples or lists to represent an
"or" of keys (e.g. "env" or "spack" is ``('env', 'spack')``)
"""
super(SingleFileScope, self).__init__(name, path)
@ -231,6 +240,13 @@ def get_section(self, section):
return None
for key in self.yaml_path:
if self._raw_data is None:
return None
# support tuples as "or" in the yaml path
if isinstance(key, (list, tuple)):
key = first_existing(self._raw_data, key)
self._raw_data = self._raw_data[key]
# data in self.sections looks (awkwardly) like this:
@ -246,8 +262,16 @@ def get_section(self, section):
# }
# }
# }
return self.sections.setdefault(
section, {section: self._raw_data.get(section)})
#
# UNLESS there is no section, in which case it is stored as:
# {
# 'config': None,
# ...
# }
value = self._raw_data.get(section)
self.sections.setdefault(
section, None if value is None else {section: value})
return self.sections[section]
def write_section(self, section):
_validate(self.sections, self.schema)
@ -663,14 +687,20 @@ def _validate_section_name(section):
% (section, " ".join(section_schemas.keys())))
def _validate(data, schema):
def _validate(data, schema, set_defaults=True):
"""Validate data read in from a Spack YAML file.
Arguments:
data (dict or list): data read from a Spack YAML file
schema (dict or list): jsonschema to validate data
set_defaults (bool): whether to set defaults based on the schema
This leverages the line information (start_mark, end_mark) stored
on Spack YAML structures.
"""
import jsonschema
if not hasattr(_validate, 'validator'):
default_setting_validator = _extend_with_default(
jsonschema.Draft4Validator)
@ -856,13 +886,22 @@ class ConfigFileError(ConfigError):
class ConfigFormatError(ConfigError):
"""Raised when a configuration format does not match its schema."""
def __init__(self, validation_error, data):
def __init__(self, validation_error, data, filename=None, line=None):
self.filename = filename # record this for ruamel.yaml
location = '<unknown file>'
mark = self._get_mark(validation_error, data)
if mark:
location = '%s' % mark.name
if mark.line is not None:
location += ':%d' % (mark.line + 1)
# spack yaml has its own file/line marks -- try to find them
if not filename and not line:
mark = self._get_mark(validation_error, data)
if mark:
filename = mark.name
line = mark.line + 1
if filename:
location = '%s' % filename
if line is not None:
location += ':%d' % line
message = '%s: %s' % (location, validation_error.message)
super(ConfigError, self).__init__(message)

View File

@ -4,19 +4,24 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import re
import sys
import shutil
import tempfile
from contextlib import contextmanager
from six.moves import zip_longest
import jsonschema
import ruamel.yaml
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import spack.error
import spack.repo
import spack.schema.env
import spack.spec
import spack.util.spack_json as sjson
from spack.config import ConfigScope
import spack.config
from spack.spec import Spec, CompilerSpec, FlagMap
from spack.version import VersionList
@ -26,7 +31,51 @@
#: path where environments are stored in the spack tree
env_path = fs.join_path(spack.paths.var_path, 'environments')
env_path = os.path.join(spack.paths.var_path, 'environments')
#: Name of the input yaml file in an environment
env_yaml_name = 'env.yaml'
#: Name of the lock file with concrete specs
env_lock_name = 'env.lock'
#: default env.yaml file to put in new environments
default_env_yaml = """\
# This is a Spack Environment file.
#
# It describes a set of packages to be installed, along with
# configuration settings.
env:
# add package specs to the `specs` list
specs:
-
"""
#: regex for validating enviroment names
valid_environment_name_re = r'^\w[\w-]*$'
#: version of the lockfile format. Must increase monotonically.
lockfile_format_version = 1
#: legal first keys in an environment.yaml file
env_schema_keys = ('env', 'spack')
#: jsonschema validator for environments
_validator = None
def valid_env_name(name):
return re.match(valid_environment_name_re, name)
def validate_env_name(name):
if not valid_env_name(name):
raise ValueError((
"'%s': names must start with a letter, and only contain "
"letters, numbers, _, and -.") % name)
return name
def activate(name, exact=False):
@ -48,9 +97,10 @@ def activate(name, exact=False):
active = read(name)
prepare_config_scope(active)
prepare_repository(active, use_repo=exact)
if exact:
spack.repo.path.put_first(active.repo)
tty.msg("Using environmennt '%s'" % active.name)
tty.debug("Using environmennt '%s'" % active.name)
def deactivate():
@ -68,21 +118,50 @@ def deactivate():
def root(name):
"""Get the root directory for an environment by name."""
return fs.join_path(env_path, name)
return os.path.join(env_path, name)
def get_dotenv_dir(env_root):
def exists(name):
"""Whether an environment exists or not."""
return os.path.exists(root(name))
def manifest_path(name):
return os.path.join(root(name), env_yaml_name)
def lockfile_path(name):
return os.path.join(root(name), env_lock_name)
def dotenv_path(env_root):
"""@return Directory in an environment that is owned by Spack"""
return fs.join_path(env_root, '.env')
return os.path.join(env_root, '.env')
def get_write_paths(env_root):
"""Determines the names of temporary and permanent directories to
write machine-generated environment info."""
tmp_new = fs.join_path(env_root, '.env.new')
dest = get_dotenv_dir(env_root)
tmp_old = fs.join_path(env_root, '.env.old')
return tmp_new, dest, tmp_old
def repos_path(dotenv_path):
return os.path.join(dotenv_path, 'repos')
def log_path(dotenv_path):
return os.path.join(dotenv_path, 'logs')
def config_dict(yaml_data):
"""Get the configuration scope section out of an env.yaml"""
key = spack.config.first_existing(yaml_data, env_schema_keys)
return yaml_data[key]
def list_environments():
"""List the names of environments that currently exist."""
candidates = sorted(os.listdir(env_path))
names = []
for candidate in candidates:
yaml_path = os.path.join(env_path, candidate, env_yaml_name)
if valid_env_name(candidate) and os.path.exists(yaml_path):
names.append(candidate)
return names
def _reset_os_and_compiler(spec, compiler=None):
@ -110,94 +189,231 @@ def _upgrade_dependency_version(spec, dep_name):
return spec
class Environment(object):
def clear(self):
self.user_specs = list()
self.concretized_order = list()
self.specs_by_hash = dict()
def validate(data, filename=None):
global _validator
if _validator is None:
_validator = jsonschema.Draft4Validator(spack.schema.env.schema)
try:
_validator.validate(data)
except jsonschema.ValidationError as e:
raise spack.config.ConfigFormatError(
e, data, filename, e.instance.lc.line + 1)
def __init__(self, name):
self.name = name
def _read_yaml(str_or_file):
"""Read YAML from a file for round-trip parsing."""
data = ruamel.yaml.load(str_or_file, ruamel.yaml.RoundTripLoader)
filename = getattr(str_or_file, 'name', None)
validate(data, filename)
return data
def _write_yaml(data, str_or_file):
"""Write YAML to a file preserving comments and dict order."""
filename = getattr(str_or_file, 'name', None)
validate(data, filename)
ruamel.yaml.dump(data, str_or_file, Dumper=ruamel.yaml.RoundTripDumper,
default_flow_style=False)
class Environment(object):
def __init__(self, name, env_yaml=None):
"""Create a new environment, optionally with an initialization file.
Arguments:
name (str): name for this environment
env_yaml (str or file): raw YAML or a file to initialize the
environment
"""
self.name = validate_env_name(name)
self.clear()
# Default config
self.yaml = {
'configs': ['<env>'],
'specs': []
}
# use read_yaml to preserve comments
if env_yaml is None:
env_yaml = default_env_yaml
self.yaml = _read_yaml(env_yaml)
# initialize user specs from the YAML
spec_list = config_dict(self.yaml).get('specs')
if spec_list:
self.user_specs = [Spec(s) for s in spec_list if s is not None]
def clear(self):
self.user_specs = [] # current user specs
self.concretized_user_specs = [] # user specs from last concretize
self.concretized_order = [] # roots of last concretize, in order
self.specs_by_hash = {} # concretized specs by hash
self._repo = None # RepoPath for this env (memoized)
@property
def path(self):
return root(self.name)
def repo_path(self):
return fs.join_path(get_dotenv_dir(self.path), 'repo')
@property
def manifest_path(self):
return manifest_path(self.name)
@property
def lock_path(self):
return lockfile_path(self.name)
@property
def dotenv_path(self):
return dotenv_path(self.path)
@property
def repos_path(self):
return repos_path(self.dotenv_path)
@property
def repo(self):
if self._repo is None:
self._repo = make_repo_path(self.repos_path)
return self._repo
def included_config_scopes(self):
"""List of included configuration scopes from the environment.
Scopes are in order from lowest to highest precedence, i.e., the
order they should be pushed on the stack, but the opposite of the
order they appaer in the env.yaml file.
"""
scopes = []
# load config scopes added via 'include:', in reverse so that
# highest-precedence scopes are last.
includes = config_dict(self.yaml).get('include', [])
for i, config_path in enumerate(reversed(includes)):
# allow paths to contain environment variables
config_path = config_path.format(**os.environ)
# treat relative paths as relative to the environment
if not os.path.isabs(config_path):
config_path = os.path.join(self.path, config_path)
config_path = os.path.normpath(os.path.realpath(config_path))
if os.path.isdir(config_path):
# directories are treated as regular ConfigScopes
config_name = 'env:%s:%s' % (
self.name, os.path.basename(config_path))
scope = spack.config.ConfigScope(config_name, config_path)
else:
# files are assumed to be SingleFileScopes
base, ext = os.path.splitext(os.path.basename(config_path))
config_name = 'env:%s:%s' % (self.name, base)
scope = spack.config.SingleFileScope(
config_name, config_path, spack.schema.merged.schema)
scopes.append(scope)
return scopes
def env_file_config_scope(self):
"""Get the configuration scope for the environment's manifest file."""
config_name = 'env:%s' % self.name
return spack.config.SingleFileScope(config_name,
self.manifest_path,
spack.schema.env.schema,
[env_schema_keys])
def config_scopes(self):
"""A list of all configuration scopes for this environment."""
return self.included_config_scopes() + [self.env_file_config_scope()]
def destroy(self):
"""Remove this environment from Spack entirely."""
shutil.rmtree(self.path)
def add(self, user_spec, report_existing=True):
"""Add a single user_spec (non-concretized) to the Environment"""
query_spec = Spec(user_spec)
existing = set(x for x in self.user_specs
if Spec(x).name == query_spec.name)
if existing:
if report_existing:
tty.die("Package {0} was already added to {1}"
.format(query_spec.name, self.name))
else:
tty.msg("Package {0} was already added to {1}"
.format(query_spec.name, self.name))
else:
tty.msg('Adding %s to environment %s' % (user_spec, self.name))
self.user_specs.append(user_spec)
"""Add a single user_spec (non-concretized) to the Environment
Returns:
(bool): True if the spec was added, False if it was already
present and did not need to be added
"""
spec = Spec(user_spec)
existing = set(s for s in self.user_specs if s.name == spec.name)
if not existing:
self.user_specs.append(spec)
return bool(not existing)
def remove(self, query_spec):
"""Remove specs from an environment that match a query_spec"""
query_spec = Spec(query_spec)
match_index = -1
for i, spec in enumerate(self.user_specs):
if Spec(spec).name == query_spec.name:
match_index = i
break
matches = [s for s in self.user_specs if s.satisfies(query_spec)]
if match_index < 0:
tty.die("Not found: {0}".format(query_spec))
if not matches:
raise EnvError("Not found: {0}".format(query_spec))
del self.user_specs[match_index]
if match_index < len(self.concretized_order):
spec_hash = self.concretized_order[match_index]
del self.concretized_order[match_index]
del self.specs_by_hash[spec_hash]
for spec in matches:
self.user_specs.remove(spec)
if spec in self.concretized_user_specs:
i = self.concretized_user_specs.index(spec)
del self.concretized_user_specs[i]
dag_hash = self.concretized_order[i]
del self.concretized_order[i]
del self.specs_by_hash[dag_hash]
def concretize(self, force=False):
"""Concretize user_specs in an Environment, creating (fully
concretized) specs.
"""Concretize user_specs in this environment.
Only concretizes specs that haven't been concretized yet unless
force is ``True``.
This only modifies the environment in memory. ``write()`` will
write out a lockfile containing concretized specs.
Arguments:
force (bool): re-concretize ALL specs, even those that were
already concretized
Return:
(list): list of newly concretized specs
force: bool
If set, re-concretize ALL specs, even those that were
already concretized.
"""
if force:
# Clear previously concretized specs
self.specs_by_hash = dict()
self.concretized_order = list()
self.concretized_user_specs = []
self.concretized_order = []
self.specs_by_hash = {}
num_concretized = len(self.concretized_order)
new_specs = list()
for user_spec in self.user_specs[num_concretized:]:
tty.msg('Concretizing %s' % user_spec)
# keep any concretized specs whose user specs are still in the manifest
new_concretized_user_specs = []
new_concretized_order = []
new_specs_by_hash = {}
for s, h in zip(self.concretized_user_specs, self.concretized_order):
if s in self.user_specs:
new_concretized_user_specs.append(s)
new_concretized_order.append(h)
new_specs_by_hash[h] = self.specs_by_hash[h]
spec = spack.cmd.parse_specs(user_spec)[0]
spec.concretize()
new_specs.append(spec)
dag_hash = spec.dag_hash()
self.specs_by_hash[dag_hash] = spec
self.concretized_order.append(spec.dag_hash())
# concretize any new user specs that we haven't concretized yet
new_specs = []
for uspec in self.user_specs:
if uspec not in new_concretized_user_specs:
tty.msg('Concretizing %s' % uspec)
cspec = uspec.concretized()
dag_hash = cspec.dag_hash()
# Display concretized spec to the user
sys.stdout.write(spec.tree(
recurse_dependencies=True, install_status=True,
hashlen=7, hashes=True))
new_concretized_user_specs.append(uspec)
new_concretized_order.append(dag_hash)
new_specs_by_hash[dag_hash] = cspec
new_specs.append(cspec)
# Display concretized spec to the user
sys.stdout.write(cspec.tree(
recurse_dependencies=True, install_status=True,
hashlen=7, hashes=True))
# save the new concretized state
self.concretized_user_specs = new_concretized_user_specs
self.concretized_order = new_concretized_order
self.specs_by_hash = new_specs_by_hash
# return only the newly concretized specs
return new_specs
def install(self, install_args=None):
@ -205,12 +421,8 @@ def install(self, install_args=None):
specs in an Environment."""
# Make sure log directory exists
logs = fs.join_path(self.path, 'logs')
try:
os.makedirs(logs)
except OSError:
if not os.path.isdir(logs):
raise
logs_dir = log_path(self.dotenv_path)
fs.mkdirp(logs_dir)
for concretized_hash in self.concretized_order:
spec = self.specs_by_hash[concretized_hash]
@ -224,13 +436,11 @@ def install(self, install_args=None):
spec.package.do_install(**kwargs)
# Link the resulting log file into logs dir
logname = '%s-%s.log' % (spec.name, spec.dag_hash(7))
logpath = fs.join_path(logs, logname)
try:
os.remove(logpath)
except OSError:
pass
os.symlink(spec.package.build_log_path, logpath)
build_log_link = os.path.join(
logs_dir, '%s-%s.log' % (spec.name, spec.dag_hash(7)))
if os.path.exists(build_log_link):
os.remove(build_log_link)
os.symlink(spec.package.build_log_path, build_log_link)
def uninstall(self, args):
"""Uninstall all the specs in an Environment."""
@ -238,7 +448,7 @@ def uninstall(self, args):
args.all = False
spack.cmd.uninstall.uninstall_specs(args, specs)
def list(self, stream, **kwargs):
def status(self, stream, **kwargs):
"""List the specs in an environment."""
for user_spec, concretized_hash in zip_longest(
self.user_specs, self.concretized_order):
@ -310,181 +520,181 @@ def _get_environment_specs(self, recurse_dependencies=True):
return spec_list
def to_dict(self):
"""Used in serializing to JSON"""
concretized_order = list(self.concretized_order)
concrete_specs = dict()
def _to_lockfile_dict(self):
"""Create a dictionary to store a lockfile for this environment."""
concrete_specs = {}
for spec in self.specs_by_hash.values():
for s in spec.traverse():
if s.dag_hash() not in concrete_specs:
concrete_specs[s.dag_hash()] = (
s.to_node_dict(all_deps=True))
format = {
'user_specs': self.user_specs,
'concretized_order': concretized_order,
dag_hash = s.dag_hash()
if dag_hash not in concrete_specs:
concrete_specs[dag_hash] = s.to_node_dict(all_deps=True)
hash_spec_list = zip(
self.concretized_order, self.concretized_user_specs)
# this is the lockfile we'll write out
data = {
# metadata about the format
'_meta': {
'file-type': 'spack-lockfile',
'lockfile-version': lockfile_format_version,
},
# users specs + hashes are the 'roots' of the environment
'roots': [{
'hash': h,
'spec': str(s)
} for h, s in hash_spec_list],
# Concrete specs by hash, including dependencies
'concrete_specs': concrete_specs,
}
return format
@staticmethod
def from_dict(name, d):
"""Used in deserializing from JSON"""
env = Environment(name)
env.user_specs = list(d['user_specs'])
env.concretized_order = list(d['concretized_order'])
specs_dict = d['concrete_specs']
return data
hash_to_node_dict = specs_dict
root_hashes = set(env.concretized_order)
def _read_lockfile_dict(self, d):
"""Read a lockfile dictionary into this environment."""
roots = d['roots']
self.concretized_user_specs = [Spec(r['spec']) for r in roots]
self.concretized_order = [r['hash'] for r in roots]
json_specs_by_hash = d['concrete_specs']
root_hashes = set(self.concretized_order)
specs_by_hash = {}
for dag_hash, node_dict in hash_to_node_dict.items():
for dag_hash, node_dict in json_specs_by_hash.items():
specs_by_hash[dag_hash] = Spec.from_node_dict(node_dict)
for dag_hash, node_dict in hash_to_node_dict.items():
for dag_hash, node_dict in json_specs_by_hash.items():
for dep_name, dep_hash, deptypes in (
Spec.dependencies_from_node_dict(node_dict)):
specs_by_hash[dag_hash]._add_dependency(
specs_by_hash[dep_hash], deptypes)
env.specs_by_hash = dict(
self.specs_by_hash = dict(
(x, y) for x, y in specs_by_hash.items() if x in root_hashes)
return env
def write(self, dump_packages=None):
"""Writes an in-memory environment to its location on disk.
Arguments:
dump_packages (list of Spec): specs of packages whose
package.py files should be written to the env's repo
"""
# ensure path in var/spack/environments
fs.mkdirp(self.path)
def check_consistency(name):
"""check whether an environment directory is consistent"""
env_root = root(name)
tmp_new, dest, tmp_old = get_write_paths(env_root)
if os.path.exists(tmp_new) or os.path.exists(tmp_old):
tty.die("Partial write state, run 'spack env repair'")
if self.specs_by_hash:
# ensure the prefix/.env directory exists
tmp_env = '%s.tmp' % self.dotenv_path
fs.mkdirp(tmp_env)
# dump package.py files for specified specs
tmp_repos_path = repos_path(tmp_env)
dump_packages = dump_packages or []
for spec in dump_packages:
for dep in spec.traverse():
if not dep.concrete:
raise ValueError('specs passed to environment.write() '
'must be concrete!')
def write(environment, new_repo=None):
"""Writes an in-memory environment back to its location on disk,
in an atomic manner."""
root = os.path.join(tmp_repos_path, dep.namespace)
repo = spack.repo.create_or_construct(root, dep.namespace)
pkg_dir = repo.dirname_for_package_name(dep.name)
tmp_new, dest, tmp_old = get_write_paths(root(environment.name))
fs.mkdirp(pkg_dir)
spack.repo.path.dump_provenance(dep, pkg_dir)
# Write the machine-generated stuff
fs.mkdirp(tmp_new)
# create one file for the environment object
with open(fs.join_path(tmp_new, 'environment.json'), 'w') as f:
sjson.dump(environment.to_dict(), stream=f)
# move the new .env directory into place.
move_move_rm(tmp_env, self.dotenv_path)
dest_repo_dir = fs.join_path(tmp_new, 'repo')
if new_repo:
shutil.copytree(new_repo.root, dest_repo_dir)
elif os.path.exists(environment.repo_path()):
shutil.copytree(environment.repo_path(), dest_repo_dir)
# Swap in new directory atomically
if os.path.exists(dest):
shutil.move(dest, tmp_old)
shutil.move(tmp_new, dest)
if os.path.exists(tmp_old):
shutil.rmtree(tmp_old)
def repair(environment_name):
"""Recovers from crash during critical section of write().
Possibilities:
tmp_new, dest
tmp_new, tmp_old
tmp_old, dest
"""
tmp_new, dest, tmp_old = get_write_paths(root(environment_name))
if os.path.exists(tmp_old):
if not os.path.exists(dest):
shutil.move(tmp_new, dest)
# write the lock file last
with write_tmp_and_move(self.lock_path) as f:
sjson.dump(self._to_lockfile_dict(), stream=f)
else:
shutil.rmtree(tmp_old)
tty.info("Previous update completed")
elif os.path.exists(tmp_new):
tty.info("Previous update did not complete")
else:
tty.info("Previous update may have completed")
if os.path.exists(self.lock_path):
os.unlink(self.lock_path)
if os.path.exists(tmp_new):
shutil.rmtree(tmp_new)
# invalidate _repo cache
self._repo = None
# put the new user specs in the YAML
yaml_spec_list = config_dict(self.yaml).setdefault('specs', [])
yaml_spec_list[:] = [str(s) for s in self.user_specs]
# if all that worked, write out the manifest file at the top level
with write_tmp_and_move(self.manifest_path) as f:
_write_yaml(self.yaml, f)
def read(environment_name):
def read(env_name):
"""Read environment state from disk."""
# Check that env is in a consistent state on disk
env_root = root(environment_name)
env_root = root(env_name)
if not os.path.isdir(env_root):
raise EnvError("no such environment '%s'" % environment_name)
raise EnvError("no such environment '%s'" % env_name)
if not os.access(env_root, os.R_OK):
raise EnvError("can't read environment '%s'" % environment_name)
raise EnvError("can't read environment '%s'" % env_name)
# Read env.yaml file
env_yaml = spack.config._read_config_file(
fs.join_path(env_root, 'env.yaml'),
spack.schema.env.schema)
# read yaml file
with open(manifest_path(env_name)) as f:
env = Environment(env_name, f.read())
dotenv_dir = get_dotenv_dir(env_root)
with open(fs.join_path(dotenv_dir, 'environment.json'), 'r') as f:
environment_dict = sjson.load(f)
environment = Environment.from_dict(environment_name, environment_dict)
if env_yaml:
environment.yaml = env_yaml['env']
# read lockfile, if it exists
lock_path = lockfile_path(env_name)
if os.path.exists(lock_path):
with open(lock_path) as f:
lockfile_dict = sjson.load(f)
env._read_lockfile_dict(lockfile_dict)
return environment
return env
def dump_to_environment_repo(spec, repo):
dest_pkg_dir = repo.dirname_for_package_name(spec.name)
if not os.path.exists(dest_pkg_dir):
spack.repo.path.dump_provenance(spec, dest_pkg_dir)
def move_move_rm(src, dest):
"""Move dest out of the way, put src in its place."""
dirname = os.path.dirname(dest)
basename = os.path.basename(dest)
old = os.path.join(dirname, '.%s.old' % basename)
if os.path.exists(dest):
shutil.move(dest, old)
shutil.move(src, dest)
if os.path.exists(old):
shutil.rmtree(old)
def prepare_repository(environment, remove=None, use_repo=False):
"""Adds environment's repository to the global search path of repos"""
repo_stage = tempfile.mkdtemp()
new_repo_dir = fs.join_path(repo_stage, 'repo')
if os.path.exists(environment.repo_path()):
shutil.copytree(environment.repo_path(), new_repo_dir)
else:
spack.repo.create_repo(new_repo_dir, environment.name)
if remove:
remove_dirs = []
repo = spack.repo.Repo(new_repo_dir)
for pkg_name in remove:
remove_dirs.append(repo.dirname_for_package_name(pkg_name))
for d in remove_dirs:
shutil.rmtree(d)
repo = spack.repo.Repo(new_repo_dir)
if use_repo:
spack.repo.put_first(repo)
return repo
@contextmanager
def write_tmp_and_move(filename):
"""Write to a temporary file, then move into place."""
dirname = os.path.dirname(filename)
basename = os.path.basename(filename)
tmp = os.path.join(dirname, '.%s.tmp' % basename)
with open(tmp, 'w') as f:
yield f
shutil.move(tmp, filename)
def prepare_config_scope(environment):
"""Adds environment's scope to the global search path
of configuration scopes"""
def make_repo_path(root):
"""Make a RepoPath from the repo subdirectories in an environment."""
path = spack.repo.RepoPath()
# Load up configs
for config_spec in environment.yaml['configs']:
config_name = os.path.split(config_spec)[1]
if config_name == '<env>':
# Use default config for the environment; doesn't have to exist
config_dir = fs.join_path(environment.path, 'config')
if not os.path.isdir(config_dir):
if os.path.isdir(root):
for repo_root in os.listdir(root):
repo_root = os.path.join(root, repo_root)
if not os.path.isdir(repo_root):
continue
config_name = environment.name
else:
# Use external user-provided config
config_dir = os.path.normpath(os.path.join(
environment.path, config_spec.format(**os.environ)))
if not os.path.isdir(config_dir):
tty.die('Spack config %s (%s) not found' %
(config_name, config_dir))
spack.config.config.push_scope(ConfigScope(config_name, config_dir))
repo = spack.repo.Repo(repo_root)
path.put_last(repo)
return path
def prepare_config_scope(env):
"""Add env's scope to the global configuration search path."""
for scope in env.config_scopes():
spack.config.config.push_scope(scope)
class EnvError(spack.error.SpackError):

View File

@ -357,7 +357,6 @@ def __init__(self, *repos, **kwargs):
self.repos = []
self.by_namespace = NamespaceTrie()
self.by_path = {}
self._all_package_names = None
self._provider_index = None
@ -374,36 +373,29 @@ def __init__(self, *repos, **kwargs):
"To remove the bad repository, run this command:",
" spack repo rm %s" % repo)
def _add(self, repo):
"""Add a repository to the namespace and path indexes.
Checks for duplicates -- two repos can't have the same root
directory, and they provide have the same namespace.
"""
if repo.root in self.by_path:
raise DuplicateRepoError("Duplicate repository: '%s'" % repo.root)
if repo.namespace in self.by_namespace:
raise DuplicateRepoError(
"Package repos '%s' and '%s' both provide namespace %s"
% (repo.root, self.by_namespace[repo.namespace].root,
repo.namespace))
# Add repo to the pkg indexes
self.by_namespace[repo.full_namespace] = repo
self.by_path[repo.root] = repo
def put_first(self, repo):
"""Add repo first in the search path."""
self._add(repo)
if isinstance(repo, RepoPath):
for r in reversed(repo.repos):
self.put_first(r)
return
self.repos.insert(0, repo)
self.by_namespace[repo.full_namespace] = repo
def put_last(self, repo):
"""Add repo last in the search path."""
self._add(repo)
if isinstance(repo, RepoPath):
for r in repo.repos:
self.put_last(r)
return
self.repos.append(repo)
# don't mask any higher-precedence repos with same namespace
if repo.full_namespace not in self.by_namespace:
self.by_namespace[repo.full_namespace] = repo
def remove(self, repo):
"""Remove a repo from the search path."""
if repo in self.repos:
@ -1079,6 +1071,14 @@ def create_repo(root, namespace=None):
return full_path, namespace
def create_or_construct(path, namespace=None):
"""Create a repository, or just return a Repo if it already exists."""
if not os.path.exists(path):
mkdirp(path)
create_repo(path, namespace)
return Repo(path, namespace)
def _path():
"""Get the singleton RepoPath instance for Spack.
@ -1159,10 +1159,6 @@ class BadRepoError(RepoError):
"""Raised when repo layout is invalid."""
class DuplicateRepoError(RepoError):
"""Raised when duplicate repos are added to a RepoPath."""
class UnknownEntityError(RepoError):
"""Raised when we encounter a package spack doesn't have."""

View File

@ -3,137 +3,431 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import shutil
import os
from six import StringIO
import pytest
import llnl.util.filesystem as fs
import spack.modules
import spack.environment as ev
import spack.util.spack_yaml as syaml
from spack.cmd.env import _environment_concretize, _environment_create
from spack.version import Version
from spack.spec import Spec
from spack.main import SpackCommand
# everything here uses the mock_env_path
pytestmark = pytest.mark.usefixtures(
'mock_env_path', 'config', 'mutable_mock_packages')
'mutable_mock_env_path', 'config', 'mutable_mock_packages')
env = SpackCommand('env')
def test_add():
c = ev.Environment('test')
c.add('mpileaks')
assert 'mpileaks' in c.user_specs
e = ev.Environment('test')
e.add('mpileaks')
assert Spec('mpileaks') in e.user_specs
def test_env_list():
env('create', 'foo')
env('create', 'bar')
env('create', 'baz')
out = env('list')
assert 'foo' in out
assert 'bar' in out
assert 'baz' in out
def test_env_destroy():
env('create', 'foo')
env('create', 'bar')
out = env('list')
assert 'foo' in out
assert 'bar' in out
env('destroy', '-y', 'foo')
out = env('list')
assert 'foo' not in out
assert 'bar' in out
env('destroy', '-y', 'bar')
out = env('list')
assert 'foo' not in out
assert 'bar' not in out
def test_concretize():
c = ev.Environment('test')
c.add('mpileaks')
c.concretize()
env_specs = c._get_environment_specs()
e = ev.Environment('test')
e.add('mpileaks')
e.concretize()
env_specs = e._get_environment_specs()
assert any(x.name == 'mpileaks' for x in env_specs)
def test_env_install(install_mockery, mock_fetch):
c = ev.Environment('test')
c.add('cmake-client')
c.concretize()
c.install()
env_specs = c._get_environment_specs()
e = ev.Environment('test')
e.add('cmake-client')
e.concretize()
e.install()
env_specs = e._get_environment_specs()
spec = next(x for x in env_specs if x.name == 'cmake-client')
assert spec.package.installed
def test_remove_after_concretize():
c = ev.Environment('test')
c.add('mpileaks')
c.concretize()
c.add('python')
c.concretize()
c.remove('mpileaks')
env_specs = c._get_environment_specs()
e = ev.Environment('test')
e.add('mpileaks')
e.concretize()
e.add('python')
e.concretize()
e.remove('mpileaks')
env_specs = e._get_environment_specs()
assert not any(x.name == 'mpileaks' for x in env_specs)
def test_reset_compiler():
c = ev.Environment('test')
c.add('mpileaks')
c.concretize()
def test_remove_command():
env('create', 'test')
first_spec = c.specs_by_hash[c.concretized_order[0]]
env('add', '-e', 'test', 'mpileaks')
assert 'mpileaks' in env('status', 'test')
env('remove', '-e', 'test', 'mpileaks')
assert 'mpileaks' not in env('status', 'test')
env('add', '-e', 'test', 'mpileaks')
assert 'mpileaks' in env('status', 'test')
env('concretize', 'test')
assert 'mpileaks' in env('status', 'test')
env('remove', '-e', 'test', 'mpileaks')
assert 'mpileaks' not in env('status', 'test')
def test_reset_compiler():
e = ev.Environment('test')
e.add('mpileaks')
e.concretize()
first_spec = e.specs_by_hash[e.concretized_order[0]]
available = set(['gcc', 'clang'])
available.remove(first_spec.compiler.name)
new_compiler = next(iter(available))
c.reset_os_and_compiler(compiler=new_compiler)
e.reset_os_and_compiler(compiler=new_compiler)
new_spec = c.specs_by_hash[c.concretized_order[0]]
new_spec = e.specs_by_hash[e.concretized_order[0]]
assert new_spec.compiler != first_spec.compiler
def test_environment_list():
c = ev.Environment('test')
c.add('mpileaks')
c.concretize()
c.add('python')
def test_environment_status():
e = ev.Environment('test')
e.add('mpileaks')
e.concretize()
e.add('python')
mock_stream = StringIO()
c.list(mock_stream)
e.status(mock_stream)
list_content = mock_stream.getvalue()
assert 'mpileaks' in list_content
assert 'python' in list_content
mpileaks_spec = c.specs_by_hash[c.concretized_order[0]]
mpileaks_spec = e.specs_by_hash[e.concretized_order[0]]
assert mpileaks_spec.format() in list_content
def test_upgrade_dependency():
c = ev.Environment('test')
c.add('mpileaks ^callpath@0.9')
c.concretize()
e = ev.Environment('test')
e.add('mpileaks ^callpath@0.9')
e.concretize()
c.upgrade_dependency('callpath')
env_specs = c._get_environment_specs()
e.upgrade_dependency('callpath')
env_specs = e._get_environment_specs()
callpath_dependents = list(x for x in env_specs if 'callpath' in x)
assert callpath_dependents
for spec in callpath_dependents:
assert spec['callpath'].version == Version('1.0')
def test_init_config():
test_config = """\
user_specs:
- mpileaks
packages:
mpileaks:
version: [2.2]
def test_to_lockfile_dict():
e = ev.Environment('test')
e.add('mpileaks')
e.concretize()
context_dict = e._to_lockfile_dict()
e_copy = ev.Environment('test_copy')
e_copy._read_lockfile_dict(context_dict)
assert e.specs_by_hash == e_copy.specs_by_hash
def test_env_repo():
e = ev.Environment('testx')
e.add('mpileaks')
_environment_concretize(e)
package = e.repo.get(spack.spec.Spec('mpileaks'))
assert package.namespace == 'spack.pkg.builtin.mock'
def test_user_removed_spec():
"""Ensure a user can remove from any position in the env.yaml file."""
initial_yaml = """\
env:
specs:
- mpileaks
- hypre
- libelf
"""
spack.package_prefs.PackagePrefs._packages_config_cache = None
spack.package_prefs.PackagePrefs._spec_cache = {}
before = ev.Environment('test', initial_yaml)
before.concretize()
before.write()
# user modifies yaml externally to spack and removes hypre
with open(before.manifest_path, 'w') as f:
f.write("""\
env:
specs:
- mpileaks
- libelf
""")
after = ev.read('test')
after.concretize()
after.write()
env_specs = after._get_environment_specs()
read = ev.read('test')
env_specs = read._get_environment_specs()
assert not any(x.name == 'hypre' for x in env_specs)
def test_init_with_file_and_remove(tmpdir):
"""Ensure a user can remove from any position in the env.yaml file."""
path = tmpdir.join('spack.yaml')
with tmpdir.as_cwd():
with open(str(path), 'w') as f:
f.write("""\
env:
specs:
- mpileaks
""")
env('create', 'test', 'spack.yaml')
out = env('list')
assert 'test' in out
out = env('status', 'test')
assert 'mpileaks' in out
env('destroy', '-y', 'test')
out = env('list')
assert 'test' not in out
def test_env_with_config():
test_config = """\
env:
specs:
- mpileaks
packages:
mpileaks:
version: [2.2]
"""
spack.package_prefs.PackagePrefs.clear_caches()
_environment_create('test', test_config)
e = ev.read('test')
ev.prepare_config_scope(e)
e.concretize()
_environment_create('test', syaml.load(StringIO(test_config)))
c = ev.read('test')
ev.prepare_config_scope(c)
c.concretize()
assert any(x.satisfies('mpileaks@2.2')
for x in c._get_environment_specs())
for x in e._get_environment_specs())
def test_to_dict():
c = ev.Environment('test')
c.add('mpileaks')
c.concretize()
context_dict = c.to_dict()
c_copy = ev.Environment.from_dict('test_copy', context_dict)
assert c.specs_by_hash == c_copy.specs_by_hash
def test_env_with_included_config_file():
test_config = """\
env:
include:
- ./included-config.yaml
specs:
- mpileaks
"""
spack.package_prefs.PackagePrefs.clear_caches()
_environment_create('test', test_config)
e = ev.read('test')
print(e.path)
with open(os.path.join(e.path, 'included-config.yaml'), 'w') as f:
f.write("""\
packages:
mpileaks:
version: [2.2]
""")
ev.prepare_config_scope(e)
e.concretize()
assert any(x.satisfies('mpileaks@2.2')
for x in e._get_environment_specs())
def test_prepare_repo():
c = ev.Environment('testx')
c.add('mpileaks')
_environment_concretize(c)
repo = None
try:
repo = ev.prepare_repository(c)
package = repo.get(spack.spec.Spec('mpileaks'))
assert package.namespace.split('.')[-1] == 'testx'
finally:
if repo:
shutil.rmtree(repo.root)
def test_env_with_included_config_scope():
config_scope_path = os.path.join(ev.root('test'), 'config')
test_config = """\
env:
include:
- %s
specs:
- mpileaks
""" % config_scope_path
spack.package_prefs.PackagePrefs.clear_caches()
_environment_create('test', test_config)
e = ev.read('test')
fs.mkdirp(config_scope_path)
with open(os.path.join(config_scope_path, 'packages.yaml'), 'w') as f:
f.write("""\
packages:
mpileaks:
version: [2.2]
""")
ev.prepare_config_scope(e)
e.concretize()
assert any(x.satisfies('mpileaks@2.2')
for x in e._get_environment_specs())
def test_env_config_precedence():
test_config = """\
env:
packages:
libelf:
version: [0.8.12]
include:
- ./included-config.yaml
specs:
- mpileaks
"""
spack.package_prefs.PackagePrefs.clear_caches()
_environment_create('test', test_config)
e = ev.read('test')
print(e.path)
with open(os.path.join(e.path, 'included-config.yaml'), 'w') as f:
f.write("""\
packages:
mpileaks:
version: [2.2]
libelf:
version: [0.8.11]
""")
ev.prepare_config_scope(e)
e.concretize()
# ensure included scope took effect
assert any(
x.satisfies('mpileaks@2.2') for x in e._get_environment_specs())
# ensure env file takes precedence
assert any(
x.satisfies('libelf@0.8.12') for x in e._get_environment_specs())
def test_bad_env_yaml_format(tmpdir):
filename = str(tmpdir.join('spack.yaml'))
with open(filename, 'w') as f:
f.write("""\
env:
spacks:
- mpileaks
""")
with tmpdir.as_cwd():
with pytest.raises(spack.config.ConfigFormatError) as e:
env('create', 'test', './spack.yaml')
assert './spack.yaml:2' in str(e)
assert "'spacks' was unexpected" in str(e)
def test_env_loads(install_mockery, mock_fetch):
env('create', 'test')
env('add', '-e', 'test', 'mpileaks')
env('concretize', 'test')
env('install', '--fake', 'test')
env('loads', 'test')
e = ev.read('test')
loads_file = os.path.join(e.path, 'loads')
assert os.path.exists(loads_file)
with open(loads_file) as f:
contents = f.read()
assert 'module load mpileaks' in contents
@pytest.mark.disable_clean_stage_check
def test_env_stage(mock_stage, mock_fetch, install_mockery):
env('create', 'test')
env('add', '-e', 'test', 'mpileaks')
env('add', '-e', 'test', 'zmpi')
env('concretize', 'test')
env('stage', 'test')
root = str(mock_stage)
def check_stage(spec):
spec = Spec(spec).concretized()
for dep in spec.traverse():
stage_name = "%s-%s-%s" % (dep.name, dep.version, dep.dag_hash())
assert os.path.isdir(os.path.join(root, stage_name))
check_stage('mpileaks')
check_stage('zmpi')
def test_env_commands_die_with_no_env_arg():
# these fail in argparse when given no arg
with pytest.raises(SystemExit):
env('create')
with pytest.raises(SystemExit):
env('destroy')
# these have an optional env arg and raise errors via tty.die
with pytest.raises(spack.main.SpackCommandError):
env('concretize')
with pytest.raises(spack.main.SpackCommandError):
env('status')
with pytest.raises(spack.main.SpackCommandError):
env('loads')
with pytest.raises(spack.main.SpackCommandError):
env('stage')
with pytest.raises(spack.main.SpackCommandError):
env('install')
with pytest.raises(spack.main.SpackCommandError):
env('uninstall')
with pytest.raises(spack.main.SpackCommandError):
env('add')
with pytest.raises(spack.main.SpackCommandError):
env('remove')

View File

@ -663,11 +663,11 @@ def get_rev():
yield t
@pytest.fixture(scope='session')
def mock_env_path(tmpdir_factory):
@pytest.fixture()
def mutable_mock_env_path(tmpdir_factory):
"""Fixture for mocking the internal spack environments directory."""
saved_path = spack.environment.env_path
spack.environment.env_path = tmpdir_factory.mktemp('mock-env-path')
spack.environment.env_path = str(tmpdir_factory.mktemp('mock-env-path'))
yield spack.environment.env_path
spack.environment.env_path = saved_path

View File

@ -35,7 +35,7 @@ def quote(sequence, q="'"):
return ['%s%s%s' % (q, e, q) for e in sequence]
def plural(n, singular, plural=None):
def plural(n, singular, plural=None, show_n=True):
"""Pluralize <singular> word by adding an s if n != 1.
Arguments:
@ -43,13 +43,15 @@ def plural(n, singular, plural=None):
singular (str): singular form of word
plural (str, optional): optional plural form, for when it's not just
singular + 's'
show_n (bool): whether to include n in the result string (default True)
Returns:
(str): "1 thing" if n == 1 or "n things" if n != 1
"""
number = '%s ' % n if show_n else ''
if n == 1:
return "%d %s" % (n, singular)
return "%s%s" % (number, singular)
elif plural is not None:
return "%d %s" % (n, plural)
return "%s%s" % (number, plural)
else:
return "%d %ss" % (n, singular)
return "%s%ss" % (number, singular)