Merge branch 'develop' into features/shared
Conflicts: share/spack/spack-completion.bash
This commit is contained in:
@@ -4167,16 +4167,23 @@ want to clean up the temporary directory, or if the package isn't
|
||||
downloading properly, you might want to run *only* the ``fetch`` stage
|
||||
of the build.
|
||||
|
||||
Spack performs best-effort installation of package dependencies by default,
|
||||
which means it will continue to install as many dependencies as possible
|
||||
after detecting failures. If you are trying to install a package with a
|
||||
lot of dependencies where one or more may fail to build, you might want to
|
||||
try the ``--fail-fast`` option to stop the installation process on the first
|
||||
failure.
|
||||
|
||||
A typical package workflow might look like this:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack edit mypackage
|
||||
$ spack install mypackage
|
||||
$ spack install --fail-fast mypackage
|
||||
... build breaks! ...
|
||||
$ spack clean mypackage
|
||||
$ spack edit mypackage
|
||||
$ spack install mypackage
|
||||
$ spack install --fail-fast mypackage
|
||||
... repeat clean/install until install works ...
|
||||
|
||||
Below are some commands that will allow you some finer-grained
|
||||
|
||||
@@ -162,6 +162,18 @@ def clean_environment():
|
||||
if 'PKGCONF' in varname:
|
||||
env.unset(varname)
|
||||
|
||||
# Unset the following variables because they can affect installation of
|
||||
# Autotools and CMake packages.
|
||||
build_system_vars = [
|
||||
'CC', 'CFLAGS', 'CPP', 'CPPFLAGS', # C variables
|
||||
'CXX', 'CCC', 'CXXFLAGS', 'CXXCPP', # C++ variables
|
||||
'F77', 'FFLAGS', 'FLIBS', # Fortran77 variables
|
||||
'FC', 'FCFLAGS', 'FCLIBS', # Fortran variables
|
||||
'LDFLAGS', 'LIBS' # linker variables
|
||||
]
|
||||
for v in build_system_vars:
|
||||
env.unset(v)
|
||||
|
||||
build_lang = spack.config.get('config:build_language')
|
||||
if build_lang:
|
||||
# Override language-related variables. This can be used to force
|
||||
|
||||
@@ -32,6 +32,7 @@ def update_kwargs_from_args(args, kwargs):
|
||||
that will be passed to Package.do_install API"""
|
||||
|
||||
kwargs.update({
|
||||
'fail_fast': args.fail_fast,
|
||||
'keep_prefix': args.keep_prefix,
|
||||
'keep_stage': args.keep_stage,
|
||||
'restage': not args.dont_restage,
|
||||
@@ -80,6 +81,9 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'--overwrite', action='store_true',
|
||||
help="reinstall an existing spec, even if it has dependents")
|
||||
subparser.add_argument(
|
||||
'--fail-fast', action='store_true',
|
||||
help="stop all builds if any build fails (default is best effort)")
|
||||
subparser.add_argument(
|
||||
'--keep-prefix', action='store_true',
|
||||
help="don't remove the install prefix if installation fails")
|
||||
|
||||
@@ -289,15 +289,18 @@ def refresh(module_type, specs, args):
|
||||
msg = 'Nothing to be done for {0} module files.'
|
||||
tty.msg(msg.format(module_type))
|
||||
return
|
||||
|
||||
# If we arrived here we have at least one writer
|
||||
module_type_root = writers[0].layout.dirname()
|
||||
spack.modules.common.generate_module_index(module_type_root, writers)
|
||||
|
||||
# Proceed regenerating module files
|
||||
tty.msg('Regenerating {name} module files'.format(name=module_type))
|
||||
if os.path.isdir(module_type_root) and args.delete_tree:
|
||||
shutil.rmtree(module_type_root, ignore_errors=False)
|
||||
filesystem.mkdirp(module_type_root)
|
||||
|
||||
# Dump module index after potentially removing module tree
|
||||
spack.modules.common.generate_module_index(
|
||||
module_type_root, writers, overwrite=args.delete_tree)
|
||||
for x in writers:
|
||||
try:
|
||||
x.write(overwrite=True)
|
||||
|
||||
@@ -355,11 +355,13 @@ def _get_compiler_link_paths(self, paths):
|
||||
for flag_type in flags:
|
||||
for flag in self.flags.get(flag_type, []):
|
||||
compiler_exe.add_default_arg(flag)
|
||||
|
||||
output = ''
|
||||
with self._compiler_environment():
|
||||
output = str(compiler_exe(
|
||||
self.verbose_flag, fin, '-o', fout,
|
||||
output=str, error=str)) # str for py2
|
||||
return _parse_non_system_link_dirs(output)
|
||||
return _parse_non_system_link_dirs(output)
|
||||
except spack.util.executable.ProcessError as pe:
|
||||
tty.debug('ProcessError: Command exited with non-zero status: ' +
|
||||
pe.long_message)
|
||||
@@ -549,24 +551,27 @@ def _compiler_environment(self):
|
||||
# store environment to replace later
|
||||
backup_env = os.environ.copy()
|
||||
|
||||
# load modules and set env variables
|
||||
for module in self.modules:
|
||||
# On cray, mic-knl module cannot be loaded without cce module
|
||||
# See: https://github.com/spack/spack/issues/3153
|
||||
if os.environ.get("CRAY_CPU_TARGET") == 'mic-knl':
|
||||
spack.util.module_cmd.load_module('cce')
|
||||
spack.util.module_cmd.load_module(module)
|
||||
try:
|
||||
# load modules and set env variables
|
||||
for module in self.modules:
|
||||
# On cray, mic-knl module cannot be loaded without cce module
|
||||
# See: https://github.com/spack/spack/issues/3153
|
||||
if os.environ.get("CRAY_CPU_TARGET") == 'mic-knl':
|
||||
spack.util.module_cmd.load_module('cce')
|
||||
spack.util.module_cmd.load_module(module)
|
||||
|
||||
# apply other compiler environment changes
|
||||
env = spack.util.environment.EnvironmentModifications()
|
||||
env.extend(spack.schema.environment.parse(self.environment))
|
||||
env.apply_modifications()
|
||||
# apply other compiler environment changes
|
||||
env = spack.util.environment.EnvironmentModifications()
|
||||
env.extend(spack.schema.environment.parse(self.environment))
|
||||
env.apply_modifications()
|
||||
|
||||
yield
|
||||
|
||||
# Restore environment
|
||||
os.environ.clear()
|
||||
os.environ.update(backup_env)
|
||||
yield
|
||||
except BaseException:
|
||||
raise
|
||||
finally:
|
||||
# Restore environment regardless of whether inner code succeeded
|
||||
os.environ.clear()
|
||||
os.environ.update(backup_env)
|
||||
|
||||
|
||||
class CompilerAccessError(spack.error.SpackError):
|
||||
|
||||
@@ -549,6 +549,9 @@ def package_id(pkg):
|
||||
dirty (bool): Don't clean the build environment before installing.
|
||||
explicit (bool): True if package was explicitly installed, False
|
||||
if package was implicitly installed (as a dependency).
|
||||
fail_fast (bool): Fail if any dependency fails to install;
|
||||
otherwise, the default is to install as many dependencies as
|
||||
possible (i.e., best effort installation).
|
||||
fake (bool): Don't really build; install fake stub files instead.
|
||||
force (bool): Install again, even if already installed.
|
||||
install_deps (bool): Install dependencies before installing this
|
||||
@@ -1385,11 +1388,14 @@ def install(self, **kwargs):
|
||||
|
||||
Args:"""
|
||||
|
||||
fail_fast = kwargs.get('fail_fast', False)
|
||||
install_deps = kwargs.get('install_deps', True)
|
||||
keep_prefix = kwargs.get('keep_prefix', False)
|
||||
keep_stage = kwargs.get('keep_stage', False)
|
||||
restage = kwargs.get('restage', False)
|
||||
|
||||
fail_fast_err = 'Terminating after first install failure'
|
||||
|
||||
# install_package defaults True and is popped so that dependencies are
|
||||
# always installed regardless of whether the root was installed
|
||||
install_package = kwargs.pop('install_package', True)
|
||||
@@ -1449,6 +1455,10 @@ def install(self, **kwargs):
|
||||
if pkg_id in self.failed or spack.store.db.prefix_failed(spec):
|
||||
tty.warn('{0} failed to install'.format(pkg_id))
|
||||
self._update_failed(task)
|
||||
|
||||
if fail_fast:
|
||||
raise InstallError(fail_fast_err)
|
||||
|
||||
continue
|
||||
|
||||
# Attempt to get a write lock. If we can't get the lock then
|
||||
@@ -1530,14 +1540,28 @@ def install(self, **kwargs):
|
||||
self._update_installed(task)
|
||||
raise
|
||||
|
||||
except (Exception, KeyboardInterrupt, SystemExit) as exc:
|
||||
# Assuming best effort installs so suppress the exception and
|
||||
# mark as a failure UNLESS this is the explicit package.
|
||||
except KeyboardInterrupt as exc:
|
||||
# The build has been terminated with a Ctrl-C so terminate.
|
||||
err = 'Failed to install {0} due to {1}: {2}'
|
||||
tty.error(err.format(pkg.name, exc.__class__.__name__,
|
||||
str(exc)))
|
||||
raise
|
||||
|
||||
except (Exception, SystemExit) as exc:
|
||||
# Best effort installs suppress the exception and mark the
|
||||
# package as a failure UNLESS this is the explicit package.
|
||||
err = 'Failed to install {0} due to {1}: {2}'
|
||||
tty.error(err.format(pkg.name, exc.__class__.__name__,
|
||||
str(exc)))
|
||||
|
||||
self._update_failed(task, True, exc)
|
||||
|
||||
if fail_fast:
|
||||
# The user requested the installation to terminate on
|
||||
# failure.
|
||||
raise InstallError('{0}: {1}'
|
||||
.format(fail_fast_err, str(exc)))
|
||||
|
||||
if pkg_id == self.pkg_id:
|
||||
raise
|
||||
|
||||
|
||||
@@ -222,8 +222,15 @@ def root_path(name):
|
||||
return spack.util.path.canonicalize_path(path)
|
||||
|
||||
|
||||
def generate_module_index(root, modules):
|
||||
entries = syaml.syaml_dict()
|
||||
def generate_module_index(root, modules, overwrite=False):
|
||||
index_path = os.path.join(root, 'module-index.yaml')
|
||||
if overwrite or not os.path.exists(index_path):
|
||||
entries = syaml.syaml_dict()
|
||||
else:
|
||||
with open(index_path) as index_file:
|
||||
yaml_content = syaml.load(index_file)
|
||||
entries = yaml_content['module_index']
|
||||
|
||||
for m in modules:
|
||||
entry = {
|
||||
'path': m.layout.filename,
|
||||
@@ -231,7 +238,6 @@ def generate_module_index(root, modules):
|
||||
}
|
||||
entries[m.spec.dag_hash()] = entry
|
||||
index = {'module_index': entries}
|
||||
index_path = os.path.join(root, 'module-index.yaml')
|
||||
llnl.util.filesystem.mkdirp(root)
|
||||
with open(index_path, 'w') as index_file:
|
||||
syaml.dump(index, default_flow_style=False, stream=index_file)
|
||||
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
module = spack.main.SpackCommand('module')
|
||||
|
||||
|
||||
#: make sure module files are generated for all the tests here
|
||||
@pytest.fixture(scope='module', autouse=True)
|
||||
def ensure_module_files_are_there(
|
||||
@@ -168,10 +169,10 @@ def test_loads_recursive_blacklisted(database, module_configuration):
|
||||
output = module('lmod', 'loads', '-r', 'mpileaks ^mpich')
|
||||
lines = output.split('\n')
|
||||
|
||||
assert any(re.match(r'[^#]*module load.*mpileaks', l) for l in lines)
|
||||
assert not any(re.match(r'[^#]module load.*callpath', l) for l in lines)
|
||||
assert any(re.match(r'## blacklisted or missing.*callpath', l)
|
||||
for l in lines)
|
||||
assert any(re.match(r'[^#]*module load.*mpileaks', ln) for ln in lines)
|
||||
assert not any(re.match(r'[^#]module load.*callpath', ln) for ln in lines)
|
||||
assert any(re.match(r'## blacklisted or missing.*callpath', ln)
|
||||
for ln in lines)
|
||||
|
||||
# TODO: currently there is no way to separate stdout and stderr when
|
||||
# invoking a SpackCommand. Supporting this requires refactoring
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
import spack.compilers as compilers
|
||||
|
||||
from spack.compiler import Compiler
|
||||
from spack.util.executable import ProcessError
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@@ -653,3 +654,62 @@ def module(*args):
|
||||
compiler = compilers[0]
|
||||
version = compiler.get_real_version()
|
||||
assert version == test_version
|
||||
|
||||
|
||||
def test_compiler_get_real_version_fails(working_env, monkeypatch, tmpdir):
|
||||
# Test variables
|
||||
test_version = '2.2.2'
|
||||
|
||||
# Create compiler
|
||||
gcc = str(tmpdir.join('gcc'))
|
||||
with open(gcc, 'w') as f:
|
||||
f.write("""#!/bin/bash
|
||||
if [[ $CMP_ON == "1" ]]; then
|
||||
echo "$CMP_VER"
|
||||
fi
|
||||
""")
|
||||
fs.set_executable(gcc)
|
||||
|
||||
# Add compiler to config
|
||||
compiler_info = {
|
||||
'spec': 'gcc@foo',
|
||||
'paths': {
|
||||
'cc': gcc,
|
||||
'cxx': None,
|
||||
'f77': None,
|
||||
'fc': None,
|
||||
},
|
||||
'flags': {},
|
||||
'operating_system': 'fake',
|
||||
'target': 'fake',
|
||||
'modules': ['turn_on'],
|
||||
'environment': {
|
||||
'set': {'CMP_VER': test_version},
|
||||
},
|
||||
'extra_rpaths': [],
|
||||
}
|
||||
compiler_dict = {'compiler': compiler_info}
|
||||
|
||||
# Set module load to turn compiler on
|
||||
def module(*args):
|
||||
if args[0] == 'show':
|
||||
return ''
|
||||
elif args[0] == 'load':
|
||||
os.environ['SPACK_TEST_CMP_ON'] = "1"
|
||||
monkeypatch.setattr(spack.util.module_cmd, 'module', module)
|
||||
|
||||
# Make compiler fail when getting implicit rpaths
|
||||
def _call(*args, **kwargs):
|
||||
raise ProcessError("Failed intentionally")
|
||||
monkeypatch.setattr(spack.util.executable.Executable, '__call__', _call)
|
||||
|
||||
# Run and no change to environment
|
||||
compilers = spack.compilers.get_compilers([compiler_dict])
|
||||
assert len(compilers) == 1
|
||||
compiler = compilers[0]
|
||||
try:
|
||||
_ = compiler.get_real_version()
|
||||
assert False
|
||||
except ProcessError:
|
||||
# Confirm environment does not change after failed call
|
||||
assert 'SPACK_TEST_CMP_ON' not in os.environ
|
||||
|
||||
@@ -718,6 +718,61 @@ def test_install_failed(install_mockery, monkeypatch, capsys):
|
||||
assert 'Warning: b failed to install' in out
|
||||
|
||||
|
||||
def test_install_fail_on_interrupt(install_mockery, monkeypatch):
|
||||
"""Test ctrl-c interrupted install."""
|
||||
err_msg = 'mock keyboard interrupt'
|
||||
|
||||
def _interrupt(installer, task, **kwargs):
|
||||
raise KeyboardInterrupt(err_msg)
|
||||
|
||||
spec, installer = create_installer('a')
|
||||
|
||||
# Raise a KeyboardInterrupt error to trigger early termination
|
||||
monkeypatch.setattr(inst.PackageInstaller, '_install_task', _interrupt)
|
||||
|
||||
with pytest.raises(KeyboardInterrupt, match=err_msg):
|
||||
installer.install()
|
||||
|
||||
|
||||
def test_install_fail_fast_on_detect(install_mockery, monkeypatch, capsys):
|
||||
"""Test fail_fast install when an install failure is detected."""
|
||||
spec, installer = create_installer('a')
|
||||
|
||||
# Make sure the package is identified as failed
|
||||
#
|
||||
# This will prevent b from installing, which will cause the build of a
|
||||
# to be skipped.
|
||||
monkeypatch.setattr(spack.database.Database, 'prefix_failed', _true)
|
||||
|
||||
with pytest.raises(spack.installer.InstallError):
|
||||
installer.install(fail_fast=True)
|
||||
|
||||
out = str(capsys.readouterr())
|
||||
assert 'Skipping build of a' in out
|
||||
|
||||
|
||||
def test_install_fail_fast_on_except(install_mockery, monkeypatch, capsys):
|
||||
"""Test fail_fast install when an install failure results from an error."""
|
||||
err_msg = 'mock patch failure'
|
||||
|
||||
def _patch(installer, task, **kwargs):
|
||||
raise RuntimeError(err_msg)
|
||||
|
||||
spec, installer = create_installer('a')
|
||||
|
||||
# Raise a non-KeyboardInterrupt exception to trigger fast failure.
|
||||
#
|
||||
# This will prevent b from installing, which will cause the build of a
|
||||
# to be skipped.
|
||||
monkeypatch.setattr(spack.package.PackageBase, 'do_patch', _patch)
|
||||
|
||||
with pytest.raises(spack.installer.InstallError, matches=err_msg):
|
||||
installer.install(fail_fast=True)
|
||||
|
||||
out = str(capsys.readouterr())
|
||||
assert 'Skipping build of a' in out
|
||||
|
||||
|
||||
def test_install_lock_failures(install_mockery, monkeypatch, capfd):
|
||||
"""Cover basic install lock failure handling in a single pass."""
|
||||
def _requeued(installer, task):
|
||||
|
||||
@@ -236,6 +236,7 @@ def test_module_index(
|
||||
|
||||
w1, s1 = factory('mpileaks')
|
||||
w2, s2 = factory('callpath')
|
||||
w3, s3 = factory('openblas')
|
||||
|
||||
test_root = str(tmpdir_factory.mktemp('module-root'))
|
||||
|
||||
@@ -246,6 +247,22 @@ def test_module_index(
|
||||
assert index[s1.dag_hash()].use_name == w1.layout.use_name
|
||||
assert index[s2.dag_hash()].path == w2.layout.filename
|
||||
|
||||
spack.modules.common.generate_module_index(test_root, [w3])
|
||||
|
||||
index = spack.modules.common.read_module_index(test_root)
|
||||
|
||||
assert len(index) == 3
|
||||
assert index[s1.dag_hash()].use_name == w1.layout.use_name
|
||||
assert index[s2.dag_hash()].path == w2.layout.filename
|
||||
|
||||
spack.modules.common.generate_module_index(
|
||||
test_root, [w3], overwrite=True)
|
||||
|
||||
index = spack.modules.common.read_module_index(test_root)
|
||||
|
||||
assert len(index) == 1
|
||||
assert index[s3.dag_hash()].use_name == w3.layout.use_name
|
||||
|
||||
def test_suffixes(self, module_configuration, factory):
|
||||
"""Tests adding suffixes to module file name."""
|
||||
module_configuration('suffix')
|
||||
|
||||
@@ -27,9 +27,29 @@ def test_chmod_real_entries_ignores_suid_sgid(tmpdir):
|
||||
|
||||
def test_chmod_rejects_group_writable_suid(tmpdir):
|
||||
path = str(tmpdir.join('file').ensure())
|
||||
mode = stat.S_ISUID | stat.S_ISGID | stat.S_ISVTX
|
||||
mode = stat.S_ISUID
|
||||
fs.chmod_x(path, mode)
|
||||
|
||||
perms = stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO
|
||||
perms = stat.S_IWGRP
|
||||
with pytest.raises(InvalidPermissionsError):
|
||||
set_permissions(path, perms)
|
||||
|
||||
|
||||
def test_chmod_rejects_world_writable_suid(tmpdir):
|
||||
path = str(tmpdir.join('file').ensure())
|
||||
mode = stat.S_ISUID
|
||||
fs.chmod_x(path, mode)
|
||||
|
||||
perms = stat.S_IWOTH
|
||||
with pytest.raises(InvalidPermissionsError):
|
||||
set_permissions(path, perms)
|
||||
|
||||
|
||||
def test_chmod_rejects_world_writable_sgid(tmpdir):
|
||||
path = str(tmpdir.join('file').ensure())
|
||||
mode = stat.S_ISGID
|
||||
fs.chmod_x(path, mode)
|
||||
|
||||
perms = stat.S_IWOTH
|
||||
with pytest.raises(InvalidPermissionsError):
|
||||
set_permissions(path, perms)
|
||||
|
||||
@@ -337,7 +337,8 @@ def test_make_elf_binaries_relative(hello_world, copy_binary, tmpdir):
|
||||
[str(new_binary)], [str(orig_binary)], str(orig_binary.dirpath())
|
||||
)
|
||||
|
||||
assert rpaths_for(new_binary) == '$ORIGIN/lib:$ORIGIN/lib64:/opt/local/lib'
|
||||
# Some compilers add rpaths so ensure changes included in final result
|
||||
assert '$ORIGIN/lib:$ORIGIN/lib64:/opt/local/lib' in rpaths_for(new_binary)
|
||||
|
||||
|
||||
def test_raise_if_not_relocatable(monkeypatch):
|
||||
|
||||
@@ -376,7 +376,7 @@ def unset(self, name, **kwargs):
|
||||
"""Stores a request to unset an environment variable.
|
||||
|
||||
Args:
|
||||
name: name of the environment variable to be set
|
||||
name: name of the environment variable to be unset
|
||||
"""
|
||||
kwargs.update(self._get_outside_caller_attributes())
|
||||
item = UnsetEnv(name, **kwargs)
|
||||
|
||||
@@ -25,10 +25,19 @@ def set_permissions(path, perms, group=None):
|
||||
# Preserve higher-order bits of file permissions
|
||||
perms |= os.stat(path).st_mode & (st.S_ISUID | st.S_ISGID | st.S_ISVTX)
|
||||
|
||||
# Do not let users create world writable suid binaries
|
||||
if perms & st.S_ISUID and perms & st.S_IWGRP:
|
||||
raise InvalidPermissionsError(
|
||||
"Attepting to set suid with world writable")
|
||||
# Do not let users create world/group writable suid binaries
|
||||
if perms & st.S_ISUID:
|
||||
if perms & st.S_IWOTH:
|
||||
raise InvalidPermissionsError(
|
||||
"Attempting to set suid with world writable")
|
||||
if perms & st.S_IWGRP:
|
||||
raise InvalidPermissionsError(
|
||||
"Attempting to set suid with group writable")
|
||||
# Or world writable sgid binaries
|
||||
if perms & st.S_ISGID:
|
||||
if perms & st.S_IWOTH:
|
||||
raise InvalidPermissionsError(
|
||||
"Attempting to set sgid with world writable")
|
||||
|
||||
fs.chmod_x(path, perms)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user