Compare commits
25 Commits
v0.15.1-sh
...
v0.15.3
Author | SHA1 | Date | |
---|---|---|---|
![]() |
0f25462ea6 | ||
![]() |
ae4bbbd241 | ||
![]() |
24bd9e3039 | ||
![]() |
0efb8ef412 | ||
![]() |
69775fcc07 | ||
![]() |
ce772420dd | ||
![]() |
9cc01dc574 | ||
![]() |
8d8cf6201b | ||
![]() |
d6d839cd3e | ||
![]() |
3534717151 | ||
![]() |
e289d481ea | ||
![]() |
ed8250e055 | ||
![]() |
40cd845479 | ||
![]() |
3b45241566 | ||
![]() |
d5b0f85ea3 | ||
![]() |
c6241e72a6 | ||
![]() |
f528022a7d | ||
![]() |
665a47607e | ||
![]() |
12958497dc | ||
![]() |
3a8bc7ffc6 | ||
![]() |
9cbe358f84 | ||
![]() |
27af499b52 | ||
![]() |
24dff9cf20 | ||
![]() |
e4265d3135 | ||
![]() |
5e5cc99147 |
31
CHANGELOG.md
31
CHANGELOG.md
@@ -1,3 +1,34 @@
|
||||
# v0.15.3 (2020-07-28)
|
||||
|
||||
This release contains the following bugfixes:
|
||||
|
||||
* Fix handling of relative view paths (#17721)
|
||||
* Fixes for binary relocation (#17418, #17455)
|
||||
* Fix redundant printing of error messages in build environment (#17709)
|
||||
|
||||
It also adds a support script for Spack tutorials:
|
||||
|
||||
* Add a tutorial setup script to share/spack (#17705, #17722)
|
||||
|
||||
# v0.15.2 (2020-07-23)
|
||||
|
||||
This minor release includes two new features:
|
||||
|
||||
* Spack install verbosity is decreased, and more debug levels are added (#17546)
|
||||
* The $spack/share/spack/keys directory contains public keys that may be optionally trusted for public binary mirrors (#17684)
|
||||
|
||||
This release also includes several important fixes:
|
||||
|
||||
* MPICC and related variables are now cleand in the build environment (#17450)
|
||||
* LLVM flang only builds CUDA offload components when +cuda (#17466)
|
||||
* CI pipelines no longer upload user environments that can contain secrets to the internet (#17545)
|
||||
* CI pipelines add bootstrapped compilers to the compiler config (#17536)
|
||||
* `spack buildcache list` does not exit on first failure and lists later mirrors (#17565)
|
||||
* Apple's "gcc" executable that is an apple-clang compiler does not generate a gcc compiler config (#17589)
|
||||
* Mixed compiler toolchains are merged more naturally across different compiler suffixes (#17590)
|
||||
* Cray Shasta platforms detect the OS properly (#17467)
|
||||
* Additional more minor fixes.
|
||||
|
||||
# v0.15.1 (2020-07-10)
|
||||
|
||||
This minor release includes several important fixes:
|
||||
|
@@ -174,8 +174,9 @@ def _lock(self, op, timeout=None):
|
||||
# If the file were writable, we'd have opened it 'r+'
|
||||
raise LockROFileError(self.path)
|
||||
|
||||
tty.debug("{0} locking [{1}:{2}]: timeout {3} sec"
|
||||
.format(lock_type[op], self._start, self._length, timeout))
|
||||
self._log_debug("{0} locking [{1}:{2}]: timeout {3} sec"
|
||||
.format(lock_type[op], self._start, self._length,
|
||||
timeout))
|
||||
|
||||
poll_intervals = iter(Lock._poll_interval_generator())
|
||||
start_time = time.time()
|
||||
@@ -211,14 +212,14 @@ def _poll_lock(self, op):
|
||||
# help for debugging distributed locking
|
||||
if self.debug:
|
||||
# All locks read the owner PID and host
|
||||
self._read_debug_data()
|
||||
tty.debug('{0} locked {1} [{2}:{3}] (owner={4})'
|
||||
.format(lock_type[op], self.path,
|
||||
self._start, self._length, self.pid))
|
||||
self._read_log_debug_data()
|
||||
self._log_debug('{0} locked {1} [{2}:{3}] (owner={4})'
|
||||
.format(lock_type[op], self.path,
|
||||
self._start, self._length, self.pid))
|
||||
|
||||
# Exclusive locks write their PID/host
|
||||
if op == fcntl.LOCK_EX:
|
||||
self._write_debug_data()
|
||||
self._write_log_debug_data()
|
||||
|
||||
return True
|
||||
|
||||
@@ -245,7 +246,7 @@ def _ensure_parent_directory(self):
|
||||
raise
|
||||
return parent
|
||||
|
||||
def _read_debug_data(self):
|
||||
def _read_log_debug_data(self):
|
||||
"""Read PID and host data out of the file if it is there."""
|
||||
self.old_pid = self.pid
|
||||
self.old_host = self.host
|
||||
@@ -257,7 +258,7 @@ def _read_debug_data(self):
|
||||
_, _, self.host = host.rpartition('=')
|
||||
self.pid = int(self.pid)
|
||||
|
||||
def _write_debug_data(self):
|
||||
def _write_log_debug_data(self):
|
||||
"""Write PID and host data to the file, recording old values."""
|
||||
self.old_pid = self.pid
|
||||
self.old_host = self.host
|
||||
@@ -473,9 +474,6 @@ def release_write(self, release_fn=None):
|
||||
else:
|
||||
return False
|
||||
|
||||
def _debug(self, *args):
|
||||
tty.debug(*args)
|
||||
|
||||
def _get_counts_desc(self):
|
||||
return '(reads {0}, writes {1})'.format(self._reads, self._writes) \
|
||||
if tty.is_verbose() else ''
|
||||
@@ -484,58 +482,50 @@ def _log_acquired(self, locktype, wait_time, nattempts):
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = 'Acquired at %s' % now.strftime("%H:%M:%S.%f")
|
||||
self._debug(self._status_msg(locktype, '{0}{1}'.
|
||||
format(desc, attempts_part)))
|
||||
self._log_debug(self._status_msg(locktype, '{0}{1}'
|
||||
.format(desc, attempts_part)))
|
||||
|
||||
def _log_acquiring(self, locktype):
|
||||
self._debug2(self._status_msg(locktype, 'Acquiring'))
|
||||
self._log_debug(self._status_msg(locktype, 'Acquiring'), level=3)
|
||||
|
||||
def _log_debug(self, *args, **kwargs):
|
||||
"""Output lock debug messages."""
|
||||
kwargs['level'] = kwargs.get('level', 2)
|
||||
tty.debug(*args, **kwargs)
|
||||
|
||||
def _log_downgraded(self, wait_time, nattempts):
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = 'Downgraded at %s' % now.strftime("%H:%M:%S.%f")
|
||||
self._debug(self._status_msg('READ LOCK', '{0}{1}'
|
||||
.format(desc, attempts_part)))
|
||||
self._log_debug(self._status_msg('READ LOCK', '{0}{1}'
|
||||
.format(desc, attempts_part)))
|
||||
|
||||
def _log_downgrading(self):
|
||||
self._debug2(self._status_msg('WRITE LOCK', 'Downgrading'))
|
||||
self._log_debug(self._status_msg('WRITE LOCK', 'Downgrading'), level=3)
|
||||
|
||||
def _log_released(self, locktype):
|
||||
now = datetime.now()
|
||||
desc = 'Released at %s' % now.strftime("%H:%M:%S.%f")
|
||||
self._debug(self._status_msg(locktype, desc))
|
||||
self._log_debug(self._status_msg(locktype, desc))
|
||||
|
||||
def _log_releasing(self, locktype):
|
||||
self._debug2(self._status_msg(locktype, 'Releasing'))
|
||||
self._log_debug(self._status_msg(locktype, 'Releasing'), level=3)
|
||||
|
||||
def _log_upgraded(self, wait_time, nattempts):
|
||||
attempts_part = _attempts_str(wait_time, nattempts)
|
||||
now = datetime.now()
|
||||
desc = 'Upgraded at %s' % now.strftime("%H:%M:%S.%f")
|
||||
self._debug(self._status_msg('WRITE LOCK', '{0}{1}'.
|
||||
format(desc, attempts_part)))
|
||||
self._log_debug(self._status_msg('WRITE LOCK', '{0}{1}'.
|
||||
format(desc, attempts_part)))
|
||||
|
||||
def _log_upgrading(self):
|
||||
self._debug2(self._status_msg('READ LOCK', 'Upgrading'))
|
||||
self._log_debug(self._status_msg('READ LOCK', 'Upgrading'), level=3)
|
||||
|
||||
def _status_msg(self, locktype, status):
|
||||
status_desc = '[{0}] {1}'.format(status, self._get_counts_desc())
|
||||
return '{0}{1.desc}: {1.path}[{1._start}:{1._length}] {2}'.format(
|
||||
locktype, self, status_desc)
|
||||
|
||||
def _debug2(self, *args):
|
||||
# TODO: Easy place to make a single, temporary change to the
|
||||
# TODO: debug level associated with the more detailed messages.
|
||||
# TODO:
|
||||
# TODO: Someday it would be great if we could switch this to
|
||||
# TODO: another level, perhaps _between_ debug and verbose, or
|
||||
# TODO: some other form of filtering so the first level of
|
||||
# TODO: debugging doesn't have to generate these messages. Using
|
||||
# TODO: verbose here did not work as expected because tests like
|
||||
# TODO: test_spec_json will write the verbose messages to the
|
||||
# TODO: output that is used to check test correctness.
|
||||
tty.debug(*args)
|
||||
|
||||
|
||||
class LockTransaction(object):
|
||||
"""Simple nested transaction context manager that uses a file lock.
|
||||
|
@@ -19,7 +19,8 @@
|
||||
|
||||
from llnl.util.tty.color import cprint, cwrite, cescape, clen
|
||||
|
||||
_debug = False
|
||||
# Globals
|
||||
_debug = 0
|
||||
_verbose = False
|
||||
_stacktrace = False
|
||||
_timestamp = False
|
||||
@@ -29,21 +30,26 @@
|
||||
indent = " "
|
||||
|
||||
|
||||
def debug_level():
|
||||
return _debug
|
||||
|
||||
|
||||
def is_verbose():
|
||||
return _verbose
|
||||
|
||||
|
||||
def is_debug():
|
||||
return _debug
|
||||
def is_debug(level=1):
|
||||
return _debug >= level
|
||||
|
||||
|
||||
def is_stacktrace():
|
||||
return _stacktrace
|
||||
|
||||
|
||||
def set_debug(flag):
|
||||
def set_debug(level=0):
|
||||
global _debug
|
||||
_debug = flag
|
||||
assert level >= 0, 'Debug level must be a positive value'
|
||||
_debug = level
|
||||
|
||||
|
||||
def set_verbose(flag):
|
||||
@@ -132,12 +138,17 @@ def process_stacktrace(countback):
|
||||
return st_text
|
||||
|
||||
|
||||
def show_pid():
|
||||
return is_debug(2)
|
||||
|
||||
|
||||
def get_timestamp(force=False):
|
||||
"""Get a string timestamp"""
|
||||
if _debug or _timestamp or force:
|
||||
# Note inclusion of the PID is useful for parallel builds.
|
||||
return '[{0}, {1}] '.format(
|
||||
datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), os.getpid())
|
||||
pid = ', {0}'.format(os.getpid()) if show_pid() else ''
|
||||
return '[{0}{1}] '.format(
|
||||
datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), pid)
|
||||
else:
|
||||
return ''
|
||||
|
||||
@@ -197,7 +208,8 @@ def verbose(message, *args, **kwargs):
|
||||
|
||||
|
||||
def debug(message, *args, **kwargs):
|
||||
if _debug:
|
||||
level = kwargs.get('level', 1)
|
||||
if is_debug(level):
|
||||
kwargs.setdefault('format', 'g')
|
||||
kwargs.setdefault('stream', sys.stderr)
|
||||
info(message, *args, **kwargs)
|
||||
|
@@ -323,14 +323,14 @@ class log_output(object):
|
||||
work within test frameworks like nose and pytest.
|
||||
"""
|
||||
|
||||
def __init__(self, file_like=None, echo=False, debug=False, buffer=False):
|
||||
def __init__(self, file_like=None, echo=False, debug=0, buffer=False):
|
||||
"""Create a new output log context manager.
|
||||
|
||||
Args:
|
||||
file_like (str or stream): open file object or name of file where
|
||||
output should be logged
|
||||
echo (bool): whether to echo output in addition to logging it
|
||||
debug (bool): whether to enable tty debug mode during logging
|
||||
debug (int): positive to enable tty debug mode during logging
|
||||
buffer (bool): pass buffer=True to skip unbuffering output; note
|
||||
this doesn't set up any *new* buffering
|
||||
|
||||
@@ -355,7 +355,7 @@ def __init__(self, file_like=None, echo=False, debug=False, buffer=False):
|
||||
self._active = False # used to prevent re-entry
|
||||
|
||||
def __call__(self, file_like=None, echo=None, debug=None, buffer=None):
|
||||
"""Thie behaves the same as init. It allows a logger to be reused.
|
||||
"""This behaves the same as init. It allows a logger to be reused.
|
||||
|
||||
Arguments are the same as for ``__init__()``. Args here take
|
||||
precedence over those passed to ``__init__()``.
|
||||
|
@@ -5,7 +5,7 @@
|
||||
|
||||
|
||||
#: major, minor, patch version for Spack, in a tuple
|
||||
spack_version_info = (0, 15, 1)
|
||||
spack_version_info = (0, 15, 3)
|
||||
|
||||
#: String containing Spack version joined with .'s
|
||||
spack_version = '.'.join(str(v) for v in spack_version_info)
|
||||
|
@@ -466,8 +466,8 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
|
||||
web_util.push_to_url(
|
||||
specfile_path, remote_specfile_path, keep_original=False)
|
||||
|
||||
tty.msg('Buildache for "%s" written to \n %s' %
|
||||
(spec, remote_spackfile_path))
|
||||
tty.debug('Buildcache for "{0}" written to \n {1}'
|
||||
.format(spec, remote_spackfile_path))
|
||||
|
||||
try:
|
||||
# create an index.html for the build_cache directory so specs can be
|
||||
@@ -602,15 +602,11 @@ def is_backup_file(file):
|
||||
if not is_backup_file(text_name):
|
||||
text_names.append(text_name)
|
||||
|
||||
# If we are installing back to the same location don't replace anything
|
||||
# If we are not installing back to the same install tree do the relocation
|
||||
if old_layout_root != new_layout_root:
|
||||
paths_to_relocate = [old_spack_prefix, old_layout_root]
|
||||
paths_to_relocate.extend(prefix_to_hash.keys())
|
||||
files_to_relocate = list(filter(
|
||||
lambda pathname: not relocate.file_is_relocatable(
|
||||
pathname, paths_to_relocate=paths_to_relocate),
|
||||
map(lambda filename: os.path.join(workdir, filename),
|
||||
buildinfo['relocate_binaries'])))
|
||||
files_to_relocate = [os.path.join(workdir, filename)
|
||||
for filename in buildinfo.get('relocate_binaries')
|
||||
]
|
||||
# If the buildcache was not created with relativized rpaths
|
||||
# do the relocation of path in binaries
|
||||
if (spec.architecture.platform == 'darwin' or
|
||||
@@ -646,6 +642,13 @@ def is_backup_file(file):
|
||||
new_spack_prefix,
|
||||
prefix_to_prefix)
|
||||
|
||||
paths_to_relocate = [old_prefix, old_layout_root]
|
||||
paths_to_relocate.extend(prefix_to_hash.keys())
|
||||
files_to_relocate = list(filter(
|
||||
lambda pathname: not relocate.file_is_relocatable(
|
||||
pathname, paths_to_relocate=paths_to_relocate),
|
||||
map(lambda filename: os.path.join(workdir, filename),
|
||||
buildinfo['relocate_binaries'])))
|
||||
# relocate the install prefixes in binary files including dependencies
|
||||
relocate.relocate_text_bin(files_to_relocate,
|
||||
old_prefix, new_prefix,
|
||||
@@ -653,6 +656,17 @@ def is_backup_file(file):
|
||||
new_spack_prefix,
|
||||
prefix_to_prefix)
|
||||
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
else:
|
||||
if old_spack_prefix != new_spack_prefix:
|
||||
relocate.relocate_text(text_names,
|
||||
old_layout_root, new_layout_root,
|
||||
old_prefix, new_prefix,
|
||||
old_spack_prefix,
|
||||
new_spack_prefix,
|
||||
prefix_to_prefix)
|
||||
|
||||
|
||||
def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
force=False):
|
||||
@@ -828,13 +842,13 @@ def get_spec(spec=None, force=False):
|
||||
|
||||
mirror_dir = url_util.local_file_path(fetch_url_build_cache)
|
||||
if mirror_dir:
|
||||
tty.msg("Finding buildcaches in %s" % mirror_dir)
|
||||
tty.debug('Finding buildcaches in {0}'.format(mirror_dir))
|
||||
link = url_util.join(fetch_url_build_cache, specfile_name)
|
||||
urls.add(link)
|
||||
|
||||
else:
|
||||
tty.msg("Finding buildcaches at %s" %
|
||||
url_util.format(fetch_url_build_cache))
|
||||
tty.debug('Finding buildcaches at {0}'
|
||||
.format(url_util.format(fetch_url_build_cache)))
|
||||
link = url_util.join(fetch_url_build_cache, specfile_name)
|
||||
urls.add(link)
|
||||
|
||||
@@ -857,8 +871,8 @@ def get_specs(allarch=False):
|
||||
fetch_url_build_cache = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path)
|
||||
|
||||
tty.msg("Finding buildcaches at %s" %
|
||||
url_util.format(fetch_url_build_cache))
|
||||
tty.debug('Finding buildcaches at {0}'
|
||||
.format(url_util.format(fetch_url_build_cache)))
|
||||
|
||||
index_url = url_util.join(fetch_url_build_cache, 'index.json')
|
||||
|
||||
@@ -869,8 +883,8 @@ def get_specs(allarch=False):
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error('Failed to read index {0}'.format(index_url))
|
||||
tty.debug(url_err)
|
||||
# Just return whatever specs we may already have cached
|
||||
return _cached_specs
|
||||
# Continue on to the next mirror
|
||||
continue
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
index_file_path = os.path.join(tmpdir, 'index.json')
|
||||
@@ -909,15 +923,15 @@ def get_keys(install=False, trust=False, force=False):
|
||||
|
||||
mirror_dir = url_util.local_file_path(fetch_url_build_cache)
|
||||
if mirror_dir:
|
||||
tty.msg("Finding public keys in %s" % mirror_dir)
|
||||
tty.debug('Finding public keys in {0}'.format(mirror_dir))
|
||||
files = os.listdir(str(mirror_dir))
|
||||
for file in files:
|
||||
if re.search(r'\.key', file) or re.search(r'\.pub', file):
|
||||
link = url_util.join(fetch_url_build_cache, file)
|
||||
keys.add(link)
|
||||
else:
|
||||
tty.msg("Finding public keys at %s" %
|
||||
url_util.format(fetch_url_build_cache))
|
||||
tty.debug('Finding public keys at {0}'
|
||||
.format(url_util.format(fetch_url_build_cache)))
|
||||
# For s3 mirror need to request index.html directly
|
||||
p, links = web_util.spider(
|
||||
url_util.join(fetch_url_build_cache, 'index.html'))
|
||||
@@ -935,14 +949,14 @@ def get_keys(install=False, trust=False, force=False):
|
||||
stage.fetch()
|
||||
except fs.FetchError:
|
||||
continue
|
||||
tty.msg('Found key %s' % link)
|
||||
tty.debug('Found key {0}'.format(link))
|
||||
if install:
|
||||
if trust:
|
||||
Gpg.trust(stage.save_filename)
|
||||
tty.msg('Added this key to trusted keys.')
|
||||
tty.debug('Added this key to trusted keys.')
|
||||
else:
|
||||
tty.msg('Will not add this key to trusted keys.'
|
||||
'Use -t to install all downloaded keys')
|
||||
tty.debug('Will not add this key to trusted keys.'
|
||||
'Use -t to install all downloaded keys')
|
||||
|
||||
|
||||
def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
|
||||
@@ -1029,7 +1043,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
|
||||
"""
|
||||
rebuilds = {}
|
||||
for mirror in spack.mirror.MirrorCollection(mirrors).values():
|
||||
tty.msg('Checking for built specs at %s' % mirror.fetch_url)
|
||||
tty.debug('Checking for built specs at {0}'.format(mirror.fetch_url))
|
||||
|
||||
rebuild_list = []
|
||||
|
||||
|
@@ -174,6 +174,14 @@ def clean_environment():
|
||||
for v in build_system_vars:
|
||||
env.unset(v)
|
||||
|
||||
# Unset mpi environment vars. These flags should only be set by
|
||||
# mpi providers for packages with mpi dependencies
|
||||
mpi_vars = [
|
||||
'MPICC', 'MPICXX', 'MPIFC', 'MPIF77', 'MPIF90'
|
||||
]
|
||||
for v in mpi_vars:
|
||||
env.unset(v)
|
||||
|
||||
build_lang = spack.config.get('config:build_language')
|
||||
if build_lang:
|
||||
# Override language-related variables. This can be used to force
|
||||
|
@@ -613,7 +613,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
debug_flag = '-d '
|
||||
|
||||
job_scripts = [
|
||||
'spack env activate .',
|
||||
'spack env activate --without-view .',
|
||||
'spack {0}ci rebuild'.format(debug_flag),
|
||||
]
|
||||
|
||||
@@ -1043,17 +1043,10 @@ def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
tty.debug('job package: {0}'.format(job_pkg))
|
||||
stage_dir = job_pkg.stage.path
|
||||
tty.debug('stage dir: {0}'.format(stage_dir))
|
||||
build_env_src = os.path.join(stage_dir, 'spack-build-env.txt')
|
||||
build_out_src = os.path.join(stage_dir, 'spack-build-out.txt')
|
||||
build_env_dst = os.path.join(
|
||||
job_log_dir, 'spack-build-env.txt')
|
||||
build_out_dst = os.path.join(
|
||||
job_log_dir, 'spack-build-out.txt')
|
||||
tty.debug('Copying logs to artifacts:')
|
||||
tty.debug(' 1: {0} -> {1}'.format(
|
||||
build_env_src, build_env_dst))
|
||||
shutil.copyfile(build_env_src, build_env_dst)
|
||||
tty.debug(' 2: {0} -> {1}'.format(
|
||||
tty.debug('Copying build log ({0}) to artifacts ({1})'.format(
|
||||
build_out_src, build_out_dst))
|
||||
shutil.copyfile(build_out_src, build_out_dst)
|
||||
except Exception as inst:
|
||||
|
@@ -351,6 +351,9 @@ def env_status(args):
|
||||
% (ev.manifest_name, env.path))
|
||||
else:
|
||||
tty.msg('In environment %s' % env.name)
|
||||
|
||||
# Check if environment views can be safely activated
|
||||
env.check_views()
|
||||
else:
|
||||
tty.msg('No active environment')
|
||||
|
||||
|
@@ -28,7 +28,7 @@
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
"""Invokes the compiler at a given path passing a single
|
||||
version argument and returns the output.
|
||||
|
||||
@@ -42,6 +42,18 @@ def get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
return output
|
||||
|
||||
|
||||
def get_compiler_version_output(compiler_path, *args, **kwargs):
|
||||
"""Wrapper for _get_compiler_version_output()."""
|
||||
# This ensures that we memoize compiler output by *absolute path*,
|
||||
# not just executable name. If we don't do this, and the path changes
|
||||
# (e.g., during testing), we can get incorrect results.
|
||||
if not os.path.isabs(compiler_path):
|
||||
compiler_path = spack.util.executable.which_string(
|
||||
compiler_path, required=True)
|
||||
|
||||
return _get_compiler_version_output(compiler_path, *args, **kwargs)
|
||||
|
||||
|
||||
def tokenize_flags(flags_str):
|
||||
"""Given a compiler flag specification as a string, this returns a list
|
||||
where the entries are the flags. For compiler options which set values
|
||||
|
@@ -650,23 +650,18 @@ def make_compiler_list(detected_versions):
|
||||
Returns:
|
||||
list of Compiler objects
|
||||
"""
|
||||
# We don't sort on the path of the compiler
|
||||
sort_fn = lambda x: (x.id, x.variation, x.language)
|
||||
compilers_s = sorted(detected_versions, key=sort_fn)
|
||||
group_fn = lambda x: (x.id, x.variation, x.language)
|
||||
sorted_compilers = sorted(detected_versions, key=group_fn)
|
||||
|
||||
# Gather items in a dictionary by the id, name variation and language
|
||||
compilers_d = {}
|
||||
for sort_key, group in itertools.groupby(compilers_s, key=sort_fn):
|
||||
for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn):
|
||||
compiler_id, name_variation, language = sort_key
|
||||
by_compiler_id = compilers_d.setdefault(compiler_id, {})
|
||||
by_name_variation = by_compiler_id.setdefault(name_variation, {})
|
||||
by_name_variation[language] = next(x.path for x in group)
|
||||
|
||||
# For each unique compiler id select the name variation with most entries
|
||||
# i.e. the one that supports most languages
|
||||
compilers = []
|
||||
|
||||
def _default(cmp_id, paths):
|
||||
def _default_make_compilers(cmp_id, paths):
|
||||
operating_system, compiler_name, version = cmp_id
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
|
||||
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
|
||||
@@ -677,16 +672,38 @@ def _default(cmp_id, paths):
|
||||
)
|
||||
return [compiler]
|
||||
|
||||
for compiler_id, by_compiler_id in compilers_d.items():
|
||||
_, selected_name_variation = max(
|
||||
(len(by_compiler_id[variation]), variation)
|
||||
for variation in by_compiler_id
|
||||
)
|
||||
# For compilers with the same compiler id:
|
||||
#
|
||||
# - Prefer with C compiler to without
|
||||
# - Prefer with C++ compiler to without
|
||||
# - Prefer no variations to variations (e.g., clang to clang-gpu)
|
||||
#
|
||||
sort_fn = lambda variation: (
|
||||
'cc' not in by_compiler_id[variation], # None last
|
||||
'cxx' not in by_compiler_id[variation], # None last
|
||||
getattr(variation, 'prefix', None),
|
||||
getattr(variation, 'suffix', None),
|
||||
)
|
||||
|
||||
compilers = []
|
||||
for compiler_id, by_compiler_id in compilers_d.items():
|
||||
ordered = sorted(by_compiler_id, key=sort_fn)
|
||||
selected_variation = ordered[0]
|
||||
selected = by_compiler_id[selected_variation]
|
||||
|
||||
# fill any missing parts from subsequent entries
|
||||
for lang in ['cxx', 'f77', 'fc']:
|
||||
if lang not in selected:
|
||||
next_lang = next((
|
||||
by_compiler_id[v][lang] for v in ordered
|
||||
if lang in by_compiler_id[v]), None)
|
||||
if next_lang:
|
||||
selected[lang] = next_lang
|
||||
|
||||
# Add it to the list of compilers
|
||||
selected = by_compiler_id[selected_name_variation]
|
||||
operating_system, _, _ = compiler_id
|
||||
make_compilers = getattr(operating_system, 'make_compilers', _default)
|
||||
make_compilers = getattr(
|
||||
operating_system, 'make_compilers', _default_make_compilers)
|
||||
|
||||
compilers.extend(make_compilers(compiler_id, selected))
|
||||
|
||||
return compilers
|
||||
|
@@ -23,7 +23,12 @@ def extract_version_from_output(cls, output):
|
||||
ver = 'unknown'
|
||||
match = re.search(
|
||||
# Apple's LLVM compiler has its own versions, so suffix them.
|
||||
r'^Apple (?:LLVM|clang) version ([^ )]+)', output
|
||||
r'^Apple (?:LLVM|clang) version ([^ )]+)',
|
||||
output,
|
||||
# Multi-line, since 'Apple clang' may not be on the first line
|
||||
# in particular, when run as gcc, it seems to output
|
||||
# "Configured with: --prefix=..." as the first line
|
||||
re.M,
|
||||
)
|
||||
if match:
|
||||
ver = match.group(match.lastindex)
|
||||
|
@@ -5,13 +5,13 @@
|
||||
|
||||
import re
|
||||
|
||||
import spack.compilers.clang
|
||||
import spack.compiler
|
||||
import spack.compilers.apple_clang as apple_clang
|
||||
|
||||
from spack.compiler import Compiler, UnsupportedCompilerFlag
|
||||
from spack.version import ver
|
||||
|
||||
|
||||
class Gcc(Compiler):
|
||||
class Gcc(spack.compiler.Compiler):
|
||||
# Subclasses use possible names of C compiler
|
||||
cc_names = ['gcc']
|
||||
|
||||
@@ -64,10 +64,8 @@ def cxx98_flag(self):
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
if self.version < ver('4.3'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C++11 standard",
|
||||
"cxx11_flag",
|
||||
" < 4.3")
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++11 standard", "cxx11_flag", " < 4.3")
|
||||
elif self.version < ver('4.7'):
|
||||
return "-std=c++0x"
|
||||
else:
|
||||
@@ -76,10 +74,8 @@ def cxx11_flag(self):
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
if self.version < ver('4.8'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C++14 standard",
|
||||
"cxx14_flag",
|
||||
"< 4.8")
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++14 standard", "cxx14_flag", "< 4.8")
|
||||
elif self.version < ver('4.9'):
|
||||
return "-std=c++1y"
|
||||
elif self.version < ver('6.0'):
|
||||
@@ -90,10 +86,8 @@ def cxx14_flag(self):
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
if self.version < ver('5.0'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C++17 standard",
|
||||
"cxx17_flag",
|
||||
"< 5.0")
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++17 standard", "cxx17_flag", "< 5.0")
|
||||
elif self.version < ver('6.0'):
|
||||
return "-std=c++1z"
|
||||
else:
|
||||
@@ -102,19 +96,15 @@ def cxx17_flag(self):
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.version < ver('4.5'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C99 standard",
|
||||
"c99_flag",
|
||||
"< 4.5")
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C99 standard", "c99_flag", "< 4.5")
|
||||
return "-std=c99"
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
if self.version < ver('4.7'):
|
||||
raise UnsupportedCompilerFlag(self,
|
||||
"the C11 standard",
|
||||
"c11_flag",
|
||||
"< 4.7")
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C11 standard", "c11_flag", "< 4.7")
|
||||
return "-std=c11"
|
||||
|
||||
@property
|
||||
@@ -152,10 +142,10 @@ def default_version(cls, cc):
|
||||
|
||||
7.2.0
|
||||
"""
|
||||
# Skip any gcc versions that are actually clang, like Apple's gcc.
|
||||
# Returning "unknown" makes them not detected by default.
|
||||
# Users can add these manually to compilers.yaml at their own risk.
|
||||
if spack.compilers.clang.Clang.default_version(cc) != 'unknown':
|
||||
# Apple's gcc is actually apple clang, so skip it. Returning
|
||||
# "unknown" ensures this compiler is not detected by default.
|
||||
# Users can add it manually to compilers.yaml at their own risk.
|
||||
if apple_clang.AppleClang.default_version(cc) != 'unknown':
|
||||
return 'unknown'
|
||||
|
||||
version = super(Gcc, cls).default_version(cc)
|
||||
|
@@ -13,7 +13,9 @@
|
||||
"0.14.2": "0.14.2",
|
||||
"0.15": "0.15",
|
||||
"0.15.0": "0.15.0",
|
||||
"0.15.1": "0.15.1"
|
||||
"0.15.1": "0.15.1",
|
||||
"0.15.2": "0.15.2",
|
||||
"0.15.3": "0.15.3"
|
||||
}
|
||||
},
|
||||
"ubuntu:16.04": {
|
||||
@@ -30,7 +32,9 @@
|
||||
"0.14.2": "0.14.2",
|
||||
"0.15": "0.15",
|
||||
"0.15.0": "0.15.0",
|
||||
"0.15.1": "0.15.1"
|
||||
"0.15.1": "0.15.1",
|
||||
"0.15.2": "0.15.2",
|
||||
"0.15.3": "0.15.3"
|
||||
}
|
||||
},
|
||||
"centos:7": {
|
||||
@@ -47,7 +51,9 @@
|
||||
"0.14.2": "0.14.2",
|
||||
"0.15": "0.15",
|
||||
"0.15.0": "0.15.0",
|
||||
"0.15.1": "0.15.1"
|
||||
"0.15.1": "0.15.1",
|
||||
"0.15.2": "0.15.2",
|
||||
"0.15.3": "0.15.3"
|
||||
}
|
||||
},
|
||||
"centos:6": {
|
||||
@@ -64,7 +70,9 @@
|
||||
"0.14.2": "0.14.2",
|
||||
"0.15": "0.15",
|
||||
"0.15.0": "0.15.0",
|
||||
"0.15.1": "0.15.1"
|
||||
"0.15.1": "0.15.1",
|
||||
"0.15.2": "0.15.2",
|
||||
"0.15.3": "0.15.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -175,9 +175,20 @@ def activate(
|
||||
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
|
||||
# become PATH variables.
|
||||
#
|
||||
if add_view and default_view_name in env.views:
|
||||
with spack.store.db.read_transaction():
|
||||
cmds += env.add_default_view_to_shell(shell)
|
||||
try:
|
||||
if add_view and default_view_name in env.views:
|
||||
with spack.store.db.read_transaction():
|
||||
cmds += env.add_default_view_to_shell(shell)
|
||||
except (spack.repo.UnknownPackageError,
|
||||
spack.repo.UnknownNamespaceError) as e:
|
||||
tty.error(e)
|
||||
tty.die(
|
||||
'Environment view is broken due to a missing package or repo.\n',
|
||||
' To activate without views enabled, activate with:\n',
|
||||
' spack env activate -V {0}\n'.format(env.name),
|
||||
' To remove it and resolve the issue, '
|
||||
'force concretize with the command:\n',
|
||||
' spack -e {0} concretize --force'.format(env.name))
|
||||
|
||||
return cmds
|
||||
|
||||
@@ -230,9 +241,15 @@ def deactivate(shell='sh'):
|
||||
cmds += ' unset SPACK_OLD_PS1; export SPACK_OLD_PS1;\n'
|
||||
cmds += 'fi;\n'
|
||||
|
||||
if default_view_name in _active_environment.views:
|
||||
with spack.store.db.read_transaction():
|
||||
cmds += _active_environment.rm_default_view_from_shell(shell)
|
||||
try:
|
||||
if default_view_name in _active_environment.views:
|
||||
with spack.store.db.read_transaction():
|
||||
cmds += _active_environment.rm_default_view_from_shell(shell)
|
||||
except (spack.repo.UnknownPackageError,
|
||||
spack.repo.UnknownNamespaceError) as e:
|
||||
tty.warn(e)
|
||||
tty.warn('Could not fully deactivate view due to missing package '
|
||||
'or repo, shell environment may be corrupt.')
|
||||
|
||||
tty.debug("Deactivated environmennt '%s'" % _active_environment.name)
|
||||
_active_environment = None
|
||||
@@ -446,8 +463,9 @@ def _eval_conditional(string):
|
||||
|
||||
|
||||
class ViewDescriptor(object):
|
||||
def __init__(self, root, projections={}, select=[], exclude=[],
|
||||
def __init__(self, base_path, root, projections={}, select=[], exclude=[],
|
||||
link=default_view_link):
|
||||
self.base = base_path
|
||||
self.root = root
|
||||
self.projections = projections
|
||||
self.select = select
|
||||
@@ -477,15 +495,19 @@ def to_dict(self):
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def from_dict(d):
|
||||
return ViewDescriptor(d['root'],
|
||||
def from_dict(base_path, d):
|
||||
return ViewDescriptor(base_path,
|
||||
d['root'],
|
||||
d.get('projections', {}),
|
||||
d.get('select', []),
|
||||
d.get('exclude', []),
|
||||
d.get('link', default_view_link))
|
||||
|
||||
def view(self):
|
||||
return YamlFilesystemView(self.root, spack.store.layout,
|
||||
root = self.root
|
||||
if not os.path.isabs(root):
|
||||
root = os.path.normpath(os.path.join(self.base, self.root))
|
||||
return YamlFilesystemView(root, spack.store.layout,
|
||||
ignore_conflicts=True,
|
||||
projections=self.projections)
|
||||
|
||||
@@ -527,20 +549,29 @@ def regenerate(self, all_specs, roots):
|
||||
installed_specs_for_view = set(
|
||||
s for s in specs_for_view if s in self and s.package.installed)
|
||||
|
||||
view = self.view()
|
||||
# To ensure there are no conflicts with packages being installed
|
||||
# that cannot be resolved or have repos that have been removed
|
||||
# we always regenerate the view from scratch. We must first make
|
||||
# sure the root directory exists for the very first time though.
|
||||
root = self.root
|
||||
if not os.path.isabs(root):
|
||||
root = os.path.normpath(os.path.join(self.base, self.root))
|
||||
fs.mkdirp(root)
|
||||
with fs.replace_directory_transaction(root):
|
||||
view = self.view()
|
||||
|
||||
view.clean()
|
||||
specs_in_view = set(view.get_all_specs())
|
||||
tty.msg("Updating view at {0}".format(self.root))
|
||||
view.clean()
|
||||
specs_in_view = set(view.get_all_specs())
|
||||
tty.msg("Updating view at {0}".format(self.root))
|
||||
|
||||
rm_specs = specs_in_view - installed_specs_for_view
|
||||
add_specs = installed_specs_for_view - specs_in_view
|
||||
rm_specs = specs_in_view - installed_specs_for_view
|
||||
add_specs = installed_specs_for_view - specs_in_view
|
||||
|
||||
# pass all_specs in, as it's expensive to read all the
|
||||
# spec.yaml files twice.
|
||||
view.remove_specs(*rm_specs, with_dependents=False,
|
||||
all_specs=specs_in_view)
|
||||
view.add_specs(*add_specs, with_dependencies=False)
|
||||
# pass all_specs in, as it's expensive to read all the
|
||||
# spec.yaml files twice.
|
||||
view.remove_specs(*rm_specs, with_dependents=False,
|
||||
all_specs=specs_in_view)
|
||||
view.add_specs(*add_specs, with_dependencies=False)
|
||||
|
||||
|
||||
class Environment(object):
|
||||
@@ -586,9 +617,11 @@ def __init__(self, path, init_file=None, with_view=None):
|
||||
self.views = {}
|
||||
elif with_view is True:
|
||||
self.views = {
|
||||
default_view_name: ViewDescriptor(self.view_path_default)}
|
||||
default_view_name: ViewDescriptor(self.path,
|
||||
self.view_path_default)}
|
||||
elif isinstance(with_view, six.string_types):
|
||||
self.views = {default_view_name: ViewDescriptor(with_view)}
|
||||
self.views = {default_view_name: ViewDescriptor(self.path,
|
||||
with_view)}
|
||||
# If with_view is None, then defer to the view settings determined by
|
||||
# the manifest file
|
||||
|
||||
@@ -659,11 +692,14 @@ def _read_manifest(self, f, raw_yaml=None):
|
||||
# enable_view can be boolean, string, or None
|
||||
if enable_view is True or enable_view is None:
|
||||
self.views = {
|
||||
default_view_name: ViewDescriptor(self.view_path_default)}
|
||||
default_view_name: ViewDescriptor(self.path,
|
||||
self.view_path_default)}
|
||||
elif isinstance(enable_view, six.string_types):
|
||||
self.views = {default_view_name: ViewDescriptor(enable_view)}
|
||||
self.views = {default_view_name: ViewDescriptor(self.path,
|
||||
enable_view)}
|
||||
elif enable_view:
|
||||
self.views = dict((name, ViewDescriptor.from_dict(values))
|
||||
path = self.path
|
||||
self.views = dict((name, ViewDescriptor.from_dict(path, values))
|
||||
for name, values in enable_view.items())
|
||||
else:
|
||||
self.views = {}
|
||||
@@ -1097,7 +1133,7 @@ def update_default_view(self, viewpath):
|
||||
if name in self.views:
|
||||
self.default_view.root = viewpath
|
||||
else:
|
||||
self.views[name] = ViewDescriptor(viewpath)
|
||||
self.views[name] = ViewDescriptor(self.path, viewpath)
|
||||
else:
|
||||
self.views.pop(name, None)
|
||||
|
||||
@@ -1111,6 +1147,24 @@ def regenerate_views(self):
|
||||
for view in self.views.values():
|
||||
view.regenerate(specs, self.roots())
|
||||
|
||||
def check_views(self):
|
||||
"""Checks if the environments default view can be activated."""
|
||||
try:
|
||||
# This is effectively a no-op, but it touches all packages in the
|
||||
# default view if they are installed.
|
||||
for view_name, view in self.views.items():
|
||||
for _, spec in self.concretized_specs():
|
||||
if spec in view and spec.package.installed:
|
||||
tty.debug(
|
||||
'Spec %s in view %s' % (spec.name, view_name))
|
||||
except (spack.repo.UnknownPackageError,
|
||||
spack.repo.UnknownNamespaceError) as e:
|
||||
tty.warn(e)
|
||||
tty.warn(
|
||||
'Environment %s includes out of date packages or repos. '
|
||||
'Loading the environment view will require reconcretization.'
|
||||
% self.name)
|
||||
|
||||
def _env_modifications_for_default_view(self, reverse=False):
|
||||
all_mods = spack.util.environment.EnvironmentModifications()
|
||||
|
||||
@@ -1490,9 +1544,10 @@ def write(self, regenerate_views=True):
|
||||
default_name = default_view_name
|
||||
if self.views and len(self.views) == 1 and default_name in self.views:
|
||||
path = self.default_view.root
|
||||
if self.default_view == ViewDescriptor(self.view_path_default):
|
||||
if self.default_view == ViewDescriptor(self.path,
|
||||
self.view_path_default):
|
||||
view = True
|
||||
elif self.default_view == ViewDescriptor(path):
|
||||
elif self.default_view == ViewDescriptor(self.path, path):
|
||||
view = path
|
||||
else:
|
||||
view = dict((name, view.to_dict())
|
||||
|
@@ -289,10 +289,11 @@ def candidate_urls(self):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.archive_file:
|
||||
tty.msg("Already downloaded %s" % self.archive_file)
|
||||
tty.debug('Already downloaded {0}'.format(self.archive_file))
|
||||
return
|
||||
|
||||
url = None
|
||||
errors = []
|
||||
for url in self.candidate_urls:
|
||||
try:
|
||||
partial_file, save_file = self._fetch_from_url(url)
|
||||
@@ -300,8 +301,10 @@ def fetch(self):
|
||||
os.rename(partial_file, save_file)
|
||||
break
|
||||
except FetchError as e:
|
||||
tty.msg(str(e))
|
||||
pass
|
||||
errors.append(str(e))
|
||||
|
||||
for msg in errors:
|
||||
tty.debug(msg)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(url)
|
||||
@@ -312,7 +315,7 @@ def _fetch_from_url(self, url):
|
||||
if self.stage.save_filename:
|
||||
save_file = self.stage.save_filename
|
||||
partial_file = self.stage.save_filename + '.part'
|
||||
tty.msg("Fetching %s" % url)
|
||||
tty.debug('Fetching {0}'.format(url))
|
||||
if partial_file:
|
||||
save_args = ['-C',
|
||||
'-', # continue partial downloads
|
||||
@@ -327,6 +330,8 @@ def _fetch_from_url(self, url):
|
||||
'-', # print out HTML headers
|
||||
'-L', # resolve 3xx redirects
|
||||
url,
|
||||
'--stderr', # redirect stderr output
|
||||
'-', # redirect to stdout
|
||||
]
|
||||
|
||||
if not spack.config.get('config:verify_ssl'):
|
||||
@@ -412,8 +417,8 @@ def cachable(self):
|
||||
@_needs_stage
|
||||
def expand(self):
|
||||
if not self.expand_archive:
|
||||
tty.msg("Staging unexpanded archive %s in %s" % (
|
||||
self.archive_file, self.stage.source_path))
|
||||
tty.debug('Staging unexpanded archive {0} in {1}'
|
||||
.format(self.archive_file, self.stage.source_path))
|
||||
if not self.stage.expanded:
|
||||
mkdirp(self.stage.source_path)
|
||||
dest = os.path.join(self.stage.source_path,
|
||||
@@ -421,7 +426,7 @@ def expand(self):
|
||||
shutil.move(self.archive_file, dest)
|
||||
return
|
||||
|
||||
tty.msg("Staging archive: %s" % self.archive_file)
|
||||
tty.debug('Staging archive: {0}'.format(self.archive_file))
|
||||
|
||||
if not self.archive_file:
|
||||
raise NoArchiveFileError(
|
||||
@@ -564,7 +569,7 @@ def fetch(self):
|
||||
raise
|
||||
|
||||
# Notify the user how we fetched.
|
||||
tty.msg('Using cached archive: %s' % path)
|
||||
tty.debug('Using cached archive: {0}'.format(path))
|
||||
|
||||
|
||||
class VCSFetchStrategy(FetchStrategy):
|
||||
@@ -594,7 +599,8 @@ def __init__(self, **kwargs):
|
||||
|
||||
@_needs_stage
|
||||
def check(self):
|
||||
tty.msg("No checksum needed when fetching with %s" % self.url_attr)
|
||||
tty.debug('No checksum needed when fetching with {0}'
|
||||
.format(self.url_attr))
|
||||
|
||||
@_needs_stage
|
||||
def expand(self):
|
||||
@@ -672,7 +678,7 @@ def go(self):
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
tty.msg("Getting go resource:", self.url)
|
||||
tty.debug('Getting go resource: {0}'.format(self.url))
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
try:
|
||||
@@ -788,10 +794,10 @@ def _repo_info(self):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.stage.expanded:
|
||||
tty.msg("Already fetched {0}".format(self.stage.source_path))
|
||||
tty.debug('Already fetched {0}'.format(self.stage.source_path))
|
||||
return
|
||||
|
||||
tty.msg("Cloning git repository: {0}".format(self._repo_info()))
|
||||
tty.debug('Cloning git repository: {0}'.format(self._repo_info()))
|
||||
|
||||
git = self.git
|
||||
if self.commit:
|
||||
@@ -959,10 +965,10 @@ def mirror_id(self):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.stage.expanded:
|
||||
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||
tty.debug('Already fetched {0}'.format(self.stage.source_path))
|
||||
return
|
||||
|
||||
tty.msg("Checking out subversion repository: %s" % self.url)
|
||||
tty.debug('Checking out subversion repository: {0}'.format(self.url))
|
||||
|
||||
args = ['checkout', '--force', '--quiet']
|
||||
if self.revision:
|
||||
@@ -1068,13 +1074,14 @@ def mirror_id(self):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.stage.expanded:
|
||||
tty.msg("Already fetched %s" % self.stage.source_path)
|
||||
tty.debug('Already fetched {0}'.format(self.stage.source_path))
|
||||
return
|
||||
|
||||
args = []
|
||||
if self.revision:
|
||||
args.append('at revision %s' % self.revision)
|
||||
tty.msg("Cloning mercurial repository:", self.url, *args)
|
||||
tty.debug('Cloning mercurial repository: {0} {1}'
|
||||
.format(self.url, args))
|
||||
|
||||
args = ['clone']
|
||||
|
||||
@@ -1130,7 +1137,7 @@ def __init__(self, *args, **kwargs):
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.archive_file:
|
||||
tty.msg("Already downloaded %s" % self.archive_file)
|
||||
tty.debug('Already downloaded {0}'.format(self.archive_file))
|
||||
return
|
||||
|
||||
parsed_url = url_util.parse(self.url)
|
||||
@@ -1138,7 +1145,7 @@ def fetch(self):
|
||||
raise FetchError(
|
||||
'S3FetchStrategy can only fetch from s3:// urls.')
|
||||
|
||||
tty.msg("Fetching %s" % self.url)
|
||||
tty.debug('Fetching {0}'.format(self.url))
|
||||
|
||||
basename = os.path.basename(parsed_url.path)
|
||||
|
||||
|
@@ -215,18 +215,18 @@ def _hms(seconds):
|
||||
|
||||
def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
|
||||
"""
|
||||
Install the package from binary cache
|
||||
Extract the package from binary cache
|
||||
|
||||
Args:
|
||||
pkg (PackageBase): the package to install from the binary cache
|
||||
cache_only (bool): only install from binary cache
|
||||
cache_only (bool): only extract from binary cache
|
||||
explicit (bool): ``True`` if installing the package was explicitly
|
||||
requested by the user, otherwise, ``False``
|
||||
unsigned (bool): ``True`` if binary package signatures to be checked,
|
||||
otherwise, ``False``
|
||||
|
||||
Return:
|
||||
(bool) ``True`` if the package was installed from binary cache,
|
||||
(bool) ``True`` if the package was extract from binary cache,
|
||||
``False`` otherwise
|
||||
"""
|
||||
installed_from_cache = _try_install_from_binary_cache(pkg, explicit,
|
||||
@@ -237,10 +237,10 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
|
||||
if cache_only:
|
||||
tty.die('{0} when cache-only specified'.format(pre))
|
||||
|
||||
tty.debug('{0}: installing from source'.format(pre))
|
||||
tty.msg('{0}: installing from source'.format(pre))
|
||||
return False
|
||||
|
||||
tty.debug('Successfully installed {0} from binary cache'.format(pkg_id))
|
||||
tty.debug('Successfully extracted {0} from binary cache'.format(pkg_id))
|
||||
_print_installed_pkg(pkg.spec.prefix)
|
||||
spack.hooks.post_install(pkg.spec)
|
||||
return True
|
||||
@@ -275,17 +275,17 @@ def _process_external_package(pkg, explicit):
|
||||
if spec.external_module:
|
||||
tty.msg('{0} has external module in {1}'
|
||||
.format(pre, spec.external_module))
|
||||
tty.msg('{0} is actually installed in {1}'
|
||||
.format(pre, spec.external_path))
|
||||
tty.debug('{0} is actually installed in {1}'
|
||||
.format(pre, spec.external_path))
|
||||
else:
|
||||
tty.msg("{0} externally installed in {1}"
|
||||
tty.msg('{0} externally installed in {1}'
|
||||
.format(pre, spec.external_path))
|
||||
|
||||
try:
|
||||
# Check if the package was already registered in the DB.
|
||||
# If this is the case, then just exit.
|
||||
rec = spack.store.db.get_record(spec)
|
||||
tty.msg('{0} already registered in DB'.format(pre))
|
||||
tty.debug('{0} already registered in DB'.format(pre))
|
||||
|
||||
# Update the value of rec.explicit if it is necessary
|
||||
_update_explicit_entry_in_db(pkg, rec, explicit)
|
||||
@@ -294,11 +294,11 @@ def _process_external_package(pkg, explicit):
|
||||
# If not, register it and generate the module file.
|
||||
# For external packages we just need to run
|
||||
# post-install hooks to generate module files.
|
||||
tty.msg('{0} generating module file'.format(pre))
|
||||
tty.debug('{0} generating module file'.format(pre))
|
||||
spack.hooks.post_install(spec)
|
||||
|
||||
# Add to the DB
|
||||
tty.msg('{0} registering into DB'.format(pre))
|
||||
tty.debug('{0} registering into DB'.format(pre))
|
||||
spack.store.db.add(spec, None, explicit=explicit)
|
||||
|
||||
|
||||
@@ -314,7 +314,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned):
|
||||
otherwise, ``False``
|
||||
|
||||
Return:
|
||||
(bool) ``True`` if the package was installed from binary cache,
|
||||
(bool) ``True`` if the package was extracted from binary cache,
|
||||
else ``False``
|
||||
"""
|
||||
tarball = binary_distribution.download_tarball(binary_spec)
|
||||
@@ -325,7 +325,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned):
|
||||
return False
|
||||
|
||||
pkg_id = package_id(pkg)
|
||||
tty.msg('Installing {0} from binary cache'.format(pkg_id))
|
||||
tty.msg('Extracting {0} from binary cache'.format(pkg_id))
|
||||
binary_distribution.extract_tarball(binary_spec, tarball, allow_root=False,
|
||||
unsigned=unsigned, force=False)
|
||||
pkg.installed_from_binary_cache = True
|
||||
@@ -335,10 +335,10 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned):
|
||||
|
||||
def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
|
||||
"""
|
||||
Try to install the package from binary cache.
|
||||
Try to extract the package from binary cache.
|
||||
|
||||
Args:
|
||||
pkg (PackageBase): the package to be installed from binary cache
|
||||
pkg (PackageBase): the package to be extracted from binary cache
|
||||
explicit (bool): the package was explicitly requested by the user
|
||||
unsigned (bool): ``True`` if binary package signatures to be checked,
|
||||
otherwise, ``False``
|
||||
@@ -369,7 +369,7 @@ def _update_explicit_entry_in_db(pkg, rec, explicit):
|
||||
with spack.store.db.write_transaction():
|
||||
rec = spack.store.db.get_record(pkg.spec)
|
||||
message = '{s.name}@{s.version} : marking the package explicit'
|
||||
tty.msg(message.format(s=pkg.spec))
|
||||
tty.debug(message.format(s=pkg.spec))
|
||||
rec.explicit = True
|
||||
|
||||
|
||||
@@ -452,7 +452,8 @@ def install_msg(name, pid):
|
||||
Return:
|
||||
(str) Colorized installing message
|
||||
"""
|
||||
return '{0}: '.format(pid) + colorize('@*{Installing} @*g{%s}' % name)
|
||||
pre = '{0}: '.format(pid) if tty.show_pid() else ''
|
||||
return pre + colorize('@*{Installing} @*g{%s}' % name)
|
||||
|
||||
|
||||
def log(pkg):
|
||||
@@ -1057,11 +1058,15 @@ def _install_task(self, task, **kwargs):
|
||||
if use_cache and \
|
||||
_install_from_cache(pkg, cache_only, explicit, unsigned):
|
||||
self._update_installed(task)
|
||||
if task.compiler:
|
||||
spack.compilers.add_compilers_to_config(
|
||||
spack.compilers.find_compilers([pkg.spec.prefix]))
|
||||
return
|
||||
|
||||
pkg.run_tests = (tests is True or tests and pkg.name in tests)
|
||||
|
||||
pre = '{0}: {1}:'.format(self.pid, pkg.name)
|
||||
pid = '{0}: '.format(self.pid) if tty.show_pid() else ''
|
||||
pre = '{0}{1}:'.format(pid, pkg.name)
|
||||
|
||||
def build_process():
|
||||
"""
|
||||
@@ -1080,8 +1085,8 @@ def build_process():
|
||||
pkg.do_stage()
|
||||
|
||||
pkg_id = package_id(pkg)
|
||||
tty.msg('{0} Building {1} [{2}]'
|
||||
.format(pre, pkg_id, pkg.build_system_class))
|
||||
tty.debug('{0} Building {1} [{2}]'
|
||||
.format(pre, pkg_id, pkg.build_system_class))
|
||||
|
||||
# get verbosity from do_install() parameter or saved value
|
||||
echo = verbose
|
||||
@@ -1102,8 +1107,8 @@ def build_process():
|
||||
if install_source and os.path.isdir(source_path):
|
||||
src_target = os.path.join(pkg.spec.prefix, 'share',
|
||||
pkg.name, 'src')
|
||||
tty.msg('{0} Copying source to {1}'
|
||||
.format(pre, src_target))
|
||||
tty.debug('{0} Copying source to {1}'
|
||||
.format(pre, src_target))
|
||||
fs.install_tree(pkg.stage.source_path, src_target)
|
||||
|
||||
# Do the real install in the source directory.
|
||||
@@ -1125,7 +1130,7 @@ def build_process():
|
||||
pass
|
||||
|
||||
# cache debug settings
|
||||
debug_enabled = tty.is_debug()
|
||||
debug_level = tty.debug_level()
|
||||
|
||||
# Spawn a daemon that reads from a pipe and redirects
|
||||
# everything to log_path
|
||||
@@ -1134,11 +1139,11 @@ def build_process():
|
||||
pkg.phases, pkg._InstallPhase_phases):
|
||||
|
||||
with logger.force_echo():
|
||||
inner_debug = tty.is_debug()
|
||||
tty.set_debug(debug_enabled)
|
||||
inner_debug_level = tty.debug_level()
|
||||
tty.set_debug(debug_level)
|
||||
tty.msg("{0} Executing phase: '{1}'"
|
||||
.format(pre, phase_name))
|
||||
tty.set_debug(inner_debug)
|
||||
tty.set_debug(inner_debug_level)
|
||||
|
||||
# Redirect stdout and stderr to daemon pipe
|
||||
phase = getattr(pkg, phase_attr)
|
||||
@@ -1154,11 +1159,11 @@ def build_process():
|
||||
pkg._total_time = time.time() - start_time
|
||||
build_time = pkg._total_time - pkg._fetch_time
|
||||
|
||||
tty.msg('{0} Successfully installed {1}'
|
||||
.format(pre, pkg_id),
|
||||
'Fetch: {0}. Build: {1}. Total: {2}.'
|
||||
.format(_hms(pkg._fetch_time), _hms(build_time),
|
||||
_hms(pkg._total_time)))
|
||||
tty.debug('{0} Successfully installed {1}'
|
||||
.format(pre, pkg_id),
|
||||
'Fetch: {0}. Build: {1}. Total: {2}.'
|
||||
.format(_hms(pkg._fetch_time), _hms(build_time),
|
||||
_hms(pkg._total_time)))
|
||||
_print_installed_pkg(pkg.prefix)
|
||||
|
||||
# preserve verbosity across runs
|
||||
@@ -1189,7 +1194,8 @@ def build_process():
|
||||
except spack.build_environment.StopPhase as e:
|
||||
# A StopPhase exception means that do_install was asked to
|
||||
# stop early from clients, and is not an error at this point
|
||||
tty.debug('{0} {1}'.format(self.pid, str(e)))
|
||||
pre = '{0}'.format(self.pid) if tty.show_pid() else ''
|
||||
tty.debug('{0}{1}'.format(pid, str(e)))
|
||||
tty.debug('Package stage directory : {0}'
|
||||
.format(pkg.stage.source_path))
|
||||
|
||||
@@ -1562,9 +1568,14 @@ def install(self, **kwargs):
|
||||
except (Exception, SystemExit) as exc:
|
||||
# Best effort installs suppress the exception and mark the
|
||||
# package as a failure UNLESS this is the explicit package.
|
||||
err = 'Failed to install {0} due to {1}: {2}'
|
||||
tty.error(err.format(pkg.name, exc.__class__.__name__,
|
||||
str(exc)))
|
||||
if (not isinstance(exc, spack.error.SpackError) or
|
||||
not exc.printed):
|
||||
# SpackErrors can be printed by the build process or at
|
||||
# lower levels -- skip printing if already printed.
|
||||
# TODO: sort out this and SpackEror.print_context()
|
||||
err = 'Failed to install {0} due to {1}: {2}'
|
||||
tty.error(
|
||||
err.format(pkg.name, exc.__class__.__name__, str(exc)))
|
||||
|
||||
self._update_failed(task, True, exc)
|
||||
|
||||
|
@@ -362,8 +362,9 @@ def make_argument_parser(**kwargs):
|
||||
'-C', '--config-scope', dest='config_scopes', action='append',
|
||||
metavar='DIR', help="add a custom configuration scope")
|
||||
parser.add_argument(
|
||||
'-d', '--debug', action='store_true',
|
||||
help="write out debug logs during compile")
|
||||
'-d', '--debug', action='count', default=0,
|
||||
help="write out debug messages "
|
||||
"(more d's for more verbosity: -d, -dd, -ddd, etc.)")
|
||||
parser.add_argument(
|
||||
'--timestamp', action='store_true',
|
||||
help="Add a timestamp to tty output")
|
||||
@@ -438,7 +439,7 @@ def setup_main_options(args):
|
||||
tty.set_debug(args.debug)
|
||||
tty.set_stacktrace(args.stacktrace)
|
||||
|
||||
# debug must be set first so that it can even affect behvaior of
|
||||
# debug must be set first so that it can even affect behavior of
|
||||
# errors raised by spack.config.
|
||||
if args.debug:
|
||||
spack.error.debug = True
|
||||
@@ -705,7 +706,7 @@ def main(argv=None):
|
||||
if not args.no_env:
|
||||
env = ev.find_environment(args)
|
||||
if env:
|
||||
ev.activate(env, args.use_env_repo)
|
||||
ev.activate(env, args.use_env_repo, add_view=False)
|
||||
|
||||
# make spack.config aware of any command line configuration scopes
|
||||
if args.config_scopes:
|
||||
|
@@ -97,6 +97,9 @@ def __str__(self):
|
||||
def _detect_crayos_version(cls):
|
||||
if os.path.isfile(_cle_release_file):
|
||||
release_attrs = read_cle_release_file()
|
||||
if 'RELEASE' not in release_attrs:
|
||||
# This Cray system uses a base OS not CLE/CNL
|
||||
return None
|
||||
v = spack.version.Version(release_attrs['RELEASE'])
|
||||
return v[0]
|
||||
elif os.path.isfile(_clerelease_file):
|
||||
|
@@ -1121,9 +1121,8 @@ def do_fetch(self, mirror_only=False):
|
||||
raise ValueError("Can only fetch concrete packages.")
|
||||
|
||||
if not self.has_code:
|
||||
tty.msg(
|
||||
"No fetch required for %s: package has no code." % self.name
|
||||
)
|
||||
tty.debug('No fetch required for {0}: package has no code.'
|
||||
.format(self.name))
|
||||
|
||||
start_time = time.time()
|
||||
checksum = spack.config.get('config:checksum')
|
||||
@@ -1139,7 +1138,8 @@ def do_fetch(self, mirror_only=False):
|
||||
ignore_checksum = tty.get_yes_or_no(" Fetch anyway?",
|
||||
default=False)
|
||||
if ignore_checksum:
|
||||
tty.msg("Fetching with no checksum.", ck_msg)
|
||||
tty.debug('Fetching with no checksum. {0}'
|
||||
.format(ck_msg))
|
||||
|
||||
if not ignore_checksum:
|
||||
raise FetchError("Will not fetch %s" %
|
||||
@@ -1195,7 +1195,7 @@ def do_patch(self):
|
||||
|
||||
# If there are no patches, note it.
|
||||
if not patches and not has_patch_fun:
|
||||
tty.msg("No patches needed for %s" % self.name)
|
||||
tty.debug('No patches needed for {0}'.format(self.name))
|
||||
return
|
||||
|
||||
# Construct paths to special files in the archive dir used to
|
||||
@@ -1208,15 +1208,15 @@ def do_patch(self):
|
||||
# If we encounter an archive that failed to patch, restage it
|
||||
# so that we can apply all the patches again.
|
||||
if os.path.isfile(bad_file):
|
||||
tty.msg("Patching failed last time. Restaging.")
|
||||
tty.debug('Patching failed last time. Restaging.')
|
||||
self.stage.restage()
|
||||
|
||||
# If this file exists, then we already applied all the patches.
|
||||
if os.path.isfile(good_file):
|
||||
tty.msg("Already patched %s" % self.name)
|
||||
tty.debug('Already patched {0}'.format(self.name))
|
||||
return
|
||||
elif os.path.isfile(no_patches_file):
|
||||
tty.msg("No patches needed for %s" % self.name)
|
||||
tty.debug('No patches needed for {0}'.format(self.name))
|
||||
return
|
||||
|
||||
# Apply all the patches for specs that match this one
|
||||
@@ -1225,7 +1225,7 @@ def do_patch(self):
|
||||
try:
|
||||
with working_dir(self.stage.source_path):
|
||||
patch.apply(self.stage)
|
||||
tty.msg('Applied patch %s' % patch.path_or_url)
|
||||
tty.debug('Applied patch {0}'.format(patch.path_or_url))
|
||||
patched = True
|
||||
except spack.error.SpackError as e:
|
||||
tty.debug(e)
|
||||
@@ -1239,7 +1239,7 @@ def do_patch(self):
|
||||
try:
|
||||
with working_dir(self.stage.source_path):
|
||||
self.patch()
|
||||
tty.msg("Ran patch() for %s" % self.name)
|
||||
tty.debug('Ran patch() for {0}'.format(self.name))
|
||||
patched = True
|
||||
except spack.multimethod.NoSuchMethodError:
|
||||
# We are running a multimethod without a default case.
|
||||
@@ -1249,12 +1249,12 @@ def do_patch(self):
|
||||
# directive, AND the patch function didn't apply, say
|
||||
# no patches are needed. Otherwise, we already
|
||||
# printed a message for each patch.
|
||||
tty.msg("No patches needed for %s" % self.name)
|
||||
tty.debug('No patches needed for {0}'.format(self.name))
|
||||
except spack.error.SpackError as e:
|
||||
tty.debug(e)
|
||||
|
||||
# Touch bad file if anything goes wrong.
|
||||
tty.msg("patch() function failed for %s" % self.name)
|
||||
tty.msg('patch() function failed for {0}'.format(self.name))
|
||||
touch(bad_file)
|
||||
raise
|
||||
|
||||
@@ -1341,7 +1341,7 @@ def _has_make_target(self, target):
|
||||
if os.path.exists(makefile):
|
||||
break
|
||||
else:
|
||||
tty.msg('No Makefile found in the build directory')
|
||||
tty.debug('No Makefile found in the build directory')
|
||||
return False
|
||||
|
||||
# Check if 'target' is a valid target.
|
||||
@@ -1372,7 +1372,8 @@ def _has_make_target(self, target):
|
||||
|
||||
for missing_target_msg in missing_target_msgs:
|
||||
if missing_target_msg.format(target) in stderr:
|
||||
tty.msg("Target '" + target + "' not found in " + makefile)
|
||||
tty.debug("Target '{0}' not found in {1}"
|
||||
.format(target, makefile))
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -1400,7 +1401,7 @@ def _has_ninja_target(self, target):
|
||||
|
||||
# Check if we have a Ninja build script
|
||||
if not os.path.exists('build.ninja'):
|
||||
tty.msg('No Ninja build script found in the build directory')
|
||||
tty.debug('No Ninja build script found in the build directory')
|
||||
return False
|
||||
|
||||
# Get a list of all targets in the Ninja build script
|
||||
@@ -1412,7 +1413,8 @@ def _has_ninja_target(self, target):
|
||||
if line.startswith(target + ':')]
|
||||
|
||||
if not matches:
|
||||
tty.msg("Target '" + target + "' not found in build.ninja")
|
||||
tty.debug("Target '{0}' not found in build.ninja"
|
||||
.format(target))
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -1719,11 +1721,12 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
|
||||
if specs:
|
||||
if deprecator:
|
||||
spack.store.db.deprecate(specs[0], deprecator)
|
||||
tty.msg("Deprecating stale DB entry for "
|
||||
"%s" % spec.short_spec)
|
||||
tty.debug('Deprecating stale DB entry for {0}'
|
||||
.format(spec.short_spec))
|
||||
else:
|
||||
spack.store.db.remove(specs[0])
|
||||
tty.msg("Removed stale DB entry for %s" % spec.short_spec)
|
||||
tty.debug('Removed stale DB entry for {0}'
|
||||
.format(spec.short_spec))
|
||||
return
|
||||
else:
|
||||
raise InstallError(str(spec) + " is not installed.")
|
||||
@@ -1767,7 +1770,7 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
|
||||
if pkg is not None:
|
||||
spack.hooks.post_uninstall(spec)
|
||||
|
||||
tty.msg("Successfully uninstalled %s" % spec.short_spec)
|
||||
tty.msg('Successfully uninstalled {0}'.format(spec.short_spec))
|
||||
|
||||
def do_uninstall(self, force=False):
|
||||
"""Uninstall this package by spec."""
|
||||
|
@@ -20,7 +20,7 @@
|
||||
_craype_name_to_target_name = {
|
||||
'x86-cascadelake': 'cascadelake',
|
||||
'x86-naples': 'zen',
|
||||
'x86-rome': 'zen', # Cheating because we have the wrong modules on rzcrayz
|
||||
'x86-rome': 'zen2',
|
||||
'x86-skylake': 'skylake_avx512',
|
||||
'mic-knl': 'mic_knl',
|
||||
'interlagos': 'bulldozer',
|
||||
|
@@ -804,15 +804,17 @@ def relocate_text(
|
||||
where they should be relocated
|
||||
"""
|
||||
# TODO: reduce the number of arguments (8 seems too much)
|
||||
sbang_regex = r'#!/bin/bash {0}/bin/sbang'.format(orig_spack)
|
||||
new_sbang = r'#!/bin/bash {0}/bin/sbang'.format(new_spack)
|
||||
orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(orig_spack)
|
||||
new_sbang = '#!/bin/bash {0}/bin/sbang'.format(new_spack)
|
||||
|
||||
for file in files:
|
||||
_replace_prefix_text(file, orig_install_prefix, new_install_prefix)
|
||||
for orig_dep_prefix, new_dep_prefix in new_prefixes.items():
|
||||
_replace_prefix_text(file, orig_dep_prefix, new_dep_prefix)
|
||||
_replace_prefix_text(file, orig_layout_root, new_layout_root)
|
||||
_replace_prefix_text(file, sbang_regex, new_sbang)
|
||||
# relocate the sbang location only if the spack directory changed
|
||||
if orig_spack != new_spack:
|
||||
_replace_prefix_text(file, orig_sbang, new_sbang)
|
||||
|
||||
|
||||
def relocate_text_bin(
|
||||
|
@@ -32,7 +32,8 @@
|
||||
'enum': [
|
||||
'develop',
|
||||
'0.14', '0.14.0', '0.14.1', '0.14.2',
|
||||
'0.15', '0.15.0', '0.15.1',
|
||||
'0.15', '0.15.0', '0.15.1', '0.15.2',
|
||||
'0.15.3',
|
||||
]
|
||||
}
|
||||
},
|
||||
|
@@ -414,10 +414,11 @@ def fetch(self, mirror_only=False):
|
||||
# Join URLs of mirror roots with mirror paths. Because
|
||||
# urljoin() will strip everything past the final '/' in
|
||||
# the root, so we add a '/' if it is not present.
|
||||
urls = []
|
||||
mirror_urls = []
|
||||
for mirror in spack.mirror.MirrorCollection().values():
|
||||
for rel_path in self.mirror_paths:
|
||||
urls.append(url_util.join(mirror.fetch_url, rel_path))
|
||||
mirror_urls.append(
|
||||
url_util.join(mirror.fetch_url, rel_path))
|
||||
|
||||
# If this archive is normally fetched from a tarball URL,
|
||||
# then use the same digest. `spack mirror` ensures that
|
||||
@@ -435,7 +436,8 @@ def fetch(self, mirror_only=False):
|
||||
self.skip_checksum_for_mirror = not bool(digest)
|
||||
|
||||
# Add URL strategies for all the mirrors with the digest
|
||||
for url in urls:
|
||||
# Insert fetchers in the order that the URLs are provided.
|
||||
for url in reversed(mirror_urls):
|
||||
fetchers.insert(
|
||||
0, fs.from_url_scheme(
|
||||
url, digest, expand=expand, extension=extension))
|
||||
@@ -457,6 +459,11 @@ def generate_fetchers():
|
||||
for fetcher in dynamic_fetchers:
|
||||
yield fetcher
|
||||
|
||||
def print_errors(errors):
|
||||
for msg in errors:
|
||||
tty.debug(msg)
|
||||
|
||||
errors = []
|
||||
for fetcher in generate_fetchers():
|
||||
try:
|
||||
fetcher.stage = self
|
||||
@@ -467,14 +474,18 @@ def generate_fetchers():
|
||||
# Don't bother reporting when something is not cached.
|
||||
continue
|
||||
except spack.error.SpackError as e:
|
||||
tty.msg("Fetching from %s failed." % fetcher)
|
||||
errors.append('Fetching from {0} failed.'.format(fetcher))
|
||||
tty.debug(e)
|
||||
continue
|
||||
else:
|
||||
err_msg = "All fetchers failed for %s" % self.name
|
||||
print_errors(errors)
|
||||
|
||||
err_msg = 'All fetchers failed for {0}'.format(self.name)
|
||||
self.fetcher = self.default_fetcher
|
||||
raise fs.FetchError(err_msg, None)
|
||||
|
||||
print_errors(errors)
|
||||
|
||||
def check(self):
|
||||
"""Check the downloaded archive against a checksum digest.
|
||||
No-op if this stage checks code out of a repository."""
|
||||
@@ -535,9 +546,9 @@ def expand_archive(self):
|
||||
downloaded."""
|
||||
if not self.expanded:
|
||||
self.fetcher.expand()
|
||||
tty.msg("Created stage in %s" % self.path)
|
||||
tty.debug('Created stage in {0}'.format(self.path))
|
||||
else:
|
||||
tty.msg("Already staged %s in %s" % (self.name, self.path))
|
||||
tty.debug('Already staged {0} in {1}'.format(self.name, self.path))
|
||||
|
||||
def restage(self):
|
||||
"""Removes the expanded archive path if it exists, then re-expands
|
||||
@@ -708,13 +719,13 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
pass
|
||||
|
||||
def fetch(self, *args, **kwargs):
|
||||
tty.msg("No need to fetch for DIY.")
|
||||
tty.debug('No need to fetch for DIY.')
|
||||
|
||||
def check(self):
|
||||
tty.msg("No checksum needed for DIY.")
|
||||
tty.debug('No checksum needed for DIY.')
|
||||
|
||||
def expand_archive(self):
|
||||
tty.msg("Using source directory: %s" % self.source_path)
|
||||
tty.debug('Using source directory: {0}'.format(self.source_path))
|
||||
|
||||
@property
|
||||
def expanded(self):
|
||||
@@ -732,7 +743,7 @@ def destroy(self):
|
||||
pass
|
||||
|
||||
def cache_local(self):
|
||||
tty.msg("Sources for DIY stages are not cached")
|
||||
tty.debug('Sources for DIY stages are not cached')
|
||||
|
||||
|
||||
def ensure_access(file):
|
||||
@@ -782,12 +793,12 @@ def get_checksums_for_versions(
|
||||
max_len = max(len(str(v)) for v in sorted_versions)
|
||||
num_ver = len(sorted_versions)
|
||||
|
||||
tty.msg("Found {0} version{1} of {2}:".format(
|
||||
num_ver, '' if num_ver == 1 else 's', name),
|
||||
"",
|
||||
*spack.cmd.elide_list(
|
||||
["{0:{1}} {2}".format(str(v), max_len, url_dict[v])
|
||||
for v in sorted_versions]))
|
||||
tty.debug('Found {0} version{1} of {2}:'.format(
|
||||
num_ver, '' if num_ver == 1 else 's', name),
|
||||
'',
|
||||
*spack.cmd.elide_list(
|
||||
['{0:{1}} {2}'.format(str(v), max_len, url_dict[v])
|
||||
for v in sorted_versions]))
|
||||
print()
|
||||
|
||||
if batch:
|
||||
@@ -802,9 +813,10 @@ def get_checksums_for_versions(
|
||||
versions = sorted_versions[:archives_to_fetch]
|
||||
urls = [url_dict[v] for v in versions]
|
||||
|
||||
tty.msg("Downloading...")
|
||||
tty.debug('Downloading...')
|
||||
version_hashes = []
|
||||
i = 0
|
||||
errors = []
|
||||
for url, version in zip(urls, versions):
|
||||
try:
|
||||
if fetch_options:
|
||||
@@ -825,10 +837,12 @@ def get_checksums_for_versions(
|
||||
hashlib.sha256, stage.archive_file)))
|
||||
i += 1
|
||||
except FailedDownloadError:
|
||||
tty.msg("Failed to fetch {0}".format(url))
|
||||
errors.append('Failed to fetch {0}'.format(url))
|
||||
except Exception as e:
|
||||
tty.msg("Something failed on {0}, skipping.".format(url),
|
||||
" ({0})".format(e))
|
||||
tty.msg('Something failed on {0}, skipping. ({1})'.format(url, e))
|
||||
|
||||
for msg in errors:
|
||||
tty.debug(msg)
|
||||
|
||||
if not version_hashes:
|
||||
tty.die("Could not fetch any versions for {0}".format(name))
|
||||
@@ -843,8 +857,8 @@ def get_checksums_for_versions(
|
||||
])
|
||||
|
||||
num_hash = len(version_hashes)
|
||||
tty.msg("Checksummed {0} version{1} of {2}:".format(
|
||||
num_hash, '' if num_hash == 1 else 's', name))
|
||||
tty.debug('Checksummed {0} version{1} of {2}:'.format(
|
||||
num_hash, '' if num_hash == 1 else 's', name))
|
||||
|
||||
return version_lines
|
||||
|
||||
|
36
lib/spack/spack/test/cache_fetch.py
Normal file
36
lib/spack/spack/test/cache_fetch.py
Normal file
@@ -0,0 +1,36 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import mkdirp, touch
|
||||
|
||||
from spack.stage import Stage
|
||||
from spack.fetch_strategy import CacheURLFetchStrategy, NoCacheError
|
||||
|
||||
|
||||
def test_fetch_missing_cache(tmpdir):
|
||||
"""Ensure raise a missing cache file."""
|
||||
testpath = str(tmpdir)
|
||||
|
||||
fetcher = CacheURLFetchStrategy(url='file:///not-a-real-cache-file')
|
||||
with Stage(fetcher, path=testpath):
|
||||
with pytest.raises(NoCacheError, match=r'No cache'):
|
||||
fetcher.fetch()
|
||||
|
||||
|
||||
def test_fetch(tmpdir):
|
||||
"""Ensure a fetch after expanding is effectively a no-op."""
|
||||
testpath = str(tmpdir)
|
||||
cache = os.path.join(testpath, 'cache.tar.gz')
|
||||
touch(cache)
|
||||
url = 'file:///{0}'.format(cache)
|
||||
|
||||
fetcher = CacheURLFetchStrategy(url=url)
|
||||
with Stage(fetcher, path=testpath) as stage:
|
||||
source_path = stage.source_path
|
||||
mkdirp(source_path)
|
||||
fetcher.fetch()
|
@@ -751,7 +751,6 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
|
||||
|
||||
logs_dir_list = os.listdir(logs_dir.strpath)
|
||||
|
||||
assert('spack-build-env.txt' in logs_dir_list)
|
||||
assert('spack-build-out.txt' in logs_dir_list)
|
||||
|
||||
# Also just make sure that if something goes wrong with the
|
||||
|
@@ -3,6 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -14,7 +16,7 @@
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def no_compilers_yaml(mutable_config, monkeypatch):
|
||||
def no_compilers_yaml(mutable_config):
|
||||
"""Creates a temporary configuration without compilers.yaml"""
|
||||
|
||||
for scope, local_config in mutable_config.scopes.items():
|
||||
@@ -64,7 +66,7 @@ def test_compiler_find_without_paths(no_compilers_yaml, working_env, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
with open('gcc', 'w') as f:
|
||||
f.write("""\
|
||||
#!/bin/bash
|
||||
#!/bin/sh
|
||||
echo "0.0.0"
|
||||
""")
|
||||
os.chmod('gcc', 0o700)
|
||||
@@ -75,6 +77,33 @@ def test_compiler_find_without_paths(no_compilers_yaml, working_env, tmpdir):
|
||||
assert 'gcc' in output
|
||||
|
||||
|
||||
@pytest.mark.regression('17589')
|
||||
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
# make a script to emulate apple gcc's version args
|
||||
with open('gcc', 'w') as f:
|
||||
f.write("""\
|
||||
#!/bin/sh
|
||||
if [ "$1" = "-dumpversion" ]; then
|
||||
echo "4.2.1"
|
||||
elif [ "$1" = "--version" ]; then
|
||||
echo "Configured with: --prefix=/dummy"
|
||||
echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
|
||||
echo "Target: x86_64-apple-darwin18.7.0"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /dummy"
|
||||
else
|
||||
echo "clang: error: no input files"
|
||||
fi
|
||||
""")
|
||||
os.chmod('gcc', 0o700)
|
||||
|
||||
os.environ['PATH'] = str(tmpdir)
|
||||
output = compiler('find', '--scope=site')
|
||||
|
||||
assert 'gcc' not in output
|
||||
|
||||
|
||||
def test_compiler_remove(mutable_config, mock_packages):
|
||||
args = spack.util.pattern.Bunch(
|
||||
all=True, compiler_spec='gcc@4.5.0', add_paths=[], scope=None
|
||||
@@ -103,3 +132,121 @@ def test_compiler_add(
|
||||
new_compiler = new_compilers - old_compilers
|
||||
assert any(c.version == spack.version.Version(mock_compiler_version)
|
||||
for c in new_compiler)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def clangdir(tmpdir):
|
||||
"""Create a directory with some dummy compiler scripts in it.
|
||||
|
||||
Scripts are:
|
||||
- clang
|
||||
- clang++
|
||||
- gcc
|
||||
- g++
|
||||
- gfortran-8
|
||||
|
||||
"""
|
||||
with tmpdir.as_cwd():
|
||||
with open('clang', 'w') as f:
|
||||
f.write("""\
|
||||
#!/bin/sh
|
||||
if [ "$1" = "--version" ]; then
|
||||
echo "clang version 11.0.0 (clang-1100.0.33.16)"
|
||||
echo "Target: x86_64-apple-darwin18.7.0"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /dummy"
|
||||
else
|
||||
echo "clang: error: no input files"
|
||||
exit 1
|
||||
fi
|
||||
""")
|
||||
shutil.copy('clang', 'clang++')
|
||||
|
||||
gcc_script = """\
|
||||
#!/bin/sh
|
||||
if [ "$1" = "-dumpversion" ]; then
|
||||
echo "8"
|
||||
elif [ "$1" = "-dumpfullversion" ]; then
|
||||
echo "8.4.0"
|
||||
elif [ "$1" = "--version" ]; then
|
||||
echo "{0} (GCC) 8.4.0 20120313 (Red Hat 8.4.0-1)"
|
||||
echo "Copyright (C) 2010 Free Software Foundation, Inc."
|
||||
else
|
||||
echo "{1}: fatal error: no input files"
|
||||
echo "compilation terminated."
|
||||
exit 1
|
||||
fi
|
||||
"""
|
||||
with open('gcc-8', 'w') as f:
|
||||
f.write(gcc_script.format('gcc', 'gcc-8'))
|
||||
with open('g++-8', 'w') as f:
|
||||
f.write(gcc_script.format('g++', 'g++-8'))
|
||||
with open('gfortran-8', 'w') as f:
|
||||
f.write(gcc_script.format('GNU Fortran', 'gfortran-8'))
|
||||
os.chmod('clang', 0o700)
|
||||
os.chmod('clang++', 0o700)
|
||||
os.chmod('gcc-8', 0o700)
|
||||
os.chmod('g++-8', 0o700)
|
||||
os.chmod('gfortran-8', 0o700)
|
||||
|
||||
yield tmpdir
|
||||
|
||||
|
||||
@pytest.mark.regression('17590')
|
||||
def test_compiler_find_mixed_suffixes(
|
||||
no_compilers_yaml, working_env, clangdir):
|
||||
"""Ensure that we'll mix compilers with different suffixes when necessary.
|
||||
"""
|
||||
os.environ['PATH'] = str(clangdir)
|
||||
output = compiler('find', '--scope=site')
|
||||
|
||||
assert 'clang@11.0.0' in output
|
||||
assert 'gcc@8.4.0' in output
|
||||
|
||||
config = spack.compilers.get_compiler_config('site', False)
|
||||
clang = next(c['compiler'] for c in config
|
||||
if c['compiler']['spec'] == 'clang@11.0.0')
|
||||
gcc = next(c['compiler'] for c in config
|
||||
if c['compiler']['spec'] == 'gcc@8.4.0')
|
||||
|
||||
gfortran_path = str(clangdir.join('gfortran-8'))
|
||||
|
||||
assert clang['paths'] == {
|
||||
'cc': str(clangdir.join('clang')),
|
||||
'cxx': str(clangdir.join('clang++')),
|
||||
# we only auto-detect mixed clang on macos
|
||||
'f77': gfortran_path if sys.platform == 'darwin' else None,
|
||||
'fc': gfortran_path if sys.platform == 'darwin' else None,
|
||||
}
|
||||
|
||||
assert gcc['paths'] == {
|
||||
'cc': str(clangdir.join('gcc-8')),
|
||||
'cxx': str(clangdir.join('g++-8')),
|
||||
'f77': gfortran_path,
|
||||
'fc': gfortran_path,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.regression('17590')
|
||||
def test_compiler_find_prefer_no_suffix(
|
||||
no_compilers_yaml, working_env, clangdir):
|
||||
"""Ensure that we'll pick 'clang' over 'clang-gpu' when there is a choice.
|
||||
"""
|
||||
with clangdir.as_cwd():
|
||||
shutil.copy('clang', 'clang-gpu')
|
||||
shutil.copy('clang++', 'clang++-gpu')
|
||||
os.chmod('clang-gpu', 0o700)
|
||||
os.chmod('clang++-gpu', 0o700)
|
||||
|
||||
os.environ['PATH'] = str(clangdir)
|
||||
output = compiler('find', '--scope=site')
|
||||
|
||||
assert 'clang@11.0.0' in output
|
||||
assert 'gcc@8.4.0' in output
|
||||
|
||||
config = spack.compilers.get_compiler_config('site', False)
|
||||
clang = next(c['compiler'] for c in config
|
||||
if c['compiler']['spec'] == 'clang@11.0.0')
|
||||
|
||||
assert clang['paths']['cc'] == str(clangdir.join('clang'))
|
||||
assert clang['paths']['cxx'] == str(clangdir.join('clang++'))
|
||||
|
@@ -16,7 +16,7 @@
|
||||
|
||||
from spack.cmd.env import _env_create
|
||||
from spack.spec import Spec
|
||||
from spack.main import SpackCommand
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
from spack.stage import stage_prefix
|
||||
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
@@ -284,6 +284,45 @@ def test_environment_status(capsys, tmpdir):
|
||||
assert 'in current directory' in env('status')
|
||||
|
||||
|
||||
def test_env_status_broken_view(
|
||||
mutable_mock_env_path, mock_archive, mock_fetch, mock_packages,
|
||||
install_mockery
|
||||
):
|
||||
with ev.create('test'):
|
||||
install('trivial-install-test-package')
|
||||
|
||||
# switch to a new repo that doesn't include the installed package
|
||||
# test that Spack detects the missing package and warns the user
|
||||
new_repo = MockPackageMultiRepo()
|
||||
with spack.repo.swap(new_repo):
|
||||
output = env('status')
|
||||
assert 'In environment test' in output
|
||||
assert 'Environment test includes out of date' in output
|
||||
|
||||
# Test that the warning goes away when it's fixed
|
||||
output = env('status')
|
||||
assert 'In environment test' in output
|
||||
assert 'Environment test includes out of date' not in output
|
||||
|
||||
|
||||
def test_env_activate_broken_view(
|
||||
mutable_mock_env_path, mock_archive, mock_fetch, mock_packages,
|
||||
install_mockery
|
||||
):
|
||||
with ev.create('test'):
|
||||
install('trivial-install-test-package')
|
||||
|
||||
# switch to a new repo that doesn't include the installed package
|
||||
# test that Spack detects the missing package and fails gracefully
|
||||
new_repo = MockPackageMultiRepo()
|
||||
with spack.repo.swap(new_repo):
|
||||
with pytest.raises(SpackCommandError):
|
||||
env('activate', '--sh', 'test')
|
||||
|
||||
# test replacing repo fixes it
|
||||
env('activate', '--sh', 'test')
|
||||
|
||||
|
||||
def test_to_lockfile_dict():
|
||||
e = ev.create('test')
|
||||
e.add('mpileaks')
|
||||
|
@@ -29,6 +29,9 @@
|
||||
install = SpackCommand('install')
|
||||
env = SpackCommand('env')
|
||||
add = SpackCommand('add')
|
||||
mirror = SpackCommand('mirror')
|
||||
uninstall = SpackCommand('uninstall')
|
||||
buildcache = SpackCommand('buildcache')
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@@ -130,8 +133,8 @@ def test_package_output(tmpdir, capsys, install_mockery, mock_fetch):
|
||||
|
||||
# make sure that output from the actual package file appears in the
|
||||
# right place in the build log.
|
||||
assert re.search(r"BEFORE INSTALL\n==>( \[.+\])? './configure'", out)
|
||||
assert "'install'\nAFTER INSTALL" in out
|
||||
assert "BEFORE INSTALL" in out
|
||||
assert "AFTER INSTALL" in out
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
@@ -177,10 +180,12 @@ def test_show_log_on_error(mock_packages, mock_archive, mock_fetch,
|
||||
assert install.error.pkg.name == 'build-error'
|
||||
assert 'Full build log:' in out
|
||||
|
||||
# Message shows up for ProcessError (1), ChildError (1), and output (1)
|
||||
print(out)
|
||||
|
||||
# Message shows up for ProcessError (1) and output (1)
|
||||
errors = [line for line in out.split('\n')
|
||||
if 'configure: error: cannot run C compiled programs' in line]
|
||||
assert len(errors) == 3
|
||||
assert len(errors) == 2
|
||||
|
||||
|
||||
def test_install_overwrite(
|
||||
@@ -733,6 +738,40 @@ def test_compiler_bootstrap(
|
||||
install('a%gcc@2.0')
|
||||
|
||||
|
||||
def test_compiler_bootstrap_from_binary_mirror(
|
||||
install_mockery_mutable_config, mock_packages, mock_fetch,
|
||||
mock_archive, mutable_config, monkeypatch, tmpdir):
|
||||
"""Make sure installing compiler from buildcache registers compiler"""
|
||||
|
||||
# Create a temp mirror directory for buildcache usage
|
||||
mirror_dir = tmpdir.join('mirror_dir')
|
||||
mirror_url = 'file://{0}'.format(mirror_dir.strpath)
|
||||
|
||||
# Install a compiler, because we want to put it in a buildcache
|
||||
install('gcc@2.0')
|
||||
|
||||
# Put installed compiler in the buildcache
|
||||
buildcache('create', '-u', '-a', '-f', '-d', mirror_dir.strpath, 'gcc@2.0')
|
||||
|
||||
# Now uninstall the compiler
|
||||
uninstall('-y', 'gcc@2.0')
|
||||
|
||||
monkeypatch.setattr(spack.concretize.Concretizer,
|
||||
'check_for_compiler_existence', False)
|
||||
spack.config.set('config:install_missing_compilers', True)
|
||||
assert CompilerSpec('gcc@2.0') not in compilers.all_compiler_specs()
|
||||
|
||||
# Configure the mirror where we put that buildcache w/ the compiler
|
||||
mirror('add', 'test-mirror', mirror_url)
|
||||
|
||||
# Now make sure that when the compiler is installed from binary mirror,
|
||||
# it also gets configured as a compiler. Test succeeds if it does not
|
||||
# raise an error
|
||||
install('--no-check-signature', '--cache-only', '--only',
|
||||
'dependencies', 'b%gcc@2.0')
|
||||
install('--no-cache', '--only', 'package', 'b%gcc@2.0')
|
||||
|
||||
|
||||
@pytest.mark.regression('16221')
|
||||
def test_compiler_bootstrap_already_installed(
|
||||
install_mockery_mutable_config, mock_packages, mock_fetch,
|
||||
|
@@ -344,10 +344,9 @@ def test_nosource_pkg_install(
|
||||
|
||||
# Make sure install works even though there is no associated code.
|
||||
pkg.do_install()
|
||||
|
||||
# Also make sure an error is raised if `do_fetch` is called.
|
||||
pkg.do_fetch()
|
||||
assert "No fetch required for nosource" in capfd.readouterr()[0]
|
||||
out = capfd.readouterr()
|
||||
assert "Installing dependency-install" in out[0]
|
||||
assert "Missing a source id for nosource" in out[1]
|
||||
|
||||
|
||||
def test_nosource_pkg_install_post_install(
|
||||
|
@@ -99,10 +99,21 @@ def test_hms(sec, result):
|
||||
assert inst._hms(sec) == result
|
||||
|
||||
|
||||
def test_install_msg():
|
||||
def test_install_msg(monkeypatch):
|
||||
"""Test results of call to install_msg based on debug level."""
|
||||
name = 'some-package'
|
||||
pid = 123456
|
||||
expected = "{0}: Installing {1}".format(pid, name)
|
||||
install_msg = 'Installing {0}'.format(name)
|
||||
|
||||
monkeypatch.setattr(tty, '_debug', 0)
|
||||
assert inst.install_msg(name, pid) == install_msg
|
||||
|
||||
monkeypatch.setattr(tty, '_debug', 1)
|
||||
assert inst.install_msg(name, pid) == install_msg
|
||||
|
||||
# Expect the PID to be added at debug level 2
|
||||
monkeypatch.setattr(tty, '_debug', 2)
|
||||
expected = "{0}: {1}".format(pid, install_msg)
|
||||
assert inst.install_msg(name, pid) == expected
|
||||
|
||||
|
||||
@@ -151,7 +162,6 @@ def test_process_external_package_module(install_mockery, monkeypatch, capfd):
|
||||
|
||||
out = capfd.readouterr()[0]
|
||||
assert 'has external module in {0}'.format(spec.external_module) in out
|
||||
assert 'is actually installed in {0}'.format(spec.external_path) in out
|
||||
|
||||
|
||||
def test_process_binary_cache_tarball_none(install_mockery, monkeypatch,
|
||||
@@ -180,7 +190,7 @@ def _spec(spec):
|
||||
spec = spack.spec.Spec('a').concretized()
|
||||
assert inst._process_binary_cache_tarball(spec.package, spec, False, False)
|
||||
|
||||
assert 'Installing a from binary cache' in capfd.readouterr()[0]
|
||||
assert 'Extracting a from binary cache' in capfd.readouterr()[0]
|
||||
|
||||
|
||||
def test_try_install_from_binary_cache(install_mockery, mock_packages,
|
||||
|
87
lib/spack/spack/test/llnl/util/tty/tty.py
Normal file
87
lib/spack/spack/test/llnl/util/tty/tty.py
Normal file
@@ -0,0 +1,87 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
import llnl.util.tty as tty
|
||||
|
||||
|
||||
def test_get_timestamp(monkeypatch):
|
||||
"""Ensure the results of get_timestamp are reasonable."""
|
||||
|
||||
# Debug disabled should return an empty string
|
||||
monkeypatch.setattr(tty, '_debug', 0)
|
||||
assert not tty.get_timestamp(False), 'Expected an empty string'
|
||||
|
||||
# Debug disabled but force the timestamp should return a string
|
||||
assert tty.get_timestamp(True), 'Expected a timestamp/non-empty string'
|
||||
|
||||
pid_str = ' {0}'.format(os.getpid())
|
||||
|
||||
# Level 1 debugging should return a timestamp WITHOUT the pid
|
||||
monkeypatch.setattr(tty, '_debug', 1)
|
||||
out_str = tty.get_timestamp(False)
|
||||
assert out_str and pid_str not in out_str, 'Expected no PID in results'
|
||||
|
||||
# Level 2 debugging should also return a timestamp WITH the pid
|
||||
monkeypatch.setattr(tty, '_debug', 2)
|
||||
out_str = tty.get_timestamp(False)
|
||||
assert out_str and pid_str in out_str, 'Expected PID in results'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('msg,enabled,trace,newline', [
|
||||
('', False, False, False), # Nothing is output
|
||||
(Exception(''), True, False, True), # Exception output
|
||||
('trace', True, True, False), # stacktrace output
|
||||
('newline', True, False, True), # newline in output
|
||||
('no newline', True, False, False) # no newline output
|
||||
])
|
||||
def test_msg(capfd, monkeypatch, enabled, msg, trace, newline):
|
||||
"""Ensure the output from msg with options is appropriate."""
|
||||
|
||||
# temporarily use the parameterized settings
|
||||
monkeypatch.setattr(tty, '_msg_enabled', enabled)
|
||||
monkeypatch.setattr(tty, '_stacktrace', trace)
|
||||
|
||||
expected = [msg if isinstance(msg, str) else 'Exception: ']
|
||||
if newline:
|
||||
expected[0] = '{0}\n'.format(expected[0])
|
||||
if trace:
|
||||
expected.insert(0, '.py')
|
||||
|
||||
tty.msg(msg, newline=newline)
|
||||
out = capfd.readouterr()[0]
|
||||
for msg in expected:
|
||||
assert msg in out
|
||||
|
||||
|
||||
@pytest.mark.parametrize('msg,trace,wrap', [
|
||||
(Exception(''), False, False), # Exception output
|
||||
('trace', True, False), # stacktrace output
|
||||
('wrap', False, True), # wrap in output
|
||||
])
|
||||
def test_info(capfd, monkeypatch, msg, trace, wrap):
|
||||
"""Ensure the output from info with options is appropriate."""
|
||||
|
||||
# temporarily use the parameterized settings
|
||||
monkeypatch.setattr(tty, '_stacktrace', trace)
|
||||
|
||||
expected = [msg if isinstance(msg, str) else 'Exception: ']
|
||||
if trace:
|
||||
expected.insert(0, '.py')
|
||||
|
||||
extra = 'This extra argument *should* make for a sufficiently long line' \
|
||||
' that needs to be wrapped if the option is enabled.'
|
||||
args = [msg, extra]
|
||||
|
||||
num_newlines = 3 if wrap else 2
|
||||
|
||||
tty.info(*args, wrap=wrap, countback=3)
|
||||
out = capfd.readouterr()[0]
|
||||
for msg in expected:
|
||||
assert msg in out
|
||||
|
||||
assert out.count('\n') == num_newlines
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import pytest
|
||||
|
||||
import spack.fetch_strategy as spack_fs
|
||||
@@ -27,3 +28,19 @@ def test_s3fetchstrategy_bad_url(tmpdir):
|
||||
assert fetcher.archive_file is None
|
||||
with pytest.raises(spack_fs.FetchError):
|
||||
fetcher.fetch()
|
||||
|
||||
|
||||
def test_s3fetchstrategy_downloaded(tmpdir):
|
||||
"""Ensure fetch with archive file already downloaded is a noop."""
|
||||
testpath = str(tmpdir)
|
||||
archive = os.path.join(testpath, 's3.tar.gz')
|
||||
|
||||
class Archived_S3FS(spack_fs.S3FetchStrategy):
|
||||
@property
|
||||
def archive_file(self):
|
||||
return archive
|
||||
|
||||
url = 's3:///{0}'.format(archive)
|
||||
fetcher = Archived_S3FS(url=url)
|
||||
with spack_stage.Stage(fetcher, path=testpath):
|
||||
fetcher.fetch()
|
||||
|
@@ -6,7 +6,10 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack
|
||||
import spack.util.executable as ex
|
||||
from spack.hooks.sbang import filter_shebangs_in_directory
|
||||
@@ -35,3 +38,18 @@ def test_read_unicode(tmpdir, working_env):
|
||||
# read the unicode back in and see whether things work
|
||||
script = ex.Executable('./%s' % script_name)
|
||||
assert u'\xc3' == script(output=str).strip()
|
||||
|
||||
|
||||
def test_which(tmpdir):
|
||||
os.environ["PATH"] = str(tmpdir)
|
||||
assert ex.which("spack-test-exe") is None
|
||||
with pytest.raises(ex.CommandNotFoundError):
|
||||
ex.which("spack-test-exe", required=True)
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
fs.touch("spack-test-exe")
|
||||
fs.set_executable('spack-test-exe')
|
||||
|
||||
exe = ex.which("spack-test-exe")
|
||||
assert exe is not None
|
||||
assert exe.path == str(tmpdir.join("spack-test-exe"))
|
||||
|
@@ -239,7 +239,8 @@ def which_string(*args, **kwargs):
|
||||
return exe
|
||||
|
||||
if required:
|
||||
tty.die("spack requires '%s'. Make sure it is in your path." % args[0])
|
||||
raise CommandNotFoundError(
|
||||
"spack requires '%s'. Make sure it is in your path." % args[0])
|
||||
|
||||
return None
|
||||
|
||||
@@ -266,3 +267,7 @@ def which(*args, **kwargs):
|
||||
|
||||
class ProcessError(spack.error.SpackError):
|
||||
"""ProcessErrors are raised when Executables exit with an error code."""
|
||||
|
||||
|
||||
class CommandNotFoundError(spack.error.SpackError):
|
||||
"""Raised when ``which()`` can't find a required executable."""
|
||||
|
@@ -77,6 +77,8 @@ def __init__(self):
|
||||
def get(self, spec):
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
spec = Spec(spec)
|
||||
if spec.name not in self.spec_to_pkg:
|
||||
raise spack.repo.UnknownPackageError(spec.fullname)
|
||||
return self.spec_to_pkg[spec.name]
|
||||
|
||||
def get_pkg_class(self, name):
|
||||
|
38
share/spack/keys/tutorial.pub
Normal file
38
share/spack/keys/tutorial.pub
Normal file
@@ -0,0 +1,38 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQENBF1IgqcBCADqSIBM0TT4+6Acv6SUpQ2l1Ql+UVRtJ74VGFOw+8I8aBWcBryB
|
||||
wNsS/Drxn9M9rX8il2aGtAmwc1dhTh0JvdZO7KqG8Q4vvWOytdLnGSE61LV4147q
|
||||
S/dJiYH2DCvhMKpOByIsEiuoTrUHzd1EQBnEPSwAQV8oWPrc1++f3iYmRemsOBCT
|
||||
BldAu7Y5RwjI3qQ6GazoCF5rd1uyiMYrpT4amEKFE91VRe+IG8XfEaSTapOc/hO3
|
||||
Sw4fzPelA2qD12I+JMj56vM0fQy3TXD5qngIb+leb2jGI+0bTz8RGS0xSMYVvftA
|
||||
upzQPaQIfzijVBt3tFSayx/NXKR0p+EuCqGBABEBAAG0MFNwYWNrIEJ1aWxkIFBp
|
||||
cGVsaW5lIChEZW1vIEtleSkgPGtleUBzcGFjay5kZW1vPokBTgQTAQgAOBYhBDHI
|
||||
4nh6FErErdiO0pX4aBGV4jnYBQJdSIKnAhsvBQsJCAcCBhUKCQgLAgQWAgMBAh4B
|
||||
AheAAAoJEJX4aBGV4jnYpf0IAJDYEjpm0h1pNswTvmnEhgNVbojCGRfAts7F5uf8
|
||||
IFXGafKQsekMWZh0Ig0YXVn72jsOuNK/+keErMfXM3DFNTq0Ki7mcFedR9r5EfLf
|
||||
4YW2n6mphsfMgsg8NwKVLFYWyhQQ4OzhdydPxkGVhEebHwfHNQ3aIcqbFmzkhxnX
|
||||
CIYh2Flf3T306tKX4lXbhsXKG1L/bLtDiFRaMCBp66HGZ8u9Dbyy/W8aDwyx4duD
|
||||
MG+y2OrhOf+zEu3ZPFyc/jsjmfnUtIfQVyRajh/8vh+i9fkvFlLaOQittNElt3z1
|
||||
8+ybGjE9qWY/mvR2ZqnP8SVkGvxSpBVfVXiFFdepvuPAcLu5AQ0EXUiCpwEIAJ2s
|
||||
npNBAVocDUSdOF/Z/eCRvy3epuYm5f1Ge1ao9K2qWYno2FatnsYxK4qqB5yGRkfj
|
||||
sEzAGP8JtJvqDSuB5Xk7CIjRNOwoSB3hqvmxWh2h+HsITUhMl11FZ0Cllz+etXcK
|
||||
APz2ZHSKnA3R8uf4JzIr1cHLS+gDBoj8NgBCZhcyva2b5UC///FLm1+/Lpvekd0U
|
||||
n7B524hbXhFUG+UMfHO/U1c4TvCMt7RGMoWUtRzfO6XB1VQCwWJBVcVGl8Yy59Zk
|
||||
3K76VbFWQWOq6fRBE0xHBAga7pOgCc9qrb+FGl1IHUT8aV8CzkxckHlNb3PlntmE
|
||||
lXZLPcGFWaPtGtuIJVsAEQEAAYkCbAQYAQgAIBYhBDHI4nh6FErErdiO0pX4aBGV
|
||||
4jnYBQJdSIKnAhsuAUAJEJX4aBGV4jnYwHQgBBkBCAAdFiEEneR3pKqi9Rnivv07
|
||||
CYCNVr37XP0FAl1IgqcACgkQCYCNVr37XP13RQf/Ttxidgo9upF8jxrWnT5YhM6D
|
||||
ozzGWzqE+/KDBX+o4f33o6uzozjESRXQUKdclC9ftDJQ84lFTMs3Z+/12ZDqCV2k
|
||||
2qf0VfXg4e5xMq4tt6hojXUeYSfeGZXNU9LzjURCcMD+amIKjVztFg4kl3KHW3Pi
|
||||
/aPTr4xWWgy2tZ1FDEuA5J6AZiKKJSVeoSPOGANouPqm4fNj273XFXQepIhQ5wve
|
||||
4No0abxfXcLt5Yp3y06rNCBC9QdC++19N5+ajn2z9Qd2ZwztPb0mNuqHAok4vrlE
|
||||
1c4WBWk93Nfy9fKImalGENpPDz0td2H9pNC9IafOWltGSWSINRrU1GeaNXS/uAOT
|
||||
CADjcDN+emLbDTTReW4FLoQ0mPJ0tACgszGW50PtncTMPSj4uxSktQPWWk41oD9q
|
||||
gpXm1Vgto4GvPWYs/ewR6Kyd8K0YkBxbRFyYOmycu3/zzYJnry+EHdvtQspwUDPg
|
||||
QlI/avDrncERzICsbd86Jz0CMY4kzpg5v9dt/N6WnHlSk/S+vv4pPUDSz26Q4Ehh
|
||||
iDvDavLGyzKSlVzWQ4bzzlQxXbDL6TZyVAQ4DBI4sI+WGtLbfD51EI5G9BfmDsbw
|
||||
XJ0Dt2yEwRfDUx/lYbAMvhUnWEu2DSpYdJb8GG0GKTGqU4YpvO1JgTCsLSLIAHfT
|
||||
tQMw04Gs+kORRNbggsdTD4sR
|
||||
=N5Wp
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
|
129
share/spack/setup-tutorial-env.sh
Executable file
129
share/spack/setup-tutorial-env.sh
Executable file
@@ -0,0 +1,129 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
###############################################################################
|
||||
#
|
||||
# This file is part of Spack and sets up the environment for the Spack tutorial
|
||||
# It is intended to be run on ubuntu-18.04 or an ubuntu-18.04 container or AWS
|
||||
# cloud9 environment
|
||||
#
|
||||
# Components:
|
||||
# 1. apt installs for packages used in the tutorial
|
||||
# these include compilers and externals used by the tutorial and
|
||||
# basic spack requirements like python and curl
|
||||
# 2. spack configuration files
|
||||
# these set the default configuration for Spack to use x86_64 and suppress
|
||||
# certain gpg warnings. The gpg warnings are not relevant for the tutorial
|
||||
# and the default x86_64 architecture allows us to run the same tutorial on
|
||||
# any x86_64 architecture without needing new binary packages.
|
||||
# 3. aws cloud9 configuration to expand available storage
|
||||
# when we run on aws cloud9 we have to expand the storage from 10G to 30G
|
||||
# because we install too much software for a default cloud9 instance
|
||||
###############################################################################
|
||||
|
||||
####
|
||||
# Ensure we're on Ubuntu 18.04
|
||||
####
|
||||
|
||||
if [ -f /etc/os-release ]; then
|
||||
. /etc/os-release
|
||||
fi
|
||||
if [ x"$UBUNTU_CODENAME" != "xbionic" ]; then
|
||||
echo "The tutorial setup script must be run on Ubuntu 18.04."
|
||||
return 1 &>/dev/null || exit 1 # works if sourced or run
|
||||
fi
|
||||
|
||||
####
|
||||
# Install packages needed for tutorial
|
||||
####
|
||||
|
||||
# compilers, basic system components, externals
|
||||
# There are retries around these because apt fails frequently on new instances,
|
||||
# due to unattended updates running in the background and taking the lock.
|
||||
until sudo apt-get update -y; do
|
||||
echo "==> apt-get update failed. retrying..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
until sudo apt-get install -y --no-install-recommends \
|
||||
autoconf make python3 python3-pip \
|
||||
build-essential ca-certificates curl git gnupg2 iproute2 emacs \
|
||||
file openssh-server tcl unzip vim wget \
|
||||
clang g++ g++-6 gcc gcc-6 gfortran gfortran-6 \
|
||||
zlib1g zlib1g-dev mpich; do
|
||||
echo "==> apt-get install failed. retrying..."
|
||||
sleep 5
|
||||
done
|
||||
|
||||
####
|
||||
# Upgrade boto3 python package on AWS systems
|
||||
####
|
||||
pip3 install --upgrade boto3
|
||||
|
||||
|
||||
####
|
||||
# Spack configuration settings for tutorial
|
||||
####
|
||||
|
||||
# create spack system config
|
||||
sudo mkdir -p /etc/spack
|
||||
|
||||
# set default arch to x86_64
|
||||
sudo tee /etc/spack/packages.yaml << EOF > /dev/null
|
||||
packages:
|
||||
all:
|
||||
target: [x86_64]
|
||||
EOF
|
||||
|
||||
# suppress gpg warnings
|
||||
sudo tee /etc/spack/config.yaml << EOF > /dev/null
|
||||
config:
|
||||
suppress_gpg_warnings: true
|
||||
EOF
|
||||
|
||||
####
|
||||
# AWS set volume size to at least 30G
|
||||
####
|
||||
|
||||
# Hardcode the specified size to 30G
|
||||
SIZE=30
|
||||
|
||||
# Get the ID of the environment host Amazon EC2 instance.
|
||||
INSTANCEID=$(curl http://169.254.169.254/latest/meta-data//instance-id)
|
||||
|
||||
# Get the ID of the Amazon EBS volume associated with the instance.
|
||||
VOLUMEID=$(aws ec2 describe-instances \
|
||||
--instance-id $INSTANCEID \
|
||||
--query "Reservations[0].Instances[0].BlockDeviceMappings[0].Ebs.VolumeId" \
|
||||
--output text)
|
||||
|
||||
# Resize the EBS volume.
|
||||
aws ec2 modify-volume --volume-id $VOLUMEID --size $SIZE
|
||||
|
||||
# Wait for the resize to finish.
|
||||
while [ \
|
||||
"$(aws ec2 describe-volumes-modifications \
|
||||
--volume-id $VOLUMEID \
|
||||
--filters Name=modification-state,Values="optimizing","completed" \
|
||||
--query "length(VolumesModifications)"\
|
||||
--output text)" != "1" ]; do
|
||||
sleep 1
|
||||
done
|
||||
|
||||
if [ -e /dev/xvda1 ]
|
||||
then
|
||||
# Rewrite the partition table so that the partition takes up all the space that it can.
|
||||
sudo growpart /dev/xvda 1
|
||||
|
||||
# Expand the size of the file system.
|
||||
sudo resize2fs /dev/xvda1
|
||||
|
||||
else
|
||||
# Rewrite the partition table so that the partition takes up all the space that it can.
|
||||
sudo growpart /dev/nvme0n1 1
|
||||
|
||||
# Expand the size of the file system.
|
||||
sudo resize2fs /dev/nvme0n1p1
|
||||
fi
|
@@ -15,6 +15,6 @@ class AdeptUtils(CMakePackage):
|
||||
version('1.0.1', sha256='259f777aeb368ede3583d3617bb779f0fde778319bf2122fdd216bdf223c015e')
|
||||
version('1.0', sha256='fed29366c9bcf5f3799220ae3b351d2cb338e2aa42133d61584ea650aa8d6ff7')
|
||||
|
||||
depends_on('boost')
|
||||
depends_on('boost@:1.72.0')
|
||||
depends_on('mpi')
|
||||
depends_on('cmake@2.8:', type='build')
|
||||
|
@@ -238,6 +238,7 @@ def post_install(self):
|
||||
spec['libelf'].prefix.include,
|
||||
spec['hwloc'].prefix.include))
|
||||
|
||||
cmake(*args)
|
||||
make()
|
||||
make('install')
|
||||
# Only build if offload target.
|
||||
cmake(*args)
|
||||
make()
|
||||
make('install')
|
||||
|
Reference in New Issue
Block a user