Compare commits
65 Commits
test/envir
...
f/env-loca
Author | SHA1 | Date | |
---|---|---|---|
![]() |
79e821a43e | ||
![]() |
c16f166dcc | ||
![]() |
4ae9730ffe | ||
![]() |
9debeaf4e7 | ||
![]() |
47cdefcbe8 | ||
![]() |
2acc356ed4 | ||
![]() |
36bf6c9009 | ||
![]() |
29fbad20a2 | ||
![]() |
5f78703af8 | ||
![]() |
9b390bdc2c | ||
![]() |
bc427e8435 | ||
![]() |
3c6e6e22be | ||
![]() |
4a8f755632 | ||
![]() |
51de7ed7ee | ||
![]() |
047110c086 | ||
![]() |
7fa16089fc | ||
![]() |
065eaa739f | ||
![]() |
8d8e88c177 | ||
![]() |
8cc69cecfc | ||
![]() |
9c690a1ef5 | ||
![]() |
3532d6ff16 | ||
![]() |
7494893d3b | ||
![]() |
54e4a72b8e | ||
![]() |
b86980461f | ||
![]() |
23b5932f73 | ||
![]() |
93760847e8 | ||
![]() |
a35d2f39af | ||
![]() |
af17cc60a9 | ||
![]() |
b0528cae3f | ||
![]() |
372a18392a | ||
![]() |
70db49dfed | ||
![]() |
1d24c196da | ||
![]() |
fa3d768947 | ||
![]() |
9b6a109c7e | ||
![]() |
663967d984 | ||
![]() |
b47ff2a2de | ||
![]() |
f7b4993810 | ||
![]() |
3e07eb8cf0 | ||
![]() |
e773396747 | ||
![]() |
e19cc2385e | ||
![]() |
e867662e1b | ||
![]() |
6aad926838 | ||
![]() |
41cf807804 | ||
![]() |
cf8b919954 | ||
![]() |
47957dccf4 | ||
![]() |
38313cadf4 | ||
![]() |
730d005a56 | ||
![]() |
2f7c850a20 | ||
![]() |
5aa7a564d3 | ||
![]() |
d50c8f1727 | ||
![]() |
0ac6dfa8f3 | ||
![]() |
0b0ffe645d | ||
![]() |
9984c838c8 | ||
![]() |
b1bd61321d | ||
![]() |
c144558245 | ||
![]() |
ef43044672 | ||
![]() |
da7294cd90 | ||
![]() |
addb891f42 | ||
![]() |
bce2d38bfc | ||
![]() |
2cae95334c | ||
![]() |
076d60ce35 | ||
![]() |
9ecdafd8de | ||
![]() |
76fde639e8 | ||
![]() |
490b5eef7c | ||
![]() |
3f2e77e5fa |
@@ -1,5 +1,3 @@
|
||||
# .git-blame-ignore-revs
|
||||
# Formatted entire codebase with black 23
|
||||
603569e321013a1a63a637813c94c2834d0a0023
|
||||
# Formatted entire codebase with black 22
|
||||
# Formatted entire codebase with black
|
||||
f52f6e99dbf1131886a80112b8c79dfc414afb7c
|
||||
|
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -1,4 +1,3 @@
|
||||
*.py diff=python
|
||||
*.lp linguist-language=Prolog
|
||||
lib/spack/external/* linguist-vendored
|
||||
*.bat text eol=crlf
|
2
.github/workflows/valid-style.yml
vendored
2
.github/workflows/valid-style.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip six setuptools types-six black==23.1.0 mypy isort clingo flake8
|
||||
python3 -m pip install --upgrade pip six setuptools types-six black==22.12.0 mypy isort clingo flake8
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
|
25
CHANGELOG.md
25
CHANGELOG.md
@@ -1,28 +1,3 @@
|
||||
# v0.19.1 (2023-02-07)
|
||||
|
||||
### Spack Bugfixes
|
||||
|
||||
* `buildcache create`: make "file exists" less verbose (#35019)
|
||||
* `spack mirror create`: don't change paths to urls (#34992)
|
||||
* Improve error message for requirements (#33988)
|
||||
* uninstall: fix accidental cubic complexity (#34005)
|
||||
* scons: fix signature for `install_args` (#34481)
|
||||
* Fix `combine_phase_logs` text encoding issues (#34657)
|
||||
* Use a module-like object to propagate changes in the MRO, when setting build env (#34059)
|
||||
* PackageBase should not define builder legacy attributes (#33942)
|
||||
* Forward lookup of the "run_tests" attribute (#34531)
|
||||
* Bugfix for timers (#33917, #33900)
|
||||
* Fix path handling in prefix inspections (#35318)
|
||||
* Fix libtool filter for Fujitsu compilers (#34916)
|
||||
* Bug fix for duplicate rpath errors on macOS when creating build caches (#34375)
|
||||
* FileCache: delete the new cache file on exception (#34623)
|
||||
* Propagate exceptions from Spack python console (#34547)
|
||||
* Tests: Fix a bug/typo in a `config_values.py` fixture (#33886)
|
||||
* Various CI fixes (#33953, #34560, #34560, #34828)
|
||||
* Docs: remove monitors and analyzers, typos (#34358, #33926)
|
||||
* bump release version for tutorial command (#33859)
|
||||
|
||||
|
||||
# v0.19.0 (2022-11-11)
|
||||
|
||||
`v0.19.0` is a major feature release.
|
||||
|
@@ -226,7 +226,7 @@ for %%Z in ("%_pa_new_path%") do if EXIST %%~sZ\NUL (
|
||||
exit /b 0
|
||||
|
||||
:: set module system roots
|
||||
:_sp_multi_pathadd
|
||||
:_sp_multi_pathadd
|
||||
for %%I in (%~2) do (
|
||||
for %%Z in (%_sp_compatible_sys_types%) do (
|
||||
:pathadd "%~1" "%%I\%%Z"
|
||||
|
@@ -185,7 +185,7 @@ config:
|
||||
# when Spack needs to manage its own package metadata and all operations are
|
||||
# expected to complete within the default time limit. The timeout should
|
||||
# therefore generally be left untouched.
|
||||
db_lock_timeout: 60
|
||||
db_lock_timeout: 3
|
||||
|
||||
|
||||
# How long to wait when attempting to modify a package (e.g. to install it).
|
||||
|
1
lib/spack/docs/.gitignore
vendored
1
lib/spack/docs/.gitignore
vendored
@@ -5,4 +5,3 @@ llnl*.rst
|
||||
_build
|
||||
.spack-env
|
||||
spack.lock
|
||||
_spack_root
|
||||
|
@@ -366,7 +366,7 @@ If the ``pyproject.toml`` lists ``mesonpy`` as the ``build-backend``,
|
||||
it uses the meson build system. Meson uses the default
|
||||
``pyproject.toml`` keys to list dependencies.
|
||||
|
||||
See https://meson-python.readthedocs.io/en/latest/tutorials/introduction.html
|
||||
See https://meson-python.readthedocs.io/en/latest/usage/start.html
|
||||
for more information.
|
||||
|
||||
"""
|
||||
|
@@ -58,7 +58,9 @@ Testing
|
||||
``WafPackage`` also provides ``test`` and ``installtest`` methods,
|
||||
which are run after the ``build`` and ``install`` phases, respectively.
|
||||
By default, these phases do nothing, but you can override them to
|
||||
run package-specific unit tests.
|
||||
run package-specific unit tests. For example, the
|
||||
`py-py2cairo <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-py2cairo/package.py>`_
|
||||
package uses:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@@ -89,7 +89,6 @@
|
||||
# Enable todo items
|
||||
todo_include_todos = True
|
||||
|
||||
|
||||
#
|
||||
# Disable duplicate cross-reference warnings.
|
||||
#
|
||||
@@ -354,7 +353,9 @@ class SpackStyle(DefaultStyle):
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual")]
|
||||
latex_documents = [
|
||||
("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual"),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
@@ -401,7 +402,7 @@ class SpackStyle(DefaultStyle):
|
||||
"Spack",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
)
|
||||
),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
@@ -417,4 +418,6 @@ class SpackStyle(DefaultStyle):
|
||||
# -- Extension configuration -------------------------------------------------
|
||||
|
||||
# sphinx.ext.intersphinx
|
||||
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
|
||||
intersphinx_mapping = {
|
||||
"python": ("https://docs.python.org/3", None),
|
||||
}
|
||||
|
@@ -222,7 +222,7 @@ and location. (See the *Configuration settings* section of ``man
|
||||
ccache`` to learn more about the default settings and how to change
|
||||
them). Please note that we currently disable ccache's ``hash_dir``
|
||||
feature to avoid an issue with the stage directory (see
|
||||
https://github.com/spack/spack/pull/3761#issuecomment-294352232).
|
||||
https://github.com/LLNL/spack/pull/3761#issuecomment-294352232).
|
||||
|
||||
-----------------------
|
||||
``shared_linking:type``
|
||||
|
@@ -118,7 +118,7 @@ make another change, test that change, etc. We use `pytest
|
||||
<http://pytest.org/>`_ as our tests framework, and these types of
|
||||
arguments are just passed to the ``pytest`` command underneath. See `the
|
||||
pytest docs
|
||||
<https://doc.pytest.org/en/latest/how-to/usage.html#specifying-which-tests-to-run>`_
|
||||
<http://doc.pytest.org/en/latest/usage.html#specifying-tests-selecting-tests>`_
|
||||
for more details on test selection syntax.
|
||||
|
||||
``spack unit-test`` has a few special options that can help you
|
||||
@@ -147,7 +147,7 @@ you want to know about. For example, to see just the tests in
|
||||
|
||||
You can also combine any of these options with a ``pytest`` keyword
|
||||
search. See the `pytest usage docs
|
||||
<https://doc.pytest.org/en/latest/how-to/usage.html#specifying-which-tests-to-run>`_
|
||||
<https://docs.pytest.org/en/stable/usage.html#specifying-tests-selecting-tests>`_:
|
||||
for more details on test selection syntax. For example, to see the names of all tests that have "spec"
|
||||
or "concretize" somewhere in their names:
|
||||
|
||||
|
@@ -21,7 +21,7 @@ be present on the machine where Spack is run:
|
||||
:header-rows: 1
|
||||
|
||||
These requirements can be easily installed on most modern Linux systems;
|
||||
on macOS, the Command Line Tools package is required, and a full XCode suite
|
||||
on macOS, the Command Line Tools package is required, and a full XCode suite
|
||||
may be necessary for some packages such as Qt and apple-gl. Spack is designed
|
||||
to run on HPC platforms like Cray. Not all packages should be expected
|
||||
to work on all platforms.
|
||||
@@ -1506,7 +1506,7 @@ Spack On Windows
|
||||
|
||||
Windows support for Spack is currently under development. While this work is still in an early stage,
|
||||
it is currently possible to set up Spack and perform a few operations on Windows. This section will guide
|
||||
you through the steps needed to install Spack and start running it on a fresh Windows machine.
|
||||
you through the steps needed to install Spack and start running it on a fresh Windows machine.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Step 1: Install prerequisites
|
||||
@@ -1516,7 +1516,7 @@ To use Spack on Windows, you will need the following packages:
|
||||
|
||||
Required:
|
||||
* Microsoft Visual Studio
|
||||
* Python
|
||||
* Python
|
||||
* Git
|
||||
|
||||
Optional:
|
||||
@@ -1547,8 +1547,8 @@ Intel Fortran
|
||||
"""""""""""""
|
||||
|
||||
For Fortran-based packages on Windows, we strongly recommend Intel's oneAPI Fortran compilers.
|
||||
The suite is free to download from Intel's website, located at
|
||||
https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html.
|
||||
The suite is free to download from Intel's website, located at
|
||||
https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html#gs.70t5tw.
|
||||
The executable of choice for Spack will be Intel's Beta Compiler, ifx, which supports the classic
|
||||
compiler's (ifort's) frontend and runtime libraries by using LLVM.
|
||||
|
||||
|
@@ -1392,7 +1392,7 @@ Go
|
||||
^^
|
||||
|
||||
Go isn't a VCS, it is a programming language with a builtin command,
|
||||
`go get <https://pkg.go.dev/cmd/go#hdr-Add_dependencies_to_current_module_and_install_them>`_,
|
||||
`go get <https://golang.org/cmd/go/#hdr-Download_and_install_packages_and_dependencies>`_,
|
||||
that fetches packages and their dependencies automatically.
|
||||
The destination directory will be the standard stage source path.
|
||||
|
||||
@@ -2117,7 +2117,7 @@ dynamic loader where to find its dependencies at runtime. You may be
|
||||
familiar with `LD_LIBRARY_PATH
|
||||
<http://tldp.org/HOWTO/Program-Library-HOWTO/shared-libraries.html>`_
|
||||
on Linux or `DYLD_LIBRARY_PATH
|
||||
<https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man3/dyld.3.html>`_
|
||||
<https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man1/dyld.1.html>`_
|
||||
on Mac OS X. RPATH is similar to these paths, in that it tells
|
||||
the loader where to find libraries. Unlike them, it is embedded in
|
||||
the binary and not set in each user's environment.
|
||||
@@ -2429,7 +2429,7 @@ package, and a `canonical hash <https://github.com/spack/spack/pull/28156>`_ of
|
||||
the ``package.py`` recipes). ``test`` dependencies do not affect the package
|
||||
hash, as they are only used to construct a test environment *after* building and
|
||||
installing a given package installation. Older versions of Spack did not include
|
||||
build dependencies in the hash, but this has been
|
||||
build dependencies in the hash, but this has been
|
||||
`fixed <https://github.com/spack/spack/pull/28504>`_ as of |Spack v0.18|_.
|
||||
|
||||
.. |Spack v0.18| replace:: Spack ``v0.18``
|
||||
|
@@ -84,74 +84,6 @@
|
||||
"visit_directory_tree",
|
||||
]
|
||||
|
||||
if sys.version_info < (3, 7, 4):
|
||||
# monkeypatch shutil.copystat to fix PermissionError when copying read-only
|
||||
# files on Lustre when using Python < 3.7.4
|
||||
|
||||
def copystat(src, dst, follow_symlinks=True):
|
||||
"""Copy file metadata
|
||||
Copy the permission bits, last access time, last modification time, and
|
||||
flags from `src` to `dst`. On Linux, copystat() also copies the "extended
|
||||
attributes" where possible. The file contents, owner, and group are
|
||||
unaffected. `src` and `dst` are path names given as strings.
|
||||
If the optional flag `follow_symlinks` is not set, symlinks aren't
|
||||
followed if and only if both `src` and `dst` are symlinks.
|
||||
"""
|
||||
|
||||
def _nop(args, ns=None, follow_symlinks=None):
|
||||
pass
|
||||
|
||||
# follow symlinks (aka don't not follow symlinks)
|
||||
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
|
||||
if follow:
|
||||
# use the real function if it exists
|
||||
def lookup(name):
|
||||
return getattr(os, name, _nop)
|
||||
|
||||
else:
|
||||
# use the real function only if it exists
|
||||
# *and* it supports follow_symlinks
|
||||
def lookup(name):
|
||||
fn = getattr(os, name, _nop)
|
||||
if sys.version_info >= (3, 3):
|
||||
if fn in os.supports_follow_symlinks: # novermin
|
||||
return fn
|
||||
return _nop
|
||||
|
||||
st = lookup("stat")(src, follow_symlinks=follow)
|
||||
mode = stat.S_IMODE(st.st_mode)
|
||||
lookup("utime")(dst, ns=(st.st_atime_ns, st.st_mtime_ns), follow_symlinks=follow)
|
||||
|
||||
# We must copy extended attributes before the file is (potentially)
|
||||
# chmod()'ed read-only, otherwise setxattr() will error with -EACCES.
|
||||
shutil._copyxattr(src, dst, follow_symlinks=follow)
|
||||
|
||||
try:
|
||||
lookup("chmod")(dst, mode, follow_symlinks=follow)
|
||||
except NotImplementedError:
|
||||
# if we got a NotImplementedError, it's because
|
||||
# * follow_symlinks=False,
|
||||
# * lchown() is unavailable, and
|
||||
# * either
|
||||
# * fchownat() is unavailable or
|
||||
# * fchownat() doesn't implement AT_SYMLINK_NOFOLLOW.
|
||||
# (it returned ENOSUP.)
|
||||
# therefore we're out of options--we simply cannot chown the
|
||||
# symlink. give up, suppress the error.
|
||||
# (which is what shutil always did in this circumstance.)
|
||||
pass
|
||||
if hasattr(st, "st_flags"):
|
||||
try:
|
||||
lookup("chflags")(dst, st.st_flags, follow_symlinks=follow)
|
||||
except OSError as why:
|
||||
for err in "EOPNOTSUPP", "ENOTSUP":
|
||||
if hasattr(errno, err) and why.errno == getattr(errno, err):
|
||||
break
|
||||
else:
|
||||
raise
|
||||
|
||||
shutil.copystat = copystat
|
||||
|
||||
|
||||
def getuid():
|
||||
if is_windows:
|
||||
@@ -336,6 +268,7 @@ def groupid_to_group(x):
|
||||
regex = re.escape(regex)
|
||||
filenames = path_to_os_path(*filenames)
|
||||
for filename in filenames:
|
||||
|
||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||
tty.debug(msg.format(filename, regex))
|
||||
|
||||
@@ -1287,6 +1220,7 @@ def traverse_tree(
|
||||
# target is relative to the link, then that may not resolve properly
|
||||
# relative to our cwd - see resolve_link_target_relative_to_the_link
|
||||
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
|
||||
|
||||
# When follow_nonexisting isn't set, don't descend into dirs
|
||||
# in source that do not exist in dest
|
||||
if follow_nonexisting or os.path.exists(dest_child):
|
||||
@@ -1728,6 +1662,7 @@ def find(root, files, recursive=True):
|
||||
|
||||
@system_path_filter
|
||||
def _find_recursive(root, search_files):
|
||||
|
||||
# The variable here is **on purpose** a defaultdict. The idea is that
|
||||
# we want to poke the filesystem as little as possible, but still maintain
|
||||
# stability in the order of the answer. Thus we are recording each library
|
||||
@@ -2698,28 +2633,3 @@ def temporary_dir(
|
||||
yield tmp_dir
|
||||
finally:
|
||||
remove_directory_contents(tmp_dir)
|
||||
|
||||
|
||||
def filesummary(path, print_bytes=16) -> Tuple[int, bytes]:
|
||||
"""Create a small summary of the given file. Does not error
|
||||
when file does not exist.
|
||||
|
||||
Args:
|
||||
print_bytes (int): Number of bytes to print from start/end of file
|
||||
|
||||
Returns:
|
||||
Tuple of size and byte string containing first n .. last n bytes.
|
||||
Size is 0 if file cannot be read."""
|
||||
try:
|
||||
n = print_bytes
|
||||
with open(path, "rb") as f:
|
||||
size = os.fstat(f.fileno()).st_size
|
||||
if size <= 2 * n:
|
||||
short_contents = f.read(2 * n)
|
||||
else:
|
||||
short_contents = f.read(n)
|
||||
f.seek(-n, 2)
|
||||
short_contents += b"..." + f.read(n)
|
||||
return size, short_contents
|
||||
except OSError:
|
||||
return 0, b""
|
||||
|
@@ -198,7 +198,7 @@ def _memoized_function(*args, **kwargs):
|
||||
except TypeError as e:
|
||||
# TypeError is raised when indexing into a dict if the key is unhashable.
|
||||
raise UnhashableArguments(
|
||||
"args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__)
|
||||
"args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__),
|
||||
) from e
|
||||
|
||||
return _memoized_function
|
||||
@@ -237,7 +237,6 @@ def decorator_with_or_without_args(decorator):
|
||||
@decorator
|
||||
|
||||
"""
|
||||
|
||||
# See https://stackoverflow.com/questions/653368 for more on this
|
||||
@functools.wraps(decorator)
|
||||
def new_dec(*args, **kwargs):
|
||||
@@ -991,7 +990,8 @@ def enum(**kwargs):
|
||||
|
||||
|
||||
def stable_partition(
|
||||
input_iterable: Iterable, predicate_fn: Callable[[Any], bool]
|
||||
input_iterable: Iterable,
|
||||
predicate_fn: Callable[[Any], bool],
|
||||
) -> Tuple[List[Any], List[Any]]:
|
||||
"""Partition the input iterable according to a custom predicate.
|
||||
|
||||
@@ -1104,7 +1104,11 @@ def __enter__(self):
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is not None:
|
||||
self._handler._receive_forwarded(self._context, exc_value, traceback.format_tb(tb))
|
||||
self._handler._receive_forwarded(
|
||||
self._context,
|
||||
exc_value,
|
||||
traceback.format_tb(tb),
|
||||
)
|
||||
|
||||
# Suppress any exception from being re-raised:
|
||||
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
||||
|
@@ -75,7 +75,7 @@ def __init__(self, ignore=None):
|
||||
# so that we have a fast lookup and can run mkdir in order.
|
||||
self.directories = OrderedDict()
|
||||
|
||||
# Files to link. Maps dst_rel to (src_root, src_rel)
|
||||
# Files to link. Maps dst_rel to (src_rel, src_root)
|
||||
self.files = OrderedDict()
|
||||
|
||||
def before_visit_dir(self, root, rel_path, depth):
|
||||
@@ -430,11 +430,6 @@ class MergeConflictError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ConflictingSpecsError(MergeConflictError):
|
||||
def __init__(self, spec_1, spec_2):
|
||||
super(MergeConflictError, self).__init__(spec_1, spec_2)
|
||||
|
||||
|
||||
class SingleMergeConflictError(MergeConflictError):
|
||||
def __init__(self, path):
|
||||
super(MergeConflictError, self).__init__("Package merge blocked by file: %s" % path)
|
||||
|
@@ -18,7 +18,7 @@ class Barrier:
|
||||
|
||||
Python 2 doesn't have multiprocessing barriers so we implement this.
|
||||
|
||||
See https://greenteapress.com/semaphores/LittleBookOfSemaphores.pdf, p. 41.
|
||||
See http://greenteapress.com/semaphores/downey08semaphores.pdf, p. 41.
|
||||
"""
|
||||
|
||||
def __init__(self, n, timeout=None):
|
||||
|
@@ -108,6 +108,7 @@ class SuppressOutput:
|
||||
"""Class for disabling output in a scope using 'with' keyword"""
|
||||
|
||||
def __init__(self, msg_enabled=True, warn_enabled=True, error_enabled=True):
|
||||
|
||||
self._msg_enabled_initial = _msg_enabled
|
||||
self._warn_enabled_initial = _warn_enabled
|
||||
self._error_enabled_initial = _error_enabled
|
||||
|
@@ -11,7 +11,6 @@
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
from typing import IO, Any, List, Optional
|
||||
|
||||
from llnl.util.tty import terminal_size
|
||||
from llnl.util.tty.color import cextra, clen
|
||||
@@ -98,16 +97,7 @@ def config_uniform_cols(elts, console_width, padding, cols=0):
|
||||
return config
|
||||
|
||||
|
||||
def colify(
|
||||
elts: List[Any],
|
||||
cols: int = 0,
|
||||
output: Optional[IO] = None,
|
||||
indent: int = 0,
|
||||
padding: int = 2,
|
||||
tty: Optional[bool] = None,
|
||||
method: str = "variable",
|
||||
console_cols: Optional[int] = None,
|
||||
):
|
||||
def colify(elts, **options):
|
||||
"""Takes a list of elements as input and finds a good columnization
|
||||
of them, similar to how gnu ls does. This supports both
|
||||
uniform-width and variable-width (tighter) columns.
|
||||
@@ -116,21 +106,31 @@ def colify(
|
||||
using ``str()``.
|
||||
|
||||
Keyword Arguments:
|
||||
output: A file object to write to. Default is ``sys.stdout``
|
||||
indent: Optionally indent all columns by some number of spaces
|
||||
padding: Spaces between columns. Default is 2
|
||||
width: Width of the output. Default is 80 if tty not detected
|
||||
cols: Force number of columns. Default is to size to terminal, or
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
indent (int): Optionally indent all columns by some number of spaces
|
||||
padding (int): Spaces between columns. Default is 2
|
||||
width (int): Width of the output. Default is 80 if tty not detected
|
||||
cols (int): Force number of columns. Default is to size to terminal, or
|
||||
single-column if no tty
|
||||
tty: Whether to attempt to write to a tty. Default is to autodetect a
|
||||
tty (bool): Whether to attempt to write to a tty. Default is to autodetect a
|
||||
tty. Set to False to force single-column output
|
||||
method: Method to use to fit columns. Options are variable or uniform.
|
||||
method (str): Method to use to fit columns. Options are variable or uniform.
|
||||
Variable-width columns are tighter, uniform columns are all the same width
|
||||
and fit less data on the screen
|
||||
console_cols: number of columns on this console (default: autodetect)
|
||||
"""
|
||||
if output is None:
|
||||
output = sys.stdout
|
||||
# Get keyword arguments or set defaults
|
||||
cols = options.pop("cols", 0)
|
||||
output = options.pop("output", sys.stdout)
|
||||
indent = options.pop("indent", 0)
|
||||
padding = options.pop("padding", 2)
|
||||
tty = options.pop("tty", None)
|
||||
method = options.pop("method", "variable")
|
||||
console_cols = options.pop("width", None)
|
||||
|
||||
if options:
|
||||
raise TypeError(
|
||||
"'%s' is an invalid keyword argument for this function." % next(options.iterkeys())
|
||||
)
|
||||
|
||||
# elts needs to be an array of strings so we can count the elements
|
||||
elts = [str(elt) for elt in elts]
|
||||
@@ -153,11 +153,10 @@ def colify(
|
||||
cols = 1
|
||||
|
||||
# Specify the number of character columns to use.
|
||||
if console_cols is None:
|
||||
if not console_cols:
|
||||
console_rows, console_cols = terminal_size()
|
||||
elif not isinstance(console_cols, int):
|
||||
elif type(console_cols) != int:
|
||||
raise ValueError("Number of columns must be an int")
|
||||
|
||||
console_cols = max(1, console_cols - indent)
|
||||
|
||||
# Choose a method. Variable-width colums vs uniform-width.
|
||||
@@ -193,13 +192,7 @@ def colify(
|
||||
return (config.cols, tuple(config.widths))
|
||||
|
||||
|
||||
def colify_table(
|
||||
table: List[List[Any]],
|
||||
output: Optional[IO] = None,
|
||||
indent: int = 0,
|
||||
padding: int = 2,
|
||||
console_cols: Optional[int] = None,
|
||||
):
|
||||
def colify_table(table, **options):
|
||||
"""Version of ``colify()`` for data expressed in rows, (list of lists).
|
||||
|
||||
Same as regular colify but:
|
||||
@@ -225,38 +218,20 @@ def transpose():
|
||||
for row in table:
|
||||
yield row[i]
|
||||
|
||||
colify(
|
||||
transpose(),
|
||||
cols=columns, # this is always the number of cols in the table
|
||||
tty=True, # don't reduce to 1 column for non-tty
|
||||
output=output,
|
||||
indent=indent,
|
||||
padding=padding,
|
||||
console_cols=console_cols,
|
||||
)
|
||||
if "cols" in options:
|
||||
raise ValueError("Cannot override columsn in colify_table.")
|
||||
options["cols"] = columns
|
||||
|
||||
# don't reduce to 1 column for non-tty
|
||||
options["tty"] = True
|
||||
|
||||
colify(transpose(), **options)
|
||||
|
||||
|
||||
def colified(
|
||||
elts: List[Any],
|
||||
cols: int = 0,
|
||||
output: Optional[IO] = None,
|
||||
indent: int = 0,
|
||||
padding: int = 2,
|
||||
tty: Optional[bool] = None,
|
||||
method: str = "variable",
|
||||
console_cols: Optional[int] = None,
|
||||
):
|
||||
def colified(elts, **options):
|
||||
"""Invokes the ``colify()`` function but returns the result as a string
|
||||
instead of writing it to an output string."""
|
||||
sio = io.StringIO()
|
||||
colify(
|
||||
elts,
|
||||
cols=cols,
|
||||
output=sio,
|
||||
indent=indent,
|
||||
padding=padding,
|
||||
tty=tty,
|
||||
method=method,
|
||||
console_cols=console_cols,
|
||||
)
|
||||
options["output"] = sio
|
||||
colify(elts, **options)
|
||||
return sio.getvalue()
|
||||
|
@@ -161,7 +161,10 @@ def _is_background(self):
|
||||
def _get_canon_echo_flags(self):
|
||||
"""Get current termios canonical and echo settings."""
|
||||
cfg = termios.tcgetattr(self.stream)
|
||||
return (bool(cfg[3] & termios.ICANON), bool(cfg[3] & termios.ECHO))
|
||||
return (
|
||||
bool(cfg[3] & termios.ICANON),
|
||||
bool(cfg[3] & termios.ECHO),
|
||||
)
|
||||
|
||||
def _enable_keyboard_input(self):
|
||||
"""Disable canonical input and echoing on ``self.stream``."""
|
||||
|
@@ -77,7 +77,10 @@ def __init__(self, pid, controller_fd, timeout=1, sleep_time=1e-1, debug=False):
|
||||
def get_canon_echo_attrs(self):
|
||||
"""Get echo and canon attributes of the terminal of controller_fd."""
|
||||
cfg = termios.tcgetattr(self.controller_fd)
|
||||
return (bool(cfg[3] & termios.ICANON), bool(cfg[3] & termios.ECHO))
|
||||
return (
|
||||
bool(cfg[3] & termios.ICANON),
|
||||
bool(cfg[3] & termios.ECHO),
|
||||
)
|
||||
|
||||
def horizontal_line(self, name):
|
||||
"""Labled horizontal line for debugging."""
|
||||
@@ -89,7 +92,11 @@ def status(self):
|
||||
if self.debug:
|
||||
canon, echo = self.get_canon_echo_attrs()
|
||||
sys.stderr.write(
|
||||
"canon: %s, echo: %s\n" % ("on" if canon else "off", "on" if echo else "off")
|
||||
"canon: %s, echo: %s\n"
|
||||
% (
|
||||
"on" if canon else "off",
|
||||
"on" if echo else "off",
|
||||
)
|
||||
)
|
||||
sys.stderr.write("input: %s\n" % self.input_on())
|
||||
sys.stderr.write("bg: %s\n" % self.background())
|
||||
|
@@ -321,7 +321,8 @@ def _check_patch_urls(pkgs, error_cls):
|
||||
errors.append(
|
||||
error_cls(
|
||||
"patch URL in package {0} must end with {1}".format(
|
||||
pkg_cls.name, full_index_arg
|
||||
pkg_cls.name,
|
||||
full_index_arg,
|
||||
),
|
||||
[patch.url],
|
||||
)
|
||||
|
@@ -40,8 +40,6 @@
|
||||
import spack.relocate as relocate
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
import spack.util.crypto
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_json as sjson
|
||||
@@ -211,7 +209,10 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
||||
break
|
||||
else:
|
||||
self._mirrors_for_spec[dag_hash].append(
|
||||
{"mirror_url": mirror_url, "spec": indexed_spec}
|
||||
{
|
||||
"mirror_url": mirror_url,
|
||||
"spec": indexed_spec,
|
||||
}
|
||||
)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
@@ -294,7 +295,10 @@ def update_spec(self, spec, found_list):
|
||||
break
|
||||
else:
|
||||
current_list.append(
|
||||
{"mirror_url": new_entry["mirror_url"], "spec": new_entry["spec"]}
|
||||
{
|
||||
"mirror_url": new_entry["mirror_url"],
|
||||
"spec": new_entry["spec"],
|
||||
}
|
||||
)
|
||||
|
||||
def update(self, with_cooldown=False):
|
||||
@@ -362,7 +366,8 @@ def update(self, with_cooldown=False):
|
||||
# May need to fetch the index and update the local caches
|
||||
try:
|
||||
needs_regen = self._fetch_and_cache_index(
|
||||
cached_mirror_url, cache_entry=cache_entry
|
||||
cached_mirror_url,
|
||||
cache_entry=cache_entry,
|
||||
)
|
||||
self._last_fetch_times[cached_mirror_url] = (now, True)
|
||||
all_methods_failed = False
|
||||
@@ -554,12 +559,7 @@ class NoChecksumException(spack.error.SpackError):
|
||||
Raised if file fails checksum verification.
|
||||
"""
|
||||
|
||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||
super(NoChecksumException, self).__init__(
|
||||
f"{algorithm} checksum failed for {path}",
|
||||
f"Expected {expected} but got {computed}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
pass
|
||||
|
||||
|
||||
class NewLayoutException(spack.error.SpackError):
|
||||
@@ -1198,42 +1198,40 @@ def _build_tarball(
|
||||
):
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
|
||||
workdir = os.path.join(tmpdir, pkg_dir)
|
||||
|
||||
# TODO: We generally don't want to mutate any files, but when using relative
|
||||
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
|
||||
# For now, we only make a full copy of the spec prefix when in relative mode.
|
||||
|
||||
if relative:
|
||||
# tarfile is used because it preserves hardlink etc best.
|
||||
binaries_dir = workdir
|
||||
temp_tarfile_name = tarball_name(spec, ".tar")
|
||||
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
|
||||
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
|
||||
tar.add(name="%s" % spec.prefix, arcname=".")
|
||||
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
|
||||
tar.extractall(workdir)
|
||||
os.remove(temp_tarfile_path)
|
||||
else:
|
||||
binaries_dir = spec.prefix
|
||||
mkdirp(os.path.join(workdir, ".spack"))
|
||||
# make a copy of the install directory to work with
|
||||
workdir = os.path.join(tmpdir, os.path.basename(spec.prefix))
|
||||
# install_tree copies hardlinks
|
||||
# create a temporary tarfile from prefix and exract it to workdir
|
||||
# tarfile preserves hardlinks
|
||||
temp_tarfile_name = tarball_name(spec, ".tar")
|
||||
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
|
||||
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
|
||||
tar.add(name="%s" % spec.prefix, arcname=".")
|
||||
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
|
||||
tar.extractall(workdir)
|
||||
os.remove(temp_tarfile_path)
|
||||
|
||||
# create info for later relocation and create tar
|
||||
write_buildinfo_file(spec, workdir, relative)
|
||||
|
||||
# optionally make the paths in the binaries relative to each other
|
||||
# in the spack install tree before creating tarball
|
||||
try:
|
||||
if relative:
|
||||
if relative:
|
||||
try:
|
||||
make_package_relative(workdir, spec, allow_root)
|
||||
elif not allow_root:
|
||||
ensure_package_relocatable(workdir, binaries_dir)
|
||||
except Exception as e:
|
||||
shutil.rmtree(workdir)
|
||||
shutil.rmtree(tarfile_dir)
|
||||
shutil.rmtree(tmpdir)
|
||||
tty.die(e)
|
||||
except Exception as e:
|
||||
shutil.rmtree(workdir)
|
||||
shutil.rmtree(tarfile_dir)
|
||||
shutil.rmtree(tmpdir)
|
||||
tty.die(e)
|
||||
else:
|
||||
try:
|
||||
check_package_relocatable(workdir, spec, allow_root)
|
||||
except Exception as e:
|
||||
shutil.rmtree(workdir)
|
||||
shutil.rmtree(tarfile_dir)
|
||||
shutil.rmtree(tmpdir)
|
||||
tty.die(e)
|
||||
|
||||
# create gzip compressed tarball of the install prefix
|
||||
# On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
|
||||
@@ -1241,13 +1239,7 @@ def _build_tarball(
|
||||
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
|
||||
# So we follow gzip.
|
||||
with closing(tarfile.open(tarfile_path, "w:gz", compresslevel=6)) as tar:
|
||||
tar.add(name=binaries_dir, arcname=pkg_dir)
|
||||
if not relative:
|
||||
# Add buildinfo file
|
||||
buildinfo_path = buildinfo_file_name(workdir)
|
||||
buildinfo_arcname = buildinfo_file_name(pkg_dir)
|
||||
tar.add(name=buildinfo_path, arcname=buildinfo_arcname)
|
||||
|
||||
tar.add(name="%s" % workdir, arcname="%s" % os.path.basename(spec.prefix))
|
||||
# remove copy of install directory
|
||||
shutil.rmtree(workdir)
|
||||
|
||||
@@ -1308,48 +1300,57 @@ def _build_tarball(
|
||||
return None
|
||||
|
||||
|
||||
def nodes_to_be_packaged(specs, root=True, dependencies=True):
|
||||
def nodes_to_be_packaged(specs, include_root=True, include_dependencies=True):
|
||||
"""Return the list of nodes to be packaged, given a list of specs.
|
||||
|
||||
Args:
|
||||
specs (List[spack.spec.Spec]): list of root specs to be processed
|
||||
root (bool): include the root of each spec in the nodes
|
||||
dependencies (bool): include the dependencies of each
|
||||
include_root (bool): include the root of each spec in the nodes
|
||||
include_dependencies (bool): include the dependencies of each
|
||||
spec in the nodes
|
||||
"""
|
||||
if not root and not dependencies:
|
||||
return []
|
||||
elif dependencies:
|
||||
nodes = traverse.traverse_nodes(specs, root=root, deptype="all")
|
||||
else:
|
||||
nodes = set(specs)
|
||||
if not include_root and not include_dependencies:
|
||||
return set()
|
||||
|
||||
# Limit to installed non-externals.
|
||||
packageable = lambda n: not n.external and n.installed
|
||||
def skip_node(current_node):
|
||||
if current_node.external or current_node.virtual:
|
||||
return True
|
||||
return spack.store.db.query_one(current_node) is None
|
||||
|
||||
# Mass install check
|
||||
with spack.store.db.read_transaction():
|
||||
return list(filter(packageable, nodes))
|
||||
expanded_set = set()
|
||||
for current_spec in specs:
|
||||
if not include_dependencies:
|
||||
nodes = [current_spec]
|
||||
else:
|
||||
nodes = [
|
||||
n
|
||||
for n in current_spec.traverse(
|
||||
order="post", root=include_root, deptype=("link", "run")
|
||||
)
|
||||
]
|
||||
|
||||
for node in nodes:
|
||||
if not skip_node(node):
|
||||
expanded_set.add(node)
|
||||
|
||||
return expanded_set
|
||||
|
||||
|
||||
def push(specs, push_url, include_root: bool = True, include_dependencies: bool = True, **kwargs):
|
||||
def push(specs, push_url, specs_kwargs=None, **kwargs):
|
||||
"""Create a binary package for each of the specs passed as input and push them
|
||||
to a given push URL.
|
||||
|
||||
Args:
|
||||
specs (List[spack.spec.Spec]): installed specs to be packaged
|
||||
push_url (str): url where to push the binary package
|
||||
include_root (bool): include the root of each spec in the nodes
|
||||
include_dependencies (bool): include the dependencies of each
|
||||
spec in the nodes
|
||||
specs_kwargs (dict): dictionary with two possible boolean keys, "include_root"
|
||||
and "include_dependencies", which determine which part of each spec is
|
||||
packaged and pushed to the mirror
|
||||
**kwargs: TODO
|
||||
|
||||
"""
|
||||
# Be explicit about the arugment type
|
||||
if type(include_root) != bool or type(include_dependencies) != bool:
|
||||
raise ValueError("Expected include_root/include_dependencies to be True/False")
|
||||
|
||||
nodes = nodes_to_be_packaged(specs, root=include_root, dependencies=include_dependencies)
|
||||
specs_kwargs = specs_kwargs or {"include_root": True, "include_dependencies": True}
|
||||
nodes = nodes_to_be_packaged(specs, **specs_kwargs)
|
||||
|
||||
# TODO: This seems to be an easy target for task
|
||||
# TODO: distribution using a parallel pool
|
||||
@@ -1566,11 +1567,16 @@ def make_package_relative(workdir, spec, allow_root):
|
||||
relocate.make_link_relative(cur_path_names, orig_path_names)
|
||||
|
||||
|
||||
def ensure_package_relocatable(workdir, binaries_dir):
|
||||
"""Check if package binaries are relocatable."""
|
||||
def check_package_relocatable(workdir, spec, allow_root):
|
||||
"""
|
||||
Check if package binaries are relocatable.
|
||||
Change links to placeholder links.
|
||||
"""
|
||||
buildinfo = read_buildinfo_file(workdir)
|
||||
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
|
||||
relocate.ensure_binaries_are_relocatable(binaries)
|
||||
cur_path_names = list()
|
||||
for filename in buildinfo["relocate_binaries"]:
|
||||
cur_path_names.append(os.path.join(workdir, filename))
|
||||
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
|
||||
|
||||
|
||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||
@@ -1773,15 +1779,14 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
||||
raise UnsignedPackageException(
|
||||
"To install unsigned packages, use the --no-check-signature option."
|
||||
)
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
# get the sha256 checksum of the tarball
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
expected = remote_checksum["hash"]
|
||||
|
||||
# if the checksums don't match don't install
|
||||
if local_checksum != expected:
|
||||
size, contents = fsys.filesummary(tarfile_path)
|
||||
raise NoChecksumException(tarfile_path, size, contents, "sha256", expected, local_checksum)
|
||||
if local_checksum != remote_checksum["hash"]:
|
||||
raise NoChecksumException(
|
||||
"Package tarball failed checksum verification.\n" "It cannot be installed."
|
||||
)
|
||||
|
||||
return tarfile_path
|
||||
|
||||
@@ -1839,14 +1844,12 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
expected = bchecksum["hash"]
|
||||
|
||||
# if the checksums don't match don't install
|
||||
if local_checksum != expected:
|
||||
size, contents = fsys.filesummary(tarfile_path)
|
||||
if local_checksum != bchecksum["hash"]:
|
||||
_delete_staged_downloads(download_result)
|
||||
raise NoChecksumException(
|
||||
tarfile_path, size, contents, "sha256", expected, local_checksum
|
||||
"Package tarball failed checksum verification.\n" "It cannot be installed."
|
||||
)
|
||||
|
||||
new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.layout.root))
|
||||
@@ -1937,11 +1940,8 @@ def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None
|
||||
tarball_path = download_result["tarball_stage"].save_filename
|
||||
msg = msg.format(tarball_path, sha256)
|
||||
if not checker.check(tarball_path):
|
||||
size, contents = fsys.filesummary(tarball_path)
|
||||
_delete_staged_downloads(download_result)
|
||||
raise NoChecksumException(
|
||||
tarball_path, size, contents, checker.hash_name, sha256, checker.sum
|
||||
)
|
||||
raise spack.binary_distribution.NoChecksumException(msg)
|
||||
tty.debug("Verified SHA256 checksum of the build cache")
|
||||
|
||||
# don't print long padded paths while extracting/relocating binaries
|
||||
@@ -2015,7 +2015,12 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
fetched_spec._mark_concrete()
|
||||
|
||||
found_specs.append({"mirror_url": mirror.fetch_url, "spec": fetched_spec})
|
||||
found_specs.append(
|
||||
{
|
||||
"mirror_url": mirror.fetch_url,
|
||||
"spec": fetched_spec,
|
||||
}
|
||||
)
|
||||
|
||||
return found_specs
|
||||
|
||||
@@ -2317,7 +2322,11 @@ def download_single_spec(concrete_spec, destination, mirror_url=None):
|
||||
local_tarball_path = os.path.join(destination, tarball_dir_name)
|
||||
|
||||
files_to_fetch = [
|
||||
{"url": [tarball_path_name], "path": local_tarball_path, "required": True},
|
||||
{
|
||||
"url": [tarball_path_name],
|
||||
"path": local_tarball_path,
|
||||
"required": True,
|
||||
},
|
||||
{
|
||||
"url": [
|
||||
tarball_name(concrete_spec, ".spec.json.sig"),
|
||||
@@ -2438,7 +2447,12 @@ def conditional_fetch(self):
|
||||
response.headers.get("Etag", None) or response.headers.get("etag", None)
|
||||
)
|
||||
|
||||
return FetchIndexResult(etag=etag, hash=computed_hash, data=result, fresh=False)
|
||||
return FetchIndexResult(
|
||||
etag=etag,
|
||||
hash=computed_hash,
|
||||
data=result,
|
||||
fresh=False,
|
||||
)
|
||||
|
||||
|
||||
class EtagIndexFetcher:
|
||||
|
@@ -5,7 +5,11 @@
|
||||
"""Function and classes needed to bootstrap Spack itself."""
|
||||
|
||||
from .config import ensure_bootstrap_configuration, is_bootstrapping
|
||||
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
||||
from .core import (
|
||||
all_core_root_specs,
|
||||
ensure_core_dependencies,
|
||||
ensure_patchelf_in_path_or_raise,
|
||||
)
|
||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||
from .status import status_message
|
||||
|
||||
|
@@ -59,7 +59,10 @@ def _try_import_from_store(module, query_spec, query_info=None):
|
||||
# to be picked up and used, possibly depending on something in the store, first
|
||||
# allows the bootstrap version to work when an incompatible version is in
|
||||
# sys.path
|
||||
orders = [module_paths + sys.path, sys.path + module_paths]
|
||||
orders = [
|
||||
module_paths + sys.path,
|
||||
sys.path + module_paths,
|
||||
]
|
||||
for path in orders:
|
||||
sys.path = path
|
||||
try:
|
||||
|
@@ -53,7 +53,12 @@
|
||||
import spack.util.url
|
||||
import spack.version
|
||||
|
||||
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
|
||||
from ._common import (
|
||||
_executables_in_store,
|
||||
_python_import,
|
||||
_root_spec,
|
||||
_try_import_from_store,
|
||||
)
|
||||
from .config import spack_python_interpreter, spec_for_current_python
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
|
@@ -171,7 +171,7 @@ def mypy_root_spec():
|
||||
|
||||
def black_root_spec():
|
||||
"""Return the root spec used to bootstrap black"""
|
||||
return _root_spec("py-black@:23.1.0")
|
||||
return _root_spec("py-black@:22.12.0")
|
||||
|
||||
|
||||
def flake8_root_spec():
|
||||
|
@@ -1016,6 +1016,7 @@ def get_cmake_prefix_path(pkg):
|
||||
def _setup_pkg_and_run(
|
||||
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
||||
):
|
||||
|
||||
context = kwargs.get("context", "build")
|
||||
|
||||
try:
|
||||
|
@@ -110,7 +110,11 @@ class AutotoolsBuilder(BaseBuilder):
|
||||
phases = ("autoreconf", "configure", "build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("configure_args", "check", "installcheck")
|
||||
legacy_methods = (
|
||||
"configure_args",
|
||||
"check",
|
||||
"installcheck",
|
||||
)
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = (
|
||||
|
@@ -31,6 +31,7 @@ def cmake_cache_option(name, boolean_value, comment=""):
|
||||
|
||||
|
||||
class CachedCMakeBuilder(CMakeBuilder):
|
||||
|
||||
#: Phases of a Cached CMake package
|
||||
#: Note: the initconfig phase is used for developer builds as a final phase to stop on
|
||||
phases: Tuple[str, ...] = ("initconfig", "cmake", "build", "install")
|
||||
|
@@ -252,7 +252,10 @@ def std_args(pkg, generator=None):
|
||||
|
||||
if platform.mac_ver()[0]:
|
||||
args.extend(
|
||||
[define("CMAKE_FIND_FRAMEWORK", "LAST"), define("CMAKE_FIND_APPBUNDLE", "LAST")]
|
||||
[
|
||||
define("CMAKE_FIND_FRAMEWORK", "LAST"),
|
||||
define("CMAKE_FIND_APPBUNDLE", "LAST"),
|
||||
]
|
||||
)
|
||||
|
||||
# Set up CMake rpath
|
||||
|
@@ -38,7 +38,10 @@ class GenericBuilder(BaseBuilder):
|
||||
legacy_methods: Tuple[str, ...] = ()
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes: Tuple[str, ...] = ("archive_files", "install_time_test_callbacks")
|
||||
legacy_attributes: Tuple[str, ...] = (
|
||||
"archive_files",
|
||||
"install_time_test_callbacks",
|
||||
)
|
||||
|
||||
#: Callback names for post-install phase tests
|
||||
install_time_test_callbacks = []
|
||||
|
@@ -857,7 +857,10 @@ def scalapack_libs(self):
|
||||
raise_lib_error("Cannot find a BLACS library for the given MPI.")
|
||||
|
||||
int_suff = "_" + self.intel64_int_suffix
|
||||
scalapack_libnames = ["libmkl_scalapack" + int_suff, blacs_lib + int_suff]
|
||||
scalapack_libnames = [
|
||||
"libmkl_scalapack" + int_suff,
|
||||
blacs_lib + int_suff,
|
||||
]
|
||||
sca_libs = find_libraries(
|
||||
scalapack_libnames, root=self.component_lib_dir("mkl"), shared=("+shared" in self.spec)
|
||||
)
|
||||
@@ -1158,7 +1161,9 @@ def _determine_license_type(self):
|
||||
#
|
||||
# Ideally, we just tell the installer to look around on the system.
|
||||
# Thankfully, we neither need to care nor emulate where it looks:
|
||||
license_type = {"ACTIVATION_TYPE": "exist_lic"}
|
||||
license_type = {
|
||||
"ACTIVATION_TYPE": "exist_lic",
|
||||
}
|
||||
|
||||
# However (and only), if the spack-internal Intel license file has been
|
||||
# populated beyond its templated explanatory comments, proffer it to
|
||||
|
@@ -68,7 +68,10 @@ def unpack(self, pkg, spec, prefix):
|
||||
|
||||
@staticmethod
|
||||
def _generate_tree_line(name, prefix):
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
|
||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(
|
||||
name=name,
|
||||
prefix=prefix,
|
||||
)
|
||||
|
||||
def generate_luarocks_config(self, pkg, spec, prefix):
|
||||
spec = self.pkg.spec
|
||||
|
@@ -37,7 +37,11 @@ class IntelOneApiPackage(Package):
|
||||
conflicts(c, msg="This package in only available for x86_64 and Linux")
|
||||
|
||||
# Add variant to toggle environment modifications from vars.sh
|
||||
variant("envmods", default=True, description="Toggles environment modifications")
|
||||
variant(
|
||||
"envmods",
|
||||
default=True,
|
||||
description="Toggles environment modifications",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_description(cls):
|
||||
|
@@ -21,7 +21,7 @@
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.directives import build_system, depends_on, extends, maintainers
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
||||
from spack.version import Version
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
|
||||
|
||||
class PythonExtension(spack.package_base.PackageBase):
|
||||
maintainers("adamjstewart", "pradyunsg")
|
||||
maintainers = ["adamjstewart"]
|
||||
|
||||
@property
|
||||
def import_modules(self):
|
||||
@@ -113,9 +113,6 @@ def view_file_conflicts(self, view, merge_map):
|
||||
return conflicts
|
||||
|
||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||
if not self.extendee_spec:
|
||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||
|
||||
bin_dir = self.spec.prefix.bin
|
||||
python_prefix = self.extendee_spec.prefix
|
||||
python_is_external = self.extendee_spec.external
|
||||
@@ -187,6 +184,8 @@ class PythonPackage(PythonExtension):
|
||||
#: Package name, version, and extension on PyPI
|
||||
pypi: Optional[str] = None
|
||||
|
||||
maintainers = ["adamjstewart", "pradyunsg"]
|
||||
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
build_system_class = "PythonPackage"
|
||||
|
@@ -7,7 +7,7 @@
|
||||
|
||||
import llnl.util.lang as lang
|
||||
|
||||
from spack.directives import extends, maintainers
|
||||
from spack.directives import extends
|
||||
|
||||
from .generic import GenericBuilder, Package
|
||||
|
||||
@@ -71,7 +71,7 @@ class RPackage(Package):
|
||||
|
||||
GenericBuilder = RBuilder
|
||||
|
||||
maintainers("glennpj")
|
||||
maintainers = ["glennpj"]
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
|
@@ -11,7 +11,7 @@
|
||||
|
||||
import spack.builder
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
from spack.directives import build_system, extends
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.environment import env_flag
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
@@ -23,7 +23,7 @@ class RacketPackage(PackageBase):
|
||||
"""
|
||||
|
||||
#: Package name, version, and extension on PyPI
|
||||
maintainers("elfprince13")
|
||||
maintainers = ["elfprince13"]
|
||||
# To be used in UI queries that require to know which
|
||||
# build-system class we are using
|
||||
build_system_class = "RacketPackage"
|
||||
|
@@ -7,7 +7,7 @@
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, extends, maintainers
|
||||
from spack.directives import build_system, extends
|
||||
|
||||
from ._checks import BaseBuilder
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
class RubyPackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for building Ruby gems."""
|
||||
|
||||
maintainers("Kerilk")
|
||||
maintainers = ["Kerilk"]
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
|
@@ -61,7 +61,10 @@ def import_modules(self):
|
||||
list: list of strings of module names
|
||||
"""
|
||||
modules = []
|
||||
root = os.path.join(self.prefix, self.spec["python"].package.platlib)
|
||||
root = os.path.join(
|
||||
self.prefix,
|
||||
self.spec["python"].package.platlib,
|
||||
)
|
||||
|
||||
# Some Python libraries are packages: collections of modules
|
||||
# distributed in directories containing __init__.py files
|
||||
|
@@ -42,7 +42,9 @@
|
||||
from spack.reporters import CDash, CDashConfiguration
|
||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||
|
||||
JOB_RETRY_CONDITIONS = ["always"]
|
||||
JOB_RETRY_CONDITIONS = [
|
||||
"always",
|
||||
]
|
||||
|
||||
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
@@ -127,7 +129,10 @@ def _remove_reserved_tags(tags):
|
||||
|
||||
|
||||
def _get_spec_string(spec):
|
||||
format_elements = ["{name}{@version}", "{%compiler}"]
|
||||
format_elements = [
|
||||
"{name}{@version}",
|
||||
"{%compiler}",
|
||||
]
|
||||
|
||||
if spec.architecture:
|
||||
format_elements.append(" {arch=architecture}")
|
||||
@@ -323,7 +328,12 @@ def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None)
|
||||
dependencies = []
|
||||
|
||||
def append_dep(s, d):
|
||||
dependencies.append({"spec": s, "depends": d})
|
||||
dependencies.append(
|
||||
{
|
||||
"spec": s,
|
||||
"depends": d,
|
||||
}
|
||||
)
|
||||
|
||||
for spec in spec_list:
|
||||
for s in spec.traverse(deptype=all):
|
||||
@@ -336,7 +346,10 @@ def append_dep(s, d):
|
||||
)
|
||||
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = {"spec": s, "needs_rebuild": not up_to_date_mirrors}
|
||||
spec_labels[skey] = {
|
||||
"spec": s,
|
||||
"needs_rebuild": not up_to_date_mirrors,
|
||||
}
|
||||
|
||||
for d in s.dependencies(deptype=all):
|
||||
dkey = _spec_deps_key(d)
|
||||
@@ -355,7 +368,10 @@ def append_dep(s, d):
|
||||
}
|
||||
)
|
||||
|
||||
deps_json_obj = {"specs": specs, "dependencies": dependencies}
|
||||
deps_json_obj = {
|
||||
"specs": specs,
|
||||
"dependencies": dependencies,
|
||||
}
|
||||
|
||||
return deps_json_obj
|
||||
|
||||
@@ -394,7 +410,14 @@ def _copy_attributes(attrs_list, src_dict, dest_dict):
|
||||
|
||||
def _find_matching_config(spec, gitlab_ci):
|
||||
runner_attributes = {}
|
||||
overridable_attrs = ["image", "tags", "variables", "before_script", "script", "after_script"]
|
||||
overridable_attrs = [
|
||||
"image",
|
||||
"tags",
|
||||
"variables",
|
||||
"before_script",
|
||||
"script",
|
||||
"after_script",
|
||||
]
|
||||
|
||||
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
|
||||
|
||||
@@ -662,14 +685,28 @@ def generate_gitlab_ci_yaml(
|
||||
except AttributeError:
|
||||
phase_name = phase
|
||||
strip_compilers = False
|
||||
phases.append({"name": phase_name, "strip-compilers": strip_compilers})
|
||||
phases.append(
|
||||
{
|
||||
"name": phase_name,
|
||||
"strip-compilers": strip_compilers,
|
||||
}
|
||||
)
|
||||
|
||||
for bs in env.spec_lists[phase_name]:
|
||||
bootstrap_specs.append(
|
||||
{"spec": bs, "phase-name": phase_name, "strip-compilers": strip_compilers}
|
||||
{
|
||||
"spec": bs,
|
||||
"phase-name": phase_name,
|
||||
"strip-compilers": strip_compilers,
|
||||
}
|
||||
)
|
||||
|
||||
phases.append({"name": "specs", "strip-compilers": False})
|
||||
phases.append(
|
||||
{
|
||||
"name": "specs",
|
||||
"strip-compilers": False,
|
||||
}
|
||||
)
|
||||
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
@@ -1072,9 +1109,15 @@ def generate_gitlab_ci_yaml(
|
||||
"variables": variables,
|
||||
"script": job_script,
|
||||
"tags": tags,
|
||||
"artifacts": {"paths": artifact_paths, "when": "always"},
|
||||
"artifacts": {
|
||||
"paths": artifact_paths,
|
||||
"when": "always",
|
||||
},
|
||||
"needs": sorted(job_dependencies, key=lambda d: d["job"]),
|
||||
"retry": {"max": 2, "when": JOB_RETRY_CONDITIONS},
|
||||
"retry": {
|
||||
"max": 2,
|
||||
"when": JOB_RETRY_CONDITIONS,
|
||||
},
|
||||
"interruptible": True,
|
||||
}
|
||||
|
||||
@@ -1092,7 +1135,10 @@ def generate_gitlab_ci_yaml(
|
||||
if image_name:
|
||||
job_object["image"] = image_name
|
||||
if image_entry is not None:
|
||||
job_object["image"] = {"name": image_name, "entrypoint": image_entry}
|
||||
job_object["image"] = {
|
||||
"name": image_name,
|
||||
"entrypoint": image_entry,
|
||||
}
|
||||
|
||||
output_object[job_name] = job_object
|
||||
job_id += 1
|
||||
@@ -1135,7 +1181,11 @@ def generate_gitlab_ci_yaml(
|
||||
|
||||
service_job_retries = {
|
||||
"max": 2,
|
||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||
"when": [
|
||||
"runner_system_failure",
|
||||
"stuck_or_timeout_failure",
|
||||
"script_failure",
|
||||
],
|
||||
}
|
||||
|
||||
if job_id > 0:
|
||||
@@ -1307,7 +1357,9 @@ def generate_gitlab_ci_yaml(
|
||||
_copy_attributes(default_attrs, service_job_config, noop_job)
|
||||
|
||||
if "script" not in noop_job:
|
||||
noop_job["script"] = ['echo "All specs already up to date, nothing to rebuild."']
|
||||
noop_job["script"] = [
|
||||
'echo "All specs already up to date, nothing to rebuild."',
|
||||
]
|
||||
|
||||
noop_job["retry"] = service_job_retries
|
||||
|
||||
@@ -1437,8 +1489,9 @@ def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
|
||||
hashes = env.all_hashes() if env else None
|
||||
matches = spack.store.specfile_matches(specfile_path, hashes=hashes)
|
||||
push_url = spack.mirror.Mirror.from_url(mirror_url).push_url
|
||||
spec_kwargs = {"include_root": True, "include_dependencies": False}
|
||||
kwargs = {"force": True, "allow_root": True, "unsigned": unsigned}
|
||||
bindist.push(matches, push_url, include_root=True, include_dependencies=False, **kwargs)
|
||||
bindist.push(matches, push_url, spec_kwargs, **kwargs)
|
||||
|
||||
|
||||
def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
|
||||
@@ -1501,7 +1554,10 @@ def copy_files_to_artifacts(src, artifacts_dir):
|
||||
try:
|
||||
fs.copy(src, artifacts_dir)
|
||||
except Exception as err:
|
||||
tty.warn(f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to: {err}")
|
||||
msg = ("Unable to copy files ({0}) to artifacts {1} due to " "exception: {2}").format(
|
||||
src, artifacts_dir, str(err)
|
||||
)
|
||||
tty.error(msg)
|
||||
|
||||
|
||||
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
@@ -1564,7 +1620,9 @@ def download_and_extract_artifacts(url, work_dir):
|
||||
"""
|
||||
tty.msg("Fetching artifacts from: {0}\n".format(url))
|
||||
|
||||
headers = {"Content-Type": "application/zip"}
|
||||
headers = {
|
||||
"Content-Type": "application/zip",
|
||||
}
|
||||
|
||||
token = os.environ.get("GITLAB_PRIVATE_TOKEN", None)
|
||||
if token:
|
||||
@@ -2023,7 +2081,10 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic
|
||||
with open(file_path, "w") as fd:
|
||||
fd.write(syaml.dump(broken_spec_details))
|
||||
web_util.push_to_url(
|
||||
file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"}
|
||||
file_path,
|
||||
url,
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "text/plain"},
|
||||
)
|
||||
except Exception as err:
|
||||
# If there is an S3 error (e.g., access denied or connection
|
||||
@@ -2101,7 +2162,14 @@ def run_standalone_tests(**kwargs):
|
||||
tty.error("Reproduction directory is required for stand-alone tests")
|
||||
return
|
||||
|
||||
test_args = ["spack", "--color=always", "--backtrace", "--verbose", "test", "run"]
|
||||
test_args = [
|
||||
"spack",
|
||||
"--color=always",
|
||||
"--backtrace",
|
||||
"--verbose",
|
||||
"test",
|
||||
"run",
|
||||
]
|
||||
if fail_fast:
|
||||
test_args.append("--fail-fast")
|
||||
|
||||
@@ -2251,9 +2319,19 @@ def populate_buildgroup(self, job_names):
|
||||
|
||||
opener = build_opener(HTTPHandler)
|
||||
|
||||
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
||||
parent_group_id = self.create_buildgroup(
|
||||
opener,
|
||||
headers,
|
||||
url,
|
||||
self.build_group,
|
||||
"Daily",
|
||||
)
|
||||
group_id = self.create_buildgroup(
|
||||
opener, headers, url, "Latest {0}".format(self.build_group), "Latest"
|
||||
opener,
|
||||
headers,
|
||||
url,
|
||||
"Latest {0}".format(self.build_group),
|
||||
"Latest",
|
||||
)
|
||||
|
||||
if not parent_group_id or not group_id:
|
||||
@@ -2263,9 +2341,13 @@ def populate_buildgroup(self, job_names):
|
||||
|
||||
data = {
|
||||
"dynamiclist": [
|
||||
{"match": name, "parentgroupid": parent_group_id, "site": self.site}
|
||||
{
|
||||
"match": name,
|
||||
"parentgroupid": parent_group_id,
|
||||
"site": self.site,
|
||||
}
|
||||
for name in job_names
|
||||
]
|
||||
],
|
||||
}
|
||||
|
||||
enc_data = json.dumps(data).encode("utf-8")
|
||||
|
@@ -43,6 +43,7 @@ def matches(obj, proto):
|
||||
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
|
||||
|
||||
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||
|
||||
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
|
||||
return False
|
||||
|
||||
|
@@ -161,7 +161,9 @@ class _UnquotedFlags(object):
|
||||
"""
|
||||
|
||||
flags_arg_pattern = re.compile(
|
||||
r'^({0})=([^\'"].*)$'.format("|".join(spack.spec.FlagMap.valid_compiler_flags()))
|
||||
r'^({0})=([^\'"].*)$'.format(
|
||||
"|".join(spack.spec.FlagMap.valid_compiler_flags()),
|
||||
)
|
||||
)
|
||||
|
||||
def __init__(self, all_unquoted_flag_pairs: List[Tuple[Match[str], str]]):
|
||||
@@ -225,6 +227,7 @@ def parse_specs(args, **kwargs):
|
||||
return specs
|
||||
|
||||
except spack.error.SpecError as e:
|
||||
|
||||
msg = e.message
|
||||
if e.long_message:
|
||||
msg += e.long_message
|
||||
|
@@ -53,6 +53,7 @@ def packages(parser, args):
|
||||
|
||||
|
||||
def packages_https(parser, args):
|
||||
|
||||
# Since packages takes a long time, --all is required without name
|
||||
if not args.check_all and not args.name:
|
||||
tty.die("Please specify one or more packages to audit, or --all.")
|
||||
|
@@ -5,7 +5,7 @@
|
||||
import spack.cmd.common.env_utility as env_utility
|
||||
|
||||
description = (
|
||||
"run a command in a spec's install environment, or dump its environment to screen or file"
|
||||
"run a command in a spec's install environment, " "or dump its environment to screen or file"
|
||||
)
|
||||
section = "build"
|
||||
level = "long"
|
||||
|
@@ -103,7 +103,9 @@ def setup_parser(subparser):
|
||||
help="Regenerate buildcache index after building package(s)",
|
||||
)
|
||||
create.add_argument(
|
||||
"--spec-file", default=None, help="Create buildcache entry for spec from json or yaml file"
|
||||
"--spec-file",
|
||||
default=None,
|
||||
help="Create buildcache entry for spec from json or yaml file",
|
||||
)
|
||||
create.add_argument(
|
||||
"--only",
|
||||
@@ -400,7 +402,7 @@ def _matching_specs(specs, spec_file):
|
||||
return spack.store.find(constraints, hashes=hashes)
|
||||
|
||||
if env:
|
||||
return [concrete for _, concrete in env.concretized_specs()]
|
||||
return [env.specs_by_hash[h] for h in env.concretized_order]
|
||||
|
||||
tty.die(
|
||||
"build cache file creation requires at least one"
|
||||
@@ -459,6 +461,10 @@ def create_fn(args):
|
||||
|
||||
msg = "Pushing binary packages to {0}/build_cache".format(url)
|
||||
tty.msg(msg)
|
||||
specs_kwargs = {
|
||||
"include_root": "package" in args.things_to_install,
|
||||
"include_dependencies": "dependencies" in args.things_to_install,
|
||||
}
|
||||
kwargs = {
|
||||
"key": args.key,
|
||||
"force": args.force,
|
||||
@@ -467,13 +473,7 @@ def create_fn(args):
|
||||
"allow_root": args.allow_root,
|
||||
"regenerate_index": args.rebuild_index,
|
||||
}
|
||||
bindist.push(
|
||||
matches,
|
||||
url,
|
||||
include_root="package" in args.things_to_install,
|
||||
include_dependencies="dependencies" in args.things_to_install,
|
||||
**kwargs,
|
||||
)
|
||||
bindist.push(matches, url, specs_kwargs, **kwargs)
|
||||
|
||||
|
||||
def install_fn(args):
|
||||
|
@@ -20,7 +20,9 @@ def setup_parser(subparser):
|
||||
help="name of the list to remove specs from",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--match-spec", dest="match_spec", help="if name is ambiguous, supply a spec to match"
|
||||
"--match-spec",
|
||||
dest="match_spec",
|
||||
help="if name is ambiguous, supply a spec to match",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-a",
|
||||
|
@@ -530,28 +530,39 @@ def ci_rebuild(args):
|
||||
if not verify_binaries:
|
||||
install_args.append("--no-check-signature")
|
||||
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
install_args.extend(cdash_handler.args())
|
||||
|
||||
slash_hash = "/{}".format(job_spec.dag_hash())
|
||||
|
||||
# Arguments when installing dependencies from cache
|
||||
deps_install_args = install_args
|
||||
|
||||
# Arguments when installing the root from sources
|
||||
root_install_args = install_args + [
|
||||
"--keep-stage",
|
||||
"--only=package",
|
||||
"--use-buildcache=package:never,dependencies:only",
|
||||
slash_hash,
|
||||
]
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
root_install_args.extend(cdash_handler.args())
|
||||
root_install_args.append(slash_hash)
|
||||
|
||||
# ["x", "y"] -> "'x' 'y'"
|
||||
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
|
||||
|
||||
commands = [
|
||||
# apparently there's a race when spack bootstraps? do it up front once
|
||||
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
|
||||
[
|
||||
SPACK_COMMAND,
|
||||
"-e",
|
||||
env.path,
|
||||
"bootstrap",
|
||||
"now",
|
||||
],
|
||||
[
|
||||
SPACK_COMMAND,
|
||||
"-e",
|
||||
env.path,
|
||||
"config",
|
||||
"add",
|
||||
"config:db_lock_timeout:120", # 2 minutes for processes to fight for a db lock
|
||||
],
|
||||
[
|
||||
SPACK_COMMAND,
|
||||
"-e",
|
||||
|
@@ -13,7 +13,11 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.argparsewriter import ArgparseCompletionWriter, ArgparseRstWriter, ArgparseWriter
|
||||
from llnl.util.argparsewriter import (
|
||||
ArgparseCompletionWriter,
|
||||
ArgparseRstWriter,
|
||||
ArgparseWriter,
|
||||
)
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
import spack.cmd
|
||||
@@ -38,7 +42,7 @@
|
||||
"format": "bash",
|
||||
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"),
|
||||
"update": os.path.join(spack.paths.share_path, "spack-completion.bash"),
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
|
@@ -12,11 +12,7 @@
|
||||
import spack.build_environment as build_environment
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.error
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack import traverse
|
||||
from spack.util.environment import dump_environment, pickle_environment
|
||||
|
||||
|
||||
@@ -42,41 +38,6 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
|
||||
class AreDepsInstalledVisitor:
|
||||
def __init__(self, context="build"):
|
||||
if context not in ("build", "test"):
|
||||
raise ValueError("context can only be build or test")
|
||||
|
||||
if context == "build":
|
||||
self.direct_deps = ("build", "link", "run")
|
||||
else:
|
||||
self.direct_deps = ("build", "test", "link", "run")
|
||||
|
||||
self.has_uninstalled_deps = False
|
||||
|
||||
def accept(self, item):
|
||||
# The root may be installed or uninstalled.
|
||||
if item.depth == 0:
|
||||
return True
|
||||
|
||||
# Early exit after we've seen an uninstalled dep.
|
||||
if self.has_uninstalled_deps:
|
||||
return False
|
||||
|
||||
spec = item.edge.spec
|
||||
if not spec.external and not spec.installed:
|
||||
self.has_uninstalled_deps = True
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def neighbors(self, item):
|
||||
# Direct deps: follow build & test edges.
|
||||
# Transitive deps: follow link / run.
|
||||
deptypes = self.direct_deps if item.depth == 0 else ("link", "run")
|
||||
return item.edge.spec.edges_to_dependencies(deptype=deptypes)
|
||||
|
||||
|
||||
def emulate_env_utility(cmd_name, context, args):
|
||||
if not args.spec:
|
||||
tty.die("spack %s requires a spec." % cmd_name)
|
||||
@@ -104,27 +65,6 @@ def emulate_env_utility(cmd_name, context, args):
|
||||
|
||||
spec = spack.cmd.matching_spec_from_env(spec)
|
||||
|
||||
# Require that dependencies are installed.
|
||||
visitor = AreDepsInstalledVisitor(context=context)
|
||||
|
||||
# Mass install check needs read transaction.
|
||||
with spack.store.db.read_transaction():
|
||||
traverse.traverse_breadth_first_with_visitor([spec], traverse.CoverNodesVisitor(visitor))
|
||||
|
||||
if visitor.has_uninstalled_deps:
|
||||
raise spack.error.SpackError(
|
||||
f"Not all dependencies of {spec.name} are installed. "
|
||||
f"Cannot setup {context} environment:",
|
||||
spec.tree(
|
||||
status_fn=spack.spec.Spec.install_status,
|
||||
hashlen=7,
|
||||
hashes=True,
|
||||
# This shows more than necessary, but we cannot dynamically change deptypes
|
||||
# in Spec.tree(...).
|
||||
deptypes="all" if context == "build" else ("build", "test", "link", "run"),
|
||||
),
|
||||
)
|
||||
|
||||
build_environment.setup_package(spec.package, args.dirty, context)
|
||||
|
||||
if args.dump:
|
||||
|
@@ -408,7 +408,13 @@ def config_prefer_upstream(args):
|
||||
pkgs = {}
|
||||
for spec in pref_specs:
|
||||
# Collect all the upstream compilers and versions for this package.
|
||||
pkg = pkgs.get(spec.name, {"version": [], "compiler": []})
|
||||
pkg = pkgs.get(
|
||||
spec.name,
|
||||
{
|
||||
"version": [],
|
||||
"compiler": [],
|
||||
},
|
||||
)
|
||||
pkgs[spec.name] = pkg
|
||||
|
||||
# We have no existing variant if this is our first added version.
|
||||
|
@@ -16,10 +16,19 @@
|
||||
import spack.stage
|
||||
import spack.util.web
|
||||
from spack.spec import Spec
|
||||
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
|
||||
from spack.url import (
|
||||
UndetectableNameError,
|
||||
UndetectableVersionError,
|
||||
parse_name,
|
||||
parse_version,
|
||||
)
|
||||
from spack.util.editor import editor
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||
from spack.util.naming import (
|
||||
mod_to_class,
|
||||
simplify_name,
|
||||
valid_fully_qualified_module_name,
|
||||
)
|
||||
|
||||
description = "create a new package file"
|
||||
section = "packaging"
|
||||
|
@@ -96,5 +96,8 @@ def report(args):
|
||||
|
||||
|
||||
def debug(parser, args):
|
||||
action = {"create-db-tarball": create_db_tarball, "report": report}
|
||||
action = {
|
||||
"create-db-tarball": create_db_tarball,
|
||||
"report": report,
|
||||
}
|
||||
action[args.debug_command](args)
|
||||
|
@@ -33,7 +33,12 @@
|
||||
level = "long"
|
||||
|
||||
# Arguments for display_specs when we find ambiguity
|
||||
display_args = {"long": True, "show_flags": True, "variants": True, "indent": 4}
|
||||
display_args = {
|
||||
"long": True,
|
||||
"show_flags": True,
|
||||
"variants": True,
|
||||
"indent": 4,
|
||||
}
|
||||
|
||||
|
||||
def setup_parser(sp):
|
||||
|
@@ -80,12 +80,22 @@ def compare_specs(a, b, to_string=False, color=None):
|
||||
# specs and to descend into dependency hashes so we include all facts.
|
||||
a_facts = set(
|
||||
shift(func)
|
||||
for func in setup.spec_clauses(a, body=True, expand_hashes=True, concrete_build_deps=True)
|
||||
for func in setup.spec_clauses(
|
||||
a,
|
||||
body=True,
|
||||
expand_hashes=True,
|
||||
concrete_build_deps=True,
|
||||
)
|
||||
if func.name == "attr"
|
||||
)
|
||||
b_facts = set(
|
||||
shift(func)
|
||||
for func in setup.spec_clauses(b, body=True, expand_hashes=True, concrete_build_deps=True)
|
||||
for func in setup.spec_clauses(
|
||||
b,
|
||||
body=True,
|
||||
expand_hashes=True,
|
||||
concrete_build_deps=True,
|
||||
)
|
||||
if func.name == "attr"
|
||||
)
|
||||
|
||||
|
@@ -148,7 +148,8 @@ def env_activate(args):
|
||||
|
||||
if not args.shell:
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack env activate", " eval `spack env activate {sh_arg} [...]`"
|
||||
"spack env activate",
|
||||
" eval `spack env activate {sh_arg} [...]`",
|
||||
)
|
||||
return 1
|
||||
|
||||
@@ -237,7 +238,8 @@ def env_deactivate_setup_parser(subparser):
|
||||
def env_deactivate(args):
|
||||
if not args.shell:
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack env deactivate", " eval `spack env deactivate {sh_arg}`"
|
||||
"spack env deactivate",
|
||||
" eval `spack env deactivate {sh_arg}`",
|
||||
)
|
||||
return 1
|
||||
|
||||
|
@@ -38,7 +38,11 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="packages with detected externals won't be built with Spack",
|
||||
)
|
||||
find_parser.add_argument("--exclude", action="append", help="packages to exclude from search")
|
||||
find_parser.add_argument(
|
||||
"--exclude",
|
||||
action="append",
|
||||
help="packages to exclude from search",
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"-p",
|
||||
"--path",
|
||||
@@ -183,6 +187,7 @@ def external_read_cray_manifest(args):
|
||||
def _collect_and_consume_cray_manifest_files(
|
||||
manifest_file=None, manifest_directory=None, dry_run=False, fail_on_error=False
|
||||
):
|
||||
|
||||
manifest_files = []
|
||||
if manifest_file:
|
||||
manifest_files.append(manifest_file)
|
||||
|
@@ -25,7 +25,10 @@ def setup_parser(subparser):
|
||||
help="fetch only missing (not yet installed) dependencies",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-D", "--dependencies", action="store_true", help="also fetch all dependencies"
|
||||
"-D",
|
||||
"--dependencies",
|
||||
action="store_true",
|
||||
help="also fetch all dependencies",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["specs"])
|
||||
subparser.epilog = (
|
||||
|
@@ -9,7 +9,13 @@
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.store
|
||||
from spack.graph import DAGWithDependencyTypes, SimpleDAG, graph_ascii, graph_dot, static_graph_dot
|
||||
from spack.graph import (
|
||||
DAGWithDependencyTypes,
|
||||
SimpleDAG,
|
||||
graph_ascii,
|
||||
graph_dot,
|
||||
static_graph_dot,
|
||||
)
|
||||
|
||||
description = "generate graphs of package dependency relationships"
|
||||
section = "basic"
|
||||
|
@@ -87,7 +87,9 @@
|
||||
"""
|
||||
|
||||
|
||||
guides = {"spec": spec_guide}
|
||||
guides = {
|
||||
"spec": spec_guide,
|
||||
}
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
|
@@ -496,7 +496,9 @@ def reporter_factory(specs):
|
||||
return None
|
||||
|
||||
context_manager = spack.report.build_context_manager(
|
||||
reporter=args.reporter(), filename=report_filename(args, specs=specs), specs=specs
|
||||
reporter=args.reporter(),
|
||||
filename=report_filename(args, specs=specs),
|
||||
specs=specs,
|
||||
)
|
||||
return context_manager
|
||||
|
||||
|
@@ -58,7 +58,10 @@
|
||||
|
||||
#: licensed files that can have LGPL language in them
|
||||
#: so far, just this command -- so it can find LGPL things elsewhere
|
||||
lgpl_exceptions = [r"lib/spack/spack/cmd/license.py", r"lib/spack/spack/test/cmd/license.py"]
|
||||
lgpl_exceptions = [
|
||||
r"lib/spack/spack/cmd/license.py",
|
||||
r"lib/spack/spack/test/cmd/license.py",
|
||||
]
|
||||
|
||||
|
||||
def _all_spack_files(root=spack.paths.prefix):
|
||||
@@ -126,6 +129,7 @@ def error_messages(self):
|
||||
|
||||
|
||||
def _check_license(lines, path):
|
||||
|
||||
found = []
|
||||
|
||||
for line in lines:
|
||||
|
@@ -98,7 +98,8 @@ def load(parser, args):
|
||||
if not args.shell:
|
||||
specs_str = " ".join(args.constraint) or "SPECS"
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack load", " eval `spack load {sh_arg} %s`" % specs_str
|
||||
"spack load",
|
||||
" eval `spack load {sh_arg} %s`" % specs_str,
|
||||
)
|
||||
return 1
|
||||
|
||||
|
@@ -27,7 +27,12 @@
|
||||
"""
|
||||
|
||||
# Arguments for display_specs when we find ambiguity
|
||||
display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
|
||||
display_args = {
|
||||
"long": True,
|
||||
"show_flags": False,
|
||||
"variants": False,
|
||||
"indent": 4,
|
||||
}
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
|
@@ -445,7 +445,9 @@ def mirror_create(args):
|
||||
|
||||
mirror_specs = concrete_specs_from_user(args)
|
||||
create_mirror_for_individual_specs(
|
||||
mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions
|
||||
mirror_specs,
|
||||
path=path,
|
||||
skip_unstable_versions=args.skip_unstable_versions,
|
||||
)
|
||||
|
||||
|
||||
@@ -465,7 +467,9 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
||||
def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn):
|
||||
mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn)
|
||||
create_mirror_for_individual_specs(
|
||||
mirror_specs, path=path, skip_unstable_versions=skip_unstable_versions
|
||||
mirror_specs,
|
||||
path=path,
|
||||
skip_unstable_versions=skip_unstable_versions,
|
||||
)
|
||||
|
||||
|
||||
|
@@ -180,7 +180,10 @@ def loads(module_type, specs, args, out=None):
|
||||
for spec in specs
|
||||
)
|
||||
|
||||
module_commands = {"tcl": "module load ", "lmod": "module load "}
|
||||
module_commands = {
|
||||
"tcl": "module load ",
|
||||
"lmod": "module load ",
|
||||
}
|
||||
|
||||
d = {"command": "" if not args.shell else module_commands[module_type], "prefix": args.prefix}
|
||||
|
||||
@@ -365,14 +368,18 @@ def refresh(module_type, specs, args):
|
||||
|
||||
|
||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||
|
||||
# Qualifiers to be used when querying the db for specs
|
||||
constraint_qualifiers = {"refresh": {"installed": True, "known": True}}
|
||||
constraint_qualifiers = {
|
||||
"refresh": {"installed": True, "known": True},
|
||||
}
|
||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||
|
||||
# Get the specs that match the query from the DB
|
||||
specs = args.specs(**query_args)
|
||||
|
||||
try:
|
||||
|
||||
callbacks[args.subparser_name](module_type, specs, args)
|
||||
|
||||
except MultipleSpecsMatch:
|
||||
|
@@ -182,7 +182,11 @@ def solve(parser, args):
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs, out=output, timers=args.timers, stats=args.stats, setup_only=setup_only
|
||||
specs,
|
||||
out=output,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
|
@@ -110,7 +110,7 @@ def spec(parser, args):
|
||||
else:
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
for input, output in specs:
|
||||
for (input, output) in specs:
|
||||
# With -y, just print YAML to output.
|
||||
if args.format:
|
||||
if args.format == "yaml":
|
||||
|
@@ -30,13 +30,20 @@ def grouper(iterable, n, fillvalue=None):
|
||||
|
||||
|
||||
#: List of directories to exclude from checks -- relative to spack root
|
||||
exclude_directories = [os.path.relpath(spack.paths.external_path, spack.paths.prefix)]
|
||||
exclude_directories = [
|
||||
os.path.relpath(spack.paths.external_path, spack.paths.prefix),
|
||||
]
|
||||
|
||||
#: Order in which tools should be run. flake8 is last so that it can
|
||||
#: double-check the results of other tools (if, e.g., --fix was provided)
|
||||
#: The list maps an executable name to a method to ensure the tool is
|
||||
#: bootstrapped or present in the environment.
|
||||
tool_names = ["isort", "black", "flake8", "mypy"]
|
||||
tool_names = [
|
||||
"isort",
|
||||
"black",
|
||||
"flake8",
|
||||
"mypy",
|
||||
]
|
||||
|
||||
#: tools we run in spack style
|
||||
tools = {}
|
||||
@@ -45,7 +52,7 @@ def grouper(iterable, n, fillvalue=None):
|
||||
mypy_ignores = [
|
||||
# same as `disable_error_code = "annotation-unchecked"` in pyproject.toml, which
|
||||
# doesn't exist in mypy 0.971 for Python 3.6
|
||||
"[annotation-unchecked]"
|
||||
"[annotation-unchecked]",
|
||||
]
|
||||
|
||||
|
||||
@@ -143,7 +150,10 @@ def setup_parser(subparser):
|
||||
help="branch to compare against to determine changed files (default: develop)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-a", "--all", action="store_true", help="check all files, not just changed files"
|
||||
"-a",
|
||||
"--all",
|
||||
action="store_true",
|
||||
help="check all files, not just changed files",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-r",
|
||||
@@ -168,7 +178,10 @@ def setup_parser(subparser):
|
||||
help="format automatically if possible (e.g., with isort, black)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--root", action="store", default=None, help="style check a different spack instance"
|
||||
"--root",
|
||||
action="store",
|
||||
default=None,
|
||||
help="style check a different spack instance",
|
||||
)
|
||||
|
||||
tool_group = subparser.add_mutually_exclusive_group()
|
||||
@@ -198,7 +211,6 @@ def rewrite_and_print_output(
|
||||
output, args, re_obj=re.compile(r"^(.+):([0-9]+):"), replacement=r"{0}:{1}:"
|
||||
):
|
||||
"""rewrite ouput with <file>:<line>: format to respect path args"""
|
||||
|
||||
# print results relative to current working directory
|
||||
def translate(match):
|
||||
return replacement.format(cwd_relative(match.group(1), args), *list(match.groups()[1:]))
|
||||
@@ -269,10 +281,24 @@ def run_mypy(mypy_cmd, file_list, args):
|
||||
os.path.join(spack.paths.prefix, "pyproject.toml"),
|
||||
"--show-error-codes",
|
||||
]
|
||||
mypy_arg_sets = [common_mypy_args + ["--package", "spack", "--package", "llnl"]]
|
||||
mypy_arg_sets = [
|
||||
common_mypy_args
|
||||
+ [
|
||||
"--package",
|
||||
"spack",
|
||||
"--package",
|
||||
"llnl",
|
||||
]
|
||||
]
|
||||
if "SPACK_MYPY_CHECK_PACKAGES" in os.environ:
|
||||
mypy_arg_sets.append(
|
||||
common_mypy_args + ["--package", "packages", "--disable-error-code", "no-redef"]
|
||||
common_mypy_args
|
||||
+ [
|
||||
"--package",
|
||||
"packages",
|
||||
"--disable-error-code",
|
||||
"no-redef",
|
||||
]
|
||||
)
|
||||
|
||||
returncode = 0
|
||||
|
@@ -33,7 +33,9 @@ def setup_parser(subparser):
|
||||
|
||||
# Run
|
||||
run_parser = sp.add_parser(
|
||||
"run", description=test_run.__doc__, help=spack.cmd.first_line(test_run.__doc__)
|
||||
"run",
|
||||
description=test_run.__doc__,
|
||||
help=spack.cmd.first_line(test_run.__doc__),
|
||||
)
|
||||
|
||||
alias_help_msg = "Provide an alias for this test-suite"
|
||||
@@ -78,7 +80,9 @@ def setup_parser(subparser):
|
||||
|
||||
# List
|
||||
list_parser = sp.add_parser(
|
||||
"list", description=test_list.__doc__, help=spack.cmd.first_line(test_list.__doc__)
|
||||
"list",
|
||||
description=test_list.__doc__,
|
||||
help=spack.cmd.first_line(test_list.__doc__),
|
||||
)
|
||||
list_parser.add_argument(
|
||||
"-a",
|
||||
@@ -92,7 +96,9 @@ def setup_parser(subparser):
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser(
|
||||
"find", description=test_find.__doc__, help=spack.cmd.first_line(test_find.__doc__)
|
||||
"find",
|
||||
description=test_find.__doc__,
|
||||
help=spack.cmd.first_line(test_find.__doc__),
|
||||
)
|
||||
find_parser.add_argument(
|
||||
"filter",
|
||||
@@ -102,7 +108,9 @@ def setup_parser(subparser):
|
||||
|
||||
# Status
|
||||
status_parser = sp.add_parser(
|
||||
"status", description=test_status.__doc__, help=spack.cmd.first_line(test_status.__doc__)
|
||||
"status",
|
||||
description=test_status.__doc__,
|
||||
help=spack.cmd.first_line(test_status.__doc__),
|
||||
)
|
||||
status_parser.add_argument(
|
||||
"names", nargs=argparse.REMAINDER, help="Test suites for which to print status"
|
||||
@@ -139,7 +147,9 @@ def setup_parser(subparser):
|
||||
|
||||
# Remove
|
||||
remove_parser = sp.add_parser(
|
||||
"remove", description=test_remove.__doc__, help=spack.cmd.first_line(test_remove.__doc__)
|
||||
"remove",
|
||||
description=test_remove.__doc__,
|
||||
help=spack.cmd.first_line(test_remove.__doc__),
|
||||
)
|
||||
arguments.add_common_arguments(remove_parser, ["yes_to_all"])
|
||||
remove_parser.add_argument(
|
||||
@@ -179,7 +189,11 @@ def test_run(args):
|
||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||
specs_to_test = []
|
||||
for spec in specs:
|
||||
matching = spack.store.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||
matching = spack.store.db.query_local(
|
||||
spec,
|
||||
hashes=hashes,
|
||||
explicit=explicit,
|
||||
)
|
||||
if spec and not matching:
|
||||
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||
"""
|
||||
@@ -214,7 +228,14 @@ def test_run(args):
|
||||
|
||||
|
||||
def report_filename(args, test_suite):
|
||||
return os.path.abspath(args.log_file or "test-{}".format(test_suite.name))
|
||||
if args.log_file:
|
||||
if os.path.isabs(args.log_file):
|
||||
return args.log_file
|
||||
else:
|
||||
log_dir = os.getcwd()
|
||||
return os.path.join(log_dir, args.log_file)
|
||||
else:
|
||||
return os.path.join(os.getcwd(), "test-%s" % test_suite.name)
|
||||
|
||||
|
||||
def create_reporter(args, specs_to_test, test_suite):
|
||||
|
@@ -5,7 +5,7 @@
|
||||
import spack.cmd.common.env_utility as env_utility
|
||||
|
||||
description = (
|
||||
"run a command in a spec's test environment, or dump its environment to screen or file"
|
||||
"run a command in a spec's test environment, " "or dump its environment to screen or file"
|
||||
)
|
||||
section = "admin"
|
||||
level = "long"
|
||||
|
@@ -31,7 +31,12 @@
|
||||
"""
|
||||
|
||||
# Arguments for display_specs when we find ambiguity
|
||||
display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
|
||||
display_args = {
|
||||
"long": True,
|
||||
"show_flags": False,
|
||||
"variants": False,
|
||||
"indent": 4,
|
||||
}
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -128,7 +133,7 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False, o
|
||||
return specs_from_cli
|
||||
|
||||
|
||||
def installed_runtime_dependents(specs, env):
|
||||
def installed_dependents(specs, env):
|
||||
"""Map each spec to a list of its installed dependents.
|
||||
|
||||
Args:
|
||||
@@ -155,10 +160,10 @@ def installed_runtime_dependents(specs, env):
|
||||
|
||||
for spec in specs:
|
||||
for dpt in traverse.traverse_nodes(
|
||||
spec.dependents(deptype=("link", "run")),
|
||||
spec.dependents(deptype="all"),
|
||||
direction="parents",
|
||||
visited=visited,
|
||||
deptype=("link", "run"),
|
||||
deptype="all",
|
||||
root=True,
|
||||
key=lambda s: s.dag_hash(),
|
||||
):
|
||||
@@ -231,7 +236,12 @@ def do_uninstall(specs, force=False):
|
||||
hashes_to_remove = set(s.dag_hash() for s in specs)
|
||||
|
||||
for s in traverse.traverse_nodes(
|
||||
specs, order="topo", direction="children", root=True, cover="nodes", deptype="all"
|
||||
specs,
|
||||
order="topo",
|
||||
direction="children",
|
||||
root=True,
|
||||
cover="nodes",
|
||||
deptype="all",
|
||||
):
|
||||
if s.dag_hash() in hashes_to_remove:
|
||||
spack.package_base.PackageBase.uninstall_by_spec(s, force=force)
|
||||
@@ -255,7 +265,7 @@ def get_uninstall_list(args, specs, env):
|
||||
# args.all takes care of the case where '-a' is given in the cli
|
||||
base_uninstall_specs = set(find_matching_specs(env, specs, args.all, args.force))
|
||||
|
||||
active_dpts, outside_dpts = installed_runtime_dependents(base_uninstall_specs, env)
|
||||
active_dpts, outside_dpts = installed_dependents(base_uninstall_specs, env)
|
||||
# It will be useful to track the unified set of specs with dependents, as
|
||||
# well as to separately track specs in the current env with dependents
|
||||
spec_to_dpts = {}
|
||||
|
@@ -77,7 +77,8 @@ def unload(parser, args):
|
||||
specs_str = " ".join(args.specs) or "SPECS"
|
||||
|
||||
spack.cmd.common.shell_init_instructions(
|
||||
"spack unload", " eval `spack unload {sh_arg}` %s" % specs_str
|
||||
"spack unload",
|
||||
" eval `spack unload {sh_arg}` %s" % specs_str,
|
||||
)
|
||||
return 1
|
||||
|
||||
|
@@ -106,7 +106,12 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def url(parser, args):
|
||||
action = {"parse": url_parse, "list": url_list, "summary": url_summary, "stats": url_stats}
|
||||
action = {
|
||||
"parse": url_parse,
|
||||
"list": url_list,
|
||||
"summary": url_summary,
|
||||
"stats": url_stats,
|
||||
}
|
||||
|
||||
action[args.subcommand](args)
|
||||
|
||||
|
@@ -619,9 +619,11 @@ def _default(search_paths):
|
||||
command_arguments = []
|
||||
files_to_be_tested = fs.files_in(*search_paths)
|
||||
for compiler_name in spack.compilers.supported_compilers():
|
||||
|
||||
compiler_cls = class_for_compiler_name(compiler_name)
|
||||
|
||||
for language in ("cc", "cxx", "f77", "fc"):
|
||||
|
||||
# Select only the files matching a regexp
|
||||
for (file, full_path), regexp in itertools.product(
|
||||
files_to_be_tested, compiler_cls.search_regexps(language)
|
||||
|
@@ -36,89 +36,36 @@ def extract_version_from_output(cls, output):
|
||||
ver = match.group(match.lastindex)
|
||||
return ver
|
||||
|
||||
# C++ flags based on CMake Modules/Compiler/AppleClang-CXX.cmake
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
# Adapted from CMake's AppleClang-CXX rules
|
||||
# Spack's AppleClang detection only valid from Xcode >= 4.6
|
||||
if self.real_version < spack.version.ver("4.0"):
|
||||
if self.real_version < spack.version.ver("4.0.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++11 standard", "cxx11_flag", "Xcode < 4.0"
|
||||
self, "the C++11 standard", "cxx11_flag", "Xcode < 4.0.0"
|
||||
)
|
||||
return "-std=c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
if self.real_version < spack.version.ver("5.1"):
|
||||
# Adapted from CMake's rules for AppleClang
|
||||
if self.real_version < spack.version.ver("5.1.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++14 standard", "cxx14_flag", "Xcode < 5.1"
|
||||
self, "the C++14 standard", "cxx14_flag", "Xcode < 5.1.0"
|
||||
)
|
||||
elif self.real_version < spack.version.ver("6.1"):
|
||||
elif self.real_version < spack.version.ver("6.1.0"):
|
||||
return "-std=c++1y"
|
||||
|
||||
return "-std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
if self.real_version < spack.version.ver("6.1"):
|
||||
# Adapted from CMake's rules for AppleClang
|
||||
if self.real_version < spack.version.ver("6.1.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++17 standard", "cxx17_flag", "Xcode < 6.1"
|
||||
self, "the C++17 standard", "cxx17_flag", "Xcode < 6.1.0"
|
||||
)
|
||||
elif self.real_version < spack.version.ver("10.0"):
|
||||
return "-std=c++1z"
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def cxx20_flag(self):
|
||||
if self.real_version < spack.version.ver("10.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++20 standard", "cxx20_flag", "Xcode < 10.0"
|
||||
)
|
||||
elif self.real_version < spack.version.ver("13.0"):
|
||||
return "-std=c++2a"
|
||||
return "-std=c++20"
|
||||
|
||||
@property
|
||||
def cxx23_flag(self):
|
||||
if self.real_version < spack.version.ver("13.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C++23 standard", "cxx23_flag", "Xcode < 13.0"
|
||||
)
|
||||
return "-std=c++2b"
|
||||
|
||||
# C flags based on CMake Modules/Compiler/AppleClang-C.cmake
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.real_version < spack.version.ver("4.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C99 standard", "c99_flag", "< 4.0"
|
||||
)
|
||||
return "-std=c99"
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
if self.real_version < spack.version.ver("4.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C11 standard", "c11_flag", "< 4.0"
|
||||
)
|
||||
return "-std=c11"
|
||||
|
||||
@property
|
||||
def c17_flag(self):
|
||||
if self.real_version < spack.version.ver("11.0"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C17 standard", "c17_flag", "< 11.0"
|
||||
)
|
||||
return "-std=c17"
|
||||
|
||||
@property
|
||||
def c23_flag(self):
|
||||
if self.real_version < spack.version.ver("11.0.3"):
|
||||
raise spack.compiler.UnsupportedCompilerFlag(
|
||||
self, "the C23 standard", "c23_flag", "< 11.0.3"
|
||||
)
|
||||
return "-std=c2x"
|
||||
return "-std=c++1z"
|
||||
|
||||
def setup_custom_environment(self, pkg, env):
|
||||
"""Set the DEVELOPER_DIR environment for the Xcode toolchain.
|
||||
@@ -207,7 +154,10 @@ def setup_custom_environment(self, pkg, env):
|
||||
),
|
||||
)
|
||||
|
||||
real_dirs = ["Toolchains/XcodeDefault.xctoolchain/usr/bin", "usr/bin"]
|
||||
real_dirs = [
|
||||
"Toolchains/XcodeDefault.xctoolchain/usr/bin",
|
||||
"usr/bin",
|
||||
]
|
||||
|
||||
bins = ["c++", "c89", "c99", "cc", "clang", "clang++", "cpp"]
|
||||
|
||||
|
@@ -89,11 +89,6 @@ def cxx14_flag(self):
|
||||
return "-std=c++14"
|
||||
return "-h std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.is_clang_based:
|
||||
|
@@ -128,23 +128,10 @@ def c99_flag(self):
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
if self.real_version < ver("3.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag", "< 3.0")
|
||||
if self.real_version < ver("3.1"):
|
||||
return "-std=c1x"
|
||||
return "-std=c11"
|
||||
|
||||
@property
|
||||
def c17_flag(self):
|
||||
if self.real_version < ver("6.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C17 standard", "c17_flag", "< 6.0")
|
||||
return "-std=c17"
|
||||
|
||||
@property
|
||||
def c23_flag(self):
|
||||
if self.real_version < ver("9.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
|
||||
return "-std=c2x"
|
||||
if self.real_version < ver("6.1.0"):
|
||||
raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag", "< 6.1.0")
|
||||
else:
|
||||
return "-std=c11"
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
|
@@ -743,7 +743,9 @@ def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
import spack.solver.asp
|
||||
|
||||
solver = spack.solver.asp.Solver()
|
||||
result = solver.solve(abstract_specs, tests=kwargs.get("tests", False))
|
||||
solver.tests = kwargs.get("tests", False)
|
||||
|
||||
result = solver.solve(abstract_specs)
|
||||
result.raise_if_unsat()
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
|
@@ -793,7 +793,7 @@ def _config():
|
||||
configuration_paths = [
|
||||
# Default configuration scope is the lowest-level scope. These are
|
||||
# versioned with Spack and can be overridden by systems, sites or users
|
||||
configuration_defaults_path
|
||||
configuration_defaults_path,
|
||||
]
|
||||
|
||||
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
|
||||
@@ -801,11 +801,15 @@ def _config():
|
||||
# System configuration is per machine.
|
||||
# This is disabled if user asks for no local configuration.
|
||||
if not disable_local_config:
|
||||
configuration_paths.append(("system", spack.paths.system_config_path))
|
||||
configuration_paths.append(
|
||||
("system", spack.paths.system_config_path),
|
||||
)
|
||||
|
||||
# Site configuration is per spack instance, for sites or projects
|
||||
# No site-level configs should be checked into spack by default.
|
||||
configuration_paths.append(("site", os.path.join(spack.paths.etc_path)))
|
||||
configuration_paths.append(
|
||||
("site", os.path.join(spack.paths.etc_path)),
|
||||
)
|
||||
|
||||
# User configuration can override both spack defaults and site config
|
||||
# This is disabled if user asks for no local configuration.
|
||||
|
@@ -18,7 +18,10 @@
|
||||
#: packages here.
|
||||
default_path = "/opt/cray/pe/cpe-descriptive-manifest/"
|
||||
|
||||
compiler_name_translation = {"nvidia": "nvhpc", "rocm": "rocmcc"}
|
||||
compiler_name_translation = {
|
||||
"nvidia": "nvhpc",
|
||||
"rocm": "rocmcc",
|
||||
}
|
||||
|
||||
|
||||
def translated_compiler_name(manifest_compiler_name):
|
||||
|
@@ -46,7 +46,10 @@
|
||||
import spack.store
|
||||
import spack.util.lock as lk
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.directory_layout import DirectoryLayoutError, InconsistentInstallDirectoryError
|
||||
from spack.directory_layout import (
|
||||
DirectoryLayoutError,
|
||||
InconsistentInstallDirectoryError,
|
||||
)
|
||||
from spack.error import SpackError
|
||||
from spack.util.crypto import bit_length
|
||||
from spack.version import Version
|
||||
@@ -105,7 +108,10 @@
|
||||
|
||||
|
||||
def reader(version):
|
||||
reader_cls = {Version("5"): spack.spec.SpecfileV1, Version("6"): spack.spec.SpecfileV3}
|
||||
reader_cls = {
|
||||
Version("5"): spack.spec.SpecfileV1,
|
||||
Version("6"): spack.spec.SpecfileV3,
|
||||
}
|
||||
return reader_cls[version]
|
||||
|
||||
|
||||
|
@@ -377,8 +377,7 @@ def compute_windows_user_path_for_package(pkg):
|
||||
install location, return list of potential locations based
|
||||
on common heuristics. For more info on Windows user specific
|
||||
installs see:
|
||||
https://learn.microsoft.com/en-us/dotnet/api/system.environment.specialfolder?view=netframework-4.8
|
||||
"""
|
||||
https://learn.microsoft.com/en-us/dotnet/api/system.environment.specialfolder?view=netframework-4.8"""
|
||||
if not is_windows:
|
||||
return []
|
||||
|
||||
|
@@ -18,9 +18,7 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.link_tree import ConflictingSpecsError
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.compilers
|
||||
@@ -47,7 +45,11 @@
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url
|
||||
from spack.filesystem_view import SimpleFilesystemView, inverse_view_func_parser, view_func_parser
|
||||
from spack.filesystem_view import (
|
||||
SimpleFilesystemView,
|
||||
inverse_view_func_parser,
|
||||
view_func_parser,
|
||||
)
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.spec import Spec
|
||||
from spack.spec_list import InvalidSpecConstraintError, SpecList
|
||||
@@ -324,7 +326,12 @@ def _write_yaml(data, str_or_file):
|
||||
def _eval_conditional(string):
|
||||
"""Evaluate conditional definitions using restricted variable scope."""
|
||||
valid_variables = spack.util.environment.get_host_environment()
|
||||
valid_variables.update({"re": re, "env": os.environ})
|
||||
valid_variables.update(
|
||||
{
|
||||
"re": re,
|
||||
"env": os.environ,
|
||||
}
|
||||
)
|
||||
return eval(string, valid_variables)
|
||||
|
||||
|
||||
@@ -627,24 +634,7 @@ def regenerate(self, concretized_root_specs):
|
||||
os.unlink(tmp_symlink_name)
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
# Give an informative error message for the typical error case: two specs, same package
|
||||
# project to same prefix.
|
||||
if isinstance(e, ConflictingSpecsError):
|
||||
spec_a = e.args[0].format(color=clr.get_color_when())
|
||||
spec_b = e.args[1].format(color=clr.get_color_when())
|
||||
raise SpackEnvironmentViewError(
|
||||
f"The environment view in {self.root} could not be created, "
|
||||
"because the following two specs project to the same prefix:\n"
|
||||
f" {spec_a}, and\n"
|
||||
f" {spec_b}.\n"
|
||||
" To resolve this issue:\n"
|
||||
" a. use `concretization:unify:true` to ensure there is only one "
|
||||
"package per spec in the environment, or\n"
|
||||
" b. disable views with `view:false`, or\n"
|
||||
" c. create custom view projections."
|
||||
) from e
|
||||
raise
|
||||
raise e
|
||||
|
||||
# Remove the old root when it's in the same folder as the new root. This guards
|
||||
# against removal of an arbitrary path when the original symlink in self.root
|
||||
@@ -1005,7 +995,9 @@ def included_config_scopes(self):
|
||||
config_path = os.path.join(config_path, basename)
|
||||
else:
|
||||
staged_path = spack.config.fetch_remote_configs(
|
||||
config_path, self.config_stage_dir, skip_existing=True
|
||||
config_path,
|
||||
self.config_stage_dir,
|
||||
skip_existing=True,
|
||||
)
|
||||
if not staged_path:
|
||||
raise SpackEnvironmentError(
|
||||
|
@@ -35,7 +35,13 @@
|
||||
import llnl.util
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import get_single_file, mkdirp, temp_cwd, temp_rename, working_dir
|
||||
from llnl.util.filesystem import (
|
||||
get_single_file,
|
||||
mkdirp,
|
||||
temp_cwd,
|
||||
temp_rename,
|
||||
working_dir,
|
||||
)
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.config
|
||||
@@ -89,6 +95,22 @@ def _ensure_one_stage_entry(stage_path):
|
||||
return os.path.join(stage_path, stage_entries[0])
|
||||
|
||||
|
||||
def _filesummary(path, print_bytes=16):
|
||||
try:
|
||||
n = print_bytes
|
||||
with open(path, "rb") as f:
|
||||
size = os.fstat(f.fileno()).st_size
|
||||
if size <= 2 * n:
|
||||
short_contents = f.read(2 * n)
|
||||
else:
|
||||
short_contents = f.read(n)
|
||||
f.seek(-n, 2)
|
||||
short_contents += b"..." + f.read(n)
|
||||
return size, short_contents
|
||||
except OSError:
|
||||
return 0, b""
|
||||
|
||||
|
||||
def fetcher(cls):
|
||||
"""Decorator used to register fetch strategies."""
|
||||
all_strategies.append(cls)
|
||||
@@ -497,7 +519,7 @@ def check(self):
|
||||
# On failure, provide some information about the file size and
|
||||
# contents, so that we can quickly see what the issue is (redirect
|
||||
# was not followed, empty file, text instead of binary, ...)
|
||||
size, contents = fs.filesummary(self.archive_file)
|
||||
size, contents = _filesummary(self.archive_file)
|
||||
raise ChecksumError(
|
||||
f"{checker.hash_name} checksum failed for {self.archive_file}",
|
||||
f"Expected {self.digest} but got {checker.sum}. "
|
||||
@@ -1533,7 +1555,11 @@ def for_package_version(pkg, version):
|
||||
# performance hit for branches on older versions of git.
|
||||
# Branches cannot be cached, so we tell the fetcher not to cache tags/branches
|
||||
ref_type = "commit" if version.is_commit else "tag"
|
||||
kwargs = {"git": pkg.git, ref_type: version.ref, "no_cache": True}
|
||||
kwargs = {
|
||||
"git": pkg.git,
|
||||
ref_type: version.ref,
|
||||
"no_cache": True,
|
||||
}
|
||||
|
||||
kwargs["submodules"] = getattr(pkg, "submodules", False)
|
||||
|
||||
|
@@ -20,7 +20,6 @@
|
||||
)
|
||||
from llnl.util.lang import index_by, match_predicate
|
||||
from llnl.util.link_tree import (
|
||||
ConflictingSpecsError,
|
||||
DestinationMergeVisitor,
|
||||
LinkTree,
|
||||
MergeConflictSummary,
|
||||
@@ -639,22 +638,6 @@ class SimpleFilesystemView(FilesystemView):
|
||||
def __init__(self, root, layout, **kwargs):
|
||||
super(SimpleFilesystemView, self).__init__(root, layout, **kwargs)
|
||||
|
||||
def _sanity_check_view_projection(self, specs):
|
||||
"""A very common issue is that we end up with two specs of the same
|
||||
package, that project to the same prefix. We want to catch that as
|
||||
early as possible and give a sensible error to the user. Here we use
|
||||
the metadata dir (.spack) projection as a quick test to see whether
|
||||
two specs in the view are going to clash. The metadata dir is used
|
||||
because it's always added by Spack with identical files, so a
|
||||
guaranteed clash that's easily verified."""
|
||||
seen = dict()
|
||||
for current_spec in specs:
|
||||
metadata_dir = self.relative_metadata_dir_for_spec(current_spec)
|
||||
conflicting_spec = seen.get(metadata_dir)
|
||||
if conflicting_spec:
|
||||
raise ConflictingSpecsError(current_spec, conflicting_spec)
|
||||
seen[metadata_dir] = current_spec
|
||||
|
||||
def add_specs(self, *specs, **kwargs):
|
||||
assert all((s.concrete for s in specs))
|
||||
if len(specs) == 0:
|
||||
@@ -669,8 +652,6 @@ def add_specs(self, *specs, **kwargs):
|
||||
if kwargs.get("exclude", None):
|
||||
specs = set(filter_exclude(specs, kwargs["exclude"]))
|
||||
|
||||
self._sanity_check_view_projection(specs)
|
||||
|
||||
# Ignore spack meta data folder.
|
||||
def skip_list(file):
|
||||
return os.path.basename(file) == spack.store.layout.metadata_dir
|
||||
@@ -705,45 +686,32 @@ def skip_list(file):
|
||||
for dst in visitor.directories:
|
||||
os.mkdir(os.path.join(self._root, dst))
|
||||
|
||||
# Link the files using a "merge map": full src => full dst
|
||||
merge_map_per_prefix = self._source_merge_visitor_to_merge_map(visitor)
|
||||
for spec in specs:
|
||||
merge_map = merge_map_per_prefix.get(spec.package.view_source(), None)
|
||||
if not merge_map:
|
||||
# Not every spec may have files to contribute.
|
||||
continue
|
||||
# Then group the files to be linked by spec...
|
||||
# For compatibility, we have to create a merge_map dict mapping
|
||||
# full_src => full_dst
|
||||
files_per_spec = itertools.groupby(visitor.files.items(), key=lambda item: item[1][0])
|
||||
|
||||
for (spec, (src_root, rel_paths)) in zip(specs, files_per_spec):
|
||||
merge_map = dict()
|
||||
for dst_rel, (_, src_rel) in rel_paths:
|
||||
full_src = os.path.join(src_root, src_rel)
|
||||
full_dst = os.path.join(self._root, dst_rel)
|
||||
merge_map[full_src] = full_dst
|
||||
spec.package.add_files_to_view(self, merge_map, skip_if_exists=False)
|
||||
|
||||
# Finally create the metadata dirs.
|
||||
self.link_metadata(specs)
|
||||
|
||||
def _source_merge_visitor_to_merge_map(self, visitor: SourceMergeVisitor):
|
||||
# For compatibility with add_files_to_view, we have to create a
|
||||
# merge_map of the form join(src_root, src_rel) => join(dst_root, dst_rel),
|
||||
# but our visitor.files format is dst_rel => (src_root, src_rel).
|
||||
# We exploit that visitor.files is an ordered dict, and files per source
|
||||
# prefix are contiguous.
|
||||
source_root = lambda item: item[1][0]
|
||||
per_source = itertools.groupby(visitor.files.items(), key=source_root)
|
||||
return {
|
||||
src_root: {
|
||||
os.path.join(src_root, src_rel): os.path.join(self._root, dst_rel)
|
||||
for dst_rel, (_, src_rel) in group
|
||||
}
|
||||
for src_root, group in per_source
|
||||
}
|
||||
|
||||
def relative_metadata_dir_for_spec(self, spec):
|
||||
return os.path.join(
|
||||
self.get_relative_projection_for_spec(spec), spack.store.layout.metadata_dir, spec.name
|
||||
)
|
||||
|
||||
def link_metadata(self, specs):
|
||||
metadata_visitor = SourceMergeVisitor()
|
||||
|
||||
for spec in specs:
|
||||
src_prefix = os.path.join(spec.package.view_source(), spack.store.layout.metadata_dir)
|
||||
proj = self.relative_metadata_dir_for_spec(spec)
|
||||
proj = os.path.join(
|
||||
self.get_relative_projection_for_spec(spec),
|
||||
spack.store.layout.metadata_dir,
|
||||
spec.name,
|
||||
)
|
||||
metadata_visitor.set_projection(proj)
|
||||
visit_directory_tree(src_prefix, metadata_visitor)
|
||||
|
||||
|
@@ -37,6 +37,7 @@ class SharedLibrariesVisitor(BaseDirectoryVisitor):
|
||||
exception of an exclude list."""
|
||||
|
||||
def __init__(self, exclude_list):
|
||||
|
||||
# List of file and directory names to be excluded
|
||||
self.exclude_list = frozenset(exclude_list)
|
||||
|
||||
|
@@ -212,7 +212,8 @@ def install_sbang():
|
||||
|
||||
# copy over the fresh copy of `sbang`
|
||||
sbang_tmp_path = os.path.join(
|
||||
os.path.dirname(sbang_path), ".%s.tmp" % os.path.basename(sbang_path)
|
||||
os.path.dirname(sbang_path),
|
||||
".%s.tmp" % os.path.basename(sbang_path),
|
||||
)
|
||||
shutil.copy(spack.paths.sbang_script, sbang_tmp_path)
|
||||
|
||||
|
@@ -423,7 +423,11 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=timer.NU
|
||||
matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True)
|
||||
|
||||
return _process_binary_cache_tarball(
|
||||
pkg, explicit, unsigned, mirrors_for_spec=matches, timer=timer
|
||||
pkg,
|
||||
explicit,
|
||||
unsigned,
|
||||
mirrors_for_spec=matches,
|
||||
timer=timer,
|
||||
)
|
||||
|
||||
|
||||
@@ -785,7 +789,7 @@ def _add_bootstrap_compilers(self, compiler, architecture, pkgs, request, all_de
|
||||
associated dependents
|
||||
"""
|
||||
packages = _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs)
|
||||
for comp_pkg, is_compiler in packages:
|
||||
for (comp_pkg, is_compiler) in packages:
|
||||
pkgid = package_id(comp_pkg)
|
||||
if pkgid not in self.build_tasks:
|
||||
self._add_init_task(comp_pkg, request, is_compiler, all_deps)
|
||||
@@ -809,7 +813,8 @@ def _modify_existing_task(self, pkgid, attr, value):
|
||||
key, task = tup
|
||||
if task.pkg_id == pkgid:
|
||||
tty.debug(
|
||||
"Modifying task for {0} to treat it as a compiler".format(pkgid), level=2
|
||||
"Modifying task for {0} to treat it as a compiler".format(pkgid),
|
||||
level=2,
|
||||
)
|
||||
setattr(task, attr, value)
|
||||
self.build_pq[i] = (key, task)
|
||||
@@ -1207,6 +1212,7 @@ def _add_tasks(self, request, all_deps):
|
||||
|
||||
install_package = request.install_args.get("install_package")
|
||||
if install_package and request.pkg_id not in self.build_tasks:
|
||||
|
||||
# Be sure to clear any previous failure
|
||||
spack.store.db.clear_failure(request.spec, force=True)
|
||||
|
||||
@@ -1755,16 +1761,14 @@ def install(self):
|
||||
raise
|
||||
|
||||
except binary_distribution.NoChecksumException as exc:
|
||||
if task.cache_only:
|
||||
raise
|
||||
|
||||
# Checking hash on downloaded binary failed.
|
||||
err = "Failed to install {0} from binary cache due to {1}:"
|
||||
err += " Requeueing to install from source."
|
||||
tty.error(err.format(pkg.name, str(exc)))
|
||||
task.use_cache = False
|
||||
self._requeue_task(task)
|
||||
continue
|
||||
if not task.cache_only:
|
||||
# Checking hash on downloaded binary failed.
|
||||
err = "Failed to install {0} from binary cache due to {1}:"
|
||||
err += " Requeueing to install from source."
|
||||
tty.error(err.format(pkg.name, str(exc)))
|
||||
task.use_cache = False
|
||||
self._requeue_task(task)
|
||||
continue
|
||||
|
||||
except (Exception, SystemExit) as exc:
|
||||
self._update_failed(task, True, exc)
|
||||
@@ -1944,7 +1948,11 @@ def run(self):
|
||||
# Run post install hooks before build stage is removed.
|
||||
spack.hooks.post_install(self.pkg.spec)
|
||||
|
||||
_print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)
|
||||
_print_timer(
|
||||
pre=self.pre,
|
||||
pkg_id=self.pkg_id,
|
||||
timer=self.timer,
|
||||
)
|
||||
_print_installed_pkg(self.pkg.prefix)
|
||||
|
||||
# Send final status that install is successful
|
||||
|
@@ -249,7 +249,10 @@ def root_path(name, module_set_name):
|
||||
Returns:
|
||||
root folder for module file installation
|
||||
"""
|
||||
defaults = {"lmod": "$spack/share/spack/lmod", "tcl": "$spack/share/spack/modules"}
|
||||
defaults = {
|
||||
"lmod": "$spack/share/spack/lmod",
|
||||
"tcl": "$spack/share/spack/modules",
|
||||
}
|
||||
# Root folders where the various module files should be written
|
||||
roots = spack.config.get("modules:%s:roots" % module_set_name, {})
|
||||
|
||||
@@ -399,19 +402,13 @@ def get_module(module_type, spec, get_full_path, module_set_name="default", requ
|
||||
else:
|
||||
writer = spack.modules.module_types[module_type](spec, module_set_name)
|
||||
if not os.path.isfile(writer.layout.filename):
|
||||
fmt_str = "{name}{@version}{/hash:7}"
|
||||
if not writer.conf.excluded:
|
||||
raise ModuleNotFoundError(
|
||||
"The module for package {} should be at {}, but it does not exist".format(
|
||||
spec.format(fmt_str), writer.layout.filename
|
||||
)
|
||||
err_msg = "No module available for package {0} at {1}".format(
|
||||
spec, writer.layout.filename
|
||||
)
|
||||
raise ModuleNotFoundError(err_msg)
|
||||
elif required:
|
||||
tty.debug(
|
||||
"The module configuration has excluded {}: omitting it".format(
|
||||
spec.format(fmt_str)
|
||||
)
|
||||
)
|
||||
tty.debug("The module configuration has excluded {0}: " "omitting it".format(spec))
|
||||
else:
|
||||
return None
|
||||
|
||||
|
@@ -65,7 +65,9 @@
|
||||
from spack.version import GitVersion, Version, VersionBase
|
||||
|
||||
FLAG_HANDLER_RETURN_TYPE = Tuple[
|
||||
Optional[Iterable[str]], Optional[Iterable[str]], Optional[Iterable[str]]
|
||||
Optional[Iterable[str]],
|
||||
Optional[Iterable[str]],
|
||||
Optional[Iterable[str]],
|
||||
]
|
||||
FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
|
||||
|
||||
@@ -1703,7 +1705,11 @@ def _has_make_target(self, target):
|
||||
"don't know how to make {0}. Stop",
|
||||
]
|
||||
|
||||
kwargs = {"fail_on_error": False, "output": os.devnull, "error": str}
|
||||
kwargs = {
|
||||
"fail_on_error": False,
|
||||
"output": os.devnull,
|
||||
"error": str,
|
||||
}
|
||||
|
||||
stderr = make("-n", target, **kwargs)
|
||||
|
||||
@@ -2214,7 +2220,10 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
|
||||
|
||||
if not force:
|
||||
dependents = spack.store.db.installed_relatives(
|
||||
spec, direction="parents", transitive=True, deptype=("link", "run")
|
||||
spec,
|
||||
direction="parents",
|
||||
transitive=True,
|
||||
deptype=("link", "run"),
|
||||
)
|
||||
if dependents:
|
||||
raise PackageStillNeededError(spec, dependents)
|
||||
@@ -2227,6 +2236,7 @@ def uninstall_by_spec(spec, force=False, deprecator=None):
|
||||
|
||||
# Pre-uninstall hook runs first.
|
||||
with spack.store.db.prefix_write_lock(spec):
|
||||
|
||||
if pkg is not None:
|
||||
try:
|
||||
spack.hooks.pre_uninstall(spec)
|
||||
@@ -2389,7 +2399,11 @@ def fetch_remote_versions(self, concurrency=128):
|
||||
|
||||
try:
|
||||
return spack.util.web.find_versions_of_archive(
|
||||
self.all_urls, self.list_url, self.list_depth, concurrency, reference_package=self
|
||||
self.all_urls,
|
||||
self.list_url,
|
||||
self.list_depth,
|
||||
concurrency,
|
||||
reference_package=self,
|
||||
)
|
||||
except spack.util.web.NoNetworkConnectionError as e:
|
||||
tty.die("Package.fetch_versions couldn't connect to:", e.url, e.message)
|
||||
|
@@ -409,7 +409,13 @@ def needs_text_relocation(m_type, m_subtype):
|
||||
|
||||
|
||||
def relocate_macho_binaries(
|
||||
path_names, old_layout_root, new_layout_root, prefix_to_prefix, rel, old_prefix, new_prefix
|
||||
path_names,
|
||||
old_layout_root,
|
||||
new_layout_root,
|
||||
prefix_to_prefix,
|
||||
rel,
|
||||
old_prefix,
|
||||
new_prefix,
|
||||
):
|
||||
"""
|
||||
Use macholib python package to get the rpaths, depedent libraries
|
||||
@@ -823,7 +829,7 @@ def fixup_macos_rpath(root, filename):
|
||||
|
||||
# Check for nonexistent rpaths (often added by spack linker overzealousness
|
||||
# with both lib/ and lib64/) and duplicate rpaths
|
||||
for rpath, count in rpaths.items():
|
||||
for (rpath, count) in rpaths.items():
|
||||
if rpath.startswith("@loader_path") or rpath.startswith("@executable_path"):
|
||||
# Allowable relative paths
|
||||
pass
|
||||
|
@@ -1066,6 +1066,7 @@ def dump_provenance(self, spec, path):
|
||||
# Install patch files needed by the package.
|
||||
fs.mkdirp(path)
|
||||
for patch in itertools.chain.from_iterable(spec.package.patches.values()):
|
||||
|
||||
if patch.path:
|
||||
if os.path.exists(patch.path):
|
||||
fs.install(patch.path, path)
|
||||
|
@@ -113,6 +113,7 @@ def wrapper(instance, *args, **kwargs):
|
||||
|
||||
start_time = time.time()
|
||||
try:
|
||||
|
||||
value = wrapped_fn(instance, *args, **kwargs)
|
||||
package["stdout"] = self.fetch_log(pkg)
|
||||
package["installed_from_binary_cache"] = pkg.installed_from_binary_cache
|
||||
@@ -233,7 +234,9 @@ def extract_package_from_signature(self, instance, *args, **kwargs):
|
||||
|
||||
@contextlib.contextmanager
|
||||
def build_context_manager(
|
||||
reporter: spack.reporters.Reporter, filename: str, specs: List[spack.spec.Spec]
|
||||
reporter: spack.reporters.Reporter,
|
||||
filename: str,
|
||||
specs: List[spack.spec.Spec],
|
||||
):
|
||||
"""Decorate a package to generate a report after the installation function is executed.
|
||||
|
||||
|
@@ -47,7 +47,8 @@
|
||||
|
||||
|
||||
CDashConfiguration = collections.namedtuple(
|
||||
"CDashConfiguration", ["upload_url", "packages", "build", "site", "buildstamp", "track"]
|
||||
"CDashConfiguration",
|
||||
["upload_url", "packages", "build", "site", "buildstamp", "track"],
|
||||
)
|
||||
|
||||
|
||||
@@ -335,7 +336,12 @@ def test_skipped_report(self, directory_name, spec, reason=None):
|
||||
if reason:
|
||||
output += "\n{0}".format(reason)
|
||||
|
||||
package = {"name": spec.name, "id": spec.dag_hash(), "result": "skipped", "stdout": output}
|
||||
package = {
|
||||
"name": spec.name,
|
||||
"id": spec.dag_hash(),
|
||||
"result": "skipped",
|
||||
"stdout": output,
|
||||
}
|
||||
self.test_report_for_package(directory_name, package, duration=0.0)
|
||||
|
||||
def concretization_report(self, directory_name, msg):
|
||||
|
@@ -10,7 +10,11 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
# The keys here represent the only recognized (ctest/cdash) status values
|
||||
completed = {"failed": "Completed", "passed": "Completed", "notrun": "No tests to run"}
|
||||
completed = {
|
||||
"failed": "Completed",
|
||||
"passed": "Completed",
|
||||
"notrun": "No tests to run",
|
||||
}
|
||||
|
||||
log_regexp = re.compile(r"^==> \[([0-9:.\-]*)(?:, [0-9]*)?\] (.*)")
|
||||
returns_regexp = re.compile(r"\[([0-9 ,]*)\]")
|
||||
|
@@ -32,7 +32,10 @@
|
||||
},
|
||||
"binary_cache_checksum": {
|
||||
"type": "object",
|
||||
"properties": {"hash_algorithm": {"type": "string"}, "hash": {"type": "string"}},
|
||||
"properties": {
|
||||
"hash_algorithm": {"type": "string"},
|
||||
"hash": {"type": "string"},
|
||||
},
|
||||
},
|
||||
"buildcache_layout_version": {"type": "number"},
|
||||
},
|
||||
|
@@ -22,7 +22,7 @@
|
||||
r"project": {"type": "string"},
|
||||
r"site": {"type": "string"},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
|
@@ -21,7 +21,7 @@
|
||||
"flags": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"keep_werror": {"type": "string", "enum": ["all", "specific", "none"]}
|
||||
"keep_werror": {"type": "string", "enum": ["all", "specific", "none"]},
|
||||
},
|
||||
},
|
||||
"shared_linking": {
|
||||
@@ -54,12 +54,12 @@
|
||||
),
|
||||
},
|
||||
{"type": "string"}, # deprecated
|
||||
]
|
||||
],
|
||||
},
|
||||
"install_hash_length": {"type": "integer", "minimum": 1},
|
||||
"install_path_scheme": {"type": "string"}, # deprecated
|
||||
"build_stage": {
|
||||
"oneOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}]
|
||||
"oneOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}],
|
||||
},
|
||||
"test_stage": {"type": "string"},
|
||||
"extensions": {"type": "array", "items": {"type": "string"}},
|
||||
@@ -83,7 +83,7 @@
|
||||
"concretizer": {"type": "string", "enum": ["original", "clingo"]},
|
||||
"db_lock_timeout": {"type": "integer", "minimum": 1},
|
||||
"package_lock_timeout": {
|
||||
"anyOf": [{"type": "integer", "minimum": 1}, {"type": "null"}]
|
||||
"anyOf": [{"type": "integer", "minimum": 1}, {"type": "null"}],
|
||||
},
|
||||
"allow_sgid": {"type": "boolean"},
|
||||
"binary_index_root": {"type": "string"},
|
||||
@@ -97,7 +97,7 @@
|
||||
"modules:[module set]:roots and is ignored",
|
||||
"error": False,
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user