Compare commits

..

65 Commits

Author SHA1 Message Date
Gregory Becker
79e821a43e docs: consistently use 'managed environment' over 'named ...' 2023-02-21 15:53:34 -08:00
psakievich
c16f166dcc Update lib/spack/docs/environments.rst 2023-02-21 16:33:45 -07:00
psakiev
4ae9730ffe remove env's 2023-02-21 12:57:13 -07:00
psakiev
9debeaf4e7 Try to resolve uninstall failure 2023-02-21 12:01:10 -07:00
psakiev
47cdefcbe8 Try setting mutable_mock_env in each func 2023-02-21 11:18:26 -07:00
psakiev
2acc356ed4 Style 2023-02-21 10:09:22 -07:00
psakiev
36bf6c9009 Add unit-tests back 2023-02-21 10:00:50 -07:00
psakiev
29fbad20a2 Undo hoops for avoiding env include 2023-02-21 09:18:44 -07:00
psakiev
5f78703af8 Fixture function scope 2023-02-21 08:53:07 -07:00
psakiev
9b390bdc2c Add back mutable-config 2023-02-15 16:55:33 -07:00
psakiev
bc427e8435 Adjust vars in mutable_mock_env_path 2023-02-15 14:46:54 -07:00
psakiev
3c6e6e22be Undo all unit-tests to see if CI will pass 2023-02-15 13:08:19 -07:00
psakievich
4a8f755632 [@spackbot] updating style on behalf of psakievich 2023-02-13 18:52:06 +00:00
psakievich
51de7ed7ee Merge branch 'develop' into f/env-location 2023-02-13 10:50:53 -07:00
psakiev
047110c086 Debug print statement 2023-01-11 05:37:29 -07:00
psakiev
7fa16089fc Add entry to tmpconfig 2023-01-11 04:46:42 -07:00
Philip Sakievich
065eaa739f Remove accidental bleed over trilinos change 2023-01-10 09:38:51 -07:00
psakiev
8d8e88c177 Try renaming dir 2023-01-09 16:24:14 -07:00
psakiev
8cc69cecfc Add function scope 2023-01-09 15:27:27 -07:00
psakiev
9c690a1ef5 Revert "Try something else"
This reverts commit 3532d6ff16.
2023-01-09 14:20:28 -07:00
psakiev
3532d6ff16 Try something else 2023-01-09 13:22:13 -07:00
psakiev
7494893d3b Drop the hidden dir 2023-01-09 12:46:59 -07:00
psakievich
54e4a72b8e Merge branch 'develop' into f/env-location 2023-01-07 22:23:23 -07:00
psakiev
b86980461f Modify failing test 2023-01-07 20:59:44 -07:00
psakiev
23b5932f73 Make hidden dir for mock env location 2022-12-21 14:56:52 -07:00
psakiev
93760847e8 Use tmpdir for all mutable env paths 2022-12-19 21:18:37 -07:00
psakievich
a35d2f39af Merge branch 'develop' into f/env-location 2022-12-19 17:02:19 -07:00
psakiev
af17cc60a9 Add tmpdir 2022-12-19 16:13:01 -07:00
psakiev
b0528cae3f Improve robustness of env test in parallel 2022-12-19 16:07:34 -07:00
psakiev
372a18392a Add syntax for debugging CI 2022-12-19 13:54:45 -07:00
psakievich
70db49dfed Merge branch 'develop' into f/env-location 2022-12-19 13:50:43 -07:00
psakiev
1d24c196da Use proper import 2022-12-08 12:36:36 -07:00
psakievich
fa3d768947 Update lib/spack/spack/environment/environment.py 2022-12-08 12:34:57 -07:00
psakiev
9b6a109c7e Remove constraint on environments_root creation 2022-12-07 22:24:18 -07:00
psakievich
663967d984 Merge branch 'develop' into f/env-location 2022-12-07 15:03:51 -07:00
psakiev
b47ff2a2de Fix missing function 2022-12-07 14:32:36 -07:00
psakiev
f7b4993810 Address reviewer comments 2022-12-07 14:25:12 -07:00
psakiev
3e07eb8cf0 Fix scope detection issue 2022-12-01 22:21:10 -07:00
psakievich
e773396747 Merge branch 'develop' into f/env-location 2022-12-01 17:00:36 -07:00
psakiev
e19cc2385e Don't automatically create the default env path 2022-12-01 16:50:24 -07:00
psakievich
e867662e1b Merge branch 'develop' into f/env-location 2022-11-09 17:19:41 -07:00
psakiev
6aad926838 Fix unicode error 2022-11-09 15:50:16 -07:00
psakievich
41cf807804 Merge branch 'develop' into f/env-location 2022-11-09 15:09:22 -07:00
psakiev
cf8b919954 Remove accidental changes to bin/sbang 2022-11-09 13:39:56 -07:00
psakiev
47957dccf4 Remove shell driven command from env activation test 2022-11-09 13:19:17 -07:00
Philip Sakievich
38313cadf4 Revert "avoid using realpath, readlink -f is more portable"
This reverts commit 2cae95334c.
2022-11-08 23:40:03 -07:00
Philip Sakievich
730d005a56 Only create default path when used 2022-11-08 23:39:03 -07:00
psakiev
2f7c850a20 style 2022-11-08 23:08:19 -07:00
psakiev
5aa7a564d3 Why do we have mutliple cod paths for env activation? 2022-11-08 22:59:29 -07:00
psakiev
d50c8f1727 Add guard-rail and unit-test (not working) 2022-11-08 22:41:09 -07:00
psakievich
0ac6dfa8f3 Merge branch 'develop' into f/env-location 2022-10-12 08:45:01 -06:00
Philip Sakievich
0b0ffe645d Ugh python 2 🤦‍♂️ 2022-10-11 22:37:57 -06:00
Philip Sakievich
9984c838c8 Fix failing shell tests 2022-10-11 22:18:43 -06:00
psakievich
b1bd61321d Merge branch 'develop' into f/env-location 2022-10-11 21:53:49 -06:00
psakievich
c144558245 Merge branch 'develop' into f/env-location 2022-09-29 11:50:06 -06:00
psakiev
ef43044672 Change name of variable 2022-09-27 22:17:29 -06:00
psakievich
da7294cd90 Update lib/spack/spack/test/config.py
Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com>
2022-09-27 21:08:32 -06:00
psakievich
addb891f42 Update lib/spack/docs/environments.rst
Co-authored-by: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com>
2022-09-27 21:08:25 -06:00
psakievich
bce2d38bfc Merge branch 'develop' into f/env-location 2022-09-27 14:40:57 -06:00
Tom Scogland
2cae95334c avoid using realpath, readlink -f is more portable 2022-09-27 18:33:52 +00:00
psakiev
076d60ce35 Add check that env_root dir exists 2022-09-27 12:02:43 -06:00
psakiev
9ecdafd8de Add some documentation 2022-09-27 11:25:31 -06:00
psakievich
76fde639e8 Update lib/spack/spack/environment/environment.py
Co-authored-by: Greg Becker <becker33@llnl.gov>

Add back path canonicalization
2022-09-27 11:17:47 -06:00
psakiev
490b5eef7c Rework path access strategy 2022-09-27 10:57:28 -06:00
psakiev
3f2e77e5fa Allow users to specify root env dir
Environments managed by spack have some advantages over anonymous Environments
but they are tucked away inside spack's directory tree. This PR gives
users the ability to specify where the environments should live.

See #32823
2022-09-26 22:31:29 -06:00
925 changed files with 8181 additions and 9535 deletions

View File

@@ -1,5 +1,3 @@
# .git-blame-ignore-revs
# Formatted entire codebase with black 23
603569e321013a1a63a637813c94c2834d0a0023
# Formatted entire codebase with black 22
# Formatted entire codebase with black
f52f6e99dbf1131886a80112b8c79dfc414afb7c

1
.gitattributes vendored
View File

@@ -1,4 +1,3 @@
*.py diff=python
*.lp linguist-language=Prolog
lib/spack/external/* linguist-vendored
*.bat text eol=crlf

View File

@@ -44,7 +44,7 @@ jobs:
cache: 'pip'
- name: Install Python packages
run: |
python3 -m pip install --upgrade pip six setuptools types-six black==23.1.0 mypy isort clingo flake8
python3 -m pip install --upgrade pip six setuptools types-six black==22.12.0 mypy isort clingo flake8
- name: Setup git configuration
run: |
# Need this for the git tests to succeed.

View File

@@ -1,28 +1,3 @@
# v0.19.1 (2023-02-07)
### Spack Bugfixes
* `buildcache create`: make "file exists" less verbose (#35019)
* `spack mirror create`: don't change paths to urls (#34992)
* Improve error message for requirements (#33988)
* uninstall: fix accidental cubic complexity (#34005)
* scons: fix signature for `install_args` (#34481)
* Fix `combine_phase_logs` text encoding issues (#34657)
* Use a module-like object to propagate changes in the MRO, when setting build env (#34059)
* PackageBase should not define builder legacy attributes (#33942)
* Forward lookup of the "run_tests" attribute (#34531)
* Bugfix for timers (#33917, #33900)
* Fix path handling in prefix inspections (#35318)
* Fix libtool filter for Fujitsu compilers (#34916)
* Bug fix for duplicate rpath errors on macOS when creating build caches (#34375)
* FileCache: delete the new cache file on exception (#34623)
* Propagate exceptions from Spack python console (#34547)
* Tests: Fix a bug/typo in a `config_values.py` fixture (#33886)
* Various CI fixes (#33953, #34560, #34560, #34828)
* Docs: remove monitors and analyzers, typos (#34358, #33926)
* bump release version for tutorial command (#33859)
# v0.19.0 (2022-11-11)
`v0.19.0` is a major feature release.

View File

@@ -226,7 +226,7 @@ for %%Z in ("%_pa_new_path%") do if EXIST %%~sZ\NUL (
exit /b 0
:: set module system roots
:_sp_multi_pathadd
:_sp_multi_pathadd
for %%I in (%~2) do (
for %%Z in (%_sp_compatible_sys_types%) do (
:pathadd "%~1" "%%I\%%Z"

View File

@@ -20,10 +20,9 @@ concretizer:
# needed to reach a solution increases noticeably with the number of targets
# considered.
targets:
# Determine whether we want to target specific or generic
# microarchitectures. Valid values are: "microarchitectures" or "generic".
# An example of "microarchitectures" would be "skylake" or "bulldozer",
# while an example of "generic" would be "aarch64" or "x86_64_v4".
# Determine whether we want to target specific or generic microarchitectures.
# An example of the first kind might be for instance "skylake" or "bulldozer",
# while generic microarchitectures are for instance "aarch64" or "x86_64_v4".
granularity: microarchitectures
# If "false" allow targets that are incompatible with the current host (for
# instance concretize with target "icelake" while running on "haswell").
@@ -34,4 +33,4 @@ concretizer:
# environments can always be activated. When "false" perform concretization separately
# on each root spec, allowing different versions and variants of the same package in
# an environment.
unify: true
unify: true

View File

@@ -185,7 +185,7 @@ config:
# when Spack needs to manage its own package metadata and all operations are
# expected to complete within the default time limit. The timeout should
# therefore generally be left untouched.
db_lock_timeout: 60
db_lock_timeout: 3
# How long to wait when attempting to modify a package (e.g. to install it).

View File

@@ -5,4 +5,3 @@ llnl*.rst
_build
.spack-env
spack.lock
_spack_root

View File

@@ -366,7 +366,7 @@ If the ``pyproject.toml`` lists ``mesonpy`` as the ``build-backend``,
it uses the meson build system. Meson uses the default
``pyproject.toml`` keys to list dependencies.
See https://meson-python.readthedocs.io/en/latest/tutorials/introduction.html
See https://meson-python.readthedocs.io/en/latest/usage/start.html
for more information.
"""

View File

@@ -58,7 +58,9 @@ Testing
``WafPackage`` also provides ``test`` and ``installtest`` methods,
which are run after the ``build`` and ``install`` phases, respectively.
By default, these phases do nothing, but you can override them to
run package-specific unit tests.
run package-specific unit tests. For example, the
`py-py2cairo <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-py2cairo/package.py>`_
package uses:
.. code-block:: python

View File

@@ -89,7 +89,6 @@
# Enable todo items
todo_include_todos = True
#
# Disable duplicate cross-reference warnings.
#
@@ -354,7 +353,9 @@ class SpackStyle(DefaultStyle):
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual")]
latex_documents = [
("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
@@ -401,7 +402,7 @@ class SpackStyle(DefaultStyle):
"Spack",
"One line description of project.",
"Miscellaneous",
)
),
]
# Documents to append as an appendix to all manuals.
@@ -417,4 +418,6 @@ class SpackStyle(DefaultStyle):
# -- Extension configuration -------------------------------------------------
# sphinx.ext.intersphinx
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
}

View File

@@ -222,7 +222,7 @@ and location. (See the *Configuration settings* section of ``man
ccache`` to learn more about the default settings and how to change
them). Please note that we currently disable ccache's ``hash_dir``
feature to avoid an issue with the stage directory (see
https://github.com/spack/spack/pull/3761#issuecomment-294352232).
https://github.com/LLNL/spack/pull/3761#issuecomment-294352232).
-----------------------
``shared_linking:type``

View File

@@ -118,7 +118,7 @@ make another change, test that change, etc. We use `pytest
<http://pytest.org/>`_ as our tests framework, and these types of
arguments are just passed to the ``pytest`` command underneath. See `the
pytest docs
<https://doc.pytest.org/en/latest/how-to/usage.html#specifying-which-tests-to-run>`_
<http://doc.pytest.org/en/latest/usage.html#specifying-tests-selecting-tests>`_
for more details on test selection syntax.
``spack unit-test`` has a few special options that can help you
@@ -147,7 +147,7 @@ you want to know about. For example, to see just the tests in
You can also combine any of these options with a ``pytest`` keyword
search. See the `pytest usage docs
<https://doc.pytest.org/en/latest/how-to/usage.html#specifying-which-tests-to-run>`_
<https://docs.pytest.org/en/stable/usage.html#specifying-tests-selecting-tests>`_:
for more details on test selection syntax. For example, to see the names of all tests that have "spec"
or "concretize" somewhere in their names:

View File

@@ -21,7 +21,7 @@ be present on the machine where Spack is run:
:header-rows: 1
These requirements can be easily installed on most modern Linux systems;
on macOS, the Command Line Tools package is required, and a full XCode suite
on macOS, the Command Line Tools package is required, and a full XCode suite
may be necessary for some packages such as Qt and apple-gl. Spack is designed
to run on HPC platforms like Cray. Not all packages should be expected
to work on all platforms.
@@ -1506,7 +1506,7 @@ Spack On Windows
Windows support for Spack is currently under development. While this work is still in an early stage,
it is currently possible to set up Spack and perform a few operations on Windows. This section will guide
you through the steps needed to install Spack and start running it on a fresh Windows machine.
you through the steps needed to install Spack and start running it on a fresh Windows machine.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Step 1: Install prerequisites
@@ -1516,7 +1516,7 @@ To use Spack on Windows, you will need the following packages:
Required:
* Microsoft Visual Studio
* Python
* Python
* Git
Optional:
@@ -1547,8 +1547,8 @@ Intel Fortran
"""""""""""""
For Fortran-based packages on Windows, we strongly recommend Intel's oneAPI Fortran compilers.
The suite is free to download from Intel's website, located at
https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html.
The suite is free to download from Intel's website, located at
https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/fortran-compiler.html#gs.70t5tw.
The executable of choice for Spack will be Intel's Beta Compiler, ifx, which supports the classic
compiler's (ifort's) frontend and runtime libraries by using LLVM.

File diff suppressed because it is too large Load Diff

93
lib/spack/env/cc vendored
View File

@@ -427,48 +427,6 @@ isystem_include_dirs_list=""
libs_list=""
other_args_list=""
# Global state for keeping track of -Wl,-rpath -Wl,/path
wl_expect_rpath=no
parse_Wl() {
# drop -Wl
shift
while [ $# -ne 0 ]; do
if [ "$wl_expect_rpath" = yes ]; then
rp="$1"
wl_expect_rpath=no
else
rp=""
case "$1" in
-rpath=*)
rp="${1#-rpath=}"
;;
--rpath=*)
rp="${1#--rpath=}"
;;
-rpath|--rpath)
wl_expect_rpath=yes
;;
"$dtags_to_strip")
;;
*)
append other_args_list "-Wl,$1"
;;
esac
fi
if [ -n "$rp" ]; then
if system_dir "$rp"; then
append system_rpath_dirs_list "$rp"
else
append rpath_dirs_list "$rp"
fi
fi
shift
done
# By lack of local variables, always set this to empty string.
rp=""
}
while [ $# -ne 0 ]; do
@@ -568,9 +526,54 @@ while [ $# -ne 0 ]; do
append other_args_list "-l$arg"
;;
-Wl,*)
IFS=,
parse_Wl $1
unset IFS
arg="${1#-Wl,}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
case "$arg" in
-rpath=*) rp="${arg#-rpath=}" ;;
--rpath=*) rp="${arg#--rpath=}" ;;
-rpath,*) rp="${arg#-rpath,}" ;;
--rpath,*) rp="${arg#--rpath,}" ;;
-rpath|--rpath)
shift; arg="$1"
case "$arg" in
-Wl,*)
rp="${arg#-Wl,}"
;;
*)
die "-Wl,-rpath was not followed by -Wl,*"
;;
esac
;;
"$dtags_to_strip")
: # We want to remove explicitly this flag
;;
*)
append other_args_list "-Wl,$arg"
;;
esac
;;
-Xlinker,*)
arg="${1#-Xlinker,}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
case "$arg" in
-rpath=*) rp="${arg#-rpath=}" ;;
--rpath=*) rp="${arg#--rpath=}" ;;
-rpath|--rpath)
shift; arg="$1"
case "$arg" in
-Xlinker,*)
rp="${arg#-Xlinker,}"
;;
*)
die "-Xlinker,-rpath was not followed by -Xlinker,*"
;;
esac
;;
*)
append other_args_list "-Xlinker,$arg"
;;
esac
;;
-Xlinker)
if [ "$2" = "-rpath" ]; then

View File

@@ -16,6 +16,7 @@
import sys
import tempfile
from contextlib import contextmanager
from sys import platform as _platform
from typing import Callable, List, Match, Optional, Tuple, Union
from llnl.util import tty
@@ -25,7 +26,9 @@
from spack.util.executable import Executable, which
from spack.util.path import path_to_os_path, system_path_filter
if sys.platform != "win32":
is_windows = _platform == "win32"
if not is_windows:
import grp
import pwd
else:
@@ -81,77 +84,9 @@
"visit_directory_tree",
]
if sys.version_info < (3, 7, 4):
# monkeypatch shutil.copystat to fix PermissionError when copying read-only
# files on Lustre when using Python < 3.7.4
def copystat(src, dst, follow_symlinks=True):
"""Copy file metadata
Copy the permission bits, last access time, last modification time, and
flags from `src` to `dst`. On Linux, copystat() also copies the "extended
attributes" where possible. The file contents, owner, and group are
unaffected. `src` and `dst` are path names given as strings.
If the optional flag `follow_symlinks` is not set, symlinks aren't
followed if and only if both `src` and `dst` are symlinks.
"""
def _nop(args, ns=None, follow_symlinks=None):
pass
# follow symlinks (aka don't not follow symlinks)
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
if follow:
# use the real function if it exists
def lookup(name):
return getattr(os, name, _nop)
else:
# use the real function only if it exists
# *and* it supports follow_symlinks
def lookup(name):
fn = getattr(os, name, _nop)
if sys.version_info >= (3, 3):
if fn in os.supports_follow_symlinks: # novermin
return fn
return _nop
st = lookup("stat")(src, follow_symlinks=follow)
mode = stat.S_IMODE(st.st_mode)
lookup("utime")(dst, ns=(st.st_atime_ns, st.st_mtime_ns), follow_symlinks=follow)
# We must copy extended attributes before the file is (potentially)
# chmod()'ed read-only, otherwise setxattr() will error with -EACCES.
shutil._copyxattr(src, dst, follow_symlinks=follow)
try:
lookup("chmod")(dst, mode, follow_symlinks=follow)
except NotImplementedError:
# if we got a NotImplementedError, it's because
# * follow_symlinks=False,
# * lchown() is unavailable, and
# * either
# * fchownat() is unavailable or
# * fchownat() doesn't implement AT_SYMLINK_NOFOLLOW.
# (it returned ENOSUP.)
# therefore we're out of options--we simply cannot chown the
# symlink. give up, suppress the error.
# (which is what shutil always did in this circumstance.)
pass
if hasattr(st, "st_flags"):
try:
lookup("chflags")(dst, st.st_flags, follow_symlinks=follow)
except OSError as why:
for err in "EOPNOTSUPP", "ENOTSUP":
if hasattr(errno, err) and why.errno == getattr(errno, err):
break
else:
raise
shutil.copystat = copystat
def getuid():
if sys.platform == "win32":
if is_windows:
import ctypes
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
@@ -164,7 +99,7 @@ def getuid():
@system_path_filter
def rename(src, dst):
# On Windows, os.rename will fail if the destination file already exists
if sys.platform == "win32":
if is_windows:
# Windows path existence checks will sometimes fail on junctions/links/symlinks
# so check for that case
if os.path.exists(dst) or os.path.islink(dst):
@@ -193,7 +128,7 @@ def _get_mime_type():
"""Generate method to call `file` system command to aquire mime type
for a specified path
"""
if sys.platform == "win32":
if is_windows:
# -h option (no-dereference) does not exist in Windows
return file_command("-b", "--mime-type")
else:
@@ -333,6 +268,7 @@ def groupid_to_group(x):
regex = re.escape(regex)
filenames = path_to_os_path(*filenames)
for filename in filenames:
msg = 'FILTER FILE: {0} [replacing "{1}"]'
tty.debug(msg.format(filename, regex))
@@ -548,7 +484,7 @@ def get_owner_uid(path, err_msg=None):
else:
p_stat = os.stat(path)
if sys.platform != "win32":
if _platform != "win32":
owner_uid = p_stat.st_uid
else:
sid = win32security.GetFileSecurity(
@@ -581,7 +517,7 @@ def group_ids(uid=None):
Returns:
(list of int): gids of groups the user is a member of
"""
if sys.platform == "win32":
if is_windows:
tty.warn("Function is not supported on Windows")
return []
@@ -601,7 +537,7 @@ def group_ids(uid=None):
@system_path_filter(arg_slice=slice(1))
def chgrp(path, group, follow_symlinks=True):
"""Implement the bash chgrp function on a single path"""
if sys.platform == "win32":
if is_windows:
raise OSError("Function 'chgrp' is not supported on Windows")
if isinstance(group, str):
@@ -1128,7 +1064,7 @@ def open_if_filename(str_or_file, mode="r"):
@system_path_filter
def touch(path):
"""Creates an empty file at the specified path."""
if sys.platform == "win32":
if is_windows:
perms = os.O_WRONLY | os.O_CREAT
else:
perms = os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY
@@ -1190,7 +1126,7 @@ def temp_cwd():
yield tmp_dir
finally:
kwargs = {}
if sys.platform == "win32":
if is_windows:
kwargs["ignore_errors"] = False
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
shutil.rmtree(tmp_dir, **kwargs)
@@ -1284,6 +1220,7 @@ def traverse_tree(
# target is relative to the link, then that may not resolve properly
# relative to our cwd - see resolve_link_target_relative_to_the_link
if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
# When follow_nonexisting isn't set, don't descend into dirs
# in source that do not exist in dest
if follow_nonexisting or os.path.exists(dest_child):
@@ -1435,7 +1372,7 @@ def visit_directory_tree(root, visitor, rel_path="", depth=0):
try:
isdir = f.is_dir()
except OSError as e:
if sys.platform == "win32" and hasattr(e, "winerror") and e.winerror == 5 and islink:
if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink:
# if path is a symlink, determine destination and
# evaluate file vs directory
link_target = resolve_link_target_relative_to_the_link(f)
@@ -1544,11 +1481,11 @@ def readonly_file_handler(ignore_errors=False):
"""
def error_remove_readonly(func, path, exc):
if sys.platform != "win32":
if not is_windows:
raise RuntimeError("This method should only be invoked on Windows")
excvalue = exc[1]
if (
sys.platform == "win32"
is_windows
and func in (os.rmdir, os.remove, os.unlink)
and excvalue.errno == errno.EACCES
):
@@ -1578,7 +1515,7 @@ def remove_linked_tree(path):
# Windows readonly files cannot be removed by Python
# directly.
if sys.platform == "win32":
if is_windows:
kwargs["ignore_errors"] = False
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
@@ -1725,6 +1662,7 @@ def find(root, files, recursive=True):
@system_path_filter
def _find_recursive(root, search_files):
# The variable here is **on purpose** a defaultdict. The idea is that
# we want to poke the filesystem as little as possible, but still maintain
# stability in the order of the answer. Thus we are recording each library
@@ -2092,7 +2030,7 @@ def names(self):
# on non Windows platform
# Windows valid library extensions are:
# ['.dll', '.lib']
valid_exts = [".dll", ".lib"] if sys.platform == "win32" else [".dylib", ".so", ".a"]
valid_exts = [".dll", ".lib"] if is_windows else [".dylib", ".so", ".a"]
for ext in valid_exts:
i = name.rfind(ext)
if i != -1:
@@ -2240,7 +2178,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
message = message.format(find_libraries.__name__, type(libraries))
raise TypeError(message)
if sys.platform == "win32":
if is_windows:
static_ext = "lib"
# For linking (runtime=False) you need the .lib files regardless of
# whether you are doing a shared or static link
@@ -2272,7 +2210,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
# finally search all of root recursively. The search stops when the first
# match is found.
common_lib_dirs = ["lib", "lib64"]
if sys.platform == "win32":
if is_windows:
common_lib_dirs.extend(["bin", "Lib"])
for subdir in common_lib_dirs:
@@ -2407,7 +2345,7 @@ def _link(self, path, dest_dir):
# For py2 compatibility, we have to catch the specific Windows error code
# associate with trying to create a file that already exists (winerror 183)
except OSError as e:
if sys.platform == "win32" and e.winerror == 183:
if e.winerror == 183:
# We have either already symlinked or we are encoutering a naming clash
# either way, we don't want to overwrite existing libraries
already_linked = islink(dest_file)
@@ -2695,28 +2633,3 @@ def temporary_dir(
yield tmp_dir
finally:
remove_directory_contents(tmp_dir)
def filesummary(path, print_bytes=16) -> Tuple[int, bytes]:
"""Create a small summary of the given file. Does not error
when file does not exist.
Args:
print_bytes (int): Number of bytes to print from start/end of file
Returns:
Tuple of size and byte string containing first n .. last n bytes.
Size is 0 if file cannot be read."""
try:
n = print_bytes
with open(path, "rb") as f:
size = os.fstat(f.fileno()).st_size
if size <= 2 * n:
short_contents = f.read(2 * n)
else:
short_contents = f.read(n)
f.seek(-n, 2)
short_contents += b"..." + f.read(n)
return size, short_contents
except OSError:
return 0, b""

View File

@@ -198,7 +198,7 @@ def _memoized_function(*args, **kwargs):
except TypeError as e:
# TypeError is raised when indexing into a dict if the key is unhashable.
raise UnhashableArguments(
"args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__)
"args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__),
) from e
return _memoized_function
@@ -237,7 +237,6 @@ def decorator_with_or_without_args(decorator):
@decorator
"""
# See https://stackoverflow.com/questions/653368 for more on this
@functools.wraps(decorator)
def new_dec(*args, **kwargs):
@@ -991,7 +990,8 @@ def enum(**kwargs):
def stable_partition(
input_iterable: Iterable, predicate_fn: Callable[[Any], bool]
input_iterable: Iterable,
predicate_fn: Callable[[Any], bool],
) -> Tuple[List[Any], List[Any]]:
"""Partition the input iterable according to a custom predicate.
@@ -1104,7 +1104,11 @@ def __enter__(self):
def __exit__(self, exc_type, exc_value, tb):
if exc_value is not None:
self._handler._receive_forwarded(self._context, exc_value, traceback.format_tb(tb))
self._handler._receive_forwarded(
self._context,
exc_value,
traceback.format_tb(tb),
)
# Suppress any exception from being re-raised:
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.

View File

@@ -75,7 +75,7 @@ def __init__(self, ignore=None):
# so that we have a fast lookup and can run mkdir in order.
self.directories = OrderedDict()
# Files to link. Maps dst_rel to (src_root, src_rel)
# Files to link. Maps dst_rel to (src_rel, src_root)
self.files = OrderedDict()
def before_visit_dir(self, root, rel_path, depth):
@@ -430,11 +430,6 @@ class MergeConflictError(Exception):
pass
class ConflictingSpecsError(MergeConflictError):
def __init__(self, spec_1, spec_2):
super(MergeConflictError, self).__init__(spec_1, spec_2)
class SingleMergeConflictError(MergeConflictError):
def __init__(self, path):
super(MergeConflictError, self).__init__("Package merge blocked by file: %s" % path)

View File

@@ -18,7 +18,7 @@ class Barrier:
Python 2 doesn't have multiprocessing barriers so we implement this.
See https://greenteapress.com/semaphores/LittleBookOfSemaphores.pdf, p. 41.
See http://greenteapress.com/semaphores/downey08semaphores.pdf, p. 41.
"""
def __init__(self, n, timeout=None):

View File

@@ -5,13 +5,15 @@
import errno
import os
import shutil
import sys
import tempfile
from os.path import exists, join
from sys import platform as _platform
from llnl.util import lang
if sys.platform == "win32":
is_windows = _platform == "win32"
if is_windows:
from win32file import CreateHardLink
@@ -21,7 +23,7 @@ def symlink(real_path, link_path):
On Windows, use junctions if os.symlink fails.
"""
if sys.platform != "win32":
if not is_windows:
os.symlink(real_path, link_path)
elif _win32_can_symlink():
# Windows requires target_is_directory=True when the target is a dir.
@@ -97,7 +99,7 @@ def _win32_is_junction(path):
if os.path.islink(path):
return False
if sys.platform == "win32":
if is_windows:
import ctypes.wintypes
GetFileAttributes = ctypes.windll.kernel32.GetFileAttributesW

View File

@@ -108,6 +108,7 @@ class SuppressOutput:
"""Class for disabling output in a scope using 'with' keyword"""
def __init__(self, msg_enabled=True, warn_enabled=True, error_enabled=True):
self._msg_enabled_initial = _msg_enabled
self._warn_enabled_initial = _warn_enabled
self._error_enabled_initial = _error_enabled

View File

@@ -11,7 +11,6 @@
import io
import os
import sys
from typing import IO, Any, List, Optional
from llnl.util.tty import terminal_size
from llnl.util.tty.color import cextra, clen
@@ -98,16 +97,7 @@ def config_uniform_cols(elts, console_width, padding, cols=0):
return config
def colify(
elts: List[Any],
cols: int = 0,
output: Optional[IO] = None,
indent: int = 0,
padding: int = 2,
tty: Optional[bool] = None,
method: str = "variable",
console_cols: Optional[int] = None,
):
def colify(elts, **options):
"""Takes a list of elements as input and finds a good columnization
of them, similar to how gnu ls does. This supports both
uniform-width and variable-width (tighter) columns.
@@ -116,21 +106,31 @@ def colify(
using ``str()``.
Keyword Arguments:
output: A file object to write to. Default is ``sys.stdout``
indent: Optionally indent all columns by some number of spaces
padding: Spaces between columns. Default is 2
width: Width of the output. Default is 80 if tty not detected
cols: Force number of columns. Default is to size to terminal, or
output (typing.IO): A file object to write to. Default is ``sys.stdout``
indent (int): Optionally indent all columns by some number of spaces
padding (int): Spaces between columns. Default is 2
width (int): Width of the output. Default is 80 if tty not detected
cols (int): Force number of columns. Default is to size to terminal, or
single-column if no tty
tty: Whether to attempt to write to a tty. Default is to autodetect a
tty (bool): Whether to attempt to write to a tty. Default is to autodetect a
tty. Set to False to force single-column output
method: Method to use to fit columns. Options are variable or uniform.
method (str): Method to use to fit columns. Options are variable or uniform.
Variable-width columns are tighter, uniform columns are all the same width
and fit less data on the screen
console_cols: number of columns on this console (default: autodetect)
"""
if output is None:
output = sys.stdout
# Get keyword arguments or set defaults
cols = options.pop("cols", 0)
output = options.pop("output", sys.stdout)
indent = options.pop("indent", 0)
padding = options.pop("padding", 2)
tty = options.pop("tty", None)
method = options.pop("method", "variable")
console_cols = options.pop("width", None)
if options:
raise TypeError(
"'%s' is an invalid keyword argument for this function." % next(options.iterkeys())
)
# elts needs to be an array of strings so we can count the elements
elts = [str(elt) for elt in elts]
@@ -153,11 +153,10 @@ def colify(
cols = 1
# Specify the number of character columns to use.
if console_cols is None:
if not console_cols:
console_rows, console_cols = terminal_size()
elif not isinstance(console_cols, int):
elif type(console_cols) != int:
raise ValueError("Number of columns must be an int")
console_cols = max(1, console_cols - indent)
# Choose a method. Variable-width colums vs uniform-width.
@@ -193,13 +192,7 @@ def colify(
return (config.cols, tuple(config.widths))
def colify_table(
table: List[List[Any]],
output: Optional[IO] = None,
indent: int = 0,
padding: int = 2,
console_cols: Optional[int] = None,
):
def colify_table(table, **options):
"""Version of ``colify()`` for data expressed in rows, (list of lists).
Same as regular colify but:
@@ -225,38 +218,20 @@ def transpose():
for row in table:
yield row[i]
colify(
transpose(),
cols=columns, # this is always the number of cols in the table
tty=True, # don't reduce to 1 column for non-tty
output=output,
indent=indent,
padding=padding,
console_cols=console_cols,
)
if "cols" in options:
raise ValueError("Cannot override columsn in colify_table.")
options["cols"] = columns
# don't reduce to 1 column for non-tty
options["tty"] = True
colify(transpose(), **options)
def colified(
elts: List[Any],
cols: int = 0,
output: Optional[IO] = None,
indent: int = 0,
padding: int = 2,
tty: Optional[bool] = None,
method: str = "variable",
console_cols: Optional[int] = None,
):
def colified(elts, **options):
"""Invokes the ``colify()`` function but returns the result as a string
instead of writing it to an output string."""
sio = io.StringIO()
colify(
elts,
cols=cols,
output=sio,
indent=indent,
padding=padding,
tty=tty,
method=method,
console_cols=console_cols,
)
options["output"] = sio
colify(elts, **options)
return sio.getvalue()

View File

@@ -161,7 +161,10 @@ def _is_background(self):
def _get_canon_echo_flags(self):
"""Get current termios canonical and echo settings."""
cfg = termios.tcgetattr(self.stream)
return (bool(cfg[3] & termios.ICANON), bool(cfg[3] & termios.ECHO))
return (
bool(cfg[3] & termios.ICANON),
bool(cfg[3] & termios.ECHO),
)
def _enable_keyboard_input(self):
"""Disable canonical input and echoing on ``self.stream``."""

View File

@@ -77,7 +77,10 @@ def __init__(self, pid, controller_fd, timeout=1, sleep_time=1e-1, debug=False):
def get_canon_echo_attrs(self):
"""Get echo and canon attributes of the terminal of controller_fd."""
cfg = termios.tcgetattr(self.controller_fd)
return (bool(cfg[3] & termios.ICANON), bool(cfg[3] & termios.ECHO))
return (
bool(cfg[3] & termios.ICANON),
bool(cfg[3] & termios.ECHO),
)
def horizontal_line(self, name):
"""Labled horizontal line for debugging."""
@@ -89,7 +92,11 @@ def status(self):
if self.debug:
canon, echo = self.get_canon_echo_attrs()
sys.stderr.write(
"canon: %s, echo: %s\n" % ("on" if canon else "off", "on" if echo else "off")
"canon: %s, echo: %s\n"
% (
"on" if canon else "off",
"on" if echo else "off",
)
)
sys.stderr.write("input: %s\n" % self.input_on())
sys.stderr.write("bg: %s\n" % self.background())

View File

@@ -25,7 +25,7 @@ def architecture_compatible(self, target, constraint):
return (
not target.architecture
or not constraint.architecture
or target.architecture.intersects(constraint.architecture)
or target.architecture.satisfies(constraint.architecture)
)
@memoized
@@ -104,7 +104,7 @@ def compiler_compatible(self, parent, child, **kwargs):
for cversion in child.compiler.versions:
# For a few compilers use specialized comparisons.
# Otherwise match on version match.
if pversion.intersects(cversion):
if pversion.satisfies(cversion):
return True
elif parent.compiler.name == "gcc" and self._gcc_compiler_compare(
pversion, cversion

View File

@@ -321,7 +321,8 @@ def _check_patch_urls(pkgs, error_cls):
errors.append(
error_cls(
"patch URL in package {0} must end with {1}".format(
pkg_cls.name, full_index_arg
pkg_cls.name,
full_index_arg,
),
[patch.url],
)
@@ -721,7 +722,7 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
dependency_pkg_cls = None
try:
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
assert any(v.intersects(s.versions) for v in list(dependency_pkg_cls.versions))
assert any(v.satisfies(s.versions) for v in list(dependency_pkg_cls.versions))
except Exception:
summary = (
"{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}"

View File

@@ -6,8 +6,6 @@
import codecs
import collections
import hashlib
import io
import itertools
import json
import multiprocessing.pool
import os
@@ -22,8 +20,7 @@
import urllib.parse
import urllib.request
import warnings
from contextlib import closing, contextmanager
from gzip import GzipFile
from contextlib import closing
from urllib.error import HTTPError, URLError
import ruamel.yaml as yaml
@@ -43,8 +40,6 @@
import spack.relocate as relocate
import spack.repo
import spack.store
import spack.traverse as traverse
import spack.util.crypto
import spack.util.file_cache as file_cache
import spack.util.gpg
import spack.util.spack_json as sjson
@@ -214,7 +209,10 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
break
else:
self._mirrors_for_spec[dag_hash].append(
{"mirror_url": mirror_url, "spec": indexed_spec}
{
"mirror_url": mirror_url,
"spec": indexed_spec,
}
)
finally:
shutil.rmtree(tmpdir)
@@ -297,7 +295,10 @@ def update_spec(self, spec, found_list):
break
else:
current_list.append(
{"mirror_url": new_entry["mirror_url"], "spec": new_entry["spec"]}
{
"mirror_url": new_entry["mirror_url"],
"spec": new_entry["spec"],
}
)
def update(self, with_cooldown=False):
@@ -365,7 +366,8 @@ def update(self, with_cooldown=False):
# May need to fetch the index and update the local caches
try:
needs_regen = self._fetch_and_cache_index(
cached_mirror_url, cache_entry=cache_entry
cached_mirror_url,
cache_entry=cache_entry,
)
self._last_fetch_times[cached_mirror_url] = (now, True)
all_methods_failed = False
@@ -557,12 +559,7 @@ class NoChecksumException(spack.error.SpackError):
Raised if file fails checksum verification.
"""
def __init__(self, path, size, contents, algorithm, expected, computed):
super(NoChecksumException, self).__init__(
f"{algorithm} checksum failed for {path}",
f"Expected {expected} but got {computed}. "
f"File size = {size} bytes. Contents = {contents!r}",
)
pass
class NewLayoutException(spack.error.SpackError):
@@ -742,31 +739,34 @@ def get_buildfile_manifest(spec):
return data
def prefixes_to_hashes(spec):
return {
str(s.prefix): s.dag_hash()
for s in itertools.chain(
spec.traverse(root=True, deptype="link"), spec.dependencies(deptype="run")
)
}
def get_buildinfo_dict(spec, rel=False):
"""Create metadata for a tarball"""
def write_buildinfo_file(spec, workdir, rel=False):
"""
Create a cache file containing information
required for the relocation
"""
manifest = get_buildfile_manifest(spec)
return {
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
"relative_rpaths": rel,
"buildpath": spack.store.layout.root,
"spackprefix": spack.paths.prefix,
"relative_prefix": os.path.relpath(spec.prefix, spack.store.layout.root),
"relocate_textfiles": manifest["text_to_relocate"],
"relocate_binaries": manifest["binary_to_relocate"],
"relocate_links": manifest["link_to_relocate"],
"hardlinks_deduped": manifest["hardlinks_deduped"],
"prefix_to_hash": prefixes_to_hashes(spec),
}
prefix_to_hash = dict()
prefix_to_hash[str(spec.package.prefix)] = spec.dag_hash()
deps = spack.build_environment.get_rpath_deps(spec.package)
for d in deps + spec.dependencies(deptype="run"):
prefix_to_hash[str(d.prefix)] = d.dag_hash()
# Create buildinfo data and write it to disk
buildinfo = {}
buildinfo["sbang_install_path"] = spack.hooks.sbang.sbang_install_path()
buildinfo["relative_rpaths"] = rel
buildinfo["buildpath"] = spack.store.layout.root
buildinfo["spackprefix"] = spack.paths.prefix
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
buildinfo["relocate_textfiles"] = manifest["text_to_relocate"]
buildinfo["relocate_binaries"] = manifest["binary_to_relocate"]
buildinfo["relocate_links"] = manifest["link_to_relocate"]
buildinfo["hardlinks_deduped"] = manifest["hardlinks_deduped"]
buildinfo["prefix_to_hash"] = prefix_to_hash
filename = buildinfo_file_name(workdir)
with open(filename, "w") as outfile:
outfile.write(syaml.dump(buildinfo, default_flow_style=True))
def tarball_directory_name(spec):
@@ -1139,68 +1139,6 @@ def generate_key_index(key_prefix, tmpdir=None):
shutil.rmtree(tmpdir)
@contextmanager
def gzip_compressed_tarfile(path):
"""Create a reproducible, compressed tarfile"""
# Create gzip compressed tarball of the install prefix
# 1) Use explicit empty filename and mtime 0 for gzip header reproducibility.
# If the filename="" is dropped, Python will use fileobj.name instead.
# This should effectively mimick `gzip --no-name`.
# 2) On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
# So we follow gzip.
with open(path, "wb") as fileobj, closing(
GzipFile(filename="", mode="wb", compresslevel=6, mtime=0, fileobj=fileobj)
) as gzip_file, tarfile.TarFile(name="", mode="w", fileobj=gzip_file) as tar:
yield tar
def deterministic_tarinfo(tarinfo: tarfile.TarInfo):
# We only add files, symlinks, hardlinks, and directories
# No character devices, block devices and FIFOs should ever enter a tarball.
if tarinfo.isdev():
return None
# For distribution, it makes no sense to user/group data; since (a) they don't exist
# on other machines, and (b) they lead to surprises as `tar x` run as root will change
# ownership if it can. We want to extract as the current user. By setting owner to root,
# root will extract as root, and non-privileged user will extract as themselves.
tarinfo.uid = 0
tarinfo.gid = 0
tarinfo.uname = ""
tarinfo.gname = ""
# Reset mtime to epoch time, our prefixes are not truly immutable, so files may get
# touched; as long as the content does not change, this ensures we get stable tarballs.
tarinfo.mtime = 0
# Normalize mode
if tarinfo.isfile() or tarinfo.islnk():
# If user can execute, use 0o755; else 0o644
# This is to avoid potentially unsafe world writable & exeutable files that may get
# extracted when Python or tar is run with privileges
tarinfo.mode = 0o644 if tarinfo.mode & 0o100 == 0 else 0o755
else: # symbolic link and directories
tarinfo.mode = 0o755
return tarinfo
def tar_add_metadata(tar: tarfile.TarFile, path: str, data: dict):
# Serialize buildinfo for the tarball
bstring = syaml.dump(data, default_flow_style=True).encode("utf-8")
tarinfo = tarfile.TarInfo(name=path)
tarinfo.size = len(bstring)
tar.addfile(deterministic_tarinfo(tarinfo), io.BytesIO(bstring))
def _do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo):
with gzip_compressed_tarfile(tarfile_path) as tar:
tar.add(name=binaries_dir, arcname=pkg_dir, filter=deterministic_tarinfo)
tar_add_metadata(tar, buildinfo_file_name(pkg_dir), buildinfo)
def _build_tarball(
spec,
out_url,
@@ -1260,45 +1198,50 @@ def _build_tarball(
):
raise NoOverwriteException(url_util.format(remote_specfile_path))
pkg_dir = os.path.basename(spec.prefix.rstrip(os.path.sep))
workdir = os.path.join(tmpdir, pkg_dir)
# TODO: We generally don't want to mutate any files, but when using relative
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
# For now, we only make a full copy of the spec prefix when in relative mode.
if relative:
# tarfile is used because it preserves hardlink etc best.
binaries_dir = workdir
temp_tarfile_name = tarball_name(spec, ".tar")
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
tar.add(name="%s" % spec.prefix, arcname=".")
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
tar.extractall(workdir)
os.remove(temp_tarfile_path)
else:
binaries_dir = spec.prefix
# make a copy of the install directory to work with
workdir = os.path.join(tmpdir, os.path.basename(spec.prefix))
# install_tree copies hardlinks
# create a temporary tarfile from prefix and exract it to workdir
# tarfile preserves hardlinks
temp_tarfile_name = tarball_name(spec, ".tar")
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
tar.add(name="%s" % spec.prefix, arcname=".")
with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
tar.extractall(workdir)
os.remove(temp_tarfile_path)
# create info for later relocation and create tar
buildinfo = get_buildinfo_dict(spec, relative)
write_buildinfo_file(spec, workdir, relative)
# optionally make the paths in the binaries relative to each other
# in the spack install tree before creating tarball
try:
if relative:
make_package_relative(workdir, spec, buildinfo, allow_root)
elif not allow_root:
ensure_package_relocatable(buildinfo, binaries_dir)
except Exception as e:
shutil.rmtree(tmpdir)
tty.die(e)
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
# remove copy of install directory
if relative:
shutil.rmtree(workdir)
try:
make_package_relative(workdir, spec, allow_root)
except Exception as e:
shutil.rmtree(workdir)
shutil.rmtree(tarfile_dir)
shutil.rmtree(tmpdir)
tty.die(e)
else:
try:
check_package_relocatable(workdir, spec, allow_root)
except Exception as e:
shutil.rmtree(workdir)
shutil.rmtree(tarfile_dir)
shutil.rmtree(tmpdir)
tty.die(e)
# create gzip compressed tarball of the install prefix
# On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
# So we follow gzip.
with closing(tarfile.open(tarfile_path, "w:gz", compresslevel=6)) as tar:
tar.add(name="%s" % workdir, arcname="%s" % os.path.basename(spec.prefix))
# remove copy of install directory
shutil.rmtree(workdir)
# get the sha256 checksum of the tarball
checksum = checksum_tarball(tarfile_path)
@@ -1357,48 +1300,57 @@ def _build_tarball(
return None
def nodes_to_be_packaged(specs, root=True, dependencies=True):
def nodes_to_be_packaged(specs, include_root=True, include_dependencies=True):
"""Return the list of nodes to be packaged, given a list of specs.
Args:
specs (List[spack.spec.Spec]): list of root specs to be processed
root (bool): include the root of each spec in the nodes
dependencies (bool): include the dependencies of each
include_root (bool): include the root of each spec in the nodes
include_dependencies (bool): include the dependencies of each
spec in the nodes
"""
if not root and not dependencies:
return []
elif dependencies:
nodes = traverse.traverse_nodes(specs, root=root, deptype="all")
else:
nodes = set(specs)
if not include_root and not include_dependencies:
return set()
# Limit to installed non-externals.
packageable = lambda n: not n.external and n.installed
def skip_node(current_node):
if current_node.external or current_node.virtual:
return True
return spack.store.db.query_one(current_node) is None
# Mass install check
with spack.store.db.read_transaction():
return list(filter(packageable, nodes))
expanded_set = set()
for current_spec in specs:
if not include_dependencies:
nodes = [current_spec]
else:
nodes = [
n
for n in current_spec.traverse(
order="post", root=include_root, deptype=("link", "run")
)
]
for node in nodes:
if not skip_node(node):
expanded_set.add(node)
return expanded_set
def push(specs, push_url, include_root: bool = True, include_dependencies: bool = True, **kwargs):
def push(specs, push_url, specs_kwargs=None, **kwargs):
"""Create a binary package for each of the specs passed as input and push them
to a given push URL.
Args:
specs (List[spack.spec.Spec]): installed specs to be packaged
push_url (str): url where to push the binary package
include_root (bool): include the root of each spec in the nodes
include_dependencies (bool): include the dependencies of each
spec in the nodes
specs_kwargs (dict): dictionary with two possible boolean keys, "include_root"
and "include_dependencies", which determine which part of each spec is
packaged and pushed to the mirror
**kwargs: TODO
"""
# Be explicit about the arugment type
if type(include_root) != bool or type(include_dependencies) != bool:
raise ValueError("Expected include_root/include_dependencies to be True/False")
nodes = nodes_to_be_packaged(specs, root=include_root, dependencies=include_dependencies)
specs_kwargs = specs_kwargs or {"include_root": True, "include_dependencies": True}
nodes = nodes_to_be_packaged(specs, **specs_kwargs)
# TODO: This seems to be an easy target for task
# TODO: distribution using a parallel pool
@@ -1585,12 +1537,13 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
return None
def make_package_relative(workdir, spec, buildinfo, allow_root):
def make_package_relative(workdir, spec, allow_root):
"""
Change paths in binaries to relative paths. Change absolute symlinks
to relative symlinks.
"""
prefix = spec.prefix
buildinfo = read_buildinfo_file(workdir)
old_layout_root = buildinfo["buildpath"]
orig_path_names = list()
cur_path_names = list()
@@ -1614,10 +1567,16 @@ def make_package_relative(workdir, spec, buildinfo, allow_root):
relocate.make_link_relative(cur_path_names, orig_path_names)
def ensure_package_relocatable(buildinfo, binaries_dir):
"""Check if package binaries are relocatable."""
binaries = [os.path.join(binaries_dir, f) for f in buildinfo["relocate_binaries"]]
relocate.ensure_binaries_are_relocatable(binaries)
def check_package_relocatable(workdir, spec, allow_root):
"""
Check if package binaries are relocatable.
Change links to placeholder links.
"""
buildinfo = read_buildinfo_file(workdir)
cur_path_names = list()
for filename in buildinfo["relocate_binaries"]:
cur_path_names.append(os.path.join(workdir, filename))
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
def dedupe_hardlinks_if_necessary(root, buildinfo):
@@ -1820,15 +1779,14 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
raise UnsignedPackageException(
"To install unsigned packages, use the --no-check-signature option."
)
# compute the sha256 checksum of the tarball
# get the sha256 checksum of the tarball
local_checksum = checksum_tarball(tarfile_path)
expected = remote_checksum["hash"]
# if the checksums don't match don't install
if local_checksum != expected:
size, contents = fsys.filesummary(tarfile_path)
raise NoChecksumException(tarfile_path, size, contents, "sha256", expected, local_checksum)
if local_checksum != remote_checksum["hash"]:
raise NoChecksumException(
"Package tarball failed checksum verification.\n" "It cannot be installed."
)
return tarfile_path
@@ -1886,14 +1844,12 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for
# compute the sha256 checksum of the tarball
local_checksum = checksum_tarball(tarfile_path)
expected = bchecksum["hash"]
# if the checksums don't match don't install
if local_checksum != expected:
size, contents = fsys.filesummary(tarfile_path)
if local_checksum != bchecksum["hash"]:
_delete_staged_downloads(download_result)
raise NoChecksumException(
tarfile_path, size, contents, "sha256", expected, local_checksum
"Package tarball failed checksum verification.\n" "It cannot be installed."
)
new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.layout.root))
@@ -1984,11 +1940,8 @@ def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None
tarball_path = download_result["tarball_stage"].save_filename
msg = msg.format(tarball_path, sha256)
if not checker.check(tarball_path):
size, contents = fsys.filesummary(tarball_path)
_delete_staged_downloads(download_result)
raise NoChecksumException(
tarball_path, size, contents, checker.hash_name, sha256, checker.sum
)
raise spack.binary_distribution.NoChecksumException(msg)
tty.debug("Verified SHA256 checksum of the build cache")
# don't print long padded paths while extracting/relocating binaries
@@ -2062,7 +2015,12 @@ def try_direct_fetch(spec, mirrors=None):
fetched_spec = Spec.from_json(specfile_contents)
fetched_spec._mark_concrete()
found_specs.append({"mirror_url": mirror.fetch_url, "spec": fetched_spec})
found_specs.append(
{
"mirror_url": mirror.fetch_url,
"spec": fetched_spec,
}
)
return found_specs
@@ -2364,7 +2322,11 @@ def download_single_spec(concrete_spec, destination, mirror_url=None):
local_tarball_path = os.path.join(destination, tarball_dir_name)
files_to_fetch = [
{"url": [tarball_path_name], "path": local_tarball_path, "required": True},
{
"url": [tarball_path_name],
"path": local_tarball_path,
"required": True,
},
{
"url": [
tarball_name(concrete_spec, ".spec.json.sig"),
@@ -2485,7 +2447,12 @@ def conditional_fetch(self):
response.headers.get("Etag", None) or response.headers.get("etag", None)
)
return FetchIndexResult(etag=etag, hash=computed_hash, data=result, fresh=False)
return FetchIndexResult(
etag=etag,
hash=computed_hash,
data=result,
fresh=False,
)
class EtagIndexFetcher:

View File

@@ -5,7 +5,11 @@
"""Function and classes needed to bootstrap Spack itself."""
from .config import ensure_bootstrap_configuration, is_bootstrapping
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
from .core import (
all_core_root_specs,
ensure_core_dependencies,
ensure_patchelf_in_path_or_raise,
)
from .environment import BootstrapEnvironment, ensure_environment_dependencies
from .status import status_message

View File

@@ -59,7 +59,10 @@ def _try_import_from_store(module, query_spec, query_info=None):
# to be picked up and used, possibly depending on something in the store, first
# allows the bootstrap version to work when an incompatible version is in
# sys.path
orders = [module_paths + sys.path, sys.path + module_paths]
orders = [
module_paths + sys.path,
sys.path + module_paths,
]
for path in orders:
sys.path = path
try:

View File

@@ -53,7 +53,12 @@
import spack.util.url
import spack.version
from ._common import _executables_in_store, _python_import, _root_spec, _try_import_from_store
from ._common import (
_executables_in_store,
_python_import,
_root_spec,
_try_import_from_store,
)
from .config import spack_python_interpreter, spec_for_current_python
#: Name of the file containing metadata about the bootstrapping source
@@ -208,7 +213,7 @@ def _install_and_test(self, abstract_spec, bincache_platform, bincache_data, tes
# This will be None for things that don't depend on python
python_spec = item.get("python", None)
# Skip specs which are not compatible
if not abstract_spec.intersects(candidate_spec):
if not abstract_spec.satisfies(candidate_spec):
continue
if python_spec is not None and python_spec not in abstract_spec:

View File

@@ -171,7 +171,7 @@ def mypy_root_spec():
def black_root_spec():
"""Return the root spec used to bootstrap black"""
return _root_spec("py-black@:23.1.0")
return _root_spec("py-black@:22.12.0")
def flake8_root_spec():

View File

@@ -69,13 +69,13 @@
from spack.installer import InstallError
from spack.util.cpus import cpus_available
from spack.util.environment import (
SYSTEM_DIRS,
EnvironmentModifications,
env_flag,
filter_system_paths,
get_path,
inspect_path,
is_system_path,
system_dirs,
validate,
)
from spack.util.executable import Executable
@@ -397,7 +397,7 @@ def set_compiler_environment_variables(pkg, env):
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
env.set("SPACK_SYSTEM_DIRS", ":".join(SYSTEM_DIRS))
env.set("SPACK_SYSTEM_DIRS", ":".join(system_dirs))
compiler.setup_custom_environment(pkg, env)
@@ -423,14 +423,6 @@ def set_wrapper_variables(pkg, env):
compiler = pkg.compiler
env.extend(spack.schema.environment.parse(compiler.environment))
# Before setting up PATH to Spack compiler wrappers, make sure compiler is in PATH
# This ensures that non-wrapped executables from the compiler bin directory are available
bindirs = dedupe(
[os.path.dirname(c) for c in [compiler.cc, compiler.cxx, compiler.fc, compiler.f77]]
)
for bindir in bindirs:
env.prepend_path("PATH", bindir)
if compiler.extra_rpaths:
extra_rpaths = ":".join(compiler.extra_rpaths)
env.set("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
@@ -493,13 +485,7 @@ def update_compiler_args_for_dep(dep):
query = pkg.spec[dep.name]
dep_link_dirs = list()
try:
# In some circumstances (particularly for externals) finding
# libraries packages can be time consuming, so indicate that
# we are performing this operation (and also report when it
# finishes).
tty.debug("Collecting libraries for {0}".format(dep.name))
dep_link_dirs.extend(query.libs.directories)
tty.debug("Libraries for {0} have been collected.".format(dep.name))
except NoLibrariesError:
tty.debug("No libraries found for {0}".format(dep.name))
@@ -786,9 +772,7 @@ def setup_package(pkg, dirty, context="build"):
set_compiler_environment_variables(pkg, env_mods)
set_wrapper_variables(pkg, env_mods)
tty.debug("setup_package: grabbing modifications from dependencies")
env_mods.extend(modifications_from_dependencies(pkg.spec, context, custom_mods_only=False))
tty.debug("setup_package: collected all modifications from dependencies")
# architecture specific setup
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
@@ -796,7 +780,6 @@ def setup_package(pkg, dirty, context="build"):
platform.setup_platform_environment(pkg, env_mods)
if context == "build":
tty.debug("setup_package: setup build environment for root")
builder = spack.builder.create(pkg)
builder.setup_build_environment(env_mods)
@@ -807,7 +790,6 @@ def setup_package(pkg, dirty, context="build"):
" includes and omit it when invoked with '--cflags'."
)
elif context == "test":
tty.debug("setup_package: setup test environment for root")
env_mods.extend(
inspect_path(
pkg.spec.prefix,
@@ -824,7 +806,6 @@ def setup_package(pkg, dirty, context="build"):
# Load modules on an already clean environment, just before applying Spack's
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
if need_compiler:
tty.debug("setup_package: loading compiler modules")
for mod in pkg.compiler.modules:
load_module(mod)
@@ -962,7 +943,6 @@ def default_modifications_for_dep(dep):
_make_runnable(dep, env)
def add_modifications_for_dep(dep):
tty.debug("Adding env modifications for {0}".format(dep.name))
# Some callers of this function only want the custom modifications.
# For callers that want both custom and default modifications, we want
# to perform the default modifications here (this groups custom
@@ -988,7 +968,6 @@ def add_modifications_for_dep(dep):
builder.setup_dependent_build_environment(env, spec)
else:
dpkg.setup_dependent_run_environment(env, spec)
tty.debug("Added env modifications for {0}".format(dep.name))
# Note that we want to perform environment modifications in a fixed order.
# The Spec.traverse method provides this: i.e. in addition to
@@ -1037,6 +1016,7 @@ def get_cmake_prefix_path(pkg):
def _setup_pkg_and_run(
serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
):
context = kwargs.get("context", "build")
try:

View File

@@ -110,7 +110,11 @@ class AutotoolsBuilder(BaseBuilder):
phases = ("autoreconf", "configure", "build", "install")
#: Names associated with package methods in the old build-system format
legacy_methods = ("configure_args", "check", "installcheck")
legacy_methods = (
"configure_args",
"check",
"installcheck",
)
#: Names associated with package attributes in the old build-system format
legacy_attributes = (

View File

@@ -31,6 +31,7 @@ def cmake_cache_option(name, boolean_value, comment=""):
class CachedCMakeBuilder(CMakeBuilder):
#: Phases of a Cached CMake package
#: Note: the initconfig phase is used for developer builds as a final phase to stop on
phases: Tuple[str, ...] = ("initconfig", "cmake", "build", "install")

View File

@@ -252,7 +252,10 @@ def std_args(pkg, generator=None):
if platform.mac_ver()[0]:
args.extend(
[define("CMAKE_FIND_FRAMEWORK", "LAST"), define("CMAKE_FIND_APPBUNDLE", "LAST")]
[
define("CMAKE_FIND_FRAMEWORK", "LAST"),
define("CMAKE_FIND_APPBUNDLE", "LAST"),
]
)
# Set up CMake rpath

View File

@@ -38,7 +38,10 @@ class GenericBuilder(BaseBuilder):
legacy_methods: Tuple[str, ...] = ()
#: Names associated with package attributes in the old build-system format
legacy_attributes: Tuple[str, ...] = ("archive_files", "install_time_test_callbacks")
legacy_attributes: Tuple[str, ...] = (
"archive_files",
"install_time_test_callbacks",
)
#: Callback names for post-install phase tests
install_time_test_callbacks = []

View File

@@ -857,7 +857,10 @@ def scalapack_libs(self):
raise_lib_error("Cannot find a BLACS library for the given MPI.")
int_suff = "_" + self.intel64_int_suffix
scalapack_libnames = ["libmkl_scalapack" + int_suff, blacs_lib + int_suff]
scalapack_libnames = [
"libmkl_scalapack" + int_suff,
blacs_lib + int_suff,
]
sca_libs = find_libraries(
scalapack_libnames, root=self.component_lib_dir("mkl"), shared=("+shared" in self.spec)
)
@@ -1158,7 +1161,9 @@ def _determine_license_type(self):
#
# Ideally, we just tell the installer to look around on the system.
# Thankfully, we neither need to care nor emulate where it looks:
license_type = {"ACTIVATION_TYPE": "exist_lic"}
license_type = {
"ACTIVATION_TYPE": "exist_lic",
}
# However (and only), if the spack-internal Intel license file has been
# populated beyond its templated explanatory comments, proffer it to

View File

@@ -68,7 +68,10 @@ def unpack(self, pkg, spec, prefix):
@staticmethod
def _generate_tree_line(name, prefix):
return """{{ name = "{name}", root = "{prefix}" }};""".format(name=name, prefix=prefix)
return """{{ name = "{name}", root = "{prefix}" }};""".format(
name=name,
prefix=prefix,
)
def generate_luarocks_config(self, pkg, spec, prefix):
spec = self.pkg.spec

View File

@@ -37,7 +37,11 @@ class IntelOneApiPackage(Package):
conflicts(c, msg="This package in only available for x86_64 and Linux")
# Add variant to toggle environment modifications from vars.sh
variant("envmods", default=True, description="Toggles environment modifications")
variant(
"envmods",
default=True,
description="Toggles environment modifications",
)
@staticmethod
def update_description(cls):

View File

@@ -21,7 +21,7 @@
import spack.package_base
import spack.spec
import spack.store
from spack.directives import build_system, depends_on, extends, maintainers
from spack.directives import build_system, depends_on, extends
from spack.error import NoHeadersError, NoLibrariesError, SpecError
from spack.version import Version
@@ -29,7 +29,7 @@
class PythonExtension(spack.package_base.PackageBase):
maintainers("adamjstewart", "pradyunsg")
maintainers = ["adamjstewart"]
@property
def import_modules(self):
@@ -113,9 +113,6 @@ def view_file_conflicts(self, view, merge_map):
return conflicts
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
if not self.extendee_spec:
return super().add_files_to_view(view, merge_map, skip_if_exists)
bin_dir = self.spec.prefix.bin
python_prefix = self.extendee_spec.prefix
python_is_external = self.extendee_spec.external
@@ -187,6 +184,8 @@ class PythonPackage(PythonExtension):
#: Package name, version, and extension on PyPI
pypi: Optional[str] = None
maintainers = ["adamjstewart", "pradyunsg"]
# To be used in UI queries that require to know which
# build-system class we are using
build_system_class = "PythonPackage"

View File

@@ -7,7 +7,7 @@
import llnl.util.lang as lang
from spack.directives import extends, maintainers
from spack.directives import extends
from .generic import GenericBuilder, Package
@@ -71,7 +71,7 @@ class RPackage(Package):
GenericBuilder = RBuilder
maintainers("glennpj")
maintainers = ["glennpj"]
#: This attribute is used in UI queries that need to know the build
#: system base class

View File

@@ -11,7 +11,7 @@
import spack.builder
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
from spack.directives import build_system, extends, maintainers
from spack.directives import build_system, extends
from spack.package_base import PackageBase
from spack.util.environment import env_flag
from spack.util.executable import Executable, ProcessError
@@ -23,7 +23,7 @@ class RacketPackage(PackageBase):
"""
#: Package name, version, and extension on PyPI
maintainers("elfprince13")
maintainers = ["elfprince13"]
# To be used in UI queries that require to know which
# build-system class we are using
build_system_class = "RacketPackage"

View File

@@ -7,7 +7,7 @@
import spack.builder
import spack.package_base
from spack.directives import build_system, extends, maintainers
from spack.directives import build_system, extends
from ._checks import BaseBuilder
@@ -15,7 +15,7 @@
class RubyPackage(spack.package_base.PackageBase):
"""Specialized class for building Ruby gems."""
maintainers("Kerilk")
maintainers = ["Kerilk"]
#: This attribute is used in UI queries that need to know the build
#: system base class

View File

@@ -61,7 +61,10 @@ def import_modules(self):
list: list of strings of module names
"""
modules = []
root = os.path.join(self.prefix, self.spec["python"].package.platlib)
root = os.path.join(
self.prefix,
self.spec["python"].package.platlib,
)
# Some Python libraries are packages: collections of modules
# distributed in directories containing __init__.py files

View File

@@ -42,7 +42,9 @@
from spack.reporters import CDash, CDashConfiguration
from spack.reporters.cdash import build_stamp as cdash_build_stamp
JOB_RETRY_CONDITIONS = ["always"]
JOB_RETRY_CONDITIONS = [
"always",
]
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
@@ -127,7 +129,10 @@ def _remove_reserved_tags(tags):
def _get_spec_string(spec):
format_elements = ["{name}{@version}", "{%compiler}"]
format_elements = [
"{name}{@version}",
"{%compiler}",
]
if spec.architecture:
format_elements.append(" {arch=architecture}")
@@ -323,7 +328,12 @@ def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None)
dependencies = []
def append_dep(s, d):
dependencies.append({"spec": s, "depends": d})
dependencies.append(
{
"spec": s,
"depends": d,
}
)
for spec in spec_list:
for s in spec.traverse(deptype=all):
@@ -336,7 +346,10 @@ def append_dep(s, d):
)
skey = _spec_deps_key(s)
spec_labels[skey] = {"spec": s, "needs_rebuild": not up_to_date_mirrors}
spec_labels[skey] = {
"spec": s,
"needs_rebuild": not up_to_date_mirrors,
}
for d in s.dependencies(deptype=all):
dkey = _spec_deps_key(d)
@@ -355,13 +368,16 @@ def append_dep(s, d):
}
)
deps_json_obj = {"specs": specs, "dependencies": dependencies}
deps_json_obj = {
"specs": specs,
"dependencies": dependencies,
}
return deps_json_obj
def _spec_matches(spec, match_string):
return spec.intersects(match_string)
return spec.satisfies(match_string)
def _remove_attributes(src_dict, dest_dict):
@@ -394,7 +410,14 @@ def _copy_attributes(attrs_list, src_dict, dest_dict):
def _find_matching_config(spec, gitlab_ci):
runner_attributes = {}
overridable_attrs = ["image", "tags", "variables", "before_script", "script", "after_script"]
overridable_attrs = [
"image",
"tags",
"variables",
"before_script",
"script",
"after_script",
]
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
@@ -490,28 +513,16 @@ def compute_affected_packages(rev1="HEAD^", rev2="HEAD"):
return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
def get_spec_filter_list(env, affected_pkgs):
"""Given a list of package names and an active/concretized
environment, return the set of all concrete specs from the
environment that could have been affected by changing the
list of packages.
If a ``dependent_traverse_depth`` is given, it is used to limit
upward (in the parent direction) traversal of specs of touched
packages. E.g. if 1 is provided, then only direct dependents
of touched package specs are traversed to produce specs that
could have been affected by changing the package, while if 0 is
provided, only the changed specs themselves are traversed. If ``None``
is given, upward traversal of touched package specs is done all
the way to the environment roots. Providing a negative number
results in no traversals at all, yielding an empty set.
Arguments:
env (spack.environment.Environment): Active concrete environment
affected_pkgs (List[str]): Affected package names
dependent_traverse_depth: Optional integer to limit dependent
traversal, or None to disable the limit.
Returns:
@@ -528,11 +539,10 @@ def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
visited = set()
dag_hash = lambda s: s.dag_hash()
for match in env_matches:
for dep_level, parent in match.traverse(direction="parents", key=dag_hash, depth=True):
if dependent_traverse_depth is None or dep_level <= dependent_traverse_depth:
affected_specs.update(
parent.traverse(direction="children", visited=visited, key=dag_hash)
)
for parent in match.traverse(direction="parents", key=dag_hash):
affected_specs.update(
parent.traverse(direction="children", visited=visited, key=dag_hash)
)
return affected_specs
@@ -593,18 +603,6 @@ def generate_gitlab_ci_yaml(
cdash_handler = CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
build_group = cdash_handler.build_group if cdash_handler else None
dependent_depth = os.environ.get("SPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH", None)
if dependent_depth is not None:
try:
dependent_depth = int(dependent_depth)
except (TypeError, ValueError):
tty.warn(
"Unrecognized value ({0}) ".format(dependent_depth),
"provide forSPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH, ",
"ignoring it.",
)
dependent_depth = None
prune_untouched_packages = False
spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
if spack_prune_untouched is not None and spack_prune_untouched.lower() == "true":
@@ -620,9 +618,7 @@ def generate_gitlab_ci_yaml(
tty.debug("affected pkgs:")
for p in affected_pkgs:
tty.debug(" {0}".format(p))
affected_specs = get_spec_filter_list(
env, affected_pkgs, dependent_traverse_depth=dependent_depth
)
affected_specs = get_spec_filter_list(env, affected_pkgs)
tty.debug("all affected specs:")
for s in affected_specs:
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
@@ -689,14 +685,28 @@ def generate_gitlab_ci_yaml(
except AttributeError:
phase_name = phase
strip_compilers = False
phases.append({"name": phase_name, "strip-compilers": strip_compilers})
phases.append(
{
"name": phase_name,
"strip-compilers": strip_compilers,
}
)
for bs in env.spec_lists[phase_name]:
bootstrap_specs.append(
{"spec": bs, "phase-name": phase_name, "strip-compilers": strip_compilers}
{
"spec": bs,
"phase-name": phase_name,
"strip-compilers": strip_compilers,
}
)
phases.append({"name": "specs", "strip-compilers": False})
phases.append(
{
"name": "specs",
"strip-compilers": False,
}
)
# If a remote mirror override (alternate buildcache destination) was
# specified, add it here in case it has already built hashes we might
@@ -965,7 +975,7 @@ def generate_gitlab_ci_yaml(
bs_arch = c_spec.architecture
bs_arch_family = bs_arch.target.microarchitecture.family
if (
c_spec.intersects(compiler_pkg_spec)
c_spec.satisfies(compiler_pkg_spec)
and bs_arch_family == spec_arch_family
):
# We found the bootstrap compiler this release spec
@@ -1099,9 +1109,15 @@ def generate_gitlab_ci_yaml(
"variables": variables,
"script": job_script,
"tags": tags,
"artifacts": {"paths": artifact_paths, "when": "always"},
"artifacts": {
"paths": artifact_paths,
"when": "always",
},
"needs": sorted(job_dependencies, key=lambda d: d["job"]),
"retry": {"max": 2, "when": JOB_RETRY_CONDITIONS},
"retry": {
"max": 2,
"when": JOB_RETRY_CONDITIONS,
},
"interruptible": True,
}
@@ -1119,7 +1135,10 @@ def generate_gitlab_ci_yaml(
if image_name:
job_object["image"] = image_name
if image_entry is not None:
job_object["image"] = {"name": image_name, "entrypoint": image_entry}
job_object["image"] = {
"name": image_name,
"entrypoint": image_entry,
}
output_object[job_name] = job_object
job_id += 1
@@ -1162,7 +1181,11 @@ def generate_gitlab_ci_yaml(
service_job_retries = {
"max": 2,
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
"when": [
"runner_system_failure",
"stuck_or_timeout_failure",
"script_failure",
],
}
if job_id > 0:
@@ -1334,7 +1357,9 @@ def generate_gitlab_ci_yaml(
_copy_attributes(default_attrs, service_job_config, noop_job)
if "script" not in noop_job:
noop_job["script"] = ['echo "All specs already up to date, nothing to rebuild."']
noop_job["script"] = [
'echo "All specs already up to date, nothing to rebuild."',
]
noop_job["retry"] = service_job_retries
@@ -1464,8 +1489,9 @@ def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
hashes = env.all_hashes() if env else None
matches = spack.store.specfile_matches(specfile_path, hashes=hashes)
push_url = spack.mirror.Mirror.from_url(mirror_url).push_url
spec_kwargs = {"include_root": True, "include_dependencies": False}
kwargs = {"force": True, "allow_root": True, "unsigned": unsigned}
bindist.push(matches, push_url, include_root=True, include_dependencies=False, **kwargs)
bindist.push(matches, push_url, spec_kwargs, **kwargs)
def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
@@ -1528,7 +1554,10 @@ def copy_files_to_artifacts(src, artifacts_dir):
try:
fs.copy(src, artifacts_dir)
except Exception as err:
tty.warn(f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to: {err}")
msg = ("Unable to copy files ({0}) to artifacts {1} due to " "exception: {2}").format(
src, artifacts_dir, str(err)
)
tty.error(msg)
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
@@ -1591,7 +1620,9 @@ def download_and_extract_artifacts(url, work_dir):
"""
tty.msg("Fetching artifacts from: {0}\n".format(url))
headers = {"Content-Type": "application/zip"}
headers = {
"Content-Type": "application/zip",
}
token = os.environ.get("GITLAB_PRIVATE_TOKEN", None)
if token:
@@ -2050,7 +2081,10 @@ def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dic
with open(file_path, "w") as fd:
fd.write(syaml.dump(broken_spec_details))
web_util.push_to_url(
file_path, url, keep_original=False, extra_args={"ContentType": "text/plain"}
file_path,
url,
keep_original=False,
extra_args={"ContentType": "text/plain"},
)
except Exception as err:
# If there is an S3 error (e.g., access denied or connection
@@ -2128,7 +2162,14 @@ def run_standalone_tests(**kwargs):
tty.error("Reproduction directory is required for stand-alone tests")
return
test_args = ["spack", "--color=always", "--backtrace", "--verbose", "test", "run"]
test_args = [
"spack",
"--color=always",
"--backtrace",
"--verbose",
"test",
"run",
]
if fail_fast:
test_args.append("--fail-fast")
@@ -2278,9 +2319,19 @@ def populate_buildgroup(self, job_names):
opener = build_opener(HTTPHandler)
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
parent_group_id = self.create_buildgroup(
opener,
headers,
url,
self.build_group,
"Daily",
)
group_id = self.create_buildgroup(
opener, headers, url, "Latest {0}".format(self.build_group), "Latest"
opener,
headers,
url,
"Latest {0}".format(self.build_group),
"Latest",
)
if not parent_group_id or not group_id:
@@ -2290,9 +2341,13 @@ def populate_buildgroup(self, job_names):
data = {
"dynamiclist": [
{"match": name, "parentgroupid": parent_group_id, "site": self.site}
{
"match": name,
"parentgroupid": parent_group_id,
"site": self.site,
}
for name in job_names
]
],
}
enc_data = json.dumps(data).encode("utf-8")

View File

@@ -43,6 +43,7 @@ def matches(obj, proto):
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
return False

View File

@@ -161,7 +161,9 @@ class _UnquotedFlags(object):
"""
flags_arg_pattern = re.compile(
r'^({0})=([^\'"].*)$'.format("|".join(spack.spec.FlagMap.valid_compiler_flags()))
r'^({0})=([^\'"].*)$'.format(
"|".join(spack.spec.FlagMap.valid_compiler_flags()),
)
)
def __init__(self, all_unquoted_flag_pairs: List[Tuple[Match[str], str]]):
@@ -225,6 +227,7 @@ def parse_specs(args, **kwargs):
return specs
except spack.error.SpecError as e:
msg = e.message
if e.long_message:
msg += e.long_message

View File

@@ -53,6 +53,7 @@ def packages(parser, args):
def packages_https(parser, args):
# Since packages takes a long time, --all is required without name
if not args.check_all and not args.name:
tty.die("Please specify one or more packages to audit, or --all.")

View File

@@ -5,7 +5,7 @@
import spack.cmd.common.env_utility as env_utility
description = (
"run a command in a spec's install environment, or dump its environment to screen or file"
"run a command in a spec's install environment, " "or dump its environment to screen or file"
)
section = "build"
level = "long"

View File

@@ -103,7 +103,9 @@ def setup_parser(subparser):
help="Regenerate buildcache index after building package(s)",
)
create.add_argument(
"--spec-file", default=None, help="Create buildcache entry for spec from json or yaml file"
"--spec-file",
default=None,
help="Create buildcache entry for spec from json or yaml file",
)
create.add_argument(
"--only",
@@ -400,7 +402,7 @@ def _matching_specs(specs, spec_file):
return spack.store.find(constraints, hashes=hashes)
if env:
return [concrete for _, concrete in env.concretized_specs()]
return [env.specs_by_hash[h] for h in env.concretized_order]
tty.die(
"build cache file creation requires at least one"
@@ -459,6 +461,10 @@ def create_fn(args):
msg = "Pushing binary packages to {0}/build_cache".format(url)
tty.msg(msg)
specs_kwargs = {
"include_root": "package" in args.things_to_install,
"include_dependencies": "dependencies" in args.things_to_install,
}
kwargs = {
"key": args.key,
"force": args.force,
@@ -467,13 +473,7 @@ def create_fn(args):
"allow_root": args.allow_root,
"regenerate_index": args.rebuild_index,
}
bindist.push(
matches,
url,
include_root="package" in args.things_to_install,
include_dependencies="dependencies" in args.things_to_install,
**kwargs,
)
bindist.push(matches, url, specs_kwargs, **kwargs)
def install_fn(args):
@@ -498,11 +498,11 @@ def list_fn(args):
if not args.allarch:
arch = spack.spec.Spec.default_arch()
specs = [s for s in specs if s.intersects(arch)]
specs = [s for s in specs if s.satisfies(arch)]
if args.specs:
constraints = set(args.specs)
specs = [s for s in specs if any(s.intersects(c) for c in constraints)]
specs = [s for s in specs if any(s.satisfies(c) for c in constraints)]
if sys.stdout.isatty():
builds = len(specs)
tty.msg("%s." % plural(builds, "cached build"))

View File

@@ -20,7 +20,9 @@ def setup_parser(subparser):
help="name of the list to remove specs from",
)
subparser.add_argument(
"--match-spec", dest="match_spec", help="if name is ambiguous, supply a spec to match"
"--match-spec",
dest="match_spec",
help="if name is ambiguous, supply a spec to match",
)
subparser.add_argument(
"-a",

View File

@@ -530,28 +530,39 @@ def ci_rebuild(args):
if not verify_binaries:
install_args.append("--no-check-signature")
if cdash_handler:
# Add additional arguments to `spack install` for CDash reporting.
install_args.extend(cdash_handler.args())
slash_hash = "/{}".format(job_spec.dag_hash())
# Arguments when installing dependencies from cache
deps_install_args = install_args
# Arguments when installing the root from sources
root_install_args = install_args + [
"--keep-stage",
"--only=package",
"--use-buildcache=package:never,dependencies:only",
slash_hash,
]
if cdash_handler:
# Add additional arguments to `spack install` for CDash reporting.
root_install_args.extend(cdash_handler.args())
root_install_args.append(slash_hash)
# ["x", "y"] -> "'x' 'y'"
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
commands = [
# apparently there's a race when spack bootstraps? do it up front once
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
[
SPACK_COMMAND,
"-e",
env.path,
"bootstrap",
"now",
],
[
SPACK_COMMAND,
"-e",
env.path,
"config",
"add",
"config:db_lock_timeout:120", # 2 minutes for processes to fight for a db lock
],
[
SPACK_COMMAND,
"-e",

View File

@@ -13,7 +13,11 @@
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.argparsewriter import ArgparseCompletionWriter, ArgparseRstWriter, ArgparseWriter
from llnl.util.argparsewriter import (
ArgparseCompletionWriter,
ArgparseRstWriter,
ArgparseWriter,
)
from llnl.util.tty.colify import colify
import spack.cmd
@@ -38,7 +42,7 @@
"format": "bash",
"header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"),
"update": os.path.join(spack.paths.share_path, "spack-completion.bash"),
}
},
}

View File

@@ -12,11 +12,7 @@
import spack.build_environment as build_environment
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.error
import spack.paths
import spack.spec
import spack.store
from spack import traverse
from spack.util.environment import dump_environment, pickle_environment
@@ -42,41 +38,6 @@ def setup_parser(subparser):
)
class AreDepsInstalledVisitor:
def __init__(self, context="build"):
if context not in ("build", "test"):
raise ValueError("context can only be build or test")
if context == "build":
self.direct_deps = ("build", "link", "run")
else:
self.direct_deps = ("build", "test", "link", "run")
self.has_uninstalled_deps = False
def accept(self, item):
# The root may be installed or uninstalled.
if item.depth == 0:
return True
# Early exit after we've seen an uninstalled dep.
if self.has_uninstalled_deps:
return False
spec = item.edge.spec
if not spec.external and not spec.installed:
self.has_uninstalled_deps = True
return False
return True
def neighbors(self, item):
# Direct deps: follow build & test edges.
# Transitive deps: follow link / run.
deptypes = self.direct_deps if item.depth == 0 else ("link", "run")
return item.edge.spec.edges_to_dependencies(deptype=deptypes)
def emulate_env_utility(cmd_name, context, args):
if not args.spec:
tty.die("spack %s requires a spec." % cmd_name)
@@ -104,27 +65,6 @@ def emulate_env_utility(cmd_name, context, args):
spec = spack.cmd.matching_spec_from_env(spec)
# Require that dependencies are installed.
visitor = AreDepsInstalledVisitor(context=context)
# Mass install check needs read transaction.
with spack.store.db.read_transaction():
traverse.traverse_breadth_first_with_visitor([spec], traverse.CoverNodesVisitor(visitor))
if visitor.has_uninstalled_deps:
raise spack.error.SpackError(
f"Not all dependencies of {spec.name} are installed. "
f"Cannot setup {context} environment:",
spec.tree(
status_fn=spack.spec.Spec.install_status,
hashlen=7,
hashes=True,
# This shows more than necessary, but we cannot dynamically change deptypes
# in Spec.tree(...).
deptypes="all" if context == "build" else ("build", "test", "link", "run"),
),
)
build_environment.setup_package(spec.package, args.dirty, context)
if args.dump:

View File

@@ -408,7 +408,13 @@ def config_prefer_upstream(args):
pkgs = {}
for spec in pref_specs:
# Collect all the upstream compilers and versions for this package.
pkg = pkgs.get(spec.name, {"version": [], "compiler": []})
pkg = pkgs.get(
spec.name,
{
"version": [],
"compiler": [],
},
)
pkgs[spec.name] = pkg
# We have no existing variant if this is our first added version.

View File

@@ -16,10 +16,19 @@
import spack.stage
import spack.util.web
from spack.spec import Spec
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
from spack.url import (
UndetectableNameError,
UndetectableVersionError,
parse_name,
parse_version,
)
from spack.util.editor import editor
from spack.util.executable import ProcessError, which
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
from spack.util.naming import (
mod_to_class,
simplify_name,
valid_fully_qualified_module_name,
)
description = "create a new package file"
section = "packaging"

View File

@@ -96,5 +96,8 @@ def report(args):
def debug(parser, args):
action = {"create-db-tarball": create_db_tarball, "report": report}
action = {
"create-db-tarball": create_db_tarball,
"report": report,
}
action[args.debug_command](args)

View File

@@ -33,7 +33,12 @@
level = "long"
# Arguments for display_specs when we find ambiguity
display_args = {"long": True, "show_flags": True, "variants": True, "indent": 4}
display_args = {
"long": True,
"show_flags": True,
"variants": True,
"indent": 4,
}
def setup_parser(sp):

View File

@@ -80,12 +80,22 @@ def compare_specs(a, b, to_string=False, color=None):
# specs and to descend into dependency hashes so we include all facts.
a_facts = set(
shift(func)
for func in setup.spec_clauses(a, body=True, expand_hashes=True, concrete_build_deps=True)
for func in setup.spec_clauses(
a,
body=True,
expand_hashes=True,
concrete_build_deps=True,
)
if func.name == "attr"
)
b_facts = set(
shift(func)
for func in setup.spec_clauses(b, body=True, expand_hashes=True, concrete_build_deps=True)
for func in setup.spec_clauses(
b,
body=True,
expand_hashes=True,
concrete_build_deps=True,
)
if func.name == "attr"
)

View File

@@ -148,7 +148,8 @@ def env_activate(args):
if not args.shell:
spack.cmd.common.shell_init_instructions(
"spack env activate", " eval `spack env activate {sh_arg} [...]`"
"spack env activate",
" eval `spack env activate {sh_arg} [...]`",
)
return 1
@@ -237,7 +238,8 @@ def env_deactivate_setup_parser(subparser):
def env_deactivate(args):
if not args.shell:
spack.cmd.common.shell_init_instructions(
"spack env deactivate", " eval `spack env deactivate {sh_arg}`"
"spack env deactivate",
" eval `spack env deactivate {sh_arg}`",
)
return 1

View File

@@ -38,7 +38,11 @@ def setup_parser(subparser):
default=False,
help="packages with detected externals won't be built with Spack",
)
find_parser.add_argument("--exclude", action="append", help="packages to exclude from search")
find_parser.add_argument(
"--exclude",
action="append",
help="packages to exclude from search",
)
find_parser.add_argument(
"-p",
"--path",
@@ -183,6 +187,7 @@ def external_read_cray_manifest(args):
def _collect_and_consume_cray_manifest_files(
manifest_file=None, manifest_directory=None, dry_run=False, fail_on_error=False
):
manifest_files = []
if manifest_file:
manifest_files.append(manifest_file)

View File

@@ -25,7 +25,10 @@ def setup_parser(subparser):
help="fetch only missing (not yet installed) dependencies",
)
subparser.add_argument(
"-D", "--dependencies", action="store_true", help="also fetch all dependencies"
"-D",
"--dependencies",
action="store_true",
help="also fetch all dependencies",
)
arguments.add_common_arguments(subparser, ["specs"])
subparser.epilog = (

View File

@@ -9,7 +9,13 @@
import spack.config
import spack.environment as ev
import spack.store
from spack.graph import DAGWithDependencyTypes, SimpleDAG, graph_ascii, graph_dot, static_graph_dot
from spack.graph import (
DAGWithDependencyTypes,
SimpleDAG,
graph_ascii,
graph_dot,
static_graph_dot,
)
description = "generate graphs of package dependency relationships"
section = "basic"

View File

@@ -39,14 +39,19 @@
compiler flags:
@g{cflags="flags"} cppflags, cflags, cxxflags,
fflags, ldflags, ldlibs
@g{==} propagate flags to package dependencies
@g{cflags=="flags"} propagate flags to package dependencies
cppflags, cflags, cxxflags, fflags,
ldflags, ldlibs
variants:
@B{+variant} enable <variant>
@B{++variant} propagate enable <variant>
@r{-variant} or @r{~variant} disable <variant>
@r{--variant} or @r{~~variant} propagate disable <variant>
@B{variant=value} set non-boolean <variant> to <value>
@B{variant==value} propagate non-boolean <variant> to <value>
@B{variant=value1,value2,value3} set multi-value <variant> values
@B{++}, @r{--}, @r{~~}, @B{==} propagate variants to package dependencies
@B{variant==value1,value2,value3} propagate multi-value <variant> values
architecture variants:
@m{platform=platform} linux, darwin, cray, etc.
@@ -82,7 +87,9 @@
"""
guides = {"spec": spec_guide}
guides = {
"spec": spec_guide,
}
def setup_parser(subparser):

View File

@@ -283,7 +283,7 @@ def print_tests(pkg):
c_names = ("gcc", "intel", "intel-parallel-studio", "pgi")
if pkg.name in c_names:
v_names.extend(["c", "cxx", "fortran"])
if pkg.spec.intersects("llvm+clang"):
if pkg.spec.satisfies("llvm+clang"):
v_names.extend(["c", "cxx"])
# TODO Refactor END

View File

@@ -496,7 +496,9 @@ def reporter_factory(specs):
return None
context_manager = spack.report.build_context_manager(
reporter=args.reporter(), filename=report_filename(args, specs=specs), specs=specs
reporter=args.reporter(),
filename=report_filename(args, specs=specs),
specs=specs,
)
return context_manager

View File

@@ -58,7 +58,10 @@
#: licensed files that can have LGPL language in them
#: so far, just this command -- so it can find LGPL things elsewhere
lgpl_exceptions = [r"lib/spack/spack/cmd/license.py", r"lib/spack/spack/test/cmd/license.py"]
lgpl_exceptions = [
r"lib/spack/spack/cmd/license.py",
r"lib/spack/spack/test/cmd/license.py",
]
def _all_spack_files(root=spack.paths.prefix):
@@ -126,6 +129,7 @@ def error_messages(self):
def _check_license(lines, path):
found = []
for line in lines:

View File

@@ -98,7 +98,8 @@ def load(parser, args):
if not args.shell:
specs_str = " ".join(args.constraint) or "SPECS"
spack.cmd.common.shell_init_instructions(
"spack load", " eval `spack load {sh_arg} %s`" % specs_str
"spack load",
" eval `spack load {sh_arg} %s`" % specs_str,
)
return 1

View File

@@ -27,7 +27,12 @@
"""
# Arguments for display_specs when we find ambiguity
display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
display_args = {
"long": True,
"show_flags": False,
"variants": False,
"indent": 4,
}
def setup_parser(subparser):

View File

@@ -335,7 +335,7 @@ def not_excluded_fn(args):
exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
def not_excluded(x):
return not any(x.satisfies(y) for y in exclude_specs)
return not any(x.satisfies(y, strict=True) for y in exclude_specs)
return not_excluded
@@ -445,7 +445,9 @@ def mirror_create(args):
mirror_specs = concrete_specs_from_user(args)
create_mirror_for_individual_specs(
mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions
mirror_specs,
path=path,
skip_unstable_versions=args.skip_unstable_versions,
)
@@ -465,7 +467,9 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn):
mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn)
create_mirror_for_individual_specs(
mirror_specs, path=path, skip_unstable_versions=skip_unstable_versions
mirror_specs,
path=path,
skip_unstable_versions=skip_unstable_versions,
)

View File

@@ -180,7 +180,10 @@ def loads(module_type, specs, args, out=None):
for spec in specs
)
module_commands = {"tcl": "module load ", "lmod": "module load "}
module_commands = {
"tcl": "module load ",
"lmod": "module load ",
}
d = {"command": "" if not args.shell else module_commands[module_type], "prefix": args.prefix}
@@ -365,14 +368,18 @@ def refresh(module_type, specs, args):
def modules_cmd(parser, args, module_type, callbacks=callbacks):
# Qualifiers to be used when querying the db for specs
constraint_qualifiers = {"refresh": {"installed": True, "known": True}}
constraint_qualifiers = {
"refresh": {"installed": True, "known": True},
}
query_args = constraint_qualifiers.get(args.subparser_name, {})
# Get the specs that match the query from the DB
specs = args.specs(**query_args)
try:
callbacks[args.subparser_name](module_type, specs, args)
except MultipleSpecsMatch:

View File

@@ -182,7 +182,11 @@ def solve(parser, args):
# set up solver parameters
# Note: reuse and other concretizer prefs are passed as configuration
result = solver.solve(
specs, out=output, timers=args.timers, stats=args.stats, setup_only=setup_only
specs,
out=output,
timers=args.timers,
stats=args.stats,
setup_only=setup_only,
)
if not setup_only:
_process_result(result, show, required_format, kwargs)

View File

@@ -110,7 +110,7 @@ def spec(parser, args):
else:
tty.die("spack spec requires at least one spec or an active environment")
for input, output in specs:
for (input, output) in specs:
# With -y, just print YAML to output.
if args.format:
if args.format == "yaml":

View File

@@ -30,13 +30,20 @@ def grouper(iterable, n, fillvalue=None):
#: List of directories to exclude from checks -- relative to spack root
exclude_directories = [os.path.relpath(spack.paths.external_path, spack.paths.prefix)]
exclude_directories = [
os.path.relpath(spack.paths.external_path, spack.paths.prefix),
]
#: Order in which tools should be run. flake8 is last so that it can
#: double-check the results of other tools (if, e.g., --fix was provided)
#: The list maps an executable name to a method to ensure the tool is
#: bootstrapped or present in the environment.
tool_names = ["isort", "black", "flake8", "mypy"]
tool_names = [
"isort",
"black",
"flake8",
"mypy",
]
#: tools we run in spack style
tools = {}
@@ -45,7 +52,7 @@ def grouper(iterable, n, fillvalue=None):
mypy_ignores = [
# same as `disable_error_code = "annotation-unchecked"` in pyproject.toml, which
# doesn't exist in mypy 0.971 for Python 3.6
"[annotation-unchecked]"
"[annotation-unchecked]",
]
@@ -143,7 +150,10 @@ def setup_parser(subparser):
help="branch to compare against to determine changed files (default: develop)",
)
subparser.add_argument(
"-a", "--all", action="store_true", help="check all files, not just changed files"
"-a",
"--all",
action="store_true",
help="check all files, not just changed files",
)
subparser.add_argument(
"-r",
@@ -168,7 +178,10 @@ def setup_parser(subparser):
help="format automatically if possible (e.g., with isort, black)",
)
subparser.add_argument(
"--root", action="store", default=None, help="style check a different spack instance"
"--root",
action="store",
default=None,
help="style check a different spack instance",
)
tool_group = subparser.add_mutually_exclusive_group()
@@ -198,7 +211,6 @@ def rewrite_and_print_output(
output, args, re_obj=re.compile(r"^(.+):([0-9]+):"), replacement=r"{0}:{1}:"
):
"""rewrite ouput with <file>:<line>: format to respect path args"""
# print results relative to current working directory
def translate(match):
return replacement.format(cwd_relative(match.group(1), args), *list(match.groups()[1:]))
@@ -269,10 +281,24 @@ def run_mypy(mypy_cmd, file_list, args):
os.path.join(spack.paths.prefix, "pyproject.toml"),
"--show-error-codes",
]
mypy_arg_sets = [common_mypy_args + ["--package", "spack", "--package", "llnl"]]
mypy_arg_sets = [
common_mypy_args
+ [
"--package",
"spack",
"--package",
"llnl",
]
]
if "SPACK_MYPY_CHECK_PACKAGES" in os.environ:
mypy_arg_sets.append(
common_mypy_args + ["--package", "packages", "--disable-error-code", "no-redef"]
common_mypy_args
+ [
"--package",
"packages",
"--disable-error-code",
"no-redef",
]
)
returncode = 0

View File

@@ -33,7 +33,9 @@ def setup_parser(subparser):
# Run
run_parser = sp.add_parser(
"run", description=test_run.__doc__, help=spack.cmd.first_line(test_run.__doc__)
"run",
description=test_run.__doc__,
help=spack.cmd.first_line(test_run.__doc__),
)
alias_help_msg = "Provide an alias for this test-suite"
@@ -78,7 +80,9 @@ def setup_parser(subparser):
# List
list_parser = sp.add_parser(
"list", description=test_list.__doc__, help=spack.cmd.first_line(test_list.__doc__)
"list",
description=test_list.__doc__,
help=spack.cmd.first_line(test_list.__doc__),
)
list_parser.add_argument(
"-a",
@@ -92,7 +96,9 @@ def setup_parser(subparser):
# Find
find_parser = sp.add_parser(
"find", description=test_find.__doc__, help=spack.cmd.first_line(test_find.__doc__)
"find",
description=test_find.__doc__,
help=spack.cmd.first_line(test_find.__doc__),
)
find_parser.add_argument(
"filter",
@@ -102,7 +108,9 @@ def setup_parser(subparser):
# Status
status_parser = sp.add_parser(
"status", description=test_status.__doc__, help=spack.cmd.first_line(test_status.__doc__)
"status",
description=test_status.__doc__,
help=spack.cmd.first_line(test_status.__doc__),
)
status_parser.add_argument(
"names", nargs=argparse.REMAINDER, help="Test suites for which to print status"
@@ -139,7 +147,9 @@ def setup_parser(subparser):
# Remove
remove_parser = sp.add_parser(
"remove", description=test_remove.__doc__, help=spack.cmd.first_line(test_remove.__doc__)
"remove",
description=test_remove.__doc__,
help=spack.cmd.first_line(test_remove.__doc__),
)
arguments.add_common_arguments(remove_parser, ["yes_to_all"])
remove_parser.add_argument(
@@ -179,7 +189,11 @@ def test_run(args):
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
specs_to_test = []
for spec in specs:
matching = spack.store.db.query_local(spec, hashes=hashes, explicit=explicit)
matching = spack.store.db.query_local(
spec,
hashes=hashes,
explicit=explicit,
)
if spec and not matching:
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
"""
@@ -214,7 +228,14 @@ def test_run(args):
def report_filename(args, test_suite):
return os.path.abspath(args.log_file or "test-{}".format(test_suite.name))
if args.log_file:
if os.path.isabs(args.log_file):
return args.log_file
else:
log_dir = os.getcwd()
return os.path.join(log_dir, args.log_file)
else:
return os.path.join(os.getcwd(), "test-%s" % test_suite.name)
def create_reporter(args, specs_to_test, test_suite):

View File

@@ -5,7 +5,7 @@
import spack.cmd.common.env_utility as env_utility
description = (
"run a command in a spec's test environment, or dump its environment to screen or file"
"run a command in a spec's test environment, " "or dump its environment to screen or file"
)
section = "admin"
level = "long"

View File

@@ -31,7 +31,12 @@
"""
# Arguments for display_specs when we find ambiguity
display_args = {"long": True, "show_flags": False, "variants": False, "indent": 4}
display_args = {
"long": True,
"show_flags": False,
"variants": False,
"indent": 4,
}
def setup_parser(subparser):
@@ -128,7 +133,7 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False, o
return specs_from_cli
def installed_runtime_dependents(specs, env):
def installed_dependents(specs, env):
"""Map each spec to a list of its installed dependents.
Args:
@@ -155,10 +160,10 @@ def installed_runtime_dependents(specs, env):
for spec in specs:
for dpt in traverse.traverse_nodes(
spec.dependents(deptype=("link", "run")),
spec.dependents(deptype="all"),
direction="parents",
visited=visited,
deptype=("link", "run"),
deptype="all",
root=True,
key=lambda s: s.dag_hash(),
):
@@ -231,7 +236,12 @@ def do_uninstall(specs, force=False):
hashes_to_remove = set(s.dag_hash() for s in specs)
for s in traverse.traverse_nodes(
specs, order="topo", direction="children", root=True, cover="nodes", deptype="all"
specs,
order="topo",
direction="children",
root=True,
cover="nodes",
deptype="all",
):
if s.dag_hash() in hashes_to_remove:
spack.package_base.PackageBase.uninstall_by_spec(s, force=force)
@@ -255,7 +265,7 @@ def get_uninstall_list(args, specs, env):
# args.all takes care of the case where '-a' is given in the cli
base_uninstall_specs = set(find_matching_specs(env, specs, args.all, args.force))
active_dpts, outside_dpts = installed_runtime_dependents(base_uninstall_specs, env)
active_dpts, outside_dpts = installed_dependents(base_uninstall_specs, env)
# It will be useful to track the unified set of specs with dependents, as
# well as to separately track specs in the current env with dependents
spec_to_dpts = {}

View File

@@ -26,6 +26,7 @@
description = "run spack's unit tests (wrapper around pytest)"
section = "developer"
level = "long"
is_windows = sys.platform == "win32"
def setup_parser(subparser):
@@ -211,7 +212,7 @@ def unit_test(parser, args, unknown_args):
# mock configuration used by unit tests
# Note: skip on windows here because for the moment,
# clingo is wholly unsupported from bootstrap
if sys.platform != "win32":
if not is_windows:
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_core_dependencies()
if pytest is None:

View File

@@ -77,7 +77,8 @@ def unload(parser, args):
specs_str = " ".join(args.specs) or "SPECS"
spack.cmd.common.shell_init_instructions(
"spack unload", " eval `spack unload {sh_arg}` %s" % specs_str
"spack unload",
" eval `spack unload {sh_arg}` %s" % specs_str,
)
return 1

View File

@@ -106,7 +106,12 @@ def setup_parser(subparser):
def url(parser, args):
action = {"parse": url_parse, "list": url_list, "summary": url_summary, "stats": url_stats}
action = {
"parse": url_parse,
"list": url_list,
"summary": url_summary,
"stats": url_stats,
}
action[args.subcommand](args)

View File

@@ -28,6 +28,8 @@
__all__ = ["Compiler"]
is_windows = sys.platform == "win32"
@llnl.util.lang.memoized
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
@@ -596,7 +598,7 @@ def search_regexps(cls, language):
suffixes = [""]
# Windows compilers generally have an extension of some sort
# as do most files on Windows, handle that case here
if sys.platform == "win32":
if is_windows:
ext = r"\.(?:exe|bat)"
cls_suf = [suf + ext for suf in cls.suffixes]
ext_suf = [ext]

View File

@@ -84,7 +84,7 @@ def _to_dict(compiler):
d = {}
d["spec"] = str(compiler.spec)
d["paths"] = dict((attr, getattr(compiler, attr, None)) for attr in _path_instance_vars)
d["flags"] = dict((fname, " ".join(fvals)) for fname, fvals in compiler.flags.items())
d["flags"] = dict((fname, fvals) for fname, fvals in compiler.flags)
d["flags"].update(
dict(
(attr, getattr(compiler, attr, None))
@@ -619,9 +619,11 @@ def _default(search_paths):
command_arguments = []
files_to_be_tested = fs.files_in(*search_paths)
for compiler_name in spack.compilers.supported_compilers():
compiler_cls = class_for_compiler_name(compiler_name)
for language in ("cc", "cxx", "f77", "fc"):
# Select only the files matching a regexp
for (file, full_path), regexp in itertools.product(
files_to_be_tested, compiler_cls.search_regexps(language)

View File

@@ -36,89 +36,36 @@ def extract_version_from_output(cls, output):
ver = match.group(match.lastindex)
return ver
# C++ flags based on CMake Modules/Compiler/AppleClang-CXX.cmake
@property
def cxx11_flag(self):
# Adapted from CMake's AppleClang-CXX rules
# Spack's AppleClang detection only valid from Xcode >= 4.6
if self.real_version < spack.version.ver("4.0"):
if self.real_version < spack.version.ver("4.0.0"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++11 standard", "cxx11_flag", "Xcode < 4.0"
self, "the C++11 standard", "cxx11_flag", "Xcode < 4.0.0"
)
return "-std=c++11"
@property
def cxx14_flag(self):
if self.real_version < spack.version.ver("5.1"):
# Adapted from CMake's rules for AppleClang
if self.real_version < spack.version.ver("5.1.0"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++14 standard", "cxx14_flag", "Xcode < 5.1"
self, "the C++14 standard", "cxx14_flag", "Xcode < 5.1.0"
)
elif self.real_version < spack.version.ver("6.1"):
elif self.real_version < spack.version.ver("6.1.0"):
return "-std=c++1y"
return "-std=c++14"
@property
def cxx17_flag(self):
if self.real_version < spack.version.ver("6.1"):
# Adapted from CMake's rules for AppleClang
if self.real_version < spack.version.ver("6.1.0"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++17 standard", "cxx17_flag", "Xcode < 6.1"
self, "the C++17 standard", "cxx17_flag", "Xcode < 6.1.0"
)
elif self.real_version < spack.version.ver("10.0"):
return "-std=c++1z"
return "-std=c++17"
@property
def cxx20_flag(self):
if self.real_version < spack.version.ver("10.0"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++20 standard", "cxx20_flag", "Xcode < 10.0"
)
elif self.real_version < spack.version.ver("13.0"):
return "-std=c++2a"
return "-std=c++20"
@property
def cxx23_flag(self):
if self.real_version < spack.version.ver("13.0"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++23 standard", "cxx23_flag", "Xcode < 13.0"
)
return "-std=c++2b"
# C flags based on CMake Modules/Compiler/AppleClang-C.cmake
@property
def c99_flag(self):
if self.real_version < spack.version.ver("4.0"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C99 standard", "c99_flag", "< 4.0"
)
return "-std=c99"
@property
def c11_flag(self):
if self.real_version < spack.version.ver("4.0"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C11 standard", "c11_flag", "< 4.0"
)
return "-std=c11"
@property
def c17_flag(self):
if self.real_version < spack.version.ver("11.0"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C17 standard", "c17_flag", "< 11.0"
)
return "-std=c17"
@property
def c23_flag(self):
if self.real_version < spack.version.ver("11.0.3"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C23 standard", "c23_flag", "< 11.0.3"
)
return "-std=c2x"
return "-std=c++1z"
def setup_custom_environment(self, pkg, env):
"""Set the DEVELOPER_DIR environment for the Xcode toolchain.
@@ -207,7 +154,10 @@ def setup_custom_environment(self, pkg, env):
),
)
real_dirs = ["Toolchains/XcodeDefault.xctoolchain/usr/bin", "usr/bin"]
real_dirs = [
"Toolchains/XcodeDefault.xctoolchain/usr/bin",
"usr/bin",
]
bins = ["c++", "c89", "c99", "cc", "clang", "clang++", "cpp"]

View File

@@ -89,11 +89,6 @@ def cxx14_flag(self):
return "-std=c++14"
return "-h std=c++14"
@property
def cxx17_flag(self):
if self.is_clang_based:
return "-std=c++17"
@property
def c99_flag(self):
if self.is_clang_based:

View File

@@ -128,23 +128,10 @@ def c99_flag(self):
@property
def c11_flag(self):
if self.real_version < ver("3.0"):
raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag", "< 3.0")
if self.real_version < ver("3.1"):
return "-std=c1x"
return "-std=c11"
@property
def c17_flag(self):
if self.real_version < ver("6.0"):
raise UnsupportedCompilerFlag(self, "the C17 standard", "c17_flag", "< 6.0")
return "-std=c17"
@property
def c23_flag(self):
if self.real_version < ver("9.0"):
raise UnsupportedCompilerFlag(self, "the C23 standard", "c23_flag", "< 9.0")
return "-std=c2x"
if self.real_version < ver("6.1.0"):
raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag", "< 6.1.0")
else:
return "-std=c11"
@property
def cc_pic_flag(self):

View File

@@ -134,7 +134,7 @@ def _valid_virtuals_and_externals(self, spec):
externals = spec_externals(cspec)
for ext in externals:
if ext.intersects(spec):
if ext.satisfies(spec):
usable.append(ext)
# If nothing is in the usable list now, it's because we aren't
@@ -200,7 +200,7 @@ def concretize_version(self, spec):
# List of versions we could consider, in sorted order
pkg_versions = spec.package_class.versions
usable = [v for v in pkg_versions if any(v.intersects(sv) for sv in spec.versions)]
usable = [v for v in pkg_versions if any(v.satisfies(sv) for sv in spec.versions)]
yaml_prefs = PackagePrefs(spec.name, "version")
@@ -344,7 +344,7 @@ def concretize_architecture(self, spec):
new_target_arch = spack.spec.ArchSpec((None, None, str(new_target)))
curr_target_arch = spack.spec.ArchSpec((None, None, str(curr_target)))
if not new_target_arch.intersects(curr_target_arch):
if not new_target_arch.satisfies(curr_target_arch):
# new_target is an incorrect guess based on preferences
# and/or default
valid_target_ranges = str(curr_target).split(",")
@@ -743,7 +743,9 @@ def _concretize_specs_together_new(*abstract_specs, **kwargs):
import spack.solver.asp
solver = spack.solver.asp.Solver()
result = solver.solve(abstract_specs, tests=kwargs.get("tests", False))
solver.tests = kwargs.get("tests", False)
result = solver.solve(abstract_specs)
result.raise_if_unsat()
return [s.copy() for s in result.specs]

View File

@@ -793,7 +793,7 @@ def _config():
configuration_paths = [
# Default configuration scope is the lowest-level scope. These are
# versioned with Spack and can be overridden by systems, sites or users
configuration_defaults_path
configuration_defaults_path,
]
disable_local_config = "SPACK_DISABLE_LOCAL_CONFIG" in os.environ
@@ -801,11 +801,15 @@ def _config():
# System configuration is per machine.
# This is disabled if user asks for no local configuration.
if not disable_local_config:
configuration_paths.append(("system", spack.paths.system_config_path))
configuration_paths.append(
("system", spack.paths.system_config_path),
)
# Site configuration is per spack instance, for sites or projects
# No site-level configs should be checked into spack by default.
configuration_paths.append(("site", os.path.join(spack.paths.etc_path)))
configuration_paths.append(
("site", os.path.join(spack.paths.etc_path)),
)
# User configuration can override both spack defaults and site config
# This is disabled if user asks for no local configuration.

View File

@@ -18,7 +18,10 @@
#: packages here.
default_path = "/opt/cray/pe/cpe-descriptive-manifest/"
compiler_name_translation = {"nvidia": "nvhpc", "rocm": "rocmcc"}
compiler_name_translation = {
"nvidia": "nvhpc",
"rocm": "rocmcc",
}
def translated_compiler_name(manifest_compiler_name):

View File

@@ -46,7 +46,10 @@
import spack.store
import spack.util.lock as lk
import spack.util.spack_json as sjson
from spack.directory_layout import DirectoryLayoutError, InconsistentInstallDirectoryError
from spack.directory_layout import (
DirectoryLayoutError,
InconsistentInstallDirectoryError,
)
from spack.error import SpackError
from spack.util.crypto import bit_length
from spack.version import Version
@@ -105,7 +108,10 @@
def reader(version):
reader_cls = {Version("5"): spack.spec.SpecfileV1, Version("6"): spack.spec.SpecfileV3}
reader_cls = {
Version("5"): spack.spec.SpecfileV1,
Version("6"): spack.spec.SpecfileV3,
}
return reader_cls[version]
@@ -1525,7 +1531,7 @@ def _query(
if not (start_date < inst_date < end_date):
continue
if query_spec is any or rec.spec.satisfies(query_spec):
if query_spec is any or rec.spec.satisfies(query_spec, strict=True):
results.append(rec.spec)
return results

View File

@@ -29,6 +29,7 @@
import spack.util.spack_yaml
import spack.util.windows_registry
is_windows = sys.platform == "win32"
#: Information on a package that has been detected
DetectedPackage = collections.namedtuple("DetectedPackage", ["spec", "prefix"])
@@ -183,7 +184,7 @@ def library_prefix(library_dir):
elif "lib" in lowered_components:
idx = lowered_components.index("lib")
return os.sep.join(components[:idx])
elif sys.platform == "win32" and "bin" in lowered_components:
elif is_windows and "bin" in lowered_components:
idx = lowered_components.index("bin")
return os.sep.join(components[:idx])
else:
@@ -259,13 +260,13 @@ def find_windows_compiler_bundled_packages():
class WindowsKitExternalPaths(object):
if sys.platform == "win32":
if is_windows:
plat_major_ver = str(winOs.windows_version()[0])
@staticmethod
def find_windows_kit_roots():
"""Return Windows kit root, typically %programfiles%\\Windows Kits\\10|11\\"""
if sys.platform != "win32":
if not is_windows:
return []
program_files = os.environ["PROGRAMFILES(x86)"]
kit_base = os.path.join(
@@ -358,7 +359,7 @@ def compute_windows_program_path_for_package(pkg):
pkg (spack.package_base.PackageBase): package for which
Program Files location is to be computed
"""
if sys.platform != "win32":
if not is_windows:
return []
# note windows paths are fine here as this method should only ever be invoked
# to interact with Windows
@@ -376,9 +377,8 @@ def compute_windows_user_path_for_package(pkg):
install location, return list of potential locations based
on common heuristics. For more info on Windows user specific
installs see:
https://learn.microsoft.com/en-us/dotnet/api/system.environment.specialfolder?view=netframework-4.8
"""
if sys.platform != "win32":
https://learn.microsoft.com/en-us/dotnet/api/system.environment.specialfolder?view=netframework-4.8"""
if not is_windows:
return []
# Current user directory

View File

@@ -31,6 +31,8 @@
path_to_dict,
)
is_windows = sys.platform == "win32"
def common_windows_package_paths():
paths = WindowsCompilerExternalPaths.find_windows_compiler_bundled_packages()
@@ -55,7 +57,7 @@ def executables_in_path(path_hints):
path_hints (list): list of paths to be searched. If None the list will be
constructed based on the PATH environment variable.
"""
if sys.platform == "win32":
if is_windows:
path_hints.extend(common_windows_package_paths())
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
return path_to_dict(search_paths)
@@ -147,7 +149,7 @@ def by_library(packages_to_check, path_hints=None):
path_to_lib_name = (
libraries_in_ld_and_system_library_path(path_hints=path_hints)
if sys.platform != "win32"
if not is_windows
else libraries_in_windows_paths(path_hints)
)

View File

@@ -21,6 +21,7 @@
import spack.util.spack_json as sjson
from spack.error import SpackError
is_windows = sys.platform == "win32"
# Note: Posixpath is used here as opposed to
# os.path.join due to spack.spec.Spec.format
# requiring forward slash path seperators at this stage
@@ -345,7 +346,7 @@ def remove_install_directory(self, spec, deprecated=False):
# Windows readonly files cannot be removed by Python
# directly, change permissions before attempting to remove
if sys.platform == "win32":
if is_windows:
kwargs = {
"ignore_errors": False,
"onerror": fs.readonly_file_handler(ignore_errors=False),

View File

@@ -18,9 +18,7 @@
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import llnl.util.tty.color as clr
from llnl.util.lang import dedupe
from llnl.util.link_tree import ConflictingSpecsError
from llnl.util.symlink import symlink
import spack.compilers
@@ -47,7 +45,11 @@
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
import spack.util.url
from spack.filesystem_view import SimpleFilesystemView, inverse_view_func_parser, view_func_parser
from spack.filesystem_view import (
SimpleFilesystemView,
inverse_view_func_parser,
view_func_parser,
)
from spack.installer import PackageInstaller
from spack.spec import Spec
from spack.spec_list import InvalidSpecConstraintError, SpecList
@@ -324,7 +326,12 @@ def _write_yaml(data, str_or_file):
def _eval_conditional(string):
"""Evaluate conditional definitions using restricted variable scope."""
valid_variables = spack.util.environment.get_host_environment()
valid_variables.update({"re": re, "env": os.environ})
valid_variables.update(
{
"re": re,
"env": os.environ,
}
)
return eval(string, valid_variables)
@@ -349,8 +356,7 @@ def _is_dev_spec_and_has_changed(spec):
def _spec_needs_overwrite(spec, changed_dev_specs):
"""Check whether the current spec needs to be overwritten because either it has
changed itself or one of its dependencies have changed
"""
changed itself or one of its dependencies have changed"""
# if it's not installed, we don't need to overwrite it
if not spec.installed:
return False
@@ -628,24 +634,7 @@ def regenerate(self, concretized_root_specs):
os.unlink(tmp_symlink_name)
except (IOError, OSError):
pass
# Give an informative error message for the typical error case: two specs, same package
# project to same prefix.
if isinstance(e, ConflictingSpecsError):
spec_a = e.args[0].format(color=clr.get_color_when())
spec_b = e.args[1].format(color=clr.get_color_when())
raise SpackEnvironmentViewError(
f"The environment view in {self.root} could not be created, "
"because the following two specs project to the same prefix:\n"
f" {spec_a}, and\n"
f" {spec_b}.\n"
" To resolve this issue:\n"
" a. use `concretization:unify:true` to ensure there is only one "
"package per spec in the environment, or\n"
" b. disable views with `view:false`, or\n"
" c. create custom view projections."
) from e
raise
raise e
# Remove the old root when it's in the same folder as the new root. This guards
# against removal of an arbitrary path when the original symlink in self.root
@@ -1006,7 +995,9 @@ def included_config_scopes(self):
config_path = os.path.join(config_path, basename)
else:
staged_path = spack.config.fetch_remote_configs(
config_path, self.config_stage_dir, skip_existing=True
config_path,
self.config_stage_dir,
skip_existing=True,
)
if not staged_path:
raise SpackEnvironmentError(
@@ -2314,7 +2305,7 @@ def _concretize_from_constraints(spec_constraints, tests=False):
invalid_deps = [
c
for c in spec_constraints
if any(c.satisfies(invd) for invd in invalid_deps_string)
if any(c.satisfies(invd, strict=True) for invd in invalid_deps_string)
]
if len(invalid_deps) != len(invalid_deps_string):
raise e

View File

@@ -28,13 +28,20 @@
import os.path
import re
import shutil
import sys
import urllib.parse
from typing import List, Optional
import llnl.util
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.filesystem import get_single_file, mkdirp, temp_cwd, temp_rename, working_dir
from llnl.util.filesystem import (
get_single_file,
mkdirp,
temp_cwd,
temp_rename,
working_dir,
)
from llnl.util.symlink import symlink
import spack.config
@@ -52,6 +59,7 @@
#: List of all fetch strategies, created by FetchStrategy metaclass.
all_strategies = []
is_windows = sys.platform == "win32"
CONTENT_TYPE_MISMATCH_WARNING_TEMPLATE = (
"The contents of {subject} look like {content_type}. Either the URL"
@@ -87,6 +95,22 @@ def _ensure_one_stage_entry(stage_path):
return os.path.join(stage_path, stage_entries[0])
def _filesummary(path, print_bytes=16):
try:
n = print_bytes
with open(path, "rb") as f:
size = os.fstat(f.fileno()).st_size
if size <= 2 * n:
short_contents = f.read(2 * n)
else:
short_contents = f.read(n)
f.seek(-n, 2)
short_contents += b"..." + f.read(n)
return size, short_contents
except OSError:
return 0, b""
def fetcher(cls):
"""Decorator used to register fetch strategies."""
all_strategies.append(cls)
@@ -495,7 +519,7 @@ def check(self):
# On failure, provide some information about the file size and
# contents, so that we can quickly see what the issue is (redirect
# was not followed, empty file, text instead of binary, ...)
size, contents = fs.filesummary(self.archive_file)
size, contents = _filesummary(self.archive_file)
raise ChecksumError(
f"{checker.hash_name} checksum failed for {self.archive_file}",
f"Expected {self.digest} but got {checker.sum}. "
@@ -1501,7 +1525,7 @@ def _from_merged_attrs(fetcher, pkg, version):
return fetcher(**attrs)
def for_package_version(pkg, version=None):
def for_package_version(pkg, version):
"""Determine a fetch strategy based on the arguments supplied to
version() in the package description."""
@@ -1512,18 +1536,8 @@ def for_package_version(pkg, version=None):
check_pkg_attributes(pkg)
if version is not None:
assert not pkg.spec.concrete, "concrete specs should not pass the 'version=' argument"
# Specs are initialized with the universe range, if no version information is given,
# so here we make sure we always match the version passed as argument
if not isinstance(version, spack.version.VersionBase):
version = spack.version.Version(version)
version_list = spack.version.VersionList()
version_list.add(version)
pkg.spec.versions = version_list
else:
version = pkg.version
if not isinstance(version, spack.version.VersionBase):
version = spack.version.Version(version)
# if it's a commit, we must use a GitFetchStrategy
if isinstance(version, spack.version.GitVersion):
@@ -1541,7 +1555,11 @@ def for_package_version(pkg, version=None):
# performance hit for branches on older versions of git.
# Branches cannot be cached, so we tell the fetcher not to cache tags/branches
ref_type = "commit" if version.is_commit else "tag"
kwargs = {"git": pkg.git, ref_type: version.ref, "no_cache": True}
kwargs = {
"git": pkg.git,
ref_type: version.ref,
"no_cache": True,
}
kwargs["submodules"] = getattr(pkg, "submodules", False)

View File

@@ -20,7 +20,6 @@
)
from llnl.util.lang import index_by, match_predicate
from llnl.util.link_tree import (
ConflictingSpecsError,
DestinationMergeVisitor,
LinkTree,
MergeConflictSummary,
@@ -639,22 +638,6 @@ class SimpleFilesystemView(FilesystemView):
def __init__(self, root, layout, **kwargs):
super(SimpleFilesystemView, self).__init__(root, layout, **kwargs)
def _sanity_check_view_projection(self, specs):
"""A very common issue is that we end up with two specs of the same
package, that project to the same prefix. We want to catch that as
early as possible and give a sensible error to the user. Here we use
the metadata dir (.spack) projection as a quick test to see whether
two specs in the view are going to clash. The metadata dir is used
because it's always added by Spack with identical files, so a
guaranteed clash that's easily verified."""
seen = dict()
for current_spec in specs:
metadata_dir = self.relative_metadata_dir_for_spec(current_spec)
conflicting_spec = seen.get(metadata_dir)
if conflicting_spec:
raise ConflictingSpecsError(current_spec, conflicting_spec)
seen[metadata_dir] = current_spec
def add_specs(self, *specs, **kwargs):
assert all((s.concrete for s in specs))
if len(specs) == 0:
@@ -669,8 +652,6 @@ def add_specs(self, *specs, **kwargs):
if kwargs.get("exclude", None):
specs = set(filter_exclude(specs, kwargs["exclude"]))
self._sanity_check_view_projection(specs)
# Ignore spack meta data folder.
def skip_list(file):
return os.path.basename(file) == spack.store.layout.metadata_dir
@@ -705,45 +686,32 @@ def skip_list(file):
for dst in visitor.directories:
os.mkdir(os.path.join(self._root, dst))
# Link the files using a "merge map": full src => full dst
merge_map_per_prefix = self._source_merge_visitor_to_merge_map(visitor)
for spec in specs:
merge_map = merge_map_per_prefix.get(spec.package.view_source(), None)
if not merge_map:
# Not every spec may have files to contribute.
continue
# Then group the files to be linked by spec...
# For compatibility, we have to create a merge_map dict mapping
# full_src => full_dst
files_per_spec = itertools.groupby(visitor.files.items(), key=lambda item: item[1][0])
for (spec, (src_root, rel_paths)) in zip(specs, files_per_spec):
merge_map = dict()
for dst_rel, (_, src_rel) in rel_paths:
full_src = os.path.join(src_root, src_rel)
full_dst = os.path.join(self._root, dst_rel)
merge_map[full_src] = full_dst
spec.package.add_files_to_view(self, merge_map, skip_if_exists=False)
# Finally create the metadata dirs.
self.link_metadata(specs)
def _source_merge_visitor_to_merge_map(self, visitor: SourceMergeVisitor):
# For compatibility with add_files_to_view, we have to create a
# merge_map of the form join(src_root, src_rel) => join(dst_root, dst_rel),
# but our visitor.files format is dst_rel => (src_root, src_rel).
# We exploit that visitor.files is an ordered dict, and files per source
# prefix are contiguous.
source_root = lambda item: item[1][0]
per_source = itertools.groupby(visitor.files.items(), key=source_root)
return {
src_root: {
os.path.join(src_root, src_rel): os.path.join(self._root, dst_rel)
for dst_rel, (_, src_rel) in group
}
for src_root, group in per_source
}
def relative_metadata_dir_for_spec(self, spec):
return os.path.join(
self.get_relative_projection_for_spec(spec), spack.store.layout.metadata_dir, spec.name
)
def link_metadata(self, specs):
metadata_visitor = SourceMergeVisitor()
for spec in specs:
src_prefix = os.path.join(spec.package.view_source(), spack.store.layout.metadata_dir)
proj = self.relative_metadata_dir_for_spec(spec)
proj = os.path.join(
self.get_relative_projection_for_spec(spec),
spack.store.layout.metadata_dir,
spec.name,
)
metadata_visitor.set_projection(proj)
visit_directory_tree(src_prefix, metadata_visitor)

View File

@@ -37,6 +37,7 @@ class SharedLibrariesVisitor(BaseDirectoryVisitor):
exception of an exclude list."""
def __init__(self, exclude_list):
# List of file and directory names to be excluded
self.exclude_list = frozenset(exclude_list)

View File

@@ -30,7 +30,8 @@
#: Groupdb does not exist on Windows, prevent imports
#: on supported systems
if sys.platform != "win32":
is_windows = sys.platform == "win32"
if not is_windows:
import grp
#: Spack itself also limits the shebang line to at most 4KB, which should be plenty.
@@ -211,7 +212,8 @@ def install_sbang():
# copy over the fresh copy of `sbang`
sbang_tmp_path = os.path.join(
os.path.dirname(sbang_path), ".%s.tmp" % os.path.basename(sbang_path)
os.path.dirname(sbang_path),
".%s.tmp" % os.path.basename(sbang_path),
)
shutil.copy(spack.paths.sbang_script, sbang_tmp_path)

View File

@@ -84,6 +84,9 @@
#: queue invariants).
STATUS_REMOVED = "removed"
is_windows = sys.platform == "win32"
is_osx = sys.platform == "darwin"
class InstallAction(object):
#: Don't perform an install
@@ -166,9 +169,9 @@ def _do_fake_install(pkg):
if not pkg.name.startswith("lib"):
library = "lib" + library
plat_shared = ".dll" if sys.platform == "win32" else ".so"
plat_static = ".lib" if sys.platform == "win32" else ".a"
dso_suffix = ".dylib" if sys.platform == "darwin" else plat_shared
plat_shared = ".dll" if is_windows else ".so"
plat_static = ".lib" if is_windows else ".a"
dso_suffix = ".dylib" if is_osx else plat_shared
# Install fake command
fs.mkdirp(pkg.prefix.bin)
@@ -420,7 +423,11 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=timer.NU
matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True)
return _process_binary_cache_tarball(
pkg, explicit, unsigned, mirrors_for_spec=matches, timer=timer
pkg,
explicit,
unsigned,
mirrors_for_spec=matches,
timer=timer,
)
@@ -782,7 +789,7 @@ def _add_bootstrap_compilers(self, compiler, architecture, pkgs, request, all_de
associated dependents
"""
packages = _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs)
for comp_pkg, is_compiler in packages:
for (comp_pkg, is_compiler) in packages:
pkgid = package_id(comp_pkg)
if pkgid not in self.build_tasks:
self._add_init_task(comp_pkg, request, is_compiler, all_deps)
@@ -806,7 +813,8 @@ def _modify_existing_task(self, pkgid, attr, value):
key, task = tup
if task.pkg_id == pkgid:
tty.debug(
"Modifying task for {0} to treat it as a compiler".format(pkgid), level=2
"Modifying task for {0} to treat it as a compiler".format(pkgid),
level=2,
)
setattr(task, attr, value)
self.build_pq[i] = (key, task)
@@ -1204,6 +1212,7 @@ def _add_tasks(self, request, all_deps):
install_package = request.install_args.get("install_package")
if install_package and request.pkg_id not in self.build_tasks:
# Be sure to clear any previous failure
spack.store.db.clear_failure(request.spec, force=True)
@@ -1752,16 +1761,14 @@ def install(self):
raise
except binary_distribution.NoChecksumException as exc:
if task.cache_only:
raise
# Checking hash on downloaded binary failed.
err = "Failed to install {0} from binary cache due to {1}:"
err += " Requeueing to install from source."
tty.error(err.format(pkg.name, str(exc)))
task.use_cache = False
self._requeue_task(task)
continue
if not task.cache_only:
# Checking hash on downloaded binary failed.
err = "Failed to install {0} from binary cache due to {1}:"
err += " Requeueing to install from source."
tty.error(err.format(pkg.name, str(exc)))
task.use_cache = False
self._requeue_task(task)
continue
except (Exception, SystemExit) as exc:
self._update_failed(task, True, exc)
@@ -1941,7 +1948,11 @@ def run(self):
# Run post install hooks before build stage is removed.
spack.hooks.post_install(self.pkg.spec)
_print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)
_print_timer(
pre=self.pre,
pkg_id=self.pkg_id,
timer=self.timer,
)
_print_installed_pkg(self.pkg.prefix)
# Send final status that install is successful

View File

@@ -575,7 +575,7 @@ def setup_main_options(args):
if args.debug:
spack.util.debug.register_interrupt_handler()
spack.config.set("config:debug", True, scope="command_line")
spack.util.environment.TRACING_ENABLED = True
spack.util.environment.tracing_enabled = True
if args.timestamp:
tty.set_timestamp(True)

Some files were not shown because too many files have changed in this diff Show More