Compare commits

..

4 Commits

Author SHA1 Message Date
Harmen Stoppels
eb77658b8f manual changes 2024-04-03 09:57:43 +02:00
Harmen Stoppels
430b41efc6 fix ordering 2024-04-02 19:10:01 +02:00
Harmen Stoppels
b628c989cb undo: py-geopmdpy 2024-04-02 17:40:37 +02:00
Harmen Stoppels
8f898a9cdc python ecosystem: sdist -> universal wheels 2024-04-02 17:38:42 +02:00
1777 changed files with 25874 additions and 19813 deletions

View File

@@ -34,7 +34,7 @@ jobs:
run: |
. share/spack/setup-env.sh
coverage run $(which spack) audit packages
coverage run $(which spack) -d audit externals
coverage run $(which spack) audit externals
coverage combine
coverage xml
- name: Package audits (without coverage)
@@ -43,7 +43,7 @@ jobs:
. share/spack/setup-env.sh
$(which spack) audit packages
$(which spack) audit externals
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
if: ${{ inputs.with_coverage == 'true' }}
with:
flags: unittests,audits

View File

@@ -50,9 +50,7 @@ jobs:
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38'],
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
name: Build ${{ matrix.dockerfile[0] }}
if: github.repository == 'spack/spack'
steps:
@@ -98,7 +96,7 @@ jobs:
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
uses: docker/setup-buildx-action@2b51285047da1547ffb1b2203d8be4c0af6b1f20
- name: Log in to GitHub Container Registry
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20

View File

@@ -1,4 +1,4 @@
black==24.4.0
black==24.3.0
clingo==5.7.1
flake8==7.0.0
isort==5.13.2

View File

@@ -91,7 +91,7 @@ jobs:
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
with:
flags: unittests,linux,${{ matrix.concretizer }}
token: ${{ secrets.CODECOV_TOKEN }}
@@ -124,7 +124,7 @@ jobs:
COVERAGE: true
run: |
share/spack/qa/run-shell-tests
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
with:
flags: shelltests,linux
token: ${{ secrets.CODECOV_TOKEN }}
@@ -185,7 +185,7 @@ jobs:
SPACK_TEST_SOLVER: clingo
run: |
share/spack/qa/run-unit-tests
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
with:
flags: unittests,linux,clingo
token: ${{ secrets.CODECOV_TOKEN }}
@@ -223,7 +223,7 @@ jobs:
$(which spack) solve zlib
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
with:
flags: unittests,macos
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -33,7 +33,7 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}
@@ -59,7 +59,7 @@ jobs:
./share/spack/qa/validate_last_exit.ps1
coverage combine -a
coverage xml
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
with:
flags: unittests,windows
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -88,7 +88,7 @@ Resources:
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
* [**Github Discussions**](https://github.com/spack/spack/discussions):
for Q&A and discussions. Note the pinned discussions for announcements.
* **X**: [@spackpm](https://twitter.com/spackpm). Be sure to
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
`@mention` us!
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
only for announcements. Please use other venues for discussions.

View File

@@ -15,7 +15,7 @@ concretizer:
# as possible, rather than building. If `false`, we'll always give you a fresh
# concretization. If `dependencies`, we'll only reuse dependencies but
# give you a fresh concretization for your root specs.
reuse: true
reuse: dependencies
# Options that tune which targets are considered for concretization. The
# concretization process is very sensitive to the number targets, and the time
# needed to reach a solution increases noticeably with the number of targets

View File

@@ -19,6 +19,7 @@ packages:
- apple-clang
- clang
- gcc
- intel
providers:
elf: [libelf]
fuse: [macfuse]

View File

@@ -15,7 +15,7 @@
# -------------------------------------------------------------------------
packages:
all:
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
compiler: [gcc, intel, pgi, clang, xl, nag, fj, aocc]
providers:
awk: [gawk]
blas: [openblas, amdblis]
@@ -35,7 +35,6 @@ packages:
java: [openjdk, jdk, ibm-java]
jpeg: [libjpeg-turbo, libjpeg]
lapack: [openblas, amdlibflame]
libc: [glibc, musl]
libgfortran: [ gcc-runtime ]
libglx: [mesa+glx, mesa18+glx]
libifcore: [ intel-oneapi-runtime ]

View File

@@ -220,40 +220,6 @@ section of the configuration:
.. _binary_caches_oci:
---------------------------------
Automatic push to a build cache
---------------------------------
Sometimes it is convenient to push packages to a build cache as soon as they are installed. Spack can do this by setting autopush flag when adding a mirror:
.. code-block:: console
$ spack mirror add --autopush <name> <url or path>
Or the autopush flag can be set for an existing mirror:
.. code-block:: console
$ spack mirror set --autopush <name> # enable automatic push for an existing mirror
$ spack mirror set --no-autopush <name> # disable automatic push for an existing mirror
Then after installing a package it is automatically pushed to all mirrors with ``autopush: true``. The command
.. code-block:: console
$ spack install <package>
will have the same effect as
.. code-block:: console
$ spack install <package>
$ spack buildcache push <cache> <package> # for all caches with autopush: true
.. note::
Packages are automatically pushed to a build cache only if they are built from source.
-----------------------------------------
OCI / Docker V2 registries as build cache
-----------------------------------------

View File

@@ -233,12 +233,6 @@ The OS that are currently supported are summarized in the table below:
* - Fedora Linux 38
- ``fedora:38``
- ``spack/fedora38``
* - Fedora Linux 39
- ``fedora:39``
- ``spack/fedora39``
* - Fedora Linux 40
- ``fedora:40``
- ``spack/fedora40``

View File

@@ -552,11 +552,11 @@ With either interpreter you can run a single command:
.. code-block:: console
$ spack python -c 'from spack.spec import Spec; Spec("python").concretized()'
...
$ spack python -c 'import distro; distro.linux_distribution()'
('Ubuntu', '18.04', 'Bionic Beaver')
$ spack python -i ipython -c 'from spack.spec import Spec; Spec("python").concretized()'
Out[1]: ...
$ spack python -i ipython -c 'import distro; distro.linux_distribution()'
Out[1]: ('Ubuntu', '18.04', 'Bionic Beaver')
or a file:
@@ -1071,9 +1071,9 @@ Announcing a release
We announce releases in all of the major Spack communication channels.
Publishing the release takes care of GitHub. The remaining channels are
X, Slack, and the mailing list. Here are the steps:
Twitter, Slack, and the mailing list. Here are the steps:
#. Announce the release on X.
#. Announce the release on Twitter.
* Compose the tweet on the ``@spackpm`` account per the
``spack-twitter`` slack channel.

View File

@@ -6435,12 +6435,9 @@ the ``paths`` attribute:
echo "Target: x86_64-pc-linux-gnu"
echo "Thread model: posix"
echo "InstalledDir: /usr/bin"
platforms: ["linux", "darwin"]
results:
- spec: 'llvm@3.9.1 +clang~lld~lldb'
If the ``platforms`` attribute is present, tests are run only if the current host
matches one of the listed platforms.
Each test is performed by first creating a temporary directory structure as
specified in the corresponding ``layout`` and by then running
package detection and checking that the outcome matches the expected
@@ -6474,10 +6471,6 @@ package detection and checking that the outcome matches the expected
- A spec that is expected from detection
- Any valid spec
- Yes
* - ``results:[0]:extra_attributes``
- Extra attributes expected on the associated Spec
- Nested dictionary with string as keys, and regular expressions as leaf values
- No
"""""""""""""""""""""""""""""""
Reuse tests from other packages

View File

@@ -2,12 +2,12 @@ sphinx==7.2.6
sphinxcontrib-programoutput==0.17
sphinx_design==0.5.0
sphinx-rtd-theme==2.0.0
python-levenshtein==0.25.1
python-levenshtein==0.25.0
docutils==0.20.1
pygments==2.17.2
urllib3==2.2.1
pytest==8.1.1
isort==5.13.2
black==24.4.0
black==24.3.0
flake8==7.0.0
mypy==1.9.0

255
lib/spack/env/cc vendored
View File

@@ -47,8 +47,7 @@ SPACK_F77_RPATH_ARG
SPACK_FC_RPATH_ARG
SPACK_LINKER_ARG
SPACK_SHORT_SPEC
SPACK_SYSTEM_DIRS
SPACK_MANAGED_DIRS"
SPACK_SYSTEM_DIRS"
# Optional parameters that aren't required to be set
@@ -174,6 +173,22 @@ preextend() {
unset IFS
}
# system_dir PATH
# test whether a path is a system directory
system_dir() {
IFS=':' # SPACK_SYSTEM_DIRS is colon-separated
path="$1"
for sd in $SPACK_SYSTEM_DIRS; do
if [ "${path}" = "${sd}" ] || [ "${path}" = "${sd}/" ]; then
# success if path starts with a system prefix
unset IFS
return 0
fi
done
unset IFS
return 1 # fail if path starts no system prefix
}
# Fail with a clear message if the input contains any bell characters.
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
@@ -186,18 +201,6 @@ for param in $params; do
fi
done
# eval this because SPACK_MANAGED_DIRS and SPACK_SYSTEM_DIRS are inputs we don't wanna loop over.
# moving the eval inside the function would eval it every call.
eval "\
path_order() {
case \"\$1\" in
$SPACK_MANAGED_DIRS) return 0 ;;
$SPACK_SYSTEM_DIRS) return 2 ;;
/*) return 1 ;;
esac
}
"
# Check if optional parameters are defined
# If we aren't asking for debug flags, don't add them
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
@@ -245,7 +248,7 @@ case "$command" in
lang_flags=C
debug_flags="-g"
;;
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
command="$SPACK_CXX"
language="C++"
comp="CXX"
@@ -417,12 +420,11 @@ input_command="$*"
parse_Wl() {
while [ $# -ne 0 ]; do
if [ "$wl_expect_rpath" = yes ]; then
path_order "$1"
case $? in
0) append return_spack_store_rpath_dirs_list "$1" ;;
1) append return_rpath_dirs_list "$1" ;;
2) append return_system_rpath_dirs_list "$1" ;;
esac
if system_dir "$1"; then
append return_system_rpath_dirs_list "$1"
else
append return_rpath_dirs_list "$1"
fi
wl_expect_rpath=no
else
case "$1" in
@@ -430,25 +432,21 @@ parse_Wl() {
arg="${1#-rpath=}"
if [ -z "$arg" ]; then
shift; continue
elif system_dir "$arg"; then
append return_system_rpath_dirs_list "$arg"
else
append return_rpath_dirs_list "$arg"
fi
path_order "$arg"
case $? in
0) append return_spack_store_rpath_dirs_list "$arg" ;;
1) append return_rpath_dirs_list "$arg" ;;
2) append return_system_rpath_dirs_list "$arg" ;;
esac
;;
--rpath=*)
arg="${1#--rpath=}"
if [ -z "$arg" ]; then
shift; continue
elif system_dir "$arg"; then
append return_system_rpath_dirs_list "$arg"
else
append return_rpath_dirs_list "$arg"
fi
path_order "$arg"
case $? in
0) append return_spack_store_rpath_dirs_list "$arg" ;;
1) append return_rpath_dirs_list "$arg" ;;
2) append return_system_rpath_dirs_list "$arg" ;;
esac
;;
-rpath|--rpath)
wl_expect_rpath=yes
@@ -475,20 +473,12 @@ categorize_arguments() {
return_other_args_list=""
return_isystem_was_used=""
return_isystem_spack_store_include_dirs_list=""
return_isystem_system_include_dirs_list=""
return_isystem_include_dirs_list=""
return_spack_store_include_dirs_list=""
return_system_include_dirs_list=""
return_include_dirs_list=""
return_spack_store_lib_dirs_list=""
return_system_lib_dirs_list=""
return_lib_dirs_list=""
return_spack_store_rpath_dirs_list=""
return_system_rpath_dirs_list=""
return_rpath_dirs_list=""
@@ -536,7 +526,7 @@ categorize_arguments() {
continue
fi
replaced="$after$stripped"
replaced="$after$stripped"
# it matched, remove it
shift
@@ -556,32 +546,29 @@ categorize_arguments() {
arg="${1#-isystem}"
return_isystem_was_used=true
if [ -z "$arg" ]; then shift; arg="$1"; fi
path_order "$arg"
case $? in
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
1) append return_isystem_include_dirs_list "$arg" ;;
2) append return_isystem_system_include_dirs_list "$arg" ;;
esac
if system_dir "$arg"; then
append return_isystem_system_include_dirs_list "$arg"
else
append return_isystem_include_dirs_list "$arg"
fi
;;
-I*)
arg="${1#-I}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
path_order "$arg"
case $? in
0) append return_spack_store_include_dirs_list "$arg" ;;
1) append return_include_dirs_list "$arg" ;;
2) append return_system_include_dirs_list "$arg" ;;
esac
if system_dir "$arg"; then
append return_system_include_dirs_list "$arg"
else
append return_include_dirs_list "$arg"
fi
;;
-L*)
arg="${1#-L}"
if [ -z "$arg" ]; then shift; arg="$1"; fi
path_order "$arg"
case $? in
0) append return_spack_store_lib_dirs_list "$arg" ;;
1) append return_lib_dirs_list "$arg" ;;
2) append return_system_lib_dirs_list "$arg" ;;
esac
if system_dir "$arg"; then
append return_system_lib_dirs_list "$arg"
else
append return_lib_dirs_list "$arg"
fi
;;
-l*)
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
@@ -614,32 +601,29 @@ categorize_arguments() {
break
elif [ "$xlinker_expect_rpath" = yes ]; then
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
path_order "$1"
case $? in
0) append return_spack_store_rpath_dirs_list "$1" ;;
1) append return_rpath_dirs_list "$1" ;;
2) append return_system_rpath_dirs_list "$1" ;;
esac
if system_dir "$1"; then
append return_system_rpath_dirs_list "$1"
else
append return_rpath_dirs_list "$1"
fi
xlinker_expect_rpath=no
else
case "$1" in
-rpath=*)
arg="${1#-rpath=}"
path_order "$arg"
case $? in
0) append return_spack_store_rpath_dirs_list "$arg" ;;
1) append return_rpath_dirs_list "$arg" ;;
2) append return_system_rpath_dirs_list "$arg" ;;
esac
if system_dir "$arg"; then
append return_system_rpath_dirs_list "$arg"
else
append return_rpath_dirs_list "$arg"
fi
;;
--rpath=*)
arg="${1#--rpath=}"
path_order "$arg"
case $? in
0) append return_spack_store_rpath_dirs_list "$arg" ;;
1) append return_rpath_dirs_list "$arg" ;;
2) append return_system_rpath_dirs_list "$arg" ;;
esac
if system_dir "$arg"; then
append return_system_rpath_dirs_list "$arg"
else
append return_rpath_dirs_list "$arg"
fi
;;
-rpath|--rpath)
xlinker_expect_rpath=yes
@@ -677,25 +661,16 @@ categorize_arguments() {
}
categorize_arguments "$@"
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
system_include_dirs_list="$return_system_include_dirs_list"
include_dirs_list="$return_include_dirs_list"
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
system_lib_dirs_list="$return_system_lib_dirs_list"
lib_dirs_list="$return_lib_dirs_list"
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
system_rpath_dirs_list="$return_system_rpath_dirs_list"
rpath_dirs_list="$return_rpath_dirs_list"
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
isystem_include_dirs_list="$return_isystem_include_dirs_list"
isystem_was_used="$return_isystem_was_used"
other_args_list="$return_other_args_list"
include_dirs_list="$return_include_dirs_list"
lib_dirs_list="$return_lib_dirs_list"
rpath_dirs_list="$return_rpath_dirs_list"
system_include_dirs_list="$return_system_include_dirs_list"
system_lib_dirs_list="$return_system_lib_dirs_list"
system_rpath_dirs_list="$return_system_rpath_dirs_list"
isystem_was_used="$return_isystem_was_used"
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
isystem_include_dirs_list="$return_isystem_include_dirs_list"
other_args_list="$return_other_args_list"
#
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
@@ -755,7 +730,7 @@ esac
# Linker flags
case "$mode" in
ccld)
ld|ccld)
extend spack_flags_list SPACK_LDFLAGS
;;
esac
@@ -763,25 +738,16 @@ esac
IFS="$lsep"
categorize_arguments $spack_flags_list
unset IFS
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
spack_flags_include_dirs_list="$return_include_dirs_list"
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
spack_flags_lib_dirs_list="$return_lib_dirs_list"
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
spack_flags_isystem_was_used="$return_isystem_was_used"
spack_flags_other_args_list="$return_other_args_list"
spack_flags_include_dirs_list="$return_include_dirs_list"
spack_flags_lib_dirs_list="$return_lib_dirs_list"
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
spack_flags_isystem_was_used="$return_isystem_was_used"
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
spack_flags_other_args_list="$return_other_args_list"
# On macOS insert headerpad_max_install_names linker flag
@@ -801,13 +767,11 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
# Append RPATH directories. Note that in the case of the
# top-level package these directories may not exist yet. For dependencies
# it is assumed that paths have already been confirmed.
extend spack_store_rpath_dirs_list SPACK_STORE_RPATH_DIRS
extend rpath_dirs_list SPACK_RPATH_DIRS
fi
fi
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
extend spack_store_lib_dirs_list SPACK_STORE_LINK_DIRS
extend lib_dirs_list SPACK_LINK_DIRS
fi
@@ -834,50 +798,38 @@ case "$mode" in
;;
esac
case "$mode" in
cpp|cc|as|ccld)
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
else
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
extend include_dirs_list SPACK_INCLUDE_DIRS
fi
;;
esac
#
# Finally, reassemble the command line.
#
args_list="$flags_list"
# Include search paths partitioned by (in store, non-sytem, system)
# Insert include directories just prior to any system include directories
# NOTE: adding ${lsep} to the prefix here turns every added element into two
extend args_list spack_flags_spack_store_include_dirs_list -I
extend args_list spack_store_include_dirs_list -I
extend args_list spack_flags_include_dirs_list -I
extend args_list include_dirs_list -I
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
extend args_list spack_flags_include_dirs_list "-I"
extend args_list include_dirs_list "-I"
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
extend args_list isystem_include_dirs_list "-isystem${lsep}"
case "$mode" in
cpp|cc|as|ccld)
if [ "$spack_flags_isystem_was_used" = "true" ]; then
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
elif [ "$isystem_was_used" = "true" ]; then
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
else
extend args_list SPACK_INCLUDE_DIRS "-I"
fi
;;
esac
extend args_list spack_flags_system_include_dirs_list -I
extend args_list system_include_dirs_list -I
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
# Library search paths partitioned by (in store, non-sytem, system)
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
extend args_list spack_store_lib_dirs_list "-L"
# Library search paths
extend args_list spack_flags_lib_dirs_list "-L"
extend args_list lib_dirs_list "-L"
extend args_list spack_flags_system_lib_dirs_list "-L"
extend args_list system_lib_dirs_list "-L"
@@ -887,12 +839,8 @@ case "$mode" in
if [ -n "$dtags_to_add" ] ; then
append args_list "$linker_arg$dtags_to_add"
fi
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
extend args_list spack_store_rpath_dirs_list "$rpath"
extend args_list spack_flags_rpath_dirs_list "$rpath"
extend args_list rpath_dirs_list "$rpath"
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
extend args_list system_rpath_dirs_list "$rpath"
;;
@@ -900,12 +848,8 @@ case "$mode" in
if [ -n "$dtags_to_add" ] ; then
append args_list "$dtags_to_add"
fi
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
extend args_list rpath_dirs_list "-rpath${lsep}"
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
extend args_list system_rpath_dirs_list "-rpath${lsep}"
;;
@@ -969,3 +913,4 @@ fi
# Execute the full command, preserving spaces with IFS set
# to the alarm bell separator.
IFS="$lsep"; exec $full_command_list

View File

@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
Tag.attrib, merge_attrib]:
if hasattr(self, a):
if memo is not None:
setattr(t, a, copy.deepcopy(getattr(self, a), memo))
setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
else:
setattr(t, a, getattr(self, a))
# fmt: on

View File

@@ -1,13 +0,0 @@
diff --git a/lib/spack/external/_vendoring/ruamel/yaml/comments.py b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
index 1badeda585..892c868af3 100644
--- a/lib/spack/external/_vendoring/ruamel/yaml/comments.py
+++ b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
Tag.attrib, merge_attrib]:
if hasattr(self, a):
if memo is not None:
- setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
+ setattr(t, a, copy.deepcopy(getattr(self, a), memo))
else:
setattr(t, a, getattr(self, a))
# fmt: on

View File

@@ -198,32 +198,15 @@ def getuid():
return os.getuid()
def _win_rename(src, dst):
# os.replace will still fail if on Windows (but not POSIX) if the dst
# is a symlink to a directory (all other cases have parity Windows <-> Posix)
if os.path.islink(dst) and os.path.isdir(os.path.realpath(dst)):
if os.path.samefile(src, dst):
# src and dst are the same
# do nothing and exit early
return
# If dst exists and is a symlink to a directory
# we need to remove dst and then perform rename/replace
# this is safe to do as there's no chance src == dst now
os.remove(dst)
os.replace(src, dst)
@system_path_filter
def rename(src, dst):
# On Windows, os.rename will fail if the destination file already exists
# os.replace is the same as os.rename on POSIX and is MoveFileExW w/
# the MOVEFILE_REPLACE_EXISTING flag on Windows
# Windows invocation is abstracted behind additonal logic handling
# remaining cases of divergent behavior accross platforms
if sys.platform == "win32":
_win_rename(src, dst)
else:
os.replace(src, dst)
# Windows path existence checks will sometimes fail on junctions/links/symlinks
# so check for that case
if os.path.exists(dst) or islink(dst):
os.remove(dst)
os.rename(src, dst)
@system_path_filter
@@ -1234,12 +1217,10 @@ def windows_sfn(path: os.PathLike):
import ctypes
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
# Method with null values returns size of short path name
sz = k32.GetShortPathNameW(path, None, 0)
# stub Windows types TCHAR[LENGTH]
TCHAR_arr = ctypes.c_wchar * sz
TCHAR_arr = ctypes.c_wchar * len(path)
ret_str = TCHAR_arr()
k32.GetShortPathNameW(path, ctypes.byref(ret_str), sz)
k32.GetShortPathNameW(path, ret_str, len(path))
return ret_str.value

View File

@@ -12,7 +12,7 @@
import traceback
from datetime import datetime
from sys import platform as _platform
from typing import Any, NoReturn
from typing import NoReturn
if _platform != "win32":
import fcntl
@@ -158,22 +158,21 @@ def get_timestamp(force=False):
return ""
def msg(message: Any, *args: Any, newline: bool = True) -> None:
def msg(message, *args, **kwargs):
if not msg_enabled():
return
if isinstance(message, Exception):
message = f"{message.__class__.__name__}: {message}"
else:
message = str(message)
message = "%s: %s" % (message.__class__.__name__, str(message))
newline = kwargs.get("newline", True)
st_text = ""
if _stacktrace:
st_text = process_stacktrace(2)
nl = "\n" if newline else ""
cwrite(f"@*b{{{st_text}==>}} {get_timestamp()}{cescape(_output_filter(message))}{nl}")
if newline:
cprint("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
else:
cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
for arg in args:
print(indent + _output_filter(str(arg)))

View File

@@ -237,6 +237,7 @@ def transpose():
def colified(
elts: List[Any],
cols: int = 0,
output: Optional[IO] = None,
indent: int = 0,
padding: int = 2,
tty: Optional[bool] = None,

View File

@@ -62,7 +62,6 @@
import re
import sys
from contextlib import contextmanager
from typing import Optional
class ColorParseError(Exception):
@@ -96,7 +95,7 @@ def __init__(self, message):
} # white
# Regex to be used for color formatting
COLOR_RE = re.compile(r"@(?:(@)|(\.)|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)")
color_re = r"@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)"
# Mapping from color arguments to values for tty.set_color
color_when_values = {"always": True, "auto": None, "never": False}
@@ -204,64 +203,77 @@ def color_when(value):
set_color_when(old_value)
def _escape(s: str, color: bool, enclose: bool, zsh: bool) -> str:
"""Returns a TTY escape sequence for a color"""
if color:
if zsh:
result = rf"\e[0;{s}m"
class match_to_ansi:
def __init__(self, color=True, enclose=False, zsh=False):
self.color = _color_when_value(color)
self.enclose = enclose
self.zsh = zsh
def escape(self, s):
"""Returns a TTY escape sequence for a color"""
if self.color:
if self.zsh:
result = rf"\e[0;{s}m"
else:
result = f"\033[{s}m"
if self.enclose:
result = rf"\[{result}\]"
return result
else:
result = f"\033[{s}m"
return ""
if enclose:
result = rf"\[{result}\]"
def __call__(self, match):
"""Convert a match object generated by ``color_re`` into an ansi
color code. This can be used as a handler in ``re.sub``.
"""
style, color, text = match.groups()
m = match.group(0)
return result
else:
return ""
if m == "@@":
return "@"
elif m == "@.":
return self.escape(0)
elif m == "@":
raise ColorParseError("Incomplete color format: '%s' in %s" % (m, match.string))
string = styles[style]
if color:
if color not in colors:
raise ColorParseError(
"Invalid color specifier: '%s' in '%s'" % (color, match.string)
)
string += ";" + str(colors[color])
colored_text = ""
if text:
colored_text = text + self.escape(0)
return self.escape(string) + colored_text
def colorize(
string: str, color: Optional[bool] = None, enclose: bool = False, zsh: bool = False
) -> str:
def colorize(string, **kwargs):
"""Replace all color expressions in a string with ANSI control codes.
Args:
string: The string to replace
string (str): The string to replace
Returns:
The filtered string
str: The filtered string
Keyword Arguments:
color: If False, output will be plain text without control codes, for output to
non-console devices (default: automatically choose color or not)
enclose: If True, enclose ansi color sequences with
color (bool): If False, output will be plain text without control
codes, for output to non-console devices.
enclose (bool): If True, enclose ansi color sequences with
square brackets to prevent misestimation of terminal width.
zsh: If True, use zsh ansi codes instead of bash ones (for variables like PS1)
zsh (bool): If True, use zsh ansi codes instead of bash ones (for variables like PS1)
"""
color = color if color is not None else get_color_when()
def match_to_ansi(match):
"""Convert a match object generated by ``COLOR_RE`` into an ansi
color code. This can be used as a handler in ``re.sub``.
"""
escaped_at, dot, style, color_code, text = match.groups()
if escaped_at:
return "@"
elif dot:
return _escape(0, color, enclose, zsh)
elif not (style or color_code):
raise ColorParseError(
f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
)
ansi_code = _escape(f"{styles[style]};{colors.get(color_code, '')}", color, enclose, zsh)
if text:
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
else:
return ansi_code
return COLOR_RE.sub(match_to_ansi, string).replace("}}", "}")
color = _color_when_value(kwargs.get("color", get_color_when()))
zsh = kwargs.get("zsh", False)
string = re.sub(color_re, match_to_ansi(color, kwargs.get("enclose")), string, zsh)
string = string.replace("}}", "}")
return string
def clen(string):
@@ -293,7 +305,7 @@ def cprint(string, stream=None, color=None):
cwrite(string + "\n", stream, color)
def cescape(string: str) -> str:
def cescape(string):
"""Escapes special characters needed for color codes.
Replaces the following symbols with their equivalent literal forms:
@@ -309,7 +321,10 @@ def cescape(string: str) -> str:
Returns:
(str): the string with color codes escaped
"""
return string.replace("@", "@@").replace("}", "}}")
string = str(string)
string = string.replace("@", "@@")
string = string.replace("}", "}}")
return string
class ColorStream:

View File

@@ -1111,76 +1111,4 @@ def _test_detection_by_executable(pkgs, error_cls):
details = [msg.format(s, idx) for s in sorted(not_expected)]
errors.append(error_cls(summary=summary, details=details))
matched_detection = []
for candidate in expected_specs:
try:
idx = specs.index(candidate)
except (AttributeError, ValueError):
pass
matched_detection.append((candidate, specs[idx]))
def _compare_extra_attribute(_expected, _detected, *, _spec):
result = []
# Check items are of the same type
if not isinstance(_detected, type(_expected)):
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
_details = [f"{_detected} was detected instead"]
return [error_cls(summary=_summary, details=_details)]
# If they are string expected is a regex
if isinstance(_expected, str):
try:
_regex = re.compile(_expected)
except re.error:
_summary = f'{pkg_name}: illegal regex in "{_spec}" extra attributes'
_details = [f"{_expected} is not a valid regex"]
return [error_cls(summary=_summary, details=_details)]
if not _regex.match(_detected):
_summary = (
f'{pkg_name}: error when trying to match "{_expected}" '
f"in extra attributes"
)
_details = [f"{_detected} does not match the regex"]
return [error_cls(summary=_summary, details=_details)]
if isinstance(_expected, dict):
_not_detected = set(_expected.keys()) - set(_detected.keys())
if _not_detected:
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
_details = [
f'"{_expected}" was expected',
f'"{_detected}" was detected',
] + [f'attribute "{s}" was not detected' for s in sorted(_not_detected)]
result.append(error_cls(summary=_summary, details=_details))
_common = set(_expected.keys()) & set(_detected.keys())
for _key in _common:
result.extend(
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
)
return result
for expected, detected in matched_detection:
# We might not want to test all attributes, so avoid not_expected
not_detected = set(expected.extra_attributes) - set(detected.extra_attributes)
if not_detected:
summary = f"{pkg_name}: cannot detect some attributes for spec {expected}"
details = [
f'"{s}" was not detected [test_id={idx}]' for s in sorted(not_detected)
]
errors.append(error_cls(summary=summary, details=details))
common = set(expected.extra_attributes) & set(detected.extra_attributes)
for key in common:
errors.extend(
_compare_extra_attribute(
expected.extra_attributes[key],
detected.extra_attributes[key],
_spec=expected,
)
)
return errors

View File

@@ -173,14 +173,35 @@ def _read_metadata(self, package_name: str) -> Any:
return data
def _install_by_hash(
self, pkg_hash: str, pkg_sha256: str, bincache_platform: spack.platforms.Platform
self,
pkg_hash: str,
pkg_sha256: str,
index: List[spack.spec.Spec],
bincache_platform: spack.platforms.Platform,
) -> None:
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
# Reconstruct the compiler that we need to use for bootstrapping
compiler_entry = {
"modules": [],
"operating_system": str(index_spec.os),
"paths": {
"cc": "/dev/null",
"cxx": "/dev/null",
"f77": "/dev/null",
"fc": "/dev/null",
},
"spec": str(index_spec.compiler),
"target": str(index_spec.target.family),
}
with spack.platforms.use_platform(bincache_platform):
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
spack.binary_distribution.install_root_node(
match, unsigned=True, force=True, sha256=pkg_sha256
)
with spack.config.override("compilers", [{"compiler": compiler_entry}]):
spec_str = "/" + pkg_hash
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
matches = spack.store.find([spec_str], multiple=False, query_fn=query)
for match in matches:
spack.binary_distribution.install_root_node(
match, unsigned=True, force=True, sha256=pkg_sha256
)
def _install_and_test(
self,
@@ -211,7 +232,7 @@ def _install_and_test(
continue
for _, pkg_hash, pkg_sha256 in item["binaries"]:
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
self._install_by_hash(pkg_hash, pkg_sha256, index, bincache_platform)
info: ConfigDictionary = {}
if test_fn(query_spec=abstract_spec, query_info=info):

View File

@@ -43,7 +43,7 @@
from collections import defaultdict
from enum import Flag, auto
from itertools import chain
from typing import List, Set, Tuple
from typing import List, Tuple
import llnl.util.tty as tty
from llnl.string import plural
@@ -57,10 +57,8 @@
import spack.build_systems.meson
import spack.build_systems.python
import spack.builder
import spack.compilers
import spack.config
import spack.deptypes as dt
import spack.error
import spack.main
import spack.package_base
import spack.paths
@@ -68,7 +66,6 @@
import spack.repo
import spack.schema.environment
import spack.spec
import spack.stage
import spack.store
import spack.subprocess_context
import spack.user_environment
@@ -81,7 +78,7 @@
from spack.installer import InstallError
from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import (
SYSTEM_DIR_CASE_ENTRY,
SYSTEM_DIRS,
EnvironmentModifications,
env_flag,
filter_system_paths,
@@ -104,13 +101,9 @@
# Spack's compiler wrappers.
#
SPACK_ENV_PATH = "SPACK_ENV_PATH"
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
SPACK_RPATH_DIRS = "SPACK_RPATH_DIRS"
SPACK_STORE_INCLUDE_DIRS = "SPACK_STORE_INCLUDE_DIRS"
SPACK_STORE_LINK_DIRS = "SPACK_STORE_LINK_DIRS"
SPACK_STORE_RPATH_DIRS = "SPACK_STORE_RPATH_DIRS"
SPACK_RPATH_DEPS = "SPACK_RPATH_DEPS"
SPACK_LINK_DEPS = "SPACK_LINK_DEPS"
SPACK_PREFIX = "SPACK_PREFIX"
@@ -423,7 +416,7 @@ def set_compiler_environment_variables(pkg, env):
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
env.set("SPACK_SYSTEM_DIRS", ":".join(SYSTEM_DIRS))
compiler.setup_custom_environment(pkg, env)
@@ -551,26 +544,9 @@ def update_compiler_args_for_dep(dep):
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
# Spack managed directories include the stage, store and upstream stores. We extend this with
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
spack_managed_dirs: Set[str] = {
spack.stage.get_stage_root(),
spack.store.STORE.db.root,
*(db.root for db in spack.store.STORE.db.upstream_dbs),
}
spack_managed_dirs.update([os.path.realpath(p) for p in spack_managed_dirs])
env.set(SPACK_MANAGED_DIRS, "|".join(f'"{p}/"*' for p in sorted(spack_managed_dirs)))
is_spack_managed = lambda p: any(p.startswith(store) for store in spack_managed_dirs)
link_dirs_spack, link_dirs_system = stable_partition(link_dirs, is_spack_managed)
include_dirs_spack, include_dirs_system = stable_partition(include_dirs, is_spack_managed)
rpath_dirs_spack, rpath_dirs_system = stable_partition(rpath_dirs, is_spack_managed)
env.set(SPACK_LINK_DIRS, ":".join(link_dirs_system))
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs_system))
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs_system))
env.set(SPACK_STORE_LINK_DIRS, ":".join(link_dirs_spack))
env.set(SPACK_STORE_INCLUDE_DIRS, ":".join(include_dirs_spack))
env.set(SPACK_STORE_RPATH_DIRS, ":".join(rpath_dirs_spack))
env.set(SPACK_LINK_DIRS, ":".join(link_dirs))
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs))
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
def set_package_py_globals(pkg, context: Context = Context.BUILD):
@@ -607,22 +583,10 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
# Put spack compiler paths in module scope. (Some packages use it
# in setup_run_environment etc, so don't put it context == build)
link_dir = spack.paths.build_env_path
pkg_compiler = None
try:
pkg_compiler = pkg.compiler
except spack.compilers.NoCompilerForSpecError as e:
tty.debug(f"cannot set 'spack_cc': {str(e)}")
if pkg_compiler is not None:
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
else:
module.spack_cc = None
module.spack_cxx = None
module.spack_f77 = None
module.spack_fc = None
module.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
module.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
module.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
module.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
# Useful directories within the prefix are encapsulated in
# a Prefix object.

View File

@@ -434,6 +434,11 @@ def _do_patch_libtool(self):
r"crtendS\.o",
]:
x.filter(regex=(rehead + o), repl="")
elif self.pkg.compiler.name == "dpcpp":
# Hack to filter out spurious predep_objects when building with Intel dpcpp
# (see https://github.com/spack/spack/issues/32863):
x.filter(regex=r"^(predep_objects=.*)/tmp/conftest-[0-9A-Fa-f]+\.o", repl=r"\1")
x.filter(regex=r"^(predep_objects=.*)/tmp/a-[0-9A-Fa-f]+\.o", repl=r"\1")
elif self.pkg.compiler.name == "nag":
for tag in ["fc", "f77"]:
marker = markers[tag]

View File

@@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections.abc
import os
import re
from typing import Tuple
import llnl.util.filesystem as fs
@@ -16,12 +15,6 @@
from .cmake import CMakeBuilder, CMakePackage
def spec_uses_toolchain(spec):
gcc_toolchain_regex = re.compile(".*gcc-toolchain.*")
using_toolchain = list(filter(gcc_toolchain_regex.match, spec.compiler_flags["cxxflags"]))
return using_toolchain
def cmake_cache_path(name, value, comment="", force=False):
"""Generate a string for a cmake cache variable"""
force_str = " FORCE" if force else ""
@@ -220,7 +213,7 @@ def initconfig_mpi_entries(self):
else:
# starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE
# vs the older versions which expect MPIEXEC
if spec["cmake"].satisfies("@3.10:"):
if self.pkg.spec["cmake"].satisfies("@3.10:"):
entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE", mpiexec))
else:
entries.append(cmake_cache_path("MPIEXEC", mpiexec))
@@ -255,17 +248,12 @@ def initconfig_hardware_entries(self):
# Include the deprecated CUDA_TOOLKIT_ROOT_DIR for supporting BLT packages
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
# CUDA_FLAGS
cuda_flags = []
if not spec.satisfies("cuda_arch=none"):
cuda_archs = ";".join(spec.variants["cuda_arch"].value)
entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", cuda_archs))
if spec_uses_toolchain(spec):
cuda_flags.append("-Xcompiler {}".format(spec_uses_toolchain(spec)[0]))
entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags)))
archs = spec.variants["cuda_arch"].value
if archs[0] != "none":
arch_str = ";".join(archs)
entries.append(
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
)
if "+rocm" in spec:
entries.append("#------------------{0}".format("-" * 30))
@@ -274,6 +262,9 @@ def initconfig_hardware_entries(self):
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
entries.append(
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
)
llvm_bin = spec["llvm-amdgpu"].prefix.bin
llvm_prefix = spec["llvm-amdgpu"].prefix
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
@@ -286,9 +277,11 @@ def initconfig_hardware_entries(self):
archs = self.spec.variants["amdgpu_target"].value
if archs[0] != "none":
arch_str = ";".join(archs)
entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
entries.append(
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
)
entries.append(cmake_cache_string("AMDGPU_TARGETS", "{0}".format(arch_str)))
entries.append(cmake_cache_string("GPU_TARGETS", "{0}".format(arch_str)))
return entries

View File

@@ -16,7 +16,7 @@
class CargoPackage(spack.package_base.PackageBase):
"""Specialized class for packages built using cargo."""
"""Specialized class for packages built using a Makefiles."""
#: This attribute is used in UI queries that need to know the build
#: system base class

View File

@@ -21,7 +21,7 @@
class MakefilePackage(spack.package_base.PackageBase):
"""Specialized class for packages built using Makefiles."""
"""Specialized class for packages built using a Makefiles."""
#: This attribute is used in UI queries that need to know the build
#: system base class

View File

@@ -14,7 +14,7 @@
from llnl.util.link_tree import LinkTree
from spack.build_environment import dso_suffix
from spack.directives import conflicts, license, redistribute, variant
from spack.directives import conflicts, variant
from spack.package_base import InstallError
from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable
@@ -26,11 +26,10 @@ class IntelOneApiPackage(Package):
"""Base class for Intel oneAPI packages."""
homepage = "https://software.intel.com/oneapi"
license("https://intel.ly/393CijO")
# oneAPI license does not allow mirroring outside of the
# organization (e.g. University/Company).
redistribute(source=False, binary=False)
redistribute_source = False
for c in [
"target=ppc64:",

View File

@@ -80,7 +80,6 @@
import spack.variant
from spack.directives import conflicts, depends_on, variant
from spack.package_base import PackageBase
from spack.util.environment import EnvironmentModifications
class ROCmPackage(PackageBase):
@@ -157,23 +156,30 @@ def hip_flags(amdgpu_target):
archs = ",".join(amdgpu_target)
return "--amdgpu-target={0}".format(archs)
def asan_on(self, env: EnvironmentModifications):
llvm_path = self.spec["llvm-amdgpu"].prefix
# ASAN
@staticmethod
def asan_on(env, llvm_path):
env.set("CC", llvm_path + "/bin/clang")
env.set("CXX", llvm_path + "/bin/clang++")
env.set("ASAN_OPTIONS", "detect_leaks=0")
for root, _, files in os.walk(llvm_path):
for root, dirs, files in os.walk(llvm_path):
if "libclang_rt.asan-x86_64.so" in files:
asan_lib_path = root
env.prepend_path("LD_LIBRARY_PATH", asan_lib_path)
if "rhel" in self.spec.os or "sles" in self.spec.os:
SET_DWARF_VERSION_4 = "-gdwarf-5"
else:
SET_DWARF_VERSION_4 = ""
SET_DWARF_VERSION_4 = ""
try:
# This will throw an error if imported on a non-Linux platform.
import distro
env.set("CFLAGS", f"-fsanitize=address -shared-libasan -g {SET_DWARF_VERSION_4}")
env.set("CXXFLAGS", f"-fsanitize=address -shared-libasan -g {SET_DWARF_VERSION_4}")
distname = distro.id()
except ImportError:
distname = "unknown"
if "rhel" in distname or "sles" in distname:
SET_DWARF_VERSION_4 = "-gdwarf-5"
env.set("CFLAGS", "-fsanitize=address -shared-libasan -g " + SET_DWARF_VERSION_4)
env.set("CXXFLAGS", "-fsanitize=address -shared-libasan -g " + SET_DWARF_VERSION_4)
env.set("LDFLAGS", "-Wl,--enable-new-dtags -fuse-ld=lld -fsanitize=address -g -Wl,")
# HIP version vs Architecture

View File

@@ -16,8 +16,8 @@
import tempfile
import time
import zipfile
from collections import defaultdict, namedtuple
from typing import Dict, List, Optional, Set, Tuple
from collections import namedtuple
from typing import List, Optional
from urllib.error import HTTPError, URLError
from urllib.parse import urlencode
from urllib.request import HTTPHandler, Request, build_opener
@@ -113,24 +113,54 @@ def _remove_reserved_tags(tags):
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
def _spec_ci_label(s):
def _spec_deps_key(s):
return f"{s.name}/{s.dag_hash(7)}"
PlainNodes = Dict[str, spack.spec.Spec]
PlainEdges = Dict[str, Set[str]]
def _add_dependency(spec_label, dep_label, deps):
if spec_label == dep_label:
return
if spec_label not in deps:
deps[spec_label] = set()
deps[spec_label].add(dep_label)
def stage_spec_jobs(specs: List[spack.spec.Spec]) -> Tuple[PlainNodes, PlainEdges, List[Set[str]]]:
"""Turn a DAG into a list of stages (set of nodes), the list is ordered topologically, so that
each node in a stage has dependencies only in previous stages.
def _get_spec_dependencies(specs, deps, spec_labels):
spec_deps_obj = _compute_spec_deps(specs)
if spec_deps_obj:
dependencies = spec_deps_obj["dependencies"]
specs = spec_deps_obj["specs"]
for entry in specs:
spec_labels[entry["label"]] = entry["spec"]
for entry in dependencies:
_add_dependency(entry["spec"], entry["depends"], deps)
def stage_spec_jobs(specs):
"""Take a set of release specs and generate a list of "stages", where the
jobs in any stage are dependent only on jobs in previous stages. This
allows us to maximize build parallelism within the gitlab-ci framework.
Arguments:
specs: Specs to build
specs (Iterable): Specs to build
Returns: A tuple of information objects describing the specs, dependencies
and stages:
spec_labels: A dictionary mapping the spec labels (which are formatted
as pkg-name/hash-prefix) to concrete specs.
deps: A dictionary where the keys should also have appeared as keys in
the spec_labels dictionary, and the values are the set of
dependencies for that spec.
stages: An ordered list of sets, each of which contains all the jobs to
built in that stage. The jobs are expressed in the same format as
the keys in the spec_labels and deps objects.
Returns: A tuple (nodes, edges, stages) where ``nodes`` maps labels to specs, ``edges`` maps
labels to a set of labels of dependencies, and ``stages`` is a topologically ordered list
of sets of labels.
"""
# The convenience method below, "_remove_satisfied_deps()", does not modify
@@ -147,12 +177,17 @@ def _remove_satisfied_deps(deps, satisfied_list):
return new_deps
nodes, edges = _extract_dag(specs)
deps = {}
spec_labels = {}
# Save the original edges, as we need to return them at the end of the function. In the loop
# below, the "dependencies" variable is rebound rather than mutated, so "edges" is not mutated.
dependencies = edges
unstaged = set(nodes.keys())
_get_spec_dependencies(specs, deps, spec_labels)
# Save the original deps, as we need to return them at the end of the
# function. In the while loop below, the "dependencies" variable is
# overwritten rather than being modified each time through the loop,
# thus preserving the original value of "deps" saved here.
dependencies = deps
unstaged = set(spec_labels.keys())
stages = []
while dependencies:
@@ -168,7 +203,7 @@ def _remove_satisfied_deps(deps, satisfied_list):
if unstaged:
stages.append(unstaged.copy())
return nodes, edges, stages
return spec_labels, deps, stages
def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions):
@@ -200,22 +235,87 @@ def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisi
tty.msg(msg)
def _extract_dag(specs: List[spack.spec.Spec]) -> Tuple[PlainNodes, PlainEdges]:
"""Extract a sub-DAG as plain old Python objects with external nodes removed."""
nodes: PlainNodes = {}
edges: PlainEdges = defaultdict(set)
def _compute_spec_deps(spec_list):
"""
Computes all the dependencies for the spec(s) and generates a JSON
object which provides both a list of unique spec names as well as a
comprehensive list of all the edges in the dependency graph. For
example, given a single spec like 'readline@7.0', this function
generates the following JSON object:
for edge in traverse.traverse_edges(specs, cover="edges"):
if (edge.parent and edge.parent.external) or edge.spec.external:
continue
child_id = _spec_ci_label(edge.spec)
nodes[child_id] = edge.spec
if edge.parent:
parent_id = _spec_ci_label(edge.parent)
nodes[parent_id] = edge.parent
edges[parent_id].add(child_id)
.. code-block:: JSON
return nodes, edges
{
"dependencies": [
{
"depends": "readline/ip6aiun",
"spec": "readline/ip6aiun"
},
{
"depends": "ncurses/y43rifz",
"spec": "readline/ip6aiun"
},
{
"depends": "ncurses/y43rifz",
"spec": "readline/ip6aiun"
},
{
"depends": "pkgconf/eg355zb",
"spec": "ncurses/y43rifz"
},
{
"depends": "pkgconf/eg355zb",
"spec": "readline/ip6aiun"
}
],
"specs": [
{
"spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-highs...",
"label": "readline/ip6aiun"
},
{
"spec": "ncurses@6.1%apple-clang@9.1.0 arch=darwin-highsi...",
"label": "ncurses/y43rifz"
},
{
"spec": "pkgconf@1.5.4%apple-clang@9.1.0 arch=darwin-high...",
"label": "pkgconf/eg355zb"
}
]
}
"""
spec_labels = {}
specs = []
dependencies = []
def append_dep(s, d):
dependencies.append({"spec": s, "depends": d})
for spec in spec_list:
for s in spec.traverse(deptype="all"):
if s.external:
tty.msg(f"Will not stage external pkg: {s}")
continue
skey = _spec_deps_key(s)
spec_labels[skey] = s
for d in s.dependencies(deptype="all"):
dkey = _spec_deps_key(d)
if d.external:
tty.msg(f"Will not stage external dep: {d}")
continue
append_dep(skey, dkey)
for spec_label, concrete_spec in spec_labels.items():
specs.append({"label": spec_label, "spec": concrete_spec})
deps_json_obj = {"specs": specs, "dependencies": dependencies}
return deps_json_obj
def _spec_matches(spec, match_string):
@@ -227,7 +327,7 @@ def _format_job_needs(
):
needs_list = []
for dep_job in dep_jobs:
dep_spec_key = _spec_ci_label(dep_job)
dep_spec_key = _spec_deps_key(dep_job)
rebuild = rebuild_decisions[dep_spec_key].rebuild
if not prune_dag or rebuild:
@@ -1847,9 +1947,9 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
entrypoint_script.append(["echo", f"Re-run install script using:\n\t{install_mechanism}"])
# Allow interactive
if IS_WINDOWS:
entrypoint_script.append(["&", "($args -Join ' ')", "-NoExit"])
entrypoint_script.extend(["&", "($args -Join ' ')", "-NoExit"])
else:
entrypoint_script.append(["exec", "$@"])
entrypoint_script.extend(["exec", "$@"])
process_command(
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False

View File

@@ -334,7 +334,8 @@ def display_specs(specs, args=None, **kwargs):
variants (bool): Show variants with specs
indent (int): indent each line this much
groups (bool): display specs grouped by arch/compiler (default True)
decorator (typing.Callable): function to call to decorate specs
decorators (dict): dictionary mappng specs to decorators
header_callback (typing.Callable): called at start of arch/compiler groups
all_headers (bool): show headers even when arch/compiler aren't defined
output (typing.IO): A file object to write to. Default is ``sys.stdout``
@@ -383,13 +384,15 @@ def get_arg(name, default=None):
vfmt = "{variants}" if variants else ""
format_string = nfmt + "{@version}" + ffmt + vfmt
transform = {"package": decorator, "fullpackage": decorator}
def fmt(s, depth=0):
"""Formatter function for all output specs"""
string = ""
if hashes:
string += gray_hash(s, hlen) + " "
string += depth * " "
string += decorator(s, s.cformat(format_string))
string += s.cformat(format_string, transform=transform)
return string
def format_list(specs):
@@ -448,7 +451,7 @@ def filter_loaded_specs(specs):
return [x for x in specs if x.dag_hash() in hashes]
def print_how_many_pkgs(specs, pkg_type="", suffix=""):
def print_how_many_pkgs(specs, pkg_type=""):
"""Given a list of specs, this will print a message about how many
specs are in that list.
@@ -459,7 +462,7 @@ def print_how_many_pkgs(specs, pkg_type="", suffix=""):
category, e.g. if pkg_type is "installed" then the message
would be "3 installed packages"
"""
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package") + suffix)
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package"))
def spack_is_git_repo():

View File

@@ -2,7 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import os.path
import shutil
import sys
@@ -68,21 +67,13 @@
}
class AllSources:
"""Iterator for bootstrapping sources. Every __iter__ call uses current config, which is robust
in tests."""
def __iter__(self):
yield from (s["name"] for s in spack.bootstrap.core.bootstrapping_sources())
def _add_scope_option(parser):
parser.add_argument(
"--scope", action=arguments.ConfigScope, help="configuration scope to read/modify"
)
def setup_parser(subparser: argparse.ArgumentParser):
def setup_parser(subparser):
sp = subparser.add_subparsers(dest="subcommand")
now = sp.add_parser("now", help="Spack ready, right now!")
@@ -131,9 +122,7 @@ def setup_parser(subparser: argparse.ArgumentParser):
add.add_argument("metadata_dir", help="directory where to find metadata files")
remove = sp.add_parser("remove", help="remove a bootstrapping source")
remove.add_argument(
"name", nargs="+", choices=AllSources(), help=argparse.SUPPRESS, metavar="name"
)
remove.add_argument("name", help="name of the source to be removed")
mirror = sp.add_parser("mirror", help="create a local mirror to bootstrap Spack")
mirror.add_argument(
@@ -208,11 +197,6 @@ def _root(args):
def _list(args):
sources = spack.bootstrap.core.bootstrapping_sources(scope=args.scope)
if not sys.stdout.isatty():
for source in sources:
print(source["name"])
return
if not sources:
llnl.util.tty.msg("No method available for bootstrapping Spack's dependencies")
return
@@ -372,35 +356,31 @@ def _add(args):
def _remove(args):
configured = set(AllSources())
removable = set(args.name)
unknown = removable - configured
if unknown:
if not configured:
raise RuntimeError("no bootstrapping sources are configured")
unknown_str = ", ".join(f'"{name}"' for name in removable)
known_str = ", ".join(f'"{name}"' for name in configured)
raise RuntimeError(
f"cannot find any bootstrapping source named {unknown_str}. "
f"Choose from {known_str}."
initial_sources = spack.bootstrap.core.bootstrapping_sources()
names = [s["name"] for s in initial_sources]
if args.name not in names:
msg = (
'cannot find any bootstrapping source named "{0}". '
"Run `spack bootstrap list` to see available sources."
)
raise RuntimeError(msg.format(args.name))
for current_scope in spack.config.scopes():
sources = spack.config.get("bootstrap:sources", scope=current_scope) or []
if args.name in [s["name"] for s in sources]:
sources = [s for s in sources if s["name"] != args.name]
spack.config.set("bootstrap:sources", sources, scope=current_scope)
msg = (
'Removed the bootstrapping source named "{0}" from the '
'"{1}" configuration scope.'
)
llnl.util.tty.msg(msg.format(args.name, current_scope))
trusted = spack.config.get("bootstrap:trusted", scope=current_scope) or []
for name in removable:
if any(name == s["name"] for s in sources):
sources = [s for s in sources if s["name"] != name]
spack.config.set("bootstrap:sources", sources, scope=current_scope)
llnl.util.tty.msg(
f'Removed the bootstrapping source named "{name}" from the '
f'"{current_scope}" configuration scope.'
)
if name in trusted:
trusted.pop(name)
spack.config.set("bootstrap:trusted", trusted, scope=current_scope)
llnl.util.tty.msg(f'Deleting information on "{name}" from list of trusted sources')
if args.name in trusted:
trusted.pop(args.name)
spack.config.set("bootstrap:trusted", trusted, scope=current_scope)
msg = 'Deleting information on "{0}" from list of trusted sources'
llnl.util.tty.msg(msg.format(args.name))
def _mirror(args):

View File

@@ -133,11 +133,6 @@ def setup_parser(subparser: argparse.ArgumentParser):
help="when pushing to an OCI registry, tag an image containing all root specs and their "
"runtime dependencies",
)
push.add_argument(
"--private",
action="store_true",
help="for a private mirror, include non-redistributable packages",
)
arguments.add_common_arguments(push, ["specs", "jobs"])
push.set_defaults(func=push_fn)
@@ -372,25 +367,6 @@ def _make_pool() -> MaybePool:
return NoPool()
def _skip_no_redistribute_for_public(specs):
remaining_specs = list()
removed_specs = list()
for spec in specs:
if spec.package.redistribute_binary:
remaining_specs.append(spec)
else:
removed_specs.append(spec)
if removed_specs:
colified_output = tty.colify.colified(list(s.name for s in removed_specs), indent=4)
tty.debug(
"The following specs will not be added to the binary cache"
" because they cannot be redistributed:\n"
f"{colified_output}\n"
"You can use `--private` to include them."
)
return remaining_specs
def push_fn(args):
"""create a binary package and push it to a mirror"""
if args.spec_file:
@@ -441,8 +417,6 @@ def push_fn(args):
root="package" in args.things_to_install,
dependencies="dependencies" in args.things_to_install,
)
if not args.private:
specs = _skip_no_redistribute_for_public(specs)
# When pushing multiple specs, print the url once ahead of time, as well as how
# many specs are being pushed.

View File

@@ -581,8 +581,6 @@ def positionals(
for idx, (args, choices, nargs, help) in enumerate(positionals):
# Make sure we always get same order of output
if not help: # this means SUPPRESS was used.
choices = None
if isinstance(choices, dict):
choices = sorted(choices.keys())
elif isinstance(choices, (set, frozenset)):

View File

@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import copy
import sys
import llnl.util.lang
@@ -13,7 +14,6 @@
import spack.cmd as cmd
import spack.environment as ev
import spack.repo
import spack.store
from spack.cmd.common import arguments
from spack.database import InstallStatuses
@@ -69,12 +69,6 @@ def setup_parser(subparser):
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
subparser.add_argument(
"-r",
"--only-roots",
action="store_true",
help="don't show full list of installed specs in an environment",
)
subparser.add_argument(
"-c",
"--show-concretized",
@@ -195,22 +189,26 @@ def query_arguments(args):
return q_args
def make_env_decorator(env):
def setup_env(env):
"""Create a function for decorating specs when in an environment."""
roots = set(env.roots())
removed = set(env.removed_specs())
def strip_build(seq):
return set(s.copy(deps=("link", "run")) for s in seq)
added = set(strip_build(env.added_specs()))
roots = set(strip_build(env.roots()))
removed = set(strip_build(env.removed_specs()))
def decorator(spec, fmt):
# add +/-/* to show added/removed/root specs
if any(spec.dag_hash() == r.dag_hash() for r in roots):
return color.colorize(f"@*{{{fmt}}}")
return color.colorize("@*{%s}" % fmt)
elif spec in removed:
return color.colorize(f"@K{{{fmt}}}")
return color.colorize("@K{%s}" % fmt)
else:
return fmt
return "%s" % fmt
return decorator
return decorator, added, roots, removed
def display_env(env, args, decorator, results):
@@ -225,51 +223,28 @@ def display_env(env, args, decorator, results):
"""
tty.msg("In environment %s" % env.name)
num_roots = len(env.user_specs) or "No"
tty.msg(f"{num_roots} root specs")
if not env.user_specs:
tty.msg("No root specs")
else:
tty.msg("Root specs")
concrete_specs = {
root: concrete_root
for root, concrete_root in zip(env.concretized_user_specs, env.concrete_roots())
}
# Root specs cannot be displayed with prefixes, since those are not
# set for abstract specs. Same for hashes
root_args = copy.copy(args)
root_args.paths = False
def root_decorator(spec, string):
"""Decorate root specs with their install status if needed"""
concrete = concrete_specs.get(spec)
if concrete:
status = color.colorize(concrete.install_status().value)
hash = concrete.dag_hash()
else:
status = color.colorize(spack.spec.InstallStatus.absent.value)
hash = "-" * 32
# TODO: status has two extra spaces on the end of it, but fixing this and other spec
# TODO: space format idiosyncrasies is complicated. Fix this eventually
status = status[:-2]
if args.long or args.very_long:
hash = color.colorize(f"@K{{{hash[: 7 if args.long else None]}}}")
return f"{status} {hash} {string}"
else:
return f"{status} {string}"
with spack.store.STORE.db.read_transaction():
# Roots are displayed with variants, etc. so that we can see
# specifically what the user asked for.
cmd.display_specs(
env.user_specs,
args,
# these are overrides of CLI args
paths=False,
long=False,
very_long=False,
# these enforce details in the root specs to show what the user asked for
root_args,
decorator=lambda s, f: color.colorize("@*{%s}" % f),
namespaces=True,
show_flags=True,
show_full_compiler=True,
decorator=root_decorator,
variants=True,
)
print()
print()
if args.show_concretized:
tty.msg("Concretized roots")
@@ -279,7 +254,7 @@ def root_decorator(spec, string):
# Display a header for the installed packages section IF there are installed
# packages. If there aren't any, we'll just end up printing "0 installed packages"
# later.
if results and not args.only_roots:
if results:
tty.msg("Installed packages")
@@ -288,10 +263,9 @@ def find(parser, args):
results = args.specs(**q_args)
env = ev.active_environment()
if not env and args.only_roots:
tty.die("-r / --only-roots requires an active environment")
decorator = make_env_decorator(env) if env else lambda s, f: f
decorator = lambda s, f: f
if env:
decorator, _, roots, _ = setup_env(env)
# use groups by default except with format.
if args.groups is None:
@@ -318,12 +292,9 @@ def find(parser, args):
if env:
display_env(env, args, decorator, results)
count_suffix = " (not shown)"
if not args.only_roots:
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
count_suffix = ""
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
# print number of installed packages last (as the list may be long)
if sys.stdout.isatty() and args.groups:
pkg_type = "loaded" if args.loaded else "installed"
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)
spack.cmd.print_how_many_pkgs(results, pkg_type)

View File

@@ -263,8 +263,8 @@ def _fmt_name_and_default(variant):
return color.colorize(f"@c{{{variant.name}}} @C{{[{_fmt_value(variant.default)}]}}")
def _fmt_when(when: "spack.spec.Spec", indent: int):
return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(str(when))}")
def _fmt_when(when, indent):
return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(when)}")
def _fmt_variant_description(variant, width, indent):
@@ -441,7 +441,7 @@ def get_url(version):
return "No URL"
url = get_url(preferred) if pkg.has_code else ""
line = version(" {0}".format(pad(preferred))) + color.cescape(str(url))
line = version(" {0}".format(pad(preferred))) + color.cescape(url)
color.cwrite(line)
print()
@@ -464,7 +464,7 @@ def get_url(version):
continue
for v, url in vers:
line = version(" {0}".format(pad(v))) + color.cescape(str(url))
line = version(" {0}".format(pad(v))) + color.cescape(url)
color.cprint(line)
@@ -475,7 +475,10 @@ def print_virtuals(pkg, args):
color.cprint(section_title("Virtual Packages: "))
if pkg.provided:
for when, specs in reversed(sorted(pkg.provided.items())):
line = " %s provides %s" % (when.cformat(), ", ".join(s.cformat() for s in specs))
line = " %s provides %s" % (
when.colorized(),
", ".join(s.colorized() for s in specs),
)
print(line)
else:
@@ -494,9 +497,7 @@ def print_licenses(pkg, args):
pad = padder(pkg.licenses, 4)
for when_spec in pkg.licenses:
license_identifier = pkg.licenses[when_spec]
line = license(" {0}".format(pad(license_identifier))) + color.cescape(
str(when_spec)
)
line = license(" {0}".format(pad(license_identifier))) + color.cescape(when_spec)
color.cprint(line)

View File

@@ -420,9 +420,10 @@ def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter_
with reporter_factory(specs_to_install):
env.install_specs(specs_to_install, **install_kwargs)
finally:
if env.views:
with env.write_transaction():
env.write(regenerate=True)
# TODO: this is doing way too much to trigger
# views and modules to be generated.
with env.write_transaction():
env.write(regenerate=True)
def concrete_specs_from_cli(args, install_kwargs):

View File

@@ -71,11 +71,6 @@ def setup_parser(subparser):
help="the number of versions to fetch for each spec, choose 'all' to"
" retrieve all versions of each package",
)
create_parser.add_argument(
"--private",
action="store_true",
help="for a private mirror, include non-redistributable packages",
)
arguments.add_common_arguments(create_parser, ["specs"])
arguments.add_concretizer_args(create_parser)
@@ -113,11 +108,6 @@ def setup_parser(subparser):
"and source use `--type binary --type source` (default)"
),
)
add_parser.add_argument(
"--autopush",
action="store_true",
help=("set mirror to push automatically after installation"),
)
add_parser_signed = add_parser.add_mutually_exclusive_group(required=False)
add_parser_signed.add_argument(
"--unsigned",
@@ -185,21 +175,6 @@ def setup_parser(subparser):
),
)
set_parser.add_argument("--url", help="url of mirror directory from 'spack mirror create'")
set_parser_autopush = set_parser.add_mutually_exclusive_group(required=False)
set_parser_autopush.add_argument(
"--autopush",
help="set mirror to push automatically after installation",
action="store_true",
default=None,
dest="autopush",
)
set_parser_autopush.add_argument(
"--no-autopush",
help="set mirror to not push automatically after installation",
action="store_false",
default=None,
dest="autopush",
)
set_parser_unsigned = set_parser.add_mutually_exclusive_group(required=False)
set_parser_unsigned.add_argument(
"--unsigned",
@@ -243,7 +218,6 @@ def mirror_add(args):
or args.type
or args.oci_username
or args.oci_password
or args.autopush
or args.signed is not None
):
connection = {"url": args.url}
@@ -260,8 +234,6 @@ def mirror_add(args):
if args.type:
connection["binary"] = "binary" in args.type
connection["source"] = "source" in args.type
if args.autopush:
connection["autopush"] = args.autopush
if args.signed is not None:
connection["signed"] = args.signed
mirror = spack.mirror.Mirror(connection, name=args.name)
@@ -298,8 +270,6 @@ def _configure_mirror(args):
changes["access_pair"] = [args.oci_username, args.oci_password]
if getattr(args, "signed", None) is not None:
changes["signed"] = args.signed
if getattr(args, "autopush", None) is not None:
changes["autopush"] = args.autopush
# argparse cannot distinguish between --binary and --no-binary when same dest :(
# notice that set-url does not have these args, so getattr
@@ -364,6 +334,7 @@ def concrete_specs_from_user(args):
specs = filter_externals(specs)
specs = list(set(specs))
specs.sort(key=lambda s: (s.name, s.version))
specs, _ = lang.stable_partition(specs, predicate_fn=not_excluded_fn(args))
return specs
@@ -408,50 +379,36 @@ def concrete_specs_from_cli_or_file(args):
return specs
class IncludeFilter:
def __init__(self, args):
self.exclude_specs = []
if args.exclude_file:
self.exclude_specs.extend(specs_from_text_file(args.exclude_file, concretize=False))
if args.exclude_specs:
self.exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
self.private = args.private
def not_excluded_fn(args):
"""Return a predicate that evaluate to True if a spec was not explicitly
excluded by the user.
"""
exclude_specs = []
if args.exclude_file:
exclude_specs.extend(specs_from_text_file(args.exclude_file, concretize=False))
if args.exclude_specs:
exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
def __call__(self, x):
return all([self._not_license_excluded(x), self._not_cmdline_excluded(x)])
def not_excluded(x):
return not any(x.satisfies(y) for y in exclude_specs)
def _not_license_excluded(self, x):
"""True if the spec is for a private mirror, or as long as the
package does not explicitly forbid redistributing source."""
if self.private:
return True
elif x.package_class.redistribute_source(x):
return True
else:
tty.debug(
"Skip adding {0} to mirror: the package.py file"
" indicates that a public mirror should not contain"
" it.".format(x.name)
)
return False
def _not_cmdline_excluded(self, x):
"""True if a spec was not explicitly excluded by the user."""
return not any(x.satisfies(y) for y in self.exclude_specs)
return not_excluded
def concrete_specs_from_environment():
def concrete_specs_from_environment(selection_fn):
env = ev.active_environment()
assert env, "an active environment is required"
mirror_specs = env.all_specs()
mirror_specs = filter_externals(mirror_specs)
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=selection_fn)
return mirror_specs
def all_specs_with_all_versions():
def all_specs_with_all_versions(selection_fn):
specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()]
mirror_specs = spack.mirror.get_all_versions(specs)
mirror_specs.sort(key=lambda s: (s.name, s.version))
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=selection_fn)
return mirror_specs
@@ -472,6 +429,12 @@ def versions_per_spec(args):
return num_versions
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
tty.msg("Summary for mirror in {}".format(path))
process_mirror_stats(present, mirrored, error)
def process_mirror_stats(present, mirrored, error):
p, m, e = len(present), len(mirrored), len(error)
tty.msg(
@@ -517,28 +480,30 @@ def mirror_create(args):
# When no directory is provided, the source dir is used
path = args.directory or spack.caches.fetch_cache_location()
mirror_specs, mirror_fn = _specs_and_action(args)
mirror_fn(mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions)
def _specs_and_action(args):
include_fn = IncludeFilter(args)
if args.all and not ev.active_environment():
mirror_specs = all_specs_with_all_versions()
mirror_fn = create_mirror_for_all_specs
elif args.all and ev.active_environment():
mirror_specs = concrete_specs_from_environment()
mirror_fn = create_mirror_for_individual_specs
else:
mirror_specs = concrete_specs_from_user(args)
mirror_fn = create_mirror_for_individual_specs
create_mirror_for_all_specs(
path=path,
skip_unstable_versions=args.skip_unstable_versions,
selection_fn=not_excluded_fn(args),
)
return
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=include_fn)
return mirror_specs, mirror_fn
if args.all and ev.active_environment():
create_mirror_for_all_specs_inside_environment(
path=path,
skip_unstable_versions=args.skip_unstable_versions,
selection_fn=not_excluded_fn(args),
)
return
mirror_specs = concrete_specs_from_user(args)
create_mirror_for_individual_specs(
mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions
)
def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
mirror_specs = all_specs_with_all_versions(selection_fn=selection_fn)
mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
path, skip_unstable_versions=skip_unstable_versions
)
@@ -550,10 +515,11 @@ def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
process_mirror_stats(*mirror_stats.stats())
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
tty.msg("Summary for mirror in {}".format(path))
process_mirror_stats(present, mirrored, error)
def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn):
mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn)
create_mirror_for_individual_specs(
mirror_specs, path=path, skip_unstable_versions=skip_unstable_versions
)
def mirror_destroy(args):

View File

@@ -22,7 +22,6 @@
import spack.error
import spack.spec
import spack.util.executable
import spack.util.libc
import spack.util.module_cmd
import spack.version
from spack.util.environment import filter_system_paths
@@ -108,6 +107,7 @@ def _parse_link_paths(string):
"""
lib_search_paths = False
raw_link_dirs = []
tty.debug("parsing implicit link info")
for line in string.splitlines():
if lib_search_paths:
if line.startswith("\t"):
@@ -122,7 +122,7 @@ def _parse_link_paths(string):
continue
if _LINKER_LINE_IGNORE.match(line):
continue
tty.debug(f"implicit link dirs: link line: {line}")
tty.debug("linker line: %s" % line)
next_arg = False
for arg in line.split():
@@ -138,12 +138,15 @@ def _parse_link_paths(string):
link_dir_arg = _LINK_DIR_ARG.match(arg)
if link_dir_arg:
link_dir = link_dir_arg.group("dir")
tty.debug("linkdir: %s" % link_dir)
raw_link_dirs.append(link_dir)
link_dir_arg = _LIBPATH_ARG.match(arg)
if link_dir_arg:
link_dir = link_dir_arg.group("dir")
tty.debug("libpath: %s", link_dir)
raw_link_dirs.append(link_dir)
tty.debug("found raw link dirs: %s" % ", ".join(raw_link_dirs))
implicit_link_dirs = list()
visited = set()
@@ -153,7 +156,7 @@ def _parse_link_paths(string):
implicit_link_dirs.append(normalized_path)
visited.add(normalized_path)
tty.debug(f"implicit link dirs: result: {', '.join(implicit_link_dirs)}")
tty.debug("found link dirs: %s" % ", ".join(implicit_link_dirs))
return implicit_link_dirs
@@ -414,35 +417,17 @@ def real_version(self):
self._real_version = self.version
return self._real_version
def implicit_rpaths(self) -> List[str]:
def implicit_rpaths(self):
if self.enable_implicit_rpaths is False:
return []
output = self.compiler_verbose_output
if not output:
return []
link_dirs = _parse_non_system_link_dirs(output)
# Put CXX first since it has the most linking issues
# And because it has flags that affect linking
link_dirs = self._get_compiler_link_paths()
all_required_libs = list(self.required_libs) + Compiler._all_compiler_rpath_libraries
return list(paths_containing_libs(link_dirs, all_required_libs))
@property
def default_libc(self) -> Optional["spack.spec.Spec"]:
"""Determine libc targeted by the compiler from link line"""
output = self.compiler_verbose_output
if not output:
return None
dynamic_linker = spack.util.libc.parse_dynamic_linker(output)
if not dynamic_linker:
return None
return spack.util.libc.libc_from_dynamic_linker(dynamic_linker)
@property
def required_libs(self):
"""For executables created with this compiler, the compiler libraries
@@ -451,17 +436,17 @@ def required_libs(self):
# By default every compiler returns the empty list
return []
@property
def compiler_verbose_output(self) -> Optional[str]:
"""Verbose output from compiling a dummy C source file. Output is cached."""
if not hasattr(self, "_compile_c_source_output"):
self._compile_c_source_output = self._compile_dummy_c_source()
return self._compile_c_source_output
def _compile_dummy_c_source(self) -> Optional[str]:
def _get_compiler_link_paths(self):
cc = self.cc if self.cc else self.cxx
if not cc or not self.verbose_flag:
return None
# Cannot determine implicit link paths without a compiler / verbose flag
return []
# What flag types apply to first_compiler, in what order
if cc == self.cc:
flags = ["cflags", "cppflags", "ldflags"]
else:
flags = ["cxxflags", "cppflags", "ldflags"]
try:
tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
@@ -473,19 +458,20 @@ def _compile_dummy_c_source(self) -> Optional[str]:
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
)
cc_exe = spack.util.executable.Executable(cc)
for flag_type in ["cflags" if cc == self.cc else "cxxflags", "cppflags", "ldflags"]:
for flag_type in flags:
cc_exe.add_default_arg(*self.flags.get(flag_type, []))
with self.compiler_environment():
return cc_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
output = cc_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
return _parse_non_system_link_dirs(output)
except spack.util.executable.ProcessError as pe:
tty.debug("ProcessError: Command exited with non-zero status: " + pe.long_message)
return None
return []
finally:
shutil.rmtree(tmpdir, ignore_errors=True)
@property
def verbose_flag(self) -> Optional[str]:
def verbose_flag(self):
"""
This property should be overridden in the compiler subclass if a
verbose flag is available.

View File

@@ -10,7 +10,6 @@
import itertools
import multiprocessing.pool
import os
import warnings
from typing import Dict, List, Optional, Tuple
import archspec.cpu
@@ -110,33 +109,27 @@ def _to_dict(compiler):
return {"compiler": d}
def get_compiler_config(
configuration: "spack.config.Configuration",
*,
scope: Optional[str] = None,
init_config: bool = False,
) -> List[Dict]:
def get_compiler_config(scope=None, init_config=False):
"""Return the compiler configuration for the specified architecture."""
config = configuration.get("compilers", scope=scope) or []
config = spack.config.CONFIG.get("compilers", scope=scope) or []
if config or not init_config:
return config
merged_config = configuration.get("compilers")
merged_config = spack.config.CONFIG.get("compilers")
if merged_config:
# Config is empty for this scope
# Do not init config because there is a non-empty scope
return config
_init_compiler_config(configuration, scope=scope)
config = configuration.get("compilers", scope=scope)
_init_compiler_config(scope=scope)
config = spack.config.CONFIG.get("compilers", scope=scope)
return config
def get_compiler_config_from_packages(
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
) -> List[Dict]:
def get_compiler_config_from_packages(scope=None):
"""Return the compiler configuration from packages.yaml"""
config = configuration.get("packages", scope=scope)
config = spack.config.get("packages", scope=scope)
if not config:
return []
@@ -223,15 +216,13 @@ def _compiler_config_from_external(config):
return compiler_entry
def _init_compiler_config(
configuration: "spack.config.Configuration", *, scope: Optional[str]
) -> None:
def _init_compiler_config(*, scope):
"""Compiler search used when Spack has no compilers."""
compilers = find_compilers()
compilers_dict = []
for compiler in compilers:
compilers_dict.append(_to_dict(compiler))
configuration.set("compilers", compilers_dict, scope=scope)
spack.config.set("compilers", compilers_dict, scope=scope)
def compiler_config_files():
@@ -242,7 +233,7 @@ def compiler_config_files():
compiler_config = config.get("compilers", scope=name)
if compiler_config:
config_files.append(config.get_config_filename(name, "compilers"))
compiler_config_from_packages = get_compiler_config_from_packages(config, scope=name)
compiler_config_from_packages = get_compiler_config_from_packages(scope=name)
if compiler_config_from_packages:
config_files.append(config.get_config_filename(name, "packages"))
return config_files
@@ -255,9 +246,7 @@ def add_compilers_to_config(compilers, scope=None):
compilers: a list of Compiler objects.
scope: configuration scope to modify.
"""
compiler_config = get_compiler_config(
configuration=spack.config.CONFIG, scope=scope, init_config=False
)
compiler_config = get_compiler_config(scope, init_config=False)
for compiler in compilers:
if not compiler.cc:
tty.debug(f"{compiler.spec} does not have a C compiler")
@@ -306,9 +295,7 @@ def _remove_compiler_from_scope(compiler_spec, scope):
True if one or more compiler entries were actually removed, False otherwise
"""
assert scope is not None, "a specific scope is needed when calling this function"
compiler_config = get_compiler_config(
configuration=spack.config.CONFIG, scope=scope, init_config=False
)
compiler_config = get_compiler_config(scope, init_config=False)
filtered_compiler_config = [
compiler_entry
for compiler_entry in compiler_config
@@ -323,28 +310,21 @@ def _remove_compiler_from_scope(compiler_spec, scope):
# We need to preserve the YAML type for comments, hence we are copying the
# items in the list that has just been retrieved
compiler_config[:] = filtered_compiler_config
spack.config.CONFIG.set("compilers", compiler_config, scope=scope)
spack.config.set("compilers", compiler_config, scope=scope)
return True
def all_compilers_config(
configuration: "spack.config.Configuration",
*,
scope: Optional[str] = None,
init_config: bool = True,
) -> List["spack.compiler.Compiler"]:
def all_compilers_config(scope=None, init_config=True):
"""Return a set of specs for all the compiler versions currently
available to build with. These are instances of CompilerSpec.
"""
from_packages_yaml = get_compiler_config_from_packages(configuration, scope=scope)
from_packages_yaml = get_compiler_config_from_packages(scope)
if from_packages_yaml:
init_config = False
from_compilers_yaml = get_compiler_config(configuration, scope=scope, init_config=init_config)
from_compilers_yaml = get_compiler_config(scope, init_config)
result = from_compilers_yaml + from_packages_yaml
# Dedupe entries by the compiler they represent
# If the entry is invalid, treat it as unique for deduplication
key = lambda c: _compiler_from_config_entry(c["compiler"] or id(c))
key = lambda c: _compiler_from_config_entry(c["compiler"])
return list(llnl.util.lang.dedupe(result, key=key))
@@ -352,7 +332,7 @@ def all_compiler_specs(scope=None, init_config=True):
# Return compiler specs from the merged config.
return [
spack.spec.parse_with_version_concrete(s["compiler"]["spec"], compiler=True)
for s in all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
for s in all_compilers_config(scope, init_config)
]
@@ -512,20 +492,11 @@ def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
def all_compilers(scope=None, init_config=True):
return all_compilers_from(
configuration=spack.config.CONFIG, scope=scope, init_config=init_config
)
def all_compilers_from(configuration, scope=None, init_config=True):
compilers = []
for items in all_compilers_config(
configuration=configuration, scope=scope, init_config=init_config
):
config = all_compilers_config(scope, init_config=init_config)
compilers = list()
for items in config:
items = items["compiler"]
compiler = _compiler_from_config_entry(items) # can be None in error case
if compiler:
compilers.append(compiler)
compilers.append(_compiler_from_config_entry(items))
return compilers
@@ -536,7 +507,7 @@ def compilers_for_spec(
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
config = all_compilers_config(scope, init_config)
matches = set(find(compiler_spec, scope, init_config))
compilers = []
@@ -546,7 +517,7 @@ def compilers_for_spec(
def compilers_for_arch(arch_spec, scope=None):
config = all_compilers_config(spack.config.CONFIG, scope=scope)
config = all_compilers_config(scope)
return list(get_compilers(config, arch_spec=arch_spec))
@@ -632,10 +603,7 @@ def _compiler_from_config_entry(items):
compiler = _compiler_cache.get(config_id, None)
if compiler is None:
try:
compiler = compiler_from_dict(items)
except UnknownCompilerError as e:
warnings.warn(e.message)
compiler = compiler_from_dict(items)
_compiler_cache[config_id] = compiler
return compiler
@@ -688,9 +656,7 @@ def get_compilers(config, cspec=None, arch_spec=None):
raise ValueError(msg)
continue
compiler = _compiler_from_config_entry(items)
if compiler:
compilers.append(compiler)
compilers.append(_compiler_from_config_entry(items))
return compilers
@@ -967,11 +933,10 @@ def _default_make_compilers(cmp_id, paths):
make_mixed_toolchain(flat_compilers)
# Finally, create the compiler list
compilers: List["spack.compiler.Compiler"] = []
compilers = []
for compiler_id, _, compiler in flat_compilers:
make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
candidates = make_compilers(compiler_id, compiler)
compilers.extend(x for x in candidates if x.cc is not None)
compilers.extend(make_compilers(compiler_id, compiler))
return compilers

View File

@@ -38,10 +38,10 @@ class Clang(Compiler):
cxx_names = ["clang++"]
# Subclasses use possible names of Fortran 77 compiler
f77_names = ["flang-new", "flang"]
f77_names = ["flang"]
# Subclasses use possible names of Fortran 90 compiler
fc_names = ["flang-new", "flang"]
fc_names = ["flang"]
version_argument = "--version"
@@ -171,11 +171,10 @@ def extract_version_from_output(cls, output):
match = re.search(
# Normal clang compiler versions are left as-is
r"(?:clang|flang-new) version ([^ )\n]+)-svn[~.\w\d-]*|"
r"clang version ([^ )\n]+)-svn[~.\w\d-]*|"
# Don't include hyphenated patch numbers in the version
# (see https://github.com/spack/spack/pull/14365 for details)
r"(?:clang|flang-new) version ([^ )\n]+?)-[~.\w\d-]*|"
r"(?:clang|flang-new) version ([^ )\n]+)",
r"clang version ([^ )\n]+?)-[~.\w\d-]*|" r"clang version ([^ )\n]+)",
output,
)
if match:

View File

@@ -0,0 +1,34 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import spack.compilers.oneapi
class Dpcpp(spack.compilers.oneapi.Oneapi):
"""This is the same as the oneAPI compiler but uses dpcpp instead of
icpx (for DPC++ source files). It explicitly refers to dpcpp, so that
CMake test files which check the compiler name (e.g. CMAKE_CXX_COMPILER)
detect it as dpcpp.
Ideally we could switch out icpx for dpcpp where needed in the oneAPI
compiler definition, but two things are needed for that: (a) a way to
tell the compiler that it should be using dpcpp and (b) a way to
customize the link_paths
See also: https://www.intel.com/content/www/us/en/develop/documentation/oneapi-dpcpp-cpp-compiler-dev-guide-and-reference/top/compiler-setup/using-the-command-line/invoking-the-compiler.html
"""
# Subclasses use possible names of C++ compiler
cxx_names = ["dpcpp"]
# Named wrapper links within build_env_path
link_paths = {
"cc": os.path.join("oneapi", "icx"),
"cxx": os.path.join("oneapi", "dpcpp"),
"f77": os.path.join("oneapi", "ifx"),
"fc": os.path.join("oneapi", "ifx"),
}

View File

@@ -8,7 +8,7 @@
import subprocess
import sys
import tempfile
from typing import Dict, List
from typing import Dict, List, Set
import archspec.cpu
@@ -20,7 +20,15 @@
from spack.error import SpackError
from spack.version import Version, VersionRange
FC_PATH: Dict[str, str] = dict()
avail_fc_version: Set[str] = set()
fc_path: Dict[str, str] = dict()
fortran_mapping = {
"2021.3.0": "19.29.30133",
"2021.2.1": "19.28.29913",
"2021.2.0": "19.28.29334",
"2021.1.0": "19.28.29333",
}
class CmdCall:
@@ -107,13 +115,15 @@ def command_str(self):
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
def get_valid_fortran_pth():
"""Assign maximum available fortran compiler version"""
# TODO (johnwparent): validate compatibility w/ try compiler
# functionality when added
def get_valid_fortran_pth(comp_ver):
cl_ver = str(comp_ver)
sort_fn = lambda fc_ver: Version(fc_ver)
sort_fc_ver = sorted(list(FC_PATH.keys()), key=sort_fn)
return FC_PATH[sort_fc_ver[-1]] if sort_fc_ver else None
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
for ver in sort_fc_ver:
if ver in fortran_mapping:
if Version(cl_ver) <= Version(fortran_mapping[ver]):
return fc_path[ver]
return None
class Msvc(Compiler):
@@ -157,9 +167,11 @@ def __init__(self, *args, **kwargs):
# This positional argument "paths" is later parsed and process by the base class
# via the call to `super` later in this method
paths = args[3]
latest_fc = get_valid_fortran_pth()
new_pth = [pth if pth else latest_fc for pth in paths[2:]]
paths[2:] = new_pth
# This positional argument "cspec" is also parsed and handled by the base class
# constructor
cspec = args[0]
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
paths[:] = new_pth
# Initialize, deferring to base class but then adding the vcvarsallfile
# file based on compiler executable path.
super().__init__(*args, **kwargs)
@@ -171,7 +183,7 @@ def __init__(self, *args, **kwargs):
# and stores their path, but their respective VCVARS
# file must be invoked before useage.
env_cmds = []
compiler_root = os.path.join(os.path.dirname(self.cc), "../../../../../..")
compiler_root = os.path.join(self.cc, "../../../../../../..")
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
# get current platform architecture and format for vcvars argument
arch = spack.platforms.real_host().default.lower()
@@ -186,34 +198,11 @@ def __init__(self, *args, **kwargs):
# paths[2] refers to the fc path and is a generic check
# for a fortran compiler
if paths[2]:
def get_oneapi_root(pth: str):
"""From within a prefix known to be a oneAPI path
determine the oneAPI root path from arbitrary point
under root
Args:
pth: path prefixed within oneAPI root
"""
if not pth:
return ""
while os.path.basename(pth) and os.path.basename(pth) != "oneAPI":
pth = os.path.dirname(pth)
return pth
# If this found, it sets all the vars
oneapi_root = get_oneapi_root(self.fc)
if not oneapi_root:
raise RuntimeError(f"Non-oneAPI Fortran compiler {self.fc} assigned to MSVC")
oneapi_root = os.getenv("ONEAPI_ROOT")
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
# some oneAPI exes return a version more precise than their
# install paths specify, so we determine path from
# the install path rather than the fc executable itself
numver = r"\d+\.\d+(?:\.\d+)?"
pattern = f"((?:{numver})|(?:latest))"
version_from_path = re.search(pattern, self.fc).group(1)
oneapi_version_setvars = os.path.join(
oneapi_root, "compiler", version_from_path, "env", "vars.bat"
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
)
# order matters here, the specific version env must be invoked first,
# otherwise it will be ignored if the root setvars sets up the oneapi
@@ -325,19 +314,23 @@ def setup_custom_environment(self, pkg, env):
@classmethod
def fc_version(cls, fc):
# We're using intel for the Fortran compilers, which exist if
# ONEAPI_ROOT is a meaningful variable
if not sys.platform == "win32":
return "unknown"
fc_ver = cls.default_version(fc)
FC_PATH[fc_ver] = fc
try:
sps = spack.operating_systems.windows_os.WindowsOs().compiler_search_paths
except AttributeError:
raise SpackError(
"Windows compiler search paths not established, "
"please report this behavior to github.com/spack/spack"
)
clp = spack.util.executable.which_string("cl", path=sps)
return cls.default_version(clp) if clp else fc_ver
avail_fc_version.add(fc_ver)
fc_path[fc_ver] = fc
if os.getenv("ONEAPI_ROOT"):
try:
sps = spack.operating_systems.windows_os.WindowsOs().compiler_search_paths
except AttributeError:
raise SpackError("Windows compiler search paths not established")
clp = spack.util.executable.which_string("cl", path=sps)
ver = cls.default_version(clp)
else:
ver = fc_ver
return ver
@classmethod
def f77_version(cls, f77):

View File

@@ -64,7 +64,7 @@ def verbose_flag(self):
#
# This way, we at least enable the implicit rpath detection, which is
# based on compilation of a C file (see method
# spack.compiler._compile_dummy_c_source): in the case of a mixed
# spack.compiler._get_compiler_link_paths): in the case of a mixed
# NAG/GCC toolchain, the flag will be passed to g++ (e.g.
# 'g++ -Wl,-v ./main.c'), otherwise, the flag will be passed to nagfor
# (e.g. 'nagfor -Wl,-v ./main.c' - note that nagfor recognizes '.c'

View File

@@ -1562,9 +1562,8 @@ def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
def use_configuration(
*scopes_or_paths: Union[ConfigScope, str]
) -> Generator[Configuration, None, None]:
"""Use the configuration scopes passed as arguments within the context manager.
This function invalidates caches, and is therefore very slow.
"""Use the configuration scopes passed as arguments within the
context manager.
Args:
*scopes_or_paths: scope objects or paths to be used

View File

@@ -12,31 +12,9 @@
},
"os_package_manager": "yum_amazon"
},
"fedora:40": {
"bootstrap": {
"template": "container/fedora.dockerfile",
"image": "docker.io/fedora:40"
},
"os_package_manager": "dnf",
"build": "spack/fedora40",
"final": {
"image": "docker.io/fedora:40"
}
},
"fedora:39": {
"bootstrap": {
"template": "container/fedora.dockerfile",
"image": "docker.io/fedora:39"
},
"os_package_manager": "dnf",
"build": "spack/fedora39",
"final": {
"image": "docker.io/fedora:39"
}
},
"fedora:38": {
"bootstrap": {
"template": "container/fedora.dockerfile",
"template": "container/fedora_38.dockerfile",
"image": "docker.io/fedora:38"
},
"os_package_manager": "dnf",
@@ -47,7 +25,7 @@
},
"fedora:37": {
"bootstrap": {
"template": "container/fedora.dockerfile",
"template": "container/fedora_37.dockerfile",
"image": "docker.io/fedora:37"
},
"os_package_manager": "dnf",

View File

@@ -25,7 +25,6 @@
import socket
import sys
import time
from json import JSONDecoder
from typing import (
Any,
Callable,
@@ -819,8 +818,7 @@ def _read_from_file(self, filename):
"""
try:
with open(filename, "r") as f:
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
fdata, _ = JSONDecoder().raw_decode(f.read())
fdata = sjson.load(f)
except Exception as e:
raise CorruptDatabaseError("error parsing database:", str(e)) from e
@@ -835,24 +833,27 @@ def check(cond, msg):
# High-level file checks
db = fdata["database"]
check("installs" in db, "no 'installs' in JSON DB.")
check("version" in db, "no 'version' in JSON DB.")
installs = db["installs"]
# TODO: better version checking semantics.
version = vn.Version(db["version"])
if version > _DB_VERSION:
raise InvalidDatabaseVersionError(self, _DB_VERSION, version)
elif version < _DB_VERSION and not any(
old == version and new == _DB_VERSION for old, new in _SKIP_REINDEX
):
tty.warn(f"Spack database version changed from {version} to {_DB_VERSION}. Upgrading.")
elif version < _DB_VERSION:
if not any(old == version and new == _DB_VERSION for old, new in _SKIP_REINDEX):
tty.warn(
"Spack database version changed from %s to %s. Upgrading."
% (version, _DB_VERSION)
)
self.reindex(spack.store.STORE.layout)
installs = dict(
(k, v.to_dict(include_fields=self._record_fields)) for k, v in self._data.items()
)
else:
check("installs" in db, "no 'installs' in JSON DB.")
installs = db["installs"]
self.reindex(spack.store.STORE.layout)
installs = dict(
(k, v.to_dict(include_fields=self._record_fields))
for k, v in self._data.items()
)
spec_reader = reader(version)

View File

@@ -83,15 +83,26 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
return path_to_dict(search_paths)
def get_elf_compat(path):
"""For ELF files, get a triplet (EI_CLASS, EI_DATA, e_machine) and see if
it is host-compatible."""
# On ELF platforms supporting, we try to be a bit smarter when it comes to shared
# libraries, by dropping those that are not host compatible.
with open(path, "rb") as f:
elf = elf_utils.parse_elf(f, only_header=True)
return (elf.is_64_bit, elf.is_little_endian, elf.elf_hdr.e_machine)
def accept_elf(path, host_compat):
"""Accept an ELF file if the header matches the given compat triplet. In case it's not an ELF
(e.g. static library, or some arbitrary file, fall back to is_readable_file)."""
"""Accept an ELF file if the header matches the given compat triplet,
obtained with :py:func:`get_elf_compat`. In case it's not an ELF (e.g.
static library, or some arbitrary file, fall back to is_readable_file)."""
# Fast path: assume libraries at least have .so in their basename.
# Note: don't replace with splitext, because of libsmth.so.1.2.3 file names.
if ".so" not in os.path.basename(path):
return llnl.util.filesystem.is_readable_file(path)
try:
return host_compat == elf_utils.get_elf_compat(path)
return host_compat == get_elf_compat(path)
except (OSError, elf_utils.ElfParsingError):
return llnl.util.filesystem.is_readable_file(path)
@@ -144,7 +155,7 @@ def libraries_in_ld_and_system_library_path(
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
try:
host_compat = elf_utils.get_elf_compat(sys.executable)
host_compat = get_elf_compat(sys.executable)
accept = lambda path: accept_elf(path, host_compat)
except (OSError, elf_utils.ElfParsingError):
accept = llnl.util.filesystem.is_readable_file

View File

@@ -11,7 +11,6 @@
from llnl.util import filesystem
import spack.platforms
import spack.repo
import spack.spec
from spack.util import spack_yaml
@@ -33,8 +32,6 @@ class ExpectedTestResult(NamedTuple):
#: Spec to be detected
spec: str
#: Attributes expected in the external spec
extra_attributes: Dict[str, str]
class DetectionTest(NamedTuple):
@@ -103,10 +100,7 @@ def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[
@property
def expected_specs(self) -> List[spack.spec.Spec]:
return [
spack.spec.Spec.from_detection(item.spec, extra_attributes=item.extra_attributes)
for item in self.test.results
]
return [spack.spec.Spec(r.spec) for r in self.test.results]
def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runner]:
@@ -123,13 +117,9 @@ def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runn
"""
result = []
detection_tests_content = read_detection_tests(pkg_name, repository)
current_platform = str(spack.platforms.host())
tests_by_path = detection_tests_content.get("paths", [])
for single_test_data in tests_by_path:
if current_platform not in single_test_data.get("platforms", [current_platform]):
continue
mock_executables = []
for layout in single_test_data["layout"]:
mock_executables.append(
@@ -137,11 +127,7 @@ def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runn
)
expected_results = []
for assertion in single_test_data["results"]:
expected_results.append(
ExpectedTestResult(
spec=assertion["spec"], extra_attributes=assertion.get("extra_attributes", {})
)
)
expected_results.append(ExpectedTestResult(spec=assertion["spec"]))
current_test = DetectionTest(
pkg_name=pkg_name, layout=mock_executables, results=expected_results

View File

@@ -27,7 +27,6 @@ class OpenMpi(Package):
* ``variant``
* ``version``
* ``requires``
* ``redistribute``
"""
import collections
@@ -64,7 +63,6 @@ class OpenMpi(Package):
__all__ = [
"DirectiveError",
"DirectiveMeta",
"DisableRedistribute",
"version",
"conflicts",
"depends_on",
@@ -77,7 +75,6 @@ class OpenMpi(Package):
"resource",
"build_system",
"requires",
"redistribute",
]
#: These are variant names used by Spack internally; packages can't use them
@@ -601,64 +598,6 @@ def _execute_depends_on(pkg: "spack.package_base.PackageBase"):
return _execute_depends_on
#: Store whether a given Spec source/binary should not be redistributed.
class DisableRedistribute:
def __init__(self, source, binary):
self.source = source
self.binary = binary
@directive("disable_redistribute")
def redistribute(source=None, binary=None, when: WhenType = None):
"""Can be used inside a Package definition to declare that
the package source and/or compiled binaries should not be
redistributed.
By default, Packages allow source/binary distribution (i.e. in
mirrors). Because of this, and because overlapping enable/
disable specs are not allowed, this directive only allows users
to explicitly disable redistribution for specs.
"""
return lambda pkg: _execute_redistribute(pkg, source, binary, when)
def _execute_redistribute(
pkg: "spack.package_base.PackageBase", source=None, binary=None, when: WhenType = None
):
if source is None and binary is None:
return
elif (source is True) or (binary is True):
raise DirectiveError(
"Source/binary distribution are true by default, they can only "
"be explicitly disabled."
)
if source is None:
source = True
if binary is None:
binary = True
when_spec = _make_when_spec(when)
if not when_spec:
return
if source is False:
max_constraint = spack.spec.Spec(f"{pkg.name}@{when_spec.versions}")
if not max_constraint.satisfies(when_spec):
raise DirectiveError("Source distribution can only be disabled for versions")
if when_spec in pkg.disable_redistribute:
disable = pkg.disable_redistribute[when_spec]
if not source:
disable.source = True
if not binary:
disable.binary = True
else:
pkg.disable_redistribute[when_spec] = DisableRedistribute(
source=not source, binary=not binary
)
@directive(("extendees", "dependencies"))
def extends(spec, when=None, type=("build", "run"), patches=None):
"""Same as depends_on, but also adds this package to the extendee list.

View File

@@ -106,16 +106,17 @@ def environment_name(path: Union[str, pathlib.Path]) -> str:
return path_str
def ensure_no_disallowed_env_config_mods(scopes: List[spack.config.ConfigScope]) -> None:
def check_disallowed_env_config_mods(scopes):
for scope in scopes:
config = scope.get_section("config")
if config and "environments_root" in config["config"]:
raise SpackEnvironmentError(
"Spack environments are prohibited from modifying 'config:environments_root' "
"because it can make the definition of the environment ill-posed. Please "
"remove from your environment and place it in a permanent scope such as "
"defaults, system, site, etc."
)
with spack.config.use_configuration(scope):
if spack.config.get("config:environments_root"):
raise SpackEnvironmentError(
"Spack environments are prohibited from modifying 'config:environments_root' "
"because it can make the definition of the environment ill-posed. Please "
"remove from your environment and place it in a permanent scope such as "
"defaults, system, site, etc."
)
return scopes
def default_manifest_yaml():
@@ -1426,7 +1427,7 @@ def _concretize_separately(self, tests=False):
# Ensure we have compilers in compilers.yaml to avoid that
# processes try to write the config file in parallel
_ = spack.compilers.get_compiler_config(spack.config.CONFIG, init_config=True)
_ = spack.compilers.get_compiler_config(init_config=True)
# Early return if there is nothing to do
if len(args) == 0:
@@ -2462,10 +2463,6 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
self.scope_name = f"env:{environment_name(self.manifest_dir)}"
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
#: Configuration scopes associated with this environment. Note that these are not
#: invalidated by a re-read of the manifest file.
self._config_scopes: Optional[List[spack.config.ConfigScope]] = None
if not self.manifest_file.exists():
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
raise SpackEnvironmentError(msg)
@@ -2811,19 +2808,16 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
@property
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
"""A list of all configuration scopes for the environment manifest. On the first call this
instantiates all the scopes, on subsequent calls it returns the cached list."""
if self._config_scopes is not None:
return self._config_scopes
scopes: List[spack.config.ConfigScope] = [
*self.included_config_scopes,
spack.config.SingleFileScope(
self.scope_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
),
]
ensure_no_disallowed_env_config_mods(scopes)
self._config_scopes = scopes
return scopes
"""A list of all configuration scopes for the environment manifest.
Returns: All configuration scopes associated with the environment
"""
config_name = self.scope_name
env_scope = spack.config.SingleFileScope(
config_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
)
return check_disallowed_env_config_mods(self.included_config_scopes + [env_scope])
def prepare_config_scope(self) -> None:
"""Add the manifest's scopes to the global configuration search path."""

View File

@@ -662,6 +662,9 @@ def add_specs(self, *specs: spack.spec.Spec) -> None:
return
# Drop externals
for s in specs:
if s.external:
tty.warn("Skipping external package: " + s.short_spec)
specs = [s for s in specs if not s.external]
self._sanity_check_view_projection(specs)

View File

@@ -1,27 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import llnl.util.tty as tty
import spack.binary_distribution as bindist
import spack.mirror
def post_install(spec, explicit):
# Push package to all buildcaches with autopush==True
# Do nothing if package was not installed from source
pkg = spec.package
if pkg.installed_from_binary_cache:
return
# Push the package to all autopush mirrors
for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values():
bindist.push_or_raise(
spec,
mirror.push_url,
bindist.PushOptions(force=True, regenerate_index=False, unsigned=not mirror.signed),
)
tty.msg(f"{spec.name}: Pushed to build cache: '{mirror.name}'")

View File

@@ -119,7 +119,7 @@ def __init__(self, pkg_count: int):
self.pkg_ids: Set[str] = set()
def next_pkg(self, pkg: "spack.package_base.PackageBase"):
pkg_id = package_id(pkg.spec)
pkg_id = package_id(pkg)
if pkg_id not in self.pkg_ids:
self.pkg_num += 1
@@ -221,12 +221,12 @@ def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explici
# consists in module file generation and registration in the DB.
if pkg.spec.external:
_process_external_package(pkg, explicit)
_print_installed_pkg(f"{pkg.prefix} (external {package_id(pkg.spec)})")
_print_installed_pkg(f"{pkg.prefix} (external {package_id(pkg)})")
return True
if pkg.spec.installed_upstream:
tty.verbose(
f"{package_id(pkg.spec)} is installed in an upstream Spack instance at "
f"{package_id(pkg)} is installed in an upstream Spack instance at "
f"{pkg.spec.prefix}"
)
_print_installed_pkg(pkg.prefix)
@@ -403,7 +403,7 @@ def _install_from_cache(
return False
t.stop()
pkg_id = package_id(pkg.spec)
pkg_id = package_id(pkg)
tty.debug(f"Successfully extracted {pkg_id} from binary cache")
_write_timer_json(pkg, t, True)
@@ -484,14 +484,11 @@ def _process_binary_cache_tarball(
if download_result is None:
return False
tty.msg(f"Extracting {package_id(pkg.spec)} from binary cache")
tty.msg(f"Extracting {package_id(pkg)} from binary cache")
with timer.measure("install"), spack.util.path.filter_padding():
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
if hasattr(pkg, "_post_buildcache_install_hook"):
pkg._post_buildcache_install_hook()
pkg.installed_from_binary_cache = True
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
return True
@@ -516,7 +513,7 @@ def _try_install_from_binary_cache(
if not spack.mirror.MirrorCollection(binary=True):
return False
tty.debug(f"Searching for binary cache of {package_id(pkg.spec)}")
tty.debug(f"Searching for binary cache of {package_id(pkg)}")
with timer.measure("search"):
matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True)
@@ -613,7 +610,7 @@ def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
Returns: list of package ids
"""
return [package_id(d) for d in spec.dependents()]
return [package_id(d.package) for d in spec.dependents()]
def install_msg(name: str, pid: int, install_status: InstallStatus) -> str:
@@ -723,7 +720,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
dump_packages(pkg.spec, packages_dir)
def package_id(spec: "spack.spec.Spec") -> str:
def package_id(pkg: "spack.package_base.PackageBase") -> str:
"""A "unique" package identifier for installation purposes
The identifier is used to track build tasks, locks, install, and
@@ -735,10 +732,10 @@ def package_id(spec: "spack.spec.Spec") -> str:
Args:
pkg: the package from which the identifier is derived
"""
if not spec.concrete:
if not pkg.spec.concrete:
raise ValueError("Cannot provide a unique, readable id when the spec is not concretized.")
return f"{spec.name}-{spec.version}-{spec.dag_hash()}"
return f"{pkg.name}-{pkg.version}-{pkg.spec.dag_hash()}"
class BuildRequest:
@@ -768,7 +765,7 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
self.pkg.last_phase = install_args.pop("stop_at", None) # type: ignore[attr-defined]
# Cache the package id for convenience
self.pkg_id = package_id(pkg.spec)
self.pkg_id = package_id(pkg)
# Save off the original install arguments plus standard defaults
# since they apply to the requested package *and* dependencies.
@@ -783,9 +780,9 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
# are not able to return full dependents for all packages across
# environment specs.
self.dependencies = set(
package_id(d)
package_id(d.package)
for d in self.pkg.spec.dependencies(deptype=self.get_depflags(self.pkg))
if package_id(d) != self.pkg_id
if package_id(d.package) != self.pkg_id
)
def __repr__(self) -> str:
@@ -835,7 +832,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
depflag = dt.LINK | dt.RUN
include_build_deps = self.install_args.get("include_build_deps")
if self.pkg_id == package_id(pkg.spec):
if self.pkg_id == package_id(pkg):
cache_only = self.install_args.get("package_cache_only")
else:
cache_only = self.install_args.get("dependencies_cache_only")
@@ -930,7 +927,7 @@ def __init__(
raise ValueError(f"{self.pkg.name} must have a concrete spec")
# The "unique" identifier for the task's package
self.pkg_id = package_id(self.pkg.spec)
self.pkg_id = package_id(self.pkg)
# The explicit build request associated with the package
if not isinstance(request, BuildRequest):
@@ -968,9 +965,9 @@ def __init__(
# if use traverse for transitive dependencies, then must remove
# transitive dependents on failure.
self.dependencies = set(
package_id(d)
package_id(d.package)
for d in self.pkg.spec.dependencies(deptype=self.request.get_depflags(self.pkg))
if package_id(d) != self.pkg_id
if package_id(d.package) != self.pkg_id
)
# Handle bootstrapped compiler
@@ -979,18 +976,14 @@ def __init__(
# a dependency of the build task. Here we add it to self.dependencies
compiler_spec = self.pkg.spec.compiler
arch_spec = self.pkg.spec.architecture
strict = spack.concretize.Concretizer().check_for_compiler_existence
if (
not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec)
and not strict
):
if not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec):
# The compiler is in the queue, identify it as dependency
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
dep.constrain(f"platform={str(arch_spec.platform)}")
dep.constrain(f"os={str(arch_spec.os)}")
dep.constrain(f"target={arch_spec.target.microarchitecture.family.name}:")
dep.concretize()
dep_id = package_id(dep)
dep_id = package_id(dep.package)
self.dependencies.add(dep_id)
# List of uninstalled dependencies, which is used to establish
@@ -1201,7 +1194,7 @@ def _add_bootstrap_compilers(
"""
packages = _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs)
for comp_pkg, is_compiler in packages:
pkgid = package_id(comp_pkg.spec)
pkgid = package_id(comp_pkg)
if pkgid not in self.build_tasks:
self._add_init_task(comp_pkg, request, is_compiler, all_deps)
elif is_compiler:
@@ -1248,7 +1241,7 @@ def _add_init_task(
"""
task = BuildTask(pkg, request, is_compiler, 0, 0, STATUS_ADDED, self.installed)
for dep_id in task.dependencies:
all_deps[dep_id].add(package_id(pkg.spec))
all_deps[dep_id].add(package_id(pkg))
self._push_task(task)
@@ -1283,7 +1276,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
err = "Cannot proceed with {0}: {1}"
for dep in request.traverse_dependencies():
dep_pkg = dep.package
dep_id = package_id(dep)
dep_id = package_id(dep_pkg)
# Check for failure since a prefix lock is not required
if spack.store.STORE.failure_tracker.has_failed(dep):
@@ -1416,7 +1409,7 @@ def _cleanup_task(self, pkg: "spack.package_base.PackageBase") -> None:
Args:
pkg: the package being installed
"""
self._remove_task(package_id(pkg.spec))
self._remove_task(package_id(pkg))
# Ensure we have a read lock to prevent others from uninstalling the
# spec during our installation.
@@ -1430,7 +1423,7 @@ def _ensure_install_ready(self, pkg: "spack.package_base.PackageBase") -> None:
Args:
pkg: the package being locally installed
"""
pkg_id = package_id(pkg.spec)
pkg_id = package_id(pkg)
pre = f"{pkg_id} cannot be installed locally:"
# External packages cannot be installed locally.
@@ -1472,7 +1465,7 @@ def _ensure_locked(
"write",
], f'"{lock_type}" is not a supported package management lock type'
pkg_id = package_id(pkg.spec)
pkg_id = package_id(pkg)
ltype, lock = self.locks.get(pkg_id, (lock_type, None))
if lock and ltype == lock_type:
return ltype, lock
@@ -1608,7 +1601,7 @@ def _add_tasks(self, request: BuildRequest, all_deps):
for dep in request.traverse_dependencies():
dep_pkg = dep.package
dep_id = package_id(dep)
dep_id = package_id(dep_pkg)
if dep_id not in self.build_tasks:
self._add_init_task(dep_pkg, request, False, all_deps)
@@ -1920,7 +1913,7 @@ def _flag_installed(
dependent_ids: set of the package's dependent ids, or None if the dependent ids are
limited to those maintained in the package (dependency DAG)
"""
pkg_id = package_id(pkg.spec)
pkg_id = package_id(pkg)
if pkg_id in self.installed:
# Already determined the package has been installed
@@ -2316,7 +2309,7 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
# info/debug information
self.pre = _log_prefix(pkg.name)
self.pkg_id = package_id(pkg.spec)
self.pkg_id = package_id(pkg)
def run(self) -> bool:
"""Main entry point from ``build_process`` to kick off install in child."""

View File

@@ -137,12 +137,6 @@ def source(self):
def signed(self) -> bool:
return isinstance(self._data, str) or self._data.get("signed", True)
@property
def autopush(self) -> bool:
if isinstance(self._data, str):
return False
return self._data.get("autopush", False)
@property
def fetch_url(self):
"""Get the valid, canonicalized fetch URL"""
@@ -156,7 +150,7 @@ def push_url(self):
def _update_connection_dict(self, current_data: dict, new_data: dict, top_level: bool):
keys = ["url", "access_pair", "access_token", "profile", "endpoint_url"]
if top_level:
keys += ["binary", "source", "signed", "autopush"]
keys += ["binary", "source", "signed"]
changed = False
for key in keys:
if key in new_data and current_data.get(key) != new_data[key]:
@@ -292,7 +286,6 @@ def __init__(
scope=None,
binary: Optional[bool] = None,
source: Optional[bool] = None,
autopush: Optional[bool] = None,
):
"""Initialize a mirror collection.
@@ -304,27 +297,21 @@ def __init__(
If None, do not filter on binary mirrors.
source: If True, only include source mirrors.
If False, omit source mirrors.
If None, do not filter on source mirrors.
autopush: If True, only include mirrors that have autopush enabled.
If False, omit mirrors that have autopush enabled.
If None, do not filter on autopush."""
mirrors_data = (
mirrors.items()
if mirrors is not None
else spack.config.get("mirrors", scope=scope).items()
)
mirrors = (Mirror(data=mirror, name=name) for name, mirror in mirrors_data)
If None, do not filter on source mirrors."""
self._mirrors = {
name: Mirror(data=mirror, name=name)
for name, mirror in (
mirrors.items()
if mirrors is not None
else spack.config.get("mirrors", scope=scope).items()
)
}
def _filter(m: Mirror):
if source is not None and m.source != source:
return False
if binary is not None and m.binary != binary:
return False
if autopush is not None and m.autopush != autopush:
return False
return True
if source is not None:
self._mirrors = {k: v for k, v in self._mirrors.items() if v.source == source}
self._mirrors = {m.name: m for m in mirrors if _filter(m)}
if binary is not None:
self._mirrors = {k: v for k, v in self._mirrors.items() if v.binary == binary}
def __eq__(self, other):
return self._mirrors == other._mirrors

View File

@@ -83,17 +83,6 @@ def configuration(module_set_name):
)
_FORMAT_STRING_RE = re.compile(r"({[^}]*})")
def _format_env_var_name(spec, var_name_fmt):
"""Format the variable name, but uppercase any formatted fields."""
fmt_parts = _FORMAT_STRING_RE.split(var_name_fmt)
return "".join(
spec.format(part).upper() if _FORMAT_STRING_RE.match(part) else part for part in fmt_parts
)
def _check_tokens_are_valid(format_string, message):
"""Checks that the tokens used in the format string are valid in
the context of module file and environment variable naming.
@@ -748,12 +737,20 @@ def environment_modifications(self):
exclude = self.conf.exclude_env_vars
# We may have tokens to substitute in environment commands
# Prepare a suitable transformation dictionary for the names
# of the environment variables. This means turn the valid
# tokens uppercase.
transform = {}
for token in _valid_tokens:
transform[token] = lambda s, string: str.upper(string)
for x in env:
# Ensure all the tokens are valid in this context
msg = "some tokens cannot be expanded in an environment variable name"
_check_tokens_are_valid(x.name, message=msg)
x.name = _format_env_var_name(self.spec, x.name)
# Transform them
x.name = self.spec.format(x.name, transform=transform)
if self.modification_needs_formatting(x):
try:
# Not every command has a value

View File

@@ -73,24 +73,17 @@ def vs_install_paths(self):
def msvc_paths(self):
return [os.path.join(path, "VC", "Tools", "MSVC") for path in self.vs_install_paths]
@property
def oneapi_root(self):
root = os.environ.get("ONEAPI_ROOT", "") or os.path.join(
os.environ.get("ProgramFiles(x86)", ""), "Intel", "oneAPI"
)
if os.path.exists(root):
return root
@property
def compiler_search_paths(self):
# First Strategy: Find MSVC directories using vswhere
_compiler_search_paths = []
for p in self.msvc_paths:
_compiler_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64")))
oneapi_root = self.oneapi_root
if oneapi_root:
if os.getenv("ONEAPI_ROOT"):
_compiler_search_paths.extend(
glob.glob(os.path.join(oneapi_root, "compiler", "**", "bin"), recursive=True)
glob.glob(
os.path.join(str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin")
)
)
# Second strategy: Find MSVC via the registry

View File

@@ -468,41 +468,7 @@ def _names(when_indexed_dictionary):
return sorted(all_names)
class RedistributionMixin:
"""Logic for determining whether a Package is source/binary
redistributable.
"""
#: Store whether a given Spec source/binary should not be
#: redistributed.
disable_redistribute: Dict["spack.spec.Spec", "spack.directives.DisableRedistribute"]
# Source redistribution must be determined before concretization
# (because source mirrors work with un-concretized Specs).
@classmethod
def redistribute_source(cls, spec):
"""Whether it should be possible to add the source of this
package to a Spack mirror.
"""
for when_spec, disable_redistribute in cls.disable_redistribute.items():
if disable_redistribute.source and spec.satisfies(when_spec):
return False
return True
@property
def redistribute_binary(self):
"""Whether it should be possible to create a binary out of an
installed instance of this package.
"""
for when_spec, disable_redistribute in self.__class__.disable_redistribute.items():
if disable_redistribute.binary and self.spec.satisfies(when_spec):
return False
return True
class PackageBase(WindowsRPath, PackageViewMixin, RedistributionMixin, metaclass=PackageMeta):
class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
"""This is the superclass for all spack packages.
***The Package class***

View File

@@ -726,14 +726,14 @@ def first_repo(self):
"""Get the first repo in precedence order."""
return self.repos[0] if self.repos else None
@llnl.util.lang.memoized
def _all_package_names_set(self, include_virtuals):
return {name for repo in self.repos for name in repo.all_package_names(include_virtuals)}
@llnl.util.lang.memoized
def _all_package_names(self, include_virtuals):
"""Return all unique package names in all repositories."""
return sorted(self._all_package_names_set(include_virtuals), key=lambda n: n.lower())
all_pkgs = set()
for repo in self.repos:
for name in repo.all_package_names(include_virtuals):
all_pkgs.add(name)
return sorted(all_pkgs, key=lambda n: n.lower())
def all_package_names(self, include_virtuals=False):
return self._all_package_names(include_virtuals)
@@ -794,11 +794,7 @@ def patch_index(self):
@autospec
def providers_for(self, vpkg_spec):
providers = [
spec
for spec in self.provider_index.providers_for(vpkg_spec)
if spec.name in self._all_package_names_set(include_virtuals=False)
]
providers = self.provider_index.providers_for(vpkg_spec)
if not providers:
raise UnknownPackageError(vpkg_spec.fullname)
return providers

View File

@@ -46,7 +46,6 @@
"signed": {"type": "boolean"},
"fetch": fetch_and_push,
"push": fetch_and_push,
"autopush": {"type": "boolean"},
**connection, # type: ignore
},
}

View File

@@ -15,7 +15,6 @@
import types
import typing
import warnings
from contextlib import contextmanager
from typing import Callable, Dict, Iterator, List, NamedTuple, Optional, Set, Tuple, Type, Union
import archspec.cpu
@@ -41,8 +40,6 @@
import spack.spec
import spack.store
import spack.util.crypto
import spack.util.elf
import spack.util.libc
import spack.util.path
import spack.util.timer
import spack.variant
@@ -122,17 +119,6 @@ def __str__(self):
return f"{self._name_.lower()}"
@contextmanager
def spec_with_name(spec, name):
"""Context manager to temporarily set the name of a spec"""
old_name = spec.name
spec.name = name
try:
yield spec
finally:
spec.name = old_name
class RequirementKind(enum.Enum):
"""Purpose / provenance of a requirement"""
@@ -281,36 +267,8 @@ def _create_counter(specs: List[spack.spec.Spec], tests: bool):
return NoDuplicatesCounter(specs, tests=tests)
def all_compilers_in_config(configuration):
return spack.compilers.all_compilers_from(configuration)
def all_libcs() -> Set[spack.spec.Spec]:
"""Return a set of all libc specs targeted by any configured compiler. If none, fall back to
libc determined from the current Python process if dynamically linked."""
libcs = {
c.default_libc for c in all_compilers_in_config(spack.config.CONFIG) if c.default_libc
}
if libcs:
return libcs
libc = spack.util.libc.libc_from_current_python_process()
return {libc} if libc else set()
def libc_is_compatible(lhs: spack.spec.Spec, rhs: spack.spec.Spec) -> List[spack.spec.Spec]:
return (
lhs.name == rhs.name
and lhs.external_path == rhs.external_path
and lhs.version >= rhs.version
)
def using_libc_compatibility() -> bool:
"""Returns True if we are currently using libc compatibility"""
return spack.platforms.host().name == "linux"
def all_compilers_in_config():
return spack.compilers.all_compilers()
def extend_flag_list(flag_list, new_flags):
@@ -596,23 +554,6 @@ def _spec_with_default_name(spec_str, name):
return spec
def _external_config_with_implicit_externals(configuration):
# Read packages.yaml and normalize it, so that it will not contain entries referring to
# virtual packages.
packages_yaml = _normalize_packages_yaml(configuration.get("packages"))
# Add externals for libc from compilers on Linux
if not using_libc_compatibility():
return packages_yaml
for compiler in all_compilers_in_config(configuration):
libc = compiler.default_libc
if libc:
entry = {"spec": f"{libc} %{compiler.spec}", "prefix": libc.external_path}
packages_yaml.setdefault(libc.name, {}).setdefault("externals", []).append(entry)
return packages_yaml
class ErrorHandler:
def __init__(self, model):
self.model = model
@@ -747,9 +688,8 @@ def on_model(model):
raise UnsatisfiableSpecError(msg)
#: Data class to collect information on a requirement
class RequirementRule(NamedTuple):
"""Data class to collect information on a requirement"""
pkg_name: str
policy: str
requirements: List["spack.spec.Spec"]
@@ -758,27 +698,6 @@ class RequirementRule(NamedTuple):
message: Optional[str]
class KnownCompiler(NamedTuple):
"""Data class to collect information on compilers"""
spec: "spack.spec.Spec"
os: str
target: str
available: bool
compiler_obj: Optional["spack.compiler.Compiler"]
def _key(self):
return self.spec, self.os, self.target
def __eq__(self, other: object):
if not isinstance(other, KnownCompiler):
return NotImplemented
return self._key() == other._key()
def __hash__(self):
return hash(self._key())
class PyclingoDriver:
def __init__(self, cores=True):
"""Driver for the Python clingo interface.
@@ -831,16 +750,10 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
if spack.config.CONFIG.get("concretizer:duplicates:strategy", "none") != "none":
self.control.load(os.path.join(parent_dir, "heuristic_separate.lp"))
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
self.control.load(os.path.join(parent_dir, "display.lp"))
if not setup.concretize_everything:
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
# Binary compatibility is based on libc on Linux, and on the os tag elsewhere
if using_libc_compatibility():
self.control.load(os.path.join(parent_dir, "libc_compatibility.lp"))
else:
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
timer.stop("load")
# Grounding is the first step in the solve -- it turns our facts
@@ -1037,9 +950,6 @@ def __init__(self, tests: bool = False):
self.pkgs: Set[str] = set()
self.explicitly_required_namespaces: Dict[str, str] = {}
# list of unique libc specs targeted by compilers (or an educated guess if no compiler)
self.libcs: List[spack.spec.Spec] = []
def pkg_version_rules(self, pkg):
"""Output declared versions of a package.
@@ -1155,29 +1065,37 @@ def compiler_facts(self):
"""Facts about available compilers."""
self.gen.h2("Available compilers")
for compiler_id, compiler in enumerate(self.possible_compilers):
indexed_possible_compilers = list(enumerate(self.possible_compilers))
for compiler_id, compiler in indexed_possible_compilers:
self.gen.fact(fn.compiler_id(compiler_id))
self.gen.fact(fn.compiler_name(compiler_id, compiler.spec.name))
self.gen.fact(fn.compiler_version(compiler_id, compiler.spec.version))
if compiler.os:
self.gen.fact(fn.compiler_os(compiler_id, compiler.os))
if compiler.operating_system:
self.gen.fact(fn.compiler_os(compiler_id, compiler.operating_system))
if compiler.target == "any":
compiler.target = None
if compiler.target is not None:
self.gen.fact(fn.compiler_target(compiler_id, compiler.target))
if compiler.compiler_obj is not None:
c = compiler.compiler_obj
for flag_type, flags in c.flags.items():
for flag in flags:
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag))
for flag_type, flags in compiler.flags.items():
for flag in flags:
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag))
if compiler.available:
self.gen.fact(fn.compiler_available(compiler_id))
self.gen.fact(fn.compiler_weight(compiler_id, compiler_id))
self.gen.newline()
# Set compiler defaults, given a list of possible compilers
self.gen.h2("Default compiler preferences (CompilerID, Weight)")
ppk = spack.package_prefs.PackagePrefs("all", "compiler", all=False)
matches = sorted(indexed_possible_compilers, key=lambda x: ppk(x[1].spec))
for weight, (compiler_id, cspec) in enumerate(matches):
f = fn.compiler_weight(compiler_id, weight)
self.gen.fact(f)
def package_requirement_rules(self, pkg):
parser = RequirementParser(spack.config.CONFIG)
self.emit_facts_from_requirement_rules(parser.rules(pkg))
@@ -1391,39 +1309,34 @@ def condition(
Returns:
int: id of the condition created by this function
"""
name = required_spec.name or name
if not name:
raise ValueError(f"Must provide a name for anonymous condition: '{required_spec}'")
named_cond = required_spec.copy()
named_cond.name = named_cond.name or name
if not named_cond.name:
raise ValueError(f"Must provide a name for anonymous condition: '{named_cond}'")
with spec_with_name(required_spec, name):
# Check if we can emit the requirements before updating the condition ID counter.
# In this way, if a condition can't be emitted but the exception is handled in the caller,
# we won't emit partial facts.
# Check if we can emit the requirements before updating the condition ID counter.
# In this way, if a condition can't be emitted but the exception is handled in the
# caller, we won't emit partial facts.
condition_id = next(self._id_counter)
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition(condition_id)))
self.gen.fact(fn.condition_reason(condition_id, msg))
condition_id = next(self._id_counter)
self.gen.fact(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
self.gen.fact(fn.condition_reason(condition_id, msg))
trigger_id = self._get_condition_id(
required_spec, cache=self._trigger_cache, body=True, transform=transform_required
)
self.gen.fact(
fn.pkg_fact(required_spec.name, fn.condition_trigger(condition_id, trigger_id))
)
if not imposed_spec:
return condition_id
effect_id = self._get_condition_id(
imposed_spec, cache=self._effect_cache, body=False, transform=transform_imposed
)
self.gen.fact(
fn.pkg_fact(required_spec.name, fn.condition_effect(condition_id, effect_id))
)
trigger_id = self._get_condition_id(
named_cond, cache=self._trigger_cache, body=True, transform=transform_required
)
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_trigger(condition_id, trigger_id)))
if not imposed_spec:
return condition_id
effect_id = self._get_condition_id(
imposed_spec, cache=self._effect_cache, body=False, transform=transform_imposed
)
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_effect(condition_id, effect_id)))
return condition_id
def impose(self, condition_id, imposed_spec, node=True, name=None, body=False):
imposed_constraints = self.spec_clauses(imposed_spec, body=body, required_from=name)
for pred in imposed_constraints:
@@ -1610,8 +1523,12 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
requirement_weight += 1
def external_packages(self):
"""Facts on external packages, from packages.yaml and implicit externals."""
packages_yaml = _external_config_with_implicit_externals(spack.config.CONFIG)
"""Facts on external packages, as read from packages.yaml"""
# Read packages.yaml and normalize it, so that it
# will not contain entries referring to virtual
# packages.
packages_yaml = spack.config.get("packages")
packages_yaml = _normalize_packages_yaml(packages_yaml)
self.gen.h1("External packages")
for pkg_name, data in packages_yaml.items():
@@ -1662,7 +1579,6 @@ def external_imposition(input_spec, requirements):
self.gen.newline()
self.trigger_rules()
self.effect_rules()
def preferred_variants(self, pkg_name):
"""Facts on concretization preferences, as read from packages.yaml"""
@@ -1708,6 +1624,23 @@ def target_preferences(self):
for i, preferred in enumerate(package_targets):
self.gen.fact(fn.target_weight(str(preferred.architecture.target), i))
def flag_defaults(self):
self.gen.h2("Compiler flag defaults")
# types of flags that can be on specs
for flag in spack.spec.FlagMap.valid_compiler_flags():
self.gen.fact(fn.flag_type(flag))
self.gen.newline()
# flags from compilers.yaml
compilers = all_compilers_in_config()
for compiler in compilers:
for name, flags in compiler.flags.items():
for flag in flags:
self.gen.fact(
fn.compiler_version_flag(compiler.name, compiler.version, name, flag)
)
def spec_clauses(
self,
spec: spack.spec.Spec,
@@ -1884,16 +1817,6 @@ def _spec_clauses(
if dep.name == "gcc-runtime":
continue
# libc is also solved again by clingo, but in this case the compatibility
# is not encoded in the parent node - so we need to emit explicit facts
if "libc" in dspec.virtuals:
for libc in self.libcs:
if libc_is_compatible(libc, dep):
clauses.append(
fn.attr("compatible_libc", spec.name, libc.name, libc.version)
)
continue
# We know dependencies are real for concrete specs. For abstract
# specs they just mean the dep is somehow in the DAG.
for dtype in dt.ALL_FLAGS:
@@ -2123,16 +2046,9 @@ def target_defaults(self, specs):
candidate_targets.append(ancestor)
best_targets = {uarch.family.name}
for compiler_id, known_compiler in enumerate(self.possible_compilers):
if not known_compiler.available:
continue
compiler = known_compiler.compiler_obj
for compiler_id, compiler in enumerate(self.possible_compilers):
# Stub support for cross-compilation, to be expanded later
if known_compiler.target is not None and compiler.target not in (
str(uarch.family),
"any",
):
if compiler.target is not None and compiler.target != str(uarch.family):
self.gen.fact(fn.compiler_supports_target(compiler_id, compiler.target))
self.gen.newline()
continue
@@ -2188,6 +2104,58 @@ def virtual_providers(self):
self.gen.fact(fn.virtual(vspec))
self.gen.newline()
def generate_possible_compilers(self, specs):
compilers = all_compilers_in_config()
# Search for compilers which differs only by aspects that are
# not selectable by users using the spec syntax
seen, sanitized_list = set(), []
for compiler in compilers:
key = compiler.spec, compiler.operating_system, compiler.target
if key in seen:
warnings.warn(
f"duplicate found for {compiler.spec} on "
f"{compiler.operating_system}/{compiler.target}. "
f"Edit your compilers.yaml configuration to remove it."
)
continue
sanitized_list.append(compiler)
seen.add(key)
cspecs = set([c.spec for c in compilers])
# add compiler specs from the input line to possibilities if we
# don't require compilers to exist.
strict = spack.concretize.Concretizer().check_for_compiler_existence
for s in traverse.traverse_nodes(specs):
# we don't need to validate compilers for already-built specs
if s.concrete or not s.compiler:
continue
version = s.compiler.versions.concrete
if not version or any(c.satisfies(s.compiler) for c in cspecs):
continue
# Error when a compiler is not found and strict mode is enabled
if strict:
raise spack.concretize.UnavailableCompilerVersionError(s.compiler)
# Make up a compiler matching the input spec. This is for bootstrapping.
compiler_cls = spack.compilers.class_for_compiler_name(s.compiler.name)
compilers.append(
compiler_cls(s.compiler, operating_system=None, target=None, paths=[None] * 4)
)
self.gen.fact(fn.allow_compiler(s.compiler.name, version))
return list(
sorted(
compilers,
key=lambda compiler: (compiler.spec.name, compiler.spec.version),
reverse=True,
)
)
def define_version_constraints(self):
"""Define what version_satisfies(...) means in ASP logic."""
for pkg_name, versions in sorted(self.version_constraints):
@@ -2349,7 +2317,6 @@ def setup(
node_counter = _create_counter(specs, tests=self.tests)
self.possible_virtuals = node_counter.possible_virtuals()
self.pkgs = node_counter.possible_dependencies()
self.libcs = sorted(all_libcs()) # type: ignore[type-var]
# Fail if we already know an unreachable node is requested
for spec in specs:
@@ -2359,17 +2326,11 @@ def setup(
if missing_deps:
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
for node in traverse.traverse_nodes(specs):
for node in spack.traverse.traverse_nodes(specs):
if node.namespace is not None:
self.explicitly_required_namespaces[node.name] = node.namespace
self.gen = ProblemInstanceBuilder()
compiler_parser = CompilerParser(configuration=spack.config.CONFIG).with_input_specs(specs)
if using_libc_compatibility():
for libc in self.libcs:
self.gen.fact(fn.allowed_libc(libc.name, libc.version))
if not allow_deprecated:
self.gen.fact(fn.deprecated_versions_not_allowed())
@@ -2388,17 +2349,17 @@ def setup(
)
specs = tuple(specs) # ensure compatible types to add
# get possible compilers
self.possible_compilers = self.generate_possible_compilers(specs)
self.gen.h1("Reusable concrete specs")
self.define_concrete_input_specs(specs, self.pkgs)
if reuse:
self.gen.fact(fn.optimize_for_reuse())
for reusable_spec in reuse:
compiler_parser.add_compiler_from_concrete_spec(reusable_spec)
self.register_concrete_spec(reusable_spec, self.pkgs)
self.concrete_specs()
self.possible_compilers = compiler_parser.possible_compilers()
self.gen.h1("Generic statements on possible packages")
node_counter.possible_packages_facts(self.gen, fn)
@@ -2499,42 +2460,15 @@ def visit(node):
def define_runtime_constraints(self):
"""Define the constraints to be imposed on the runtimes"""
recorder = RuntimePropertyRecorder(self)
for compiler in self.possible_compilers:
compiler_with_different_cls_names = {
"oneapi": "intel-oneapi-compilers",
"clang": "llvm",
}
compiler_cls_name = compiler_with_different_cls_names.get(
compiler.spec.name, compiler.spec.name
)
compiler_with_different_cls_names = {"oneapi": "intel-oneapi-compilers"}
compiler_cls_name = compiler_with_different_cls_names.get(compiler.name, compiler.name)
try:
compiler_cls = spack.repo.PATH.get_pkg_class(compiler_cls_name)
if hasattr(compiler_cls, "runtime_constraints"):
compiler_cls.runtime_constraints(spec=compiler.spec, pkg=recorder)
except spack.repo.UnknownPackageError:
pass
# Inject libc from available compilers, on Linux
if not compiler.available:
continue
current_libc = compiler.compiler_obj.default_libc
# If this is a compiler yet to be built (config:install_missing_compilers:true)
# infer libc from the Python process
if not current_libc and compiler.compiler_obj.cc is None:
current_libc = spack.util.libc.libc_from_current_python_process()
if using_libc_compatibility() and current_libc:
recorder("*").depends_on(
"libc", when=f"%{compiler.spec}", type="link", description="Add libc"
)
recorder("*").depends_on(
str(current_libc),
when=f"%{compiler.spec}",
type="link",
description="Add libc",
)
if hasattr(compiler_cls, "runtime_constraints"):
compiler_cls.runtime_constraints(spec=compiler.spec, pkg=recorder)
recorder.consume_facts()
@@ -2906,97 +2840,6 @@ def reject_requirement_constraint(
return False
class CompilerParser:
"""Parses configuration files, and builds a list of possible compilers for the solve."""
def __init__(self, configuration) -> None:
self.compilers: Set[KnownCompiler] = set()
for c in all_compilers_in_config(configuration):
if using_libc_compatibility() and not c.default_libc:
warnings.warn(
f"cannot detect libc from {c.spec}. The compiler will not be used "
f"during concretization."
)
continue
target = c.target if c.target != "any" else None
candidate = KnownCompiler(
spec=c.spec, os=c.operating_system, target=target, available=True, compiler_obj=c
)
if candidate in self.compilers:
warnings.warn(
f"duplicate found for {c.spec} on {c.operating_system}/{c.target}. "
f"Edit your compilers.yaml configuration to remove it."
)
continue
self.compilers.add(candidate)
def with_input_specs(self, input_specs: List["spack.spec.Spec"]) -> "CompilerParser":
"""Accounts for input specs when building the list of possible compilers.
Args:
input_specs: specs to be concretized
"""
strict = spack.concretize.Concretizer().check_for_compiler_existence
default_os = str(spack.platforms.host().default_os)
default_target = str(archspec.cpu.host().family)
for s in traverse.traverse_nodes(input_specs):
# we don't need to validate compilers for already-built specs
if s.concrete or not s.compiler:
continue
version = s.compiler.versions.concrete
if not version or any(item.spec.satisfies(s.compiler) for item in self.compilers):
continue
# Error when a compiler is not found and strict mode is enabled
if strict:
raise spack.concretize.UnavailableCompilerVersionError(s.compiler)
# Make up a compiler matching the input spec. This is for bootstrapping.
compiler_cls = spack.compilers.class_for_compiler_name(s.compiler.name)
compiler_obj = compiler_cls(
s.compiler, operating_system=default_os, target=default_target, paths=[None] * 4
)
self.compilers.add(
KnownCompiler(
spec=s.compiler,
os=default_os,
target=default_target,
available=True,
compiler_obj=compiler_obj,
)
)
return self
def add_compiler_from_concrete_spec(self, spec: "spack.spec.Spec") -> None:
"""Account for compilers that are coming from concrete specs, through reuse.
Args:
spec: concrete spec to be reused
"""
assert spec.concrete, "the spec argument must be concrete"
candidate = KnownCompiler(
spec=spec.compiler,
os=str(spec.architecture.os),
target=str(spec.architecture.target.microarchitecture.family),
available=False,
compiler_obj=None,
)
self.compilers.add(candidate)
def possible_compilers(self) -> List[KnownCompiler]:
# Here we have to sort two times, first sort by name and ascending version
result = sorted(self.compilers, key=lambda x: (x.spec.name, x.spec.version), reverse=True)
# Then stable sort to prefer available compilers and account for preferences
ppk = spack.package_prefs.PackagePrefs("all", "compiler", all=False)
result.sort(key=lambda x: (not x.available, ppk(x.spec)))
return result
class RuntimePropertyRecorder:
"""An object of this class is injected in callbacks to compilers, to let them declare
properties of the runtimes they support and of the runtimes they provide, and to add
@@ -3283,8 +3126,12 @@ def no_flags(self, node, flag_type):
self._specs[node].compiler_flags[flag_type] = []
def external_spec_selected(self, node, idx):
"""This means that the external spec and index idx has been selected for this package."""
packages_yaml = _external_config_with_implicit_externals(spack.config.CONFIG)
"""This means that the external spec and index idx
has been selected for this package.
"""
packages_yaml = spack.config.get("packages")
packages_yaml = _normalize_packages_yaml(packages_yaml)
spec_info = packages_yaml[node.pkg]["externals"][int(idx)]
self._specs[node].external_path = spec_info.get("prefix", None)
self._specs[node].external_modules = spack.spec.Spec._format_module_list(
@@ -3332,9 +3179,7 @@ def reorder_flags(self):
imposes order afterwards.
"""
# reverse compilers so we get highest priority compilers that share a spec
compilers = dict(
(c.spec, c) for c in reversed(all_compilers_in_config(spack.config.CONFIG))
)
compilers = dict((c.spec, c) for c in reversed(all_compilers_in_config()))
cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())
for spec in self._specs.values():
@@ -3526,7 +3371,7 @@ def _is_reusable(spec: spack.spec.Spec, packages, local: bool) -> bool:
return False
if not spec.external:
return _has_runtime_dependencies(spec)
return True
# Cray external manifest externals are always reusable
if local:
@@ -3551,19 +3396,6 @@ def _is_reusable(spec: spack.spec.Spec, packages, local: bool) -> bool:
return False
def _has_runtime_dependencies(spec: spack.spec.Spec) -> bool:
if not WITH_RUNTIME:
return True
if spec.compiler.name == "gcc" and not spec.dependencies("gcc-runtime"):
return False
if spec.compiler.name == "oneapi" and not spec.dependencies("intel-oneapi-runtime"):
return False
return True
class Solver:
"""This is the main external interface class for solving.
@@ -3582,7 +3414,7 @@ def __init__(self):
# These properties are settable via spack configuration, and overridable
# by setting them directly as properties.
self.reuse = spack.config.get("concretizer:reuse", True)
self.reuse = spack.config.get("concretizer:reuse", False)
@staticmethod
def _check_input_and_extract_concrete_specs(specs):
@@ -3599,7 +3431,7 @@ def _check_input_and_extract_concrete_specs(specs):
def _reusable_specs(self, specs):
reusable_specs = []
if self.reuse:
packages = _external_config_with_implicit_externals(spack.config.CONFIG)
packages = spack.config.get("packages")
# Specs from the local Database
with spack.store.STORE.db.read_transaction():
reusable_specs.extend(

View File

@@ -80,7 +80,6 @@ unification_set(SetID, VirtualNode)
#defined multiple_unification_sets/1.
#defined runtime/1.
%----
% Rules to break symmetry and speed-up searches
@@ -127,12 +126,10 @@ trigger_node(TriggerID, Node, Node) :-
trigger_condition_holds(TriggerID, Node),
literal(TriggerID).
% Since we trigger the existence of literal nodes from a condition, we need to construct the condition_set/2
% Since we trigger the existence of literal nodes from a condition, we need to construct
% the condition_set/2 manually below
mentioned_in_literal(Root, Mentioned) :- mentioned_in_literal(TriggerID, Root, Mentioned), solve_literal(TriggerID).
condition_set(node(min_dupe_id, Root), node(min_dupe_id, Root)) :- mentioned_in_literal(Root, Root).
1 { condition_set(node(min_dupe_id, Root), node(0..Y-1, Mentioned)) : max_dupes(Mentioned, Y) } 1 :-
mentioned_in_literal(Root, Mentioned), Mentioned != Root.
condition_set(node(min_dupe_id, Root), node(min_dupe_id, Mentioned)) :- mentioned_in_literal(Root, Mentioned).
% Discriminate between "roots" that have been explicitly requested, and roots that are deduced from "virtual roots"
explicitly_requested_root(node(min_dupe_id, Package)) :-
@@ -140,20 +137,6 @@ explicitly_requested_root(node(min_dupe_id, Package)) :-
trigger_and_effect(Package, TriggerID, EffectID),
imposed_constraint(EffectID, "root", Package).
% Keep track of which nodes are associated with which root DAG
associated_with_root(RootNode, RootNode) :- attr("root", RootNode).
associated_with_root(RootNode, ChildNode) :-
depends_on(ParentNode, ChildNode),
associated_with_root(RootNode, ParentNode).
% We cannot have a node in the root condition set, that is not associated with that root
:- attr("root", RootNode),
condition_set(RootNode, node(X, Package)),
not virtual(Package),
not associated_with_root(RootNode, node(X, Package)).
#defined concretize_everything/0.
#defined literal/1.
@@ -539,12 +522,6 @@ attr("virtual_on_edge", PackageNode, ProviderNode, Virtual)
provider(ProviderNode, node(_, Virtual)),
not external(PackageNode).
% If a virtual node is in the answer set, it must be either a virtual root,
% or used somewhere
:- attr("virtual_node", node(_, Virtual)),
not attr("virtual_on_incoming_edges", _, Virtual),
not attr("virtual_root", node(_, Virtual)).
attr("virtual_on_incoming_edges", ProviderNode, Virtual)
:- attr("virtual_on_edge", _, ProviderNode, Virtual).
@@ -907,8 +884,12 @@ error(100, "{0} variant '{1}' cannot have values '{2}' and '{3}' as they come fr
Set1 < Set2, % see[1]
build(node(ID, Package)).
:- attr("variant_set", node(ID, Package), Variant, Value),
not attr("variant_value", node(ID, Package), Variant, Value).
% variant_set is an explicitly set variant value. If it's not 'set',
% we revert to the default value. If it is set, we force the set value
attr("variant_value", PackageNode, Variant, Value)
:- attr("node", PackageNode),
node_has_variant(PackageNode, Variant),
attr("variant_set", PackageNode, Variant, Value).
% The rules below allow us to prefer default values for variants
% whenever possible. If a variant is set in a spec, or if it is
@@ -993,13 +974,14 @@ pkg_fact(Package, variant_single_value("dev_path"))
% Platform semantics
%-----------------------------------------------------------------------------
% NOTE: Currently we have a single allowed platform per DAG, therefore there is no
% need to have additional optimization criteria. If we ever add cross-platform dags,
% this needs to be changed.
:- 2 { allowed_platform(Platform) }, internal_error("More than one allowed platform detected").
% if no platform is set, fall back to the default
error(100, "platform '{0}' is not allowed on the current host", Platform)
:- attr("node_platform", _, Platform), not allowed_platform(Platform).
1 { attr("node_platform", PackageNode, Platform) : allowed_platform(Platform) } 1
:- attr("node", PackageNode).
attr("node_platform", PackageNode, Platform)
:- attr("node", PackageNode),
not attr("node_platform_set", PackageNode),
node_platform_default(Platform).
% setting platform on a node is a hard constraint
attr("node_platform", PackageNode, Platform)
@@ -1023,6 +1005,14 @@ error(100, "Cannot select '{0} os={1}' (operating system '{1}' is not buildable)
attr("node_os", node(X, Package), OS),
not buildable_os(OS).
% can't have dependencies on incompatible OS's
error(100, "{0} and dependency {1} have incompatible operating systems 'os={2}' and 'os={3}'", Package, Dependency, PackageNodeOS, DependencyOS)
:- depends_on(node(X, Package), node(Y, Dependency)),
attr("node_os", node(X, Package), PackageNodeOS),
attr("node_os", node(Y, Dependency), DependencyOS),
not os_compatible(PackageNodeOS, DependencyOS),
build(node(X, Package)).
% give OS choice weights according to os declarations
node_os_weight(PackageNode, Weight)
:- attr("node", PackageNode),
@@ -1035,6 +1025,13 @@ os_compatible(OS, OS) :- os(OS).
% Transitive compatibility among operating systems
os_compatible(OS1, OS3) :- os_compatible(OS1, OS2), os_compatible(OS2, OS3).
% We can select only operating systems compatible with the ones
% for which we can build software. We need a cardinality constraint
% since we might have more than one "buildable_os(OS)" fact.
:- not 1 { os_compatible(CurrentOS, ReusedOS) : buildable_os(CurrentOS) },
attr("node_os", Package, ReusedOS),
internal_error("Reused OS incompatible with build OS").
% If an OS is set explicitly respect the value
attr("node_os", PackageNode, OS) :- attr("node_os_set", PackageNode, OS), attr("node", PackageNode).
@@ -1082,9 +1079,6 @@ error(100, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Tar
compiler_version(CompilerID, Version),
build(node(X, Package)).
#defined compiler_supports_target/2.
#defined compiler_available/1.
% if a target is set explicitly, respect it
attr("node_target", PackageNode, Target)
:- attr("node", PackageNode), attr("node_target_set", PackageNode, Target).
@@ -1115,7 +1109,7 @@ error(100, "'{0} target={1}' is not compatible with this machine", Package, Targ
% Compiler semantics
%-----------------------------------------------------------------------------
% There must be only one compiler set per built node.
{ node_compiler(PackageNode, CompilerID) : compiler_id(CompilerID), compiler_available(CompilerID) } :-
{ node_compiler(PackageNode, CompilerID) : compiler_id(CompilerID) } :-
attr("node", PackageNode),
build(PackageNode).
@@ -1132,7 +1126,6 @@ attr("node_compiler_version", PackageNode, CompilerName, CompilerVersion)
:- node_compiler(PackageNode, CompilerID),
compiler_name(CompilerID, CompilerName),
compiler_version(CompilerID, CompilerVersion),
compiler_available(CompilerID),
build(PackageNode).
attr("node_compiler", PackageNode, CompilerName)
@@ -1195,6 +1188,7 @@ error(100, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", Package, Compil
compiler_version(CompilerID, Version),
compiler_os(CompilerID, CompilerOS),
not os_compatible(CompilerOS, OS),
not allow_compiler(Compiler, Version),
build(node(X, Package)).
% If a package and one of its dependencies don't have the
@@ -1215,6 +1209,7 @@ compiler_mismatch_required(PackageNode, DependencyNode)
not compiler_match(PackageNode, DependencyNode).
#defined compiler_os/3.
#defined allow_compiler/2.
% compilers weighted by preference according to packages.yaml
node_compiler_weight(node(ID, Package), Weight)
@@ -1499,20 +1494,18 @@ opt_criterion(40, "compiler mismatches that are not from CLI").
#minimize{ 0@240: #true }.
#minimize{ 0@40: #true }.
#minimize{
1@40+Priority,PackageNode,node(ID, Dependency)
: compiler_mismatch(PackageNode, node(ID, Dependency)),
build_priority(node(ID, Dependency), Priority),
not runtime(Dependency)
1@40+Priority,PackageNode,DependencyNode
: compiler_mismatch(PackageNode, DependencyNode),
build_priority(PackageNode, Priority)
}.
opt_criterion(39, "compiler mismatches that are not from CLI").
#minimize{ 0@239: #true }.
#minimize{ 0@39: #true }.
#minimize{
1@39+Priority,PackageNode,node(ID, Dependency)
: compiler_mismatch_required(PackageNode, node(ID, Dependency)),
build_priority(node(ID, Dependency), Priority),
not runtime(Dependency)
1@39+Priority,PackageNode,DependencyNode
: compiler_mismatch_required(PackageNode, DependencyNode),
build_priority(PackageNode, Priority)
}.
opt_criterion(30, "non-preferred OS's").
@@ -1529,10 +1522,9 @@ opt_criterion(25, "version badness").
#minimize{ 0@225: #true }.
#minimize{ 0@25: #true }.
#minimize{
Weight@25+Priority,node(X, Package)
: version_weight(node(X, Package), Weight),
build_priority(node(X, Package), Priority),
not runtime(Package)
Weight@25+Priority,PackageNode
: version_weight(PackageNode, Weight),
build_priority(PackageNode, Priority)
}.
% Try to use all the default values of variants
@@ -1551,10 +1543,9 @@ opt_criterion(15, "non-preferred compilers").
#minimize{ 0@215: #true }.
#minimize{ 0@15: #true }.
#minimize{
Weight@15+Priority,node(X, Package)
: node_compiler_weight(node(X, Package), Weight),
build_priority(node(X, Package), Priority),
not runtime(Package)
Weight@15+Priority,PackageNode
: node_compiler_weight(PackageNode, Weight),
build_priority(PackageNode, Priority)
}.
% Minimize the number of mismatches for targets in the DAG, try
@@ -1563,55 +1554,18 @@ opt_criterion(10, "target mismatches").
#minimize{ 0@210: #true }.
#minimize{ 0@10: #true }.
#minimize{
1@10+Priority,PackageNode,node(ID, Dependency)
: node_target_mismatch(PackageNode, node(ID, Dependency)),
build_priority(node(ID, Dependency), Priority),
not runtime(Dependency)
1@10+Priority,PackageNode,Dependency
: node_target_mismatch(PackageNode, Dependency),
build_priority(PackageNode, Priority)
}.
opt_criterion(5, "non-preferred targets").
#minimize{ 0@205: #true }.
#minimize{ 0@5: #true }.
#minimize{
Weight@5+Priority,node(X, Package)
: node_target_weight(node(X, Package), Weight),
build_priority(node(X, Package), Priority),
not runtime(Package)
}.
% Minimize the number of compiler mismatches for runtimes
opt_criterion(4, "compiler mismatches (runtimes)").
#minimize{ 0@204: #true }.
#minimize{ 0@4: #true }.
#minimize{
1@4,PackageNode,node(ID, Dependency)
: compiler_mismatch(PackageNode, node(ID, Dependency)), runtime(Dependency)
}.
#minimize{
1@4,PackageNode,node(ID, Dependency)
: compiler_mismatch_required(PackageNode, node(ID, Dependency)), runtime(Dependency)
}.
% Choose more recent versions for runtimes
opt_criterion(3, "version badness (runtimes)").
#minimize{ 0@203: #true }.
#minimize{ 0@3: #true }.
#minimize{
Weight@3,node(X, Package)
: version_weight(node(X, Package), Weight),
runtime(Package)
}.
% Choose best target for runtimes
opt_criterion(2, "non-preferred targets (runtimes)").
#minimize{ 0@202: #true }.
#minimize{ 0@2: #true }.
#minimize{
Weight@2,node(X, Package)
: node_target_weight(node(X, Package), Weight),
runtime(Package)
Weight@5+Priority,PackageNode
: node_target_weight(PackageNode, Weight),
build_priority(PackageNode, Priority)
}.
% Choose more recent versions for nodes

View File

@@ -1,37 +0,0 @@
% Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
% Spack Project Developers. See the top-level COPYRIGHT file for details.
%
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
%=============================================================================
% Libc compatibility rules for reusing solves.
%
% These rules are used on Linux
%=============================================================================
% A package cannot be reused if the libc is not compatible with it
:- provider(node(X, LibcPackage), node(0, "libc")),
attr("version", node(X, LibcPackage), LibcVersion),
attr("hash", node(R, ReusedPackage), Hash),
% Libc packages can be reused without the "compatible_libc" attribute
ReusedPackage != LibcPackage,
not attr("compatible_libc", node(R, ReusedPackage), LibcPackage, LibcVersion).
% Check whether the DAG has any built package
has_built_packages() :- build(X), not external(X).
% A libc is needed in the DAG
:- has_built_packages(), not provider(_, node(0, "libc")).
% The libc must be chosen among available ones
:- has_built_packages(),
provider(node(X, LibcPackage), node(0, "libc")),
attr("node", node(X, LibcPackage)),
attr("version", node(X, LibcPackage), LibcVersion),
not allowed_libc(LibcPackage, LibcVersion).
% A built node must depend on libc
:- build(PackageNode),
provider(LibcNode, node(0, "libc")),
not external(PackageNode),
not depends_on(PackageNode, LibcNode).

View File

@@ -7,26 +7,21 @@
% OS compatibility rules for reusing solves.
% os_compatible(RecentOS, OlderOS)
% OlderOS binaries can be used on RecentOS
%
% These rules are used on every platform, but Linux
%=============================================================================
% macOS
os_compatible("sonoma", "ventura").
os_compatible("ventura", "monterey").
os_compatible("monterey", "bigsur").
os_compatible("bigsur", "catalina").
% can't have dependencies on incompatible OS's
error(100, "{0} and dependency {1} have incompatible operating systems 'os={2}' and 'os={3}'", Package, Dependency, PackageNodeOS, DependencyOS)
:- depends_on(node(X, Package), node(Y, Dependency)),
attr("node_os", node(X, Package), PackageNodeOS),
attr("node_os", node(Y, Dependency), DependencyOS),
not os_compatible(PackageNodeOS, DependencyOS),
build(node(X, Package)).
% Ubuntu
os_compatible("ubuntu22.04", "ubuntu21.10").
os_compatible("ubuntu21.10", "ubuntu21.04").
os_compatible("ubuntu21.04", "ubuntu20.10").
os_compatible("ubuntu20.10", "ubuntu20.04").
os_compatible("ubuntu20.04", "ubuntu19.10").
os_compatible("ubuntu19.10", "ubuntu19.04").
os_compatible("ubuntu19.04", "ubuntu18.10").
os_compatible("ubuntu18.10", "ubuntu18.04").
% We can select only operating systems compatible with the ones
% for which we can build software. We need a cardinality constraint
% since we might have more than one "buildable_os(OS)" fact.
:- not 1 { os_compatible(CurrentOS, ReusedOS) : buildable_os(CurrentOS) },
attr("node_os", Package, ReusedOS).
%EL8
os_compatible("rhel8", "rocky8").

View File

@@ -51,6 +51,7 @@
import collections
import collections.abc
import enum
import io
import itertools
import os
import pathlib
@@ -58,7 +59,7 @@
import re
import socket
import warnings
from typing import Any, Callable, Dict, List, Match, Optional, Set, Tuple, Union
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
import llnl.path
import llnl.string
@@ -120,44 +121,36 @@
"SpecDeprecatedError",
]
SPEC_FORMAT_RE = re.compile(
r"(?:" # this is one big or, with matches ordered by priority
# OPTION 1: escaped character (needs to be first to catch opening \{)
# Note that an unterminated \ at the end of a string is left untouched
r"(?:\\(.))"
r"|" # or
# OPTION 2: an actual format string
r"{" # non-escaped open brace {
r"([%@/]|arch=)?" # optional sigil (to print sigil in color)
r"(?:\^([^}\.]+)\.)?" # optional ^depname. (to get attr from dependency)
# after the sigil or depname, we can have a hash expression or another attribute
r"(?:" # one of
r"(hash\b)(?:\:(\d+))?" # hash followed by :<optional length>
r"|" # or
r"([^}]*)" # another attribute to format
r")" # end one of
r"(})?" # finish format string with non-escaped close brace }, or missing if not present
r"|"
# OPTION 3: mismatched close brace (option 2 would consume a matched open brace)
r"(})" # brace
r")",
re.IGNORECASE,
)
#: Valid pattern for an identifier in Spack
IDENTIFIER_RE = r"\w[\w-]*"
# Coloring of specs when using color output. Fields are printed with
# different colors to enhance readability.
# See llnl.util.tty.color for descriptions of the color codes.
COMPILER_COLOR = "@g" #: color for highlighting compilers
VERSION_COLOR = "@c" #: color for highlighting versions
ARCHITECTURE_COLOR = "@m" #: color for highlighting architectures
VARIANT_COLOR = "@B" #: color for highlighting variants
ENABLED_VARIANT_COLOR = "@B" #: color for highlighting enabled variants
DISABLED_VARIANT_COLOR = "r" #: color for highlighting disabled varaints
DEPENDENCY_COLOR = "@." #: color for highlighting dependencies
HASH_COLOR = "@K" #: color for highlighting package hashes
#: This map determines the coloring of specs when using color output.
#: We make the fields different colors to enhance readability.
#: See llnl.util.tty.color for descriptions of the color codes.
COLOR_FORMATS = {
"%": COMPILER_COLOR,
"@": VERSION_COLOR,
"=": ARCHITECTURE_COLOR,
"+": ENABLED_VARIANT_COLOR,
"~": DISABLED_VARIANT_COLOR,
"^": DEPENDENCY_COLOR,
"#": HASH_COLOR,
}
#: Regex used for splitting by spec field separators.
#: These need to be escaped to avoid metacharacters in
#: ``COLOR_FORMATS.keys()``.
_SEPARATORS = "[\\%s]" % "\\".join(COLOR_FORMATS.keys())
#: Default format for Spec.format(). This format can be round-tripped, so that:
#: Spec(Spec("string").format()) == Spec("string)"
DEFAULT_FORMAT = (
@@ -200,7 +193,26 @@ class InstallStatus(enum.Enum):
missing = "@r{[-]} "
# regexes used in spec formatting
def colorize_spec(spec):
"""Returns a spec colorized according to the colors specified in
COLOR_FORMATS."""
class insert_color:
def __init__(self):
self.last = None
def __call__(self, match):
# ignore compiler versions (color same as compiler)
sep = match.group(0)
if self.last == "%" and sep == "@":
return clr.cescape(sep)
self.last = sep
return "%s%s" % (COLOR_FORMATS[sep], clr.cescape(sep))
return clr.colorize(re.sub(_SEPARATORS, insert_color(), str(spec)) + "@.")
OLD_STYLE_FMT_RE = re.compile(r"\${[A-Z]+}")
@@ -899,9 +911,6 @@ def flags():
yield flags
def __str__(self):
if not self:
return ""
sorted_items = sorted((k, v) for k, v in self.items() if v)
result = ""
@@ -4283,7 +4292,10 @@ def deps():
yield deps
def format(self, format_string: str = DEFAULT_FORMAT, color: Optional[bool] = False) -> str:
def colorized(self):
return colorize_spec(self)
def format(self, format_string=DEFAULT_FORMAT, **kwargs):
r"""Prints out particular pieces of a spec, depending on what is
in the format string.
@@ -4346,65 +4358,79 @@ def format(self, format_string: str = DEFAULT_FORMAT, color: Optional[bool] = Fa
literal ``\`` character.
Args:
format_string: string containing the format to be expanded
color: True for colorized result; False for no color; None for auto color.
format_string (str): string containing the format to be expanded
Keyword Args:
color (bool): True if returned string is colored
transform (dict): maps full-string formats to a callable \
that accepts a string and returns another one
"""
ensure_modern_format_string(format_string)
color = kwargs.get("color", False)
transform = kwargs.get("transform", {})
def safe_color(sigil: str, string: str, color_fmt: Optional[str]) -> str:
# avoid colorizing if there is no color or the string is empty
if (color is False) or not color_fmt or not string:
return sigil + string
# escape and add the sigil here to avoid multiple concatenations
if sigil == "@":
sigil = "@@"
return clr.colorize(f"{color_fmt}{sigil}{clr.cescape(string)}@.", color=color)
out = io.StringIO()
def format_attribute(match_object: Match) -> str:
(esc, sig, dep, hash, hash_len, attribute, close_brace, unmatched_close_brace) = (
match_object.groups()
)
if esc:
return esc
elif unmatched_close_brace:
raise SpecFormatStringError(f"Unmatched close brace: '{format_string}'")
elif not close_brace:
raise SpecFormatStringError(f"Missing close brace: '{format_string}'")
def write(s, c=None):
f = clr.cescape(s)
if c is not None:
f = COLOR_FORMATS[c] + f + "@."
clr.cwrite(f, stream=out, color=color)
current = self if dep is None else self[dep]
def write_attribute(spec, attribute, color):
attribute = attribute.lower()
# Hash attributes can return early.
# NOTE: we currently treat abstract_hash like an attribute and ignore
# any length associated with it. We may want to change that.
if hash:
if sig and sig != "/":
raise SpecFormatSigilError(sig, "DAG hashes", hash)
try:
length = int(hash_len) if hash_len else None
except ValueError:
raise SpecFormatStringError(f"Invalid hash length: '{hash_len}'")
return safe_color(sig or "", current.dag_hash(length), HASH_COLOR)
sig = ""
if attribute.startswith(("@", "%", "/")):
# color sigils that are inside braces
sig = attribute[0]
attribute = attribute[1:]
elif attribute.startswith("arch="):
sig = " arch=" # include space as separator
attribute = attribute[5:]
current = spec
if attribute.startswith("^"):
attribute = attribute[1:]
dep, attribute = attribute.split(".", 1)
current = self[dep]
if attribute == "":
raise SpecFormatStringError("Format string attributes must be non-empty")
attribute = attribute.lower()
parts = attribute.split(".")
assert parts
# check that the sigil is valid for the attribute.
if not sig:
sig = ""
elif sig == "@" and parts[-1] not in ("versions", "version"):
if sig == "@" and parts[-1] not in ("versions", "version"):
raise SpecFormatSigilError(sig, "versions", attribute)
elif sig == "%" and attribute not in ("compiler", "compiler.name"):
raise SpecFormatSigilError(sig, "compilers", attribute)
elif sig == "/" and attribute != "abstract_hash":
elif sig == "/" and not re.match(r"(abstract_)?hash(:\d+)?$", attribute):
raise SpecFormatSigilError(sig, "DAG hashes", attribute)
elif sig == "arch=":
if attribute not in ("architecture", "arch"):
raise SpecFormatSigilError(sig, "the architecture", attribute)
sig = " arch=" # include space as separator
elif sig == " arch=" and attribute not in ("architecture", "arch"):
raise SpecFormatSigilError(sig, "the architecture", attribute)
# find the morph function for our attribute
morph = transform.get(attribute, lambda s, x: x)
# Special cases for non-spec attributes and hashes.
# These must be the only non-dep component of the format attribute
if attribute == "spack_root":
write(morph(spec, spack.paths.spack_root))
return
elif attribute == "spack_install":
write(morph(spec, spack.store.STORE.layout.root))
return
elif re.match(r"hash(:\d)?", attribute):
col = "#"
if ":" in attribute:
_, length = attribute.split(":")
write(sig + morph(spec, current.dag_hash(int(length))), col)
else:
write(sig + morph(spec, current.dag_hash()), col)
return
# Iterate over components using getattr to get next element
for idx, part in enumerate(parts):
@@ -4413,7 +4439,7 @@ def format_attribute(match_object: Match) -> str:
if part.startswith("_"):
raise SpecFormatStringError("Attempted to format private attribute")
else:
if part == "variants" and isinstance(current, vt.VariantMap):
if isinstance(current, vt.VariantMap):
# subscript instead of getattr for variant names
current = current[part]
else:
@@ -4437,31 +4463,62 @@ def format_attribute(match_object: Match) -> str:
raise SpecFormatStringError(m)
if isinstance(current, vn.VersionList):
if current == vn.any_version:
# don't print empty version lists
return ""
# We don't print empty version lists
return
if callable(current):
raise SpecFormatStringError("Attempted to format callable object")
if current is None:
# not printing anything
return ""
# We're not printing anything
return
# Set color codes for various attributes
color = None
col = None
if "variants" in parts:
color = VARIANT_COLOR
col = "+"
elif "architecture" in parts:
color = ARCHITECTURE_COLOR
col = "="
elif "compiler" in parts or "compiler_flags" in parts:
color = COMPILER_COLOR
col = "%"
elif "version" in parts or "versions" in parts:
color = VERSION_COLOR
col = "@"
# return colored output
return safe_color(sig, str(current), color)
# Finally, write the output
write(sig + morph(spec, str(current)), col)
return SPEC_FORMAT_RE.sub(format_attribute, format_string).strip()
attribute = ""
in_attribute = False
escape = False
for c in format_string:
if escape:
out.write(c)
escape = False
elif c == "\\":
escape = True
elif in_attribute:
if c == "}":
write_attribute(self, attribute, color)
attribute = ""
in_attribute = False
else:
attribute += c
else:
if c == "}":
raise SpecFormatStringError(
"Encountered closing } before opening { in %s" % format_string
)
elif c == "{":
in_attribute = True
else:
out.write(c)
if in_attribute:
raise SpecFormatStringError(
"Format string terminated while reading attribute." "Missing terminating }."
)
formatted_spec = out.getvalue()
return formatted_spec.strip()
def cformat(self, *args, **kwargs):
"""Same as format, but color defaults to auto instead of False."""
@@ -4469,16 +4526,6 @@ def cformat(self, *args, **kwargs):
kwargs.setdefault("color", None)
return self.format(*args, **kwargs)
@property
def spack_root(self):
"""Special field for using ``{spack_root}`` in Spec.format()."""
return spack.paths.spack_root
@property
def spack_install(self):
"""Special field for using ``{spack_install}`` in Spec.format()."""
return spack.store.STORE.layout.root
def format_path(
# self, format_string: str, _path_ctor: Optional[pathlib.PurePath] = None
self,
@@ -4504,27 +4551,18 @@ def format_path(
path_ctor = _path_ctor or pathlib.PurePath
format_string_as_path = path_ctor(format_string)
if format_string_as_path.is_absolute() or (
# Paths that begin with a single "\" on windows are relative, but we still
# want to preserve the initial "\\" to be consistent with PureWindowsPath.
# Ensure that this '\' is not passed to polite_filename() so it's not converted to '_'
(os.name == "nt" or path_ctor == pathlib.PureWindowsPath)
and format_string_as_path.parts[0] == "\\"
):
if format_string_as_path.is_absolute():
output_path_components = [format_string_as_path.parts[0]]
input_path_components = list(format_string_as_path.parts[1:])
else:
output_path_components = []
input_path_components = list(format_string_as_path.parts)
output_path_components += [
fs.polite_filename(self.format(part)) for part in input_path_components
fs.polite_filename(self.format(x)) for x in input_path_components
]
return str(path_ctor(*output_path_components))
def __str__(self):
if not self._dependencies:
return self.format()
root_str = [self.format()]
sorted_dependencies = sorted(
self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash)

View File

@@ -142,7 +142,7 @@ def optimization_flags(self, compiler):
# custom spec.
compiler_version = compiler.version
version_number, suffix = archspec.cpu.version_components(compiler.version)
if not version_number or suffix:
if not version_number or suffix not in ("", "apple"):
# Try to deduce the underlying version of the compiler, regardless
# of its name in compilers.yaml. Depending on where this function
# is called we might get either a CompilerSpec or a fully fledged

View File

@@ -390,11 +390,11 @@ def test_built_spec_cache(mirror_dir):
assert any([r["spec"] == s for r in results])
def fake_dag_hash(spec, length=None):
def fake_dag_hash(spec):
# Generate an arbitrary hash that is intended to be different than
# whatever a Spec reported before (to test actions that trigger when
# the hash changes)
return "tal4c7h4z0gqmixb1eqa92mjoybxn5l6"[:length]
return "tal4c7h4z0gqmixb1eqa92mjoybxn5l6"
@pytest.mark.usefixtures(

View File

@@ -63,8 +63,7 @@ def build_environment(working_env):
os.environ["SPACK_LINKER_ARG"] = "-Wl,"
os.environ["SPACK_DTAGS_TO_ADD"] = "--disable-new-dtags"
os.environ["SPACK_DTAGS_TO_STRIP"] = "--enable-new-dtags"
os.environ["SPACK_SYSTEM_DIRS"] = "/usr/include|/usr/lib"
os.environ["SPACK_MANAGED_DIRS"] = f"{prefix}/opt/spack"
os.environ["SPACK_SYSTEM_DIRS"] = "/usr/include /usr/lib"
os.environ["SPACK_TARGET_ARGS"] = ""
if "SPACK_DEPENDENCIES" in os.environ:

View File

@@ -15,7 +15,7 @@
import spack.config
import spack.spec
from spack.paths import build_env_path
from spack.util.environment import SYSTEM_DIR_CASE_ENTRY, set_env
from spack.util.environment import SYSTEM_DIRS, set_env
from spack.util.executable import Executable, ProcessError
#
@@ -127,7 +127,7 @@
spack_cflags = ["-Wall"]
spack_cxxflags = ["-Werror"]
spack_fflags = ["-w"]
spack_ldflags = ["-Wl,--gc-sections", "-L", "foo"]
spack_ldflags = ["-L", "foo"]
spack_ldlibs = ["-lfoo"]
lheaderpad = ["-Wl,-headerpad_max_install_names"]
@@ -159,8 +159,7 @@ def wrapper_environment(working_env):
SPACK_DEBUG_LOG_ID="foo-hashabc",
SPACK_COMPILER_SPEC="gcc@4.4.7",
SPACK_SHORT_SPEC="foo@1.2 arch=linux-rhel6-x86_64 /hashabc",
SPACK_SYSTEM_DIRS=SYSTEM_DIR_CASE_ENTRY,
SPACK_MANAGED_DIRS="/path/to/spack-1/opt/spack/*|/path/to/spack-2/opt/spack/*",
SPACK_SYSTEM_DIRS=":".join(SYSTEM_DIRS),
SPACK_CC_RPATH_ARG="-Wl,-rpath,",
SPACK_CXX_RPATH_ARG="-Wl,-rpath,",
SPACK_F77_RPATH_ARG="-Wl,-rpath,",
@@ -279,6 +278,7 @@ def test_ld_flags(wrapper_environment, wrapper_flags):
test_args,
["ld"]
+ test_include_paths
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
+ test_library_paths
+ ["--disable-new-dtags"]
+ test_rpaths
@@ -306,14 +306,13 @@ def test_cc_flags(wrapper_environment, wrapper_flags):
[real_cc]
+ target_args
+ test_include_paths
+ ["-Lfoo"]
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
+ test_library_paths
+ ["-Wl,--disable-new-dtags"]
+ test_wl_rpaths
+ test_args_without_paths
+ spack_cppflags
+ spack_cflags
+ ["-Wl,--gc-sections"]
+ spack_ldlibs,
)
@@ -325,13 +324,12 @@ def test_cxx_flags(wrapper_environment, wrapper_flags):
[real_cc]
+ target_args
+ test_include_paths
+ ["-Lfoo"]
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
+ test_library_paths
+ ["-Wl,--disable-new-dtags"]
+ test_wl_rpaths
+ test_args_without_paths
+ spack_cppflags
+ ["-Wl,--gc-sections"]
+ spack_ldlibs,
)
@@ -343,14 +341,13 @@ def test_fc_flags(wrapper_environment, wrapper_flags):
[real_cc]
+ target_args
+ test_include_paths
+ ["-Lfoo"]
+ [spack_ldflags[i] + spack_ldflags[i + 1] for i in range(0, len(spack_ldflags), 2)]
+ test_library_paths
+ ["-Wl,--disable-new-dtags"]
+ test_wl_rpaths
+ test_args_without_paths
+ spack_fflags
+ spack_cppflags
+ ["-Wl,--gc-sections"]
+ spack_ldlibs,
)
@@ -910,108 +907,3 @@ def test_linker_strips_loopopt(wrapper_environment, wrapper_flags):
result = cc(*(test_args + ["-loopopt=0", "-c", "x.c"]), output=str)
result = result.strip().split("\n")
assert "-loopopt=0" in result
def test_spack_managed_dirs_are_prioritized(wrapper_environment):
# We have two different stores with 5 packages divided over them
pkg1 = "/path/to/spack-1/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-1.0-abcdef"
pkg2 = "/path/to/spack-1/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-2.0-abcdef"
pkg3 = "/path/to/spack-2/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-3.0-abcdef"
pkg4 = "/path/to/spack-2/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-4.0-abcdef"
pkg5 = "/path/to/spack-2/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-5.0-abcdef"
variables = {
# cppflags, ldflags from the command line, config or package.py take highest priority
"SPACK_CPPFLAGS": f"-I/usr/local/include -I/external-1/include -I{pkg1}/include",
"SPACK_LDFLAGS": f"-L/usr/local/lib -L/external-1/lib -L{pkg1}/lib "
f"-Wl,-rpath,/usr/local/lib -Wl,-rpath,/external-1/lib -Wl,-rpath,{pkg1}/lib",
# automatic -L, -Wl,-rpath, -I flags from dependencies -- on the spack side they are
# already partitioned into "spack owned prefixes" and "non-spack owned prefixes"
"SPACK_STORE_LINK_DIRS": f"{pkg4}/lib:{pkg5}/lib",
"SPACK_STORE_RPATH_DIRS": f"{pkg4}/lib:{pkg5}/lib",
"SPACK_STORE_INCLUDE_DIRS": f"{pkg4}/include:{pkg5}/include",
"SPACK_LINK_DIRS": "/external-3/lib:/external-4/lib",
"SPACK_RPATH_DIRS": "/external-3/lib:/external-4/lib",
"SPACK_INCLUDE_DIRS": "/external-3/include:/external-4/include",
}
with set_env(SPACK_TEST_COMMAND="dump-args", **variables):
effective_call = (
cc(
# system paths
"-I/usr/include",
"-L/usr/lib",
"-Wl,-rpath,/usr/lib",
# some other externals
"-I/external-2/include",
"-L/external-2/lib",
"-Wl,-rpath,/external-2/lib",
# relative paths are considered "spack managed" since they are in the stage dir
"-I..",
"-L..",
"-Wl,-rpath,..", # pathological but simpler for the test.
# spack store paths
f"-I{pkg2}/include",
f"-I{pkg3}/include",
f"-L{pkg2}/lib",
f"-L{pkg3}/lib",
f"-Wl,-rpath,{pkg2}/lib",
f"-Wl,-rpath,{pkg3}/lib",
"hello.c",
"-o",
"hello",
output=str,
)
.strip()
.split("\n")
)
dash_I = [flag[2:] for flag in effective_call if flag.startswith("-I")]
dash_L = [flag[2:] for flag in effective_call if flag.startswith("-L")]
dash_Wl_rpath = [flag[11:] for flag in effective_call if flag.startswith("-Wl,-rpath")]
assert dash_I == [
# spack owned dirs from SPACK_*FLAGS
f"{pkg1}/include",
# spack owned dirs from command line & automatic flags for deps (in that order)]
"..",
f"{pkg2}/include", # from command line
f"{pkg3}/include", # from command line
f"{pkg4}/include", # from SPACK_STORE_INCLUDE_DIRS
f"{pkg5}/include", # from SPACK_STORE_INCLUDE_DIRS
# non-system dirs from SPACK_*FLAGS
"/external-1/include",
# non-system dirs from command line & automatic flags for deps (in that order)
"/external-2/include", # from command line
"/external-3/include", # from SPACK_INCLUDE_DIRS
"/external-4/include", # from SPACK_INCLUDE_DIRS
# system dirs from SPACK_*FLAGS
"/usr/local/include",
# system dirs from command line
"/usr/include",
]
assert (
dash_L
== dash_Wl_rpath
== [
# spack owned dirs from SPACK_*FLAGS
f"{pkg1}/lib",
# spack owned dirs from command line & automatic flags for deps (in that order)
"..",
f"{pkg2}/lib", # from command line
f"{pkg3}/lib", # from command line
f"{pkg4}/lib", # from SPACK_STORE_LINK_DIRS
f"{pkg5}/lib", # from SPACK_STORE_LINK_DIRS
# non-system dirs from SPACK_*FLAGS
"/external-1/lib",
# non-system dirs from command line & automatic flags for deps (in that order)
"/external-2/lib", # from command line
"/external-3/lib", # from SPACK_LINK_DIRS
"/external-4/lib", # from SPACK_LINK_DIRS
# system dirs from SPACK_*FLAGS
"/usr/local/lib",
# system dirs from command line
"/usr/lib",
]
)

View File

@@ -169,25 +169,6 @@ def test_update_key_index(
assert "index.json" in key_dir_list
def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch):
"""Test buildcache with autopush"""
mirror_dir = tmp_path / "mirror"
mirror_autopush_dir = tmp_path / "mirror_autopush"
mirror("add", "--unsigned", "mirror", mirror_dir.as_uri())
mirror("add", "--autopush", "--unsigned", "mirror-autopush", mirror_autopush_dir.as_uri())
s = Spec("libdwarf").concretized()
# Install and generate build cache index
s.package.do_install()
metadata_file = spack.binary_distribution.tarball_name(s, ".spec.json")
assert not (mirror_dir / "build_cache" / metadata_file).exists()
assert (mirror_autopush_dir / "build_cache" / metadata_file).exists()
def test_buildcache_sync(
mutable_mock_env_path,
install_mockery_mutable_config,
@@ -446,10 +427,3 @@ def test_push_and_install_with_mirror_marked_unsigned_does_not_require_extra_fla
spec.package.do_uninstall(force=True)
spec.package.do_install(**kwargs)
def test_skip_no_redistribute(mock_packages, config):
specs = list(Spec("no-redistribute-dependent").concretized().traverse())
filtered = spack.cmd.buildcache._skip_no_redistribute_for_public(specs)
assert not any(s.name == "no-redistribute" for s in filtered)
assert any(s.name == "no-redistribute-dependent" for s in filtered)

View File

@@ -117,13 +117,13 @@ def test_specs_staging(config, tmpdir):
with repo.use_repositories(builder.root):
spec_a = Spec("a").concretized()
spec_a_label = ci._spec_ci_label(spec_a)
spec_b_label = ci._spec_ci_label(spec_a["b"])
spec_c_label = ci._spec_ci_label(spec_a["c"])
spec_d_label = ci._spec_ci_label(spec_a["d"])
spec_e_label = ci._spec_ci_label(spec_a["e"])
spec_f_label = ci._spec_ci_label(spec_a["f"])
spec_g_label = ci._spec_ci_label(spec_a["g"])
spec_a_label = ci._spec_deps_key(spec_a)
spec_b_label = ci._spec_deps_key(spec_a["b"])
spec_c_label = ci._spec_deps_key(spec_a["c"])
spec_d_label = ci._spec_deps_key(spec_a["d"])
spec_e_label = ci._spec_deps_key(spec_a["e"])
spec_f_label = ci._spec_deps_key(spec_a["f"])
spec_g_label = ci._spec_deps_key(spec_a["g"])
spec_labels, dependencies, stages = ci.stage_spec_jobs([spec_a])

View File

@@ -123,18 +123,17 @@ def test_root_and_dep_match_returns_root(mock_packages, mutable_mock_env_path):
@pytest.mark.parametrize(
"arg,conf", [("--reuse", True), ("--fresh", False), ("--reuse-deps", "dependencies")]
"arg,config", [("--reuse", True), ("--fresh", False), ("--reuse-deps", "dependencies")]
)
def test_concretizer_arguments(mutable_config, mock_packages, arg, conf):
def test_concretizer_arguments(mutable_config, mock_packages, arg, config):
"""Ensure that ConfigSetAction is doing the right thing."""
spec = spack.main.SpackCommand("spec")
assert spack.config.get("concretizer:reuse", None, scope="command_line") is None
assert spack.config.get("concretizer:reuse", None) is None
spec(arg, "zlib")
assert spack.config.get("concretizer:reuse", None) == conf
assert spack.config.get("concretizer:reuse", None, scope="command_line") == conf
assert spack.config.get("concretizer:reuse", None) == config
def test_use_buildcache_type():

View File

@@ -112,10 +112,10 @@ def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, mock_executa
@pytest.mark.regression("37996")
def test_compiler_remove(mutable_config, mock_packages):
"""Tests that we can remove a compiler from configuration."""
assert spack.spec.CompilerSpec("gcc@=9.4.0") in spack.compilers.all_compiler_specs()
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@9.4.0", add_paths=[], scope=None)
assert spack.spec.CompilerSpec("gcc@=4.8.0") in spack.compilers.all_compiler_specs()
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.8.0", add_paths=[], scope=None)
spack.cmd.compiler.compiler_remove(args)
assert spack.spec.CompilerSpec("gcc@=9.4.0") not in spack.compilers.all_compiler_specs()
assert spack.spec.CompilerSpec("gcc@=4.8.0") not in spack.compilers.all_compiler_specs()
@pytest.mark.regression("37996")
@@ -124,10 +124,10 @@ def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
site_config = spack.config.get("compilers", scope="site")
spack.config.set("compilers", site_config, scope="user")
assert spack.spec.CompilerSpec("gcc@=9.4.0") in spack.compilers.all_compiler_specs()
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@9.4.0", add_paths=[], scope=None)
assert spack.spec.CompilerSpec("gcc@=4.8.0") in spack.compilers.all_compiler_specs()
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.8.0", add_paths=[], scope=None)
spack.cmd.compiler.compiler_remove(args)
assert spack.spec.CompilerSpec("gcc@=9.4.0") not in spack.compilers.all_compiler_specs()
assert spack.spec.CompilerSpec("gcc@=4.8.0") not in spack.compilers.all_compiler_specs()
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
@@ -175,9 +175,7 @@ def test_compiler_find_mixed_suffixes(
assert "clang@11.0.0" in output
assert "gcc@8.4.0" in output
config = spack.compilers.get_compiler_config(
no_compilers_yaml, scope="site", init_config=False
)
config = spack.compilers.get_compiler_config("site", False)
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
@@ -212,9 +210,7 @@ def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compiler
assert "clang@11.0.0" in output
assert "gcc@8.4.0" in output
config = spack.compilers.get_compiler_config(
no_compilers_yaml, scope="site", init_config=False
)
config = spack.compilers.get_compiler_config("site", False)
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
assert clang["paths"]["cc"] == str(compilers_dir / "clang")
@@ -233,9 +229,7 @@ def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir)
compiler("find", "--scope=site")
config = spack.compilers.get_compiler_config(
no_compilers_yaml, scope="site", init_config=False
)
config = spack.compilers.get_compiler_config("site", False)
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
assert gcc["paths"] == {
"cc": str(new_dir / "gcc-8"),

View File

@@ -858,7 +858,8 @@ def test_with_config_bad_include_activate(environment_from_manifest, tmpdir):
"""
)
with ev.Environment(env_root) as e:
e = ev.Environment(env_root)
with e:
e.concretize()
# we've created an environment with some included config files (which do
@@ -868,7 +869,7 @@ def test_with_config_bad_include_activate(environment_from_manifest, tmpdir):
os.remove(abs_include_path)
os.remove(include1)
with pytest.raises(spack.config.ConfigFileError) as exc:
ev.activate(ev.Environment(env_root))
ev.activate(e)
err = exc.value.message
assert "missing include" in err
@@ -1062,7 +1063,8 @@ def test_config_change_new(mutable_mock_env_path, tmp_path, mock_packages, mutab
"""
)
with ev.Environment(tmp_path):
e = ev.Environment(tmp_path)
with e:
config("change", "packages:mpich:require:~debug")
with pytest.raises(spack.solver.asp.UnsatisfiableSpecError):
spack.spec.Spec("mpich+debug").concretized()
@@ -1079,7 +1081,7 @@ def test_config_change_new(mutable_mock_env_path, tmp_path, mock_packages, mutab
require: "@3.0.3"
"""
)
with ev.Environment(tmp_path):
with e:
assert spack.spec.Spec("mpich").concretized().satisfies("@3.0.3")
with pytest.raises(spack.config.ConfigError, match="not a list"):
config("change", "packages:mpich:require:~debug")

View File

@@ -88,7 +88,6 @@ def __init__(
exclude_file=None,
exclude_specs=None,
directory=None,
private=False,
):
self.specs = specs or []
self.all = all
@@ -97,7 +96,6 @@ def __init__(
self.dependencies = dependencies
self.exclude_file = exclude_file
self.exclude_specs = exclude_specs
self.private = private
self.directory = directory
@@ -106,7 +104,7 @@ def test_exclude_specs(mock_packages, config):
specs=["mpich"], versions_per_spec="all", exclude_specs="mpich@3.0.1:3.0.2 mpich@1.0"
)
mirror_specs, _ = spack.cmd.mirror._specs_and_action(args)
mirror_specs = spack.cmd.mirror.concrete_specs_from_user(args)
expected_include = set(
spack.spec.Spec(x).concretized() for x in ["mpich@3.0.3", "mpich@3.0.4", "mpich@3.0"]
)
@@ -115,19 +113,6 @@ def test_exclude_specs(mock_packages, config):
assert not any(spec.satisfies(y) for spec in mirror_specs for y in expected_exclude)
def test_exclude_specs_public_mirror(mock_packages, config):
args = MockMirrorArgs(
specs=["no-redistribute-dependent"],
versions_per_spec="all",
dependencies=True,
private=False,
)
mirror_specs, _ = spack.cmd.mirror._specs_and_action(args)
assert not any(s.name == "no-redistribute" for s in mirror_specs)
assert any(s.name == "no-redistribute-dependent" for s in mirror_specs)
def test_exclude_file(mock_packages, tmpdir, config):
exclude_path = os.path.join(str(tmpdir), "test-exclude.txt")
with open(exclude_path, "w") as exclude_file:
@@ -140,7 +125,7 @@ def test_exclude_file(mock_packages, tmpdir, config):
args = MockMirrorArgs(specs=["mpich"], versions_per_spec="all", exclude_file=exclude_path)
mirror_specs, _ = spack.cmd.mirror._specs_and_action(args)
mirror_specs = spack.cmd.mirror.concrete_specs_from_user(args)
expected_include = set(
spack.spec.Spec(x).concretized() for x in ["mpich@3.0.3", "mpich@3.0.4", "mpich@3.0"]
)
@@ -277,9 +262,11 @@ def test_mirror_destroy(
class TestMirrorCreate:
@pytest.mark.regression("31736", "31985")
def test_all_specs_with_all_versions_dont_concretize(self):
args = MockMirrorArgs(all=True, exclude_file=None, exclude_specs=None)
mirror_specs, _ = spack.cmd.mirror._specs_and_action(args)
assert all(not s.concrete for s in mirror_specs)
args = MockMirrorArgs(exclude_file=None, exclude_specs=None)
specs = spack.cmd.mirror.all_specs_with_all_versions(
selection_fn=spack.cmd.mirror.not_excluded_fn(args)
)
assert all(not s.concrete for s in specs)
@pytest.mark.parametrize(
"cli_args,error_str",
@@ -337,8 +324,8 @@ def test_error_conditions(self, cli_args, error_str):
],
)
def test_exclude_specs_from_user(self, cli_args, not_expected, config):
mirror_specs, _ = spack.cmd.mirror._specs_and_action(MockMirrorArgs(**cli_args))
assert not any(s.satisfies(y) for s in mirror_specs for y in not_expected)
specs = spack.cmd.mirror.concrete_specs_from_user(MockMirrorArgs(**cli_args))
assert not any(s.satisfies(y) for s in specs for y in not_expected)
@pytest.mark.parametrize("abstract_specs", [("bowtie", "callpath")])
def test_specs_from_cli_are_the_same_as_from_file(self, abstract_specs, config, tmpdir):
@@ -420,27 +407,3 @@ def test_mirror_add_set_signed(mutable_config):
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "signed": False}
mirror("set", "--signed", "example")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "signed": True}
def test_mirror_add_set_autopush(mutable_config):
# Add mirror without autopush
mirror("add", "example", "http://example.com")
assert spack.config.get("mirrors:example") == "http://example.com"
mirror("set", "--no-autopush", "example")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": False}
mirror("set", "--autopush", "example")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
mirror("set", "--no-autopush", "example")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": False}
mirror("remove", "example")
# Add mirror with autopush
mirror("add", "--autopush", "example", "http://example.com")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
mirror("set", "--autopush", "example")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
mirror("set", "--no-autopush", "example")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": False}
mirror("set", "--autopush", "example")
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
mirror("remove", "example")

View File

@@ -31,7 +31,7 @@ def test_spec():
@pytest.mark.only_clingo("Known failure of the original concretizer")
def test_spec_concretizer_args(mutable_config, mutable_database, do_not_check_runtimes_on_reuse):
def test_spec_concretizer_args(mutable_config, mutable_database):
"""End-to-end test of CLI concretizer prefs.
It's here to make sure that everything works from CLI

View File

@@ -14,7 +14,6 @@
import spack.compilers
import spack.spec
import spack.util.environment
import spack.util.module_cmd
from spack.compiler import Compiler
from spack.util.executable import Executable, ProcessError
@@ -138,6 +137,14 @@ def __init__(self):
environment={},
)
def _get_compiler_link_paths(self):
# Mock os.path.isdir so the link paths don't have to exist
old_isdir = os.path.isdir
os.path.isdir = lambda x: True
ret = super()._get_compiler_link_paths()
os.path.isdir = old_isdir
return ret
@property
def name(self):
return "mockcompiler"
@@ -155,25 +162,34 @@ def verbose_flag(self):
required_libs = ["libgfortran"]
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
def test_implicit_rpaths(dirs_with_libfiles):
def test_implicit_rpaths(dirs_with_libfiles, monkeypatch):
lib_to_dirs, all_dirs = dirs_with_libfiles
def try_all_dirs(*args):
return all_dirs
monkeypatch.setattr(MockCompiler, "_get_compiler_link_paths", try_all_dirs)
expected_rpaths = set(lib_to_dirs["libstdc++"] + lib_to_dirs["libgfortran"])
compiler = MockCompiler()
compiler._compile_c_source_output = "ld " + " ".join(f"-L{d}" for d in all_dirs)
retrieved_rpaths = compiler.implicit_rpaths()
assert set(retrieved_rpaths) == set(lib_to_dirs["libstdc++"] + lib_to_dirs["libgfortran"])
assert set(retrieved_rpaths) == expected_rpaths
without_flag_output = "ld -L/path/to/first/lib -L/path/to/second/lib64"
with_flag_output = "ld -L/path/to/first/with/flag/lib -L/path/to/second/lib64"
no_flag_dirs = ["/path/to/first/lib", "/path/to/second/lib64"]
no_flag_output = "ld -L%s -L%s" % tuple(no_flag_dirs)
flag_dirs = ["/path/to/first/with/flag/lib", "/path/to/second/lib64"]
flag_output = "ld -L%s -L%s" % tuple(flag_dirs)
def call_compiler(exe, *args, **kwargs):
# This method can replace Executable.__call__ to emulate a compiler that
# changes libraries depending on a flag.
if "--correct-flag" in exe.exe:
return with_flag_output
return without_flag_output
return flag_output
return no_flag_output
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
@@ -187,8 +203,8 @@ def call_compiler(exe, *args, **kwargs):
("cc", "cppflags"),
],
)
@pytest.mark.enable_compiler_execution
def test_compile_dummy_c_source_adds_flags(monkeypatch, exe, flagname):
@pytest.mark.enable_compiler_link_paths
def test_get_compiler_link_paths(monkeypatch, exe, flagname):
# create fake compiler that emits mock verbose output
compiler = MockCompiler()
monkeypatch.setattr(Executable, "__call__", call_compiler)
@@ -205,38 +221,40 @@ def test_compile_dummy_c_source_adds_flags(monkeypatch, exe, flagname):
assert False
# Test without flags
assert compiler._compile_dummy_c_source() == without_flag_output
assert compiler._get_compiler_link_paths() == no_flag_dirs
if flagname:
# set flags and test
compiler.flags = {flagname: ["--correct-flag"]}
assert compiler._compile_dummy_c_source() == with_flag_output
assert compiler._get_compiler_link_paths() == flag_dirs
@pytest.mark.enable_compiler_execution
def test_compile_dummy_c_source_no_path():
def test_get_compiler_link_paths_no_path():
compiler = MockCompiler()
compiler.cc = None
compiler.cxx = None
assert compiler._compile_dummy_c_source() is None
compiler.f77 = None
compiler.fc = None
assert compiler._get_compiler_link_paths() == []
@pytest.mark.enable_compiler_execution
def test_compile_dummy_c_source_no_verbose_flag():
def test_get_compiler_link_paths_no_verbose_flag():
compiler = MockCompiler()
compiler._verbose_flag = None
assert compiler._compile_dummy_c_source() is None
assert compiler._get_compiler_link_paths() == []
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
@pytest.mark.enable_compiler_execution
def test_compile_dummy_c_source_load_env(working_env, monkeypatch, tmpdir):
@pytest.mark.enable_compiler_link_paths
def test_get_compiler_link_paths_load_env(working_env, monkeypatch, tmpdir):
gcc = str(tmpdir.join("gcc"))
with open(gcc, "w") as f:
f.write(
f"""#!/bin/sh
"""#!/bin/sh
if [ "$ENV_SET" = "1" ] && [ "$MODULE_LOADED" = "1" ]; then
printf '{without_flag_output}'
echo '"""
+ no_flag_output
+ """'
fi
"""
)
@@ -256,7 +274,7 @@ def module(*args):
compiler.environment = {"set": {"ENV_SET": "1"}}
compiler.modules = ["turn_on"]
assert compiler._compile_dummy_c_source() == without_flag_output
assert compiler._get_compiler_link_paths() == no_flag_dirs
# Get the desired flag from the specified compiler spec.
@@ -682,7 +700,7 @@ def test_raising_if_compiler_target_is_over_specific(config):
]
arch_spec = spack.spec.ArchSpec(("linux", "ubuntu18.04", "haswell"))
with spack.config.override("compilers", compilers):
cfg = spack.compilers.get_compiler_config(config)
cfg = spack.compilers.get_compiler_config()
with pytest.raises(ValueError):
spack.compilers.get_compilers(cfg, spack.spec.CompilerSpec("gcc@9.0.1"), arch_spec)
@@ -894,52 +912,3 @@ def prepare_executable(name):
# Test that null entries don't fail
compiler.cc = None
compiler.verify_executables()
@pytest.mark.parametrize(
"detected_versions,expected_length",
[
# If we detect a C compiler we expect the result to be valid
(
[
spack.compilers.DetectVersionArgs(
id=spack.compilers.CompilerID(
os="ubuntu20.04", compiler_name="clang", version="12.0.0"
),
variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
language="cc",
path="/usr/bin/clang-12",
),
spack.compilers.DetectVersionArgs(
id=spack.compilers.CompilerID(
os="ubuntu20.04", compiler_name="clang", version="12.0.0"
),
variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
language="cxx",
path="/usr/bin/clang++-12",
),
],
1,
),
# If we detect only a C++ compiler we expect the result to be discarded
(
[
spack.compilers.DetectVersionArgs(
id=spack.compilers.CompilerID(
os="ubuntu20.04", compiler_name="clang", version="12.0.0"
),
variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
language="cxx",
path="/usr/bin/clang++-12",
)
],
0,
),
],
)
def test_detection_requires_c_compiler(detected_versions, expected_length):
"""Tests that compilers automatically added to the configuration have
at least a C compiler.
"""
result = spack.compilers.make_compiler_list(detected_versions)
assert len(result) == expected_length

View File

@@ -13,7 +13,6 @@
import llnl.util.lang
import spack.compiler
import spack.compilers
import spack.concretize
import spack.config
@@ -24,7 +23,6 @@
import spack.platforms
import spack.repo
import spack.solver.asp
import spack.util.libc
import spack.variant as vt
from spack.concretize import find_spec
from spack.spec import CompilerSpec, Spec
@@ -69,24 +67,6 @@ def check_concretize(abstract_spec):
return concrete
@pytest.fixture(scope="function", autouse=True)
def binary_compatibility(monkeypatch, request):
"""Selects whether we use OS compatibility for binaries, or libc compatibility."""
if spack.platforms.real_host().name != "linux":
return
if "mock_packages" not in request.fixturenames:
# Only builtin.mock has a mock glibc package
return
if "database" in request.fixturenames or "mutable_database" in request.fixturenames:
# Databases have been created without glibc support
return
monkeypatch.setattr(spack.solver.asp, "using_libc_compatibility", lambda: True)
monkeypatch.setattr(spack.compiler.Compiler, "default_libc", Spec("glibc@=2.28"))
@pytest.fixture(
params=[
# no_deps
@@ -140,16 +120,14 @@ def current_host(request, monkeypatch):
# is_preference is not empty if we want to supply the
# preferred target via packages.yaml
cpu, _, is_preference = request.param.partition("-")
target = archspec.cpu.TARGETS[cpu]
monkeypatch.setattr(spack.platforms.Test, "default", cpu)
monkeypatch.setattr(spack.platforms.Test, "front_end", cpu)
if not is_preference:
target = archspec.cpu.TARGETS[cpu]
monkeypatch.setattr(archspec.cpu, "host", lambda: target)
yield target
else:
target = archspec.cpu.TARGETS["sapphirerapids"]
monkeypatch.setattr(archspec.cpu, "host", lambda: target)
with spack.config.override("packages:all", {"target": [cpu]}):
yield target
@@ -276,7 +254,7 @@ def gcc11_with_flags(compiler_factory):
# This must use the mutable_config fixture because the test
# adjusting_default_target_based_on_compiler uses the current_host fixture,
# which changes the config.
@pytest.mark.usefixtures("mutable_config", "mock_packages", "do_not_check_runtimes_on_reuse")
@pytest.mark.usefixtures("mutable_config", "mock_packages")
class TestConcretize:
def test_concretize(self, spec):
check_concretize(spec)
@@ -636,7 +614,7 @@ def test_my_dep_depends_on_provider_of_my_virtual_dep(self):
spec.normalize()
spec.concretize()
@pytest.mark.parametrize("compiler_str", ["clang", "gcc", "gcc@10.2.1", "clang@:15.0.0"])
@pytest.mark.parametrize("compiler_str", ["clang", "gcc", "gcc@10.2.1", "clang@:12.0.0"])
def test_compiler_inheritance(self, compiler_str):
spec_str = "mpileaks %{0}".format(compiler_str)
spec = Spec(spec_str).concretized()
@@ -896,10 +874,10 @@ def test_concretize_anonymous_dep(self, spec_str):
@pytest.mark.parametrize(
"spec_str,expected_str",
[
# Unconstrained versions select default compiler (gcc@10.2.1)
# Unconstrained versions select default compiler (gcc@4.5.0)
("bowtie@1.4.0", "%gcc@10.2.1"),
# Version with conflicts and no valid gcc select another compiler
("bowtie@1.3.0", "%clang@15.0.0"),
("bowtie@1.3.0", "%clang@12.0.0"),
# If a higher gcc is available still prefer that
("bowtie@1.2.2 os=redhat6", "%gcc@11.1.0"),
],
@@ -1034,7 +1012,7 @@ def test_working_around_conflicting_defaults(self, spec_str, expected):
[("cmake", ["%clang"]), ("cmake %gcc", ["%gcc"]), ("cmake %clang", ["%clang"])],
)
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_external_package_and_compiler_preferences(self, spec_str, expected, mutable_config):
def test_external_package_and_compiler_preferences(self, spec_str, expected):
packages_yaml = {
"all": {"compiler": ["clang", "gcc"]},
"cmake": {
@@ -1042,7 +1020,7 @@ def test_external_package_and_compiler_preferences(self, spec_str, expected, mut
"buildable": False,
},
}
mutable_config.set("packages", packages_yaml)
spack.config.set("packages", packages_yaml)
s = Spec(spec_str).concretized()
assert s.external
@@ -1347,9 +1325,6 @@ def mock_fn(*args, **kwargs):
def test_reuse_installed_packages_when_package_def_changes(
self, context, mutable_database, repo_with_changing_recipe
):
# test applies only with reuse turned off in concretizer
spack.config.set("concretizer:reuse", False)
# Install a spec
root = Spec("root").concretized()
dependency = root["changing"].copy()
@@ -1373,22 +1348,6 @@ def test_reuse_installed_packages_when_package_def_changes(
# Structure and package hash will be different without reuse
assert root.dag_hash() != new_root_without_reuse.dag_hash()
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
@pytest.mark.regression("43663")
def test_no_reuse_when_variant_condition_does_not_hold(self, mutable_database, mock_packages):
spack.config.set("concretizer:reuse", True)
# Install a spec for which the `version_based` variant condition does not hold
old = Spec("conditional-variant-pkg @1").concretized()
old.package.do_install(fake=True, explicit=True)
# Then explicitly require a spec with `+version_based`, which shouldn't reuse previous spec
new1 = Spec("conditional-variant-pkg +version_based").concretized()
assert new1.satisfies("@2 +version_based")
new2 = Spec("conditional-variant-pkg +two_whens").concretized()
assert new2.satisfies("@2 +two_whens +version_based")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_reuse_with_flags(self, mutable_database, mutable_config):
spack.config.set("concretizer:reuse", True)
@@ -1475,14 +1434,12 @@ def test_os_selection_when_multiple_choices_are_possible(
):
s = Spec(spec_str).concretized()
for node in s.traverse():
if node.name == "glibc":
continue
assert node.satisfies(expected_os)
@pytest.mark.regression("22718")
@pytest.mark.parametrize(
"spec_str,expected_compiler",
[("mpileaks", "%gcc@10.2.1"), ("mpileaks ^mpich%clang@15.0.0", "%clang@15.0.0")],
[("mpileaks", "%gcc@10.2.1"), ("mpileaks ^mpich%clang@12.0.0", "%clang@12.0.0")],
)
def test_compiler_is_unique(self, spec_str, expected_compiler):
s = Spec(spec_str).concretized()
@@ -1770,7 +1727,7 @@ def test_reuse_with_unknown_package_dont_raise(self, tmpdir, temporary_store, mo
[
(["libelf", "libelf@0.8.10"], 1),
(["libdwarf%gcc", "libelf%clang"], 2),
(["libdwarf%gcc", "libdwarf%clang"], 3),
(["libdwarf%gcc", "libdwarf%clang"], 4),
(["libdwarf^libelf@0.8.12", "libdwarf^libelf@0.8.13"], 4),
(["hdf5", "zmpi"], 3),
(["hdf5", "mpich"], 2),
@@ -1789,8 +1746,7 @@ def test_best_effort_coconcretize(self, specs, expected):
for s in result.specs:
concrete_specs.update(s.traverse())
libc_offset = 1 if spack.solver.asp.using_libc_compatibility() else 0
assert len(concrete_specs) == expected + libc_offset
assert len(concrete_specs) == expected
@pytest.mark.parametrize(
"specs,expected_spec,occurances",
@@ -1910,16 +1866,32 @@ def test_version_weight_and_provenance(self):
result_spec = result.specs[0]
num_specs = len(list(result_spec.traverse()))
libc_offset = 1 if spack.solver.asp.using_libc_compatibility() else 0
criteria = [
(num_specs - 1 - libc_offset, None, "number of packages to build (vs. reuse)"),
(num_specs - 1, None, "number of packages to build (vs. reuse)"),
(2, 0, "version badness"),
]
for criterion in criteria:
assert criterion in result.criteria, result_spec
assert criterion in result.criteria
assert result_spec.satisfies("^b@1.0")
@pytest.mark.regression("31169")
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_not_reusing_incompatible_os_or_compiler(self):
root_spec = Spec("b")
s = root_spec.concretized()
wrong_compiler, wrong_os = s.copy(), s.copy()
wrong_compiler.compiler = spack.spec.CompilerSpec("gcc@12.1.0")
wrong_os.architecture = spack.spec.ArchSpec("test-ubuntu2204-x86_64")
reusable_specs = [wrong_compiler, wrong_os]
with spack.config.override("concretizer:reuse", True):
solver = spack.solver.asp.Solver()
setup = spack.solver.asp.SpackSolverSetup()
result, _, _ = solver.driver.solve(setup, [root_spec], reuse=reusable_specs)
concrete_spec = result.specs[0]
assert concrete_spec.satisfies("%{}".format(s.compiler))
assert concrete_spec.satisfies("os={}".format(s.architecture.os))
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
def test_reuse_succeeds_with_config_compatible_os(self):
root_spec = Spec("b")
@@ -2124,11 +2096,7 @@ def test_external_python_extension_find_dependency_from_installed(self, monkeypa
# install python external
python = Spec("python").concretized()
def query(*args, **kwargs):
return [python]
monkeypatch.setattr(spack.store.STORE.db, "query", query)
monkeypatch.setattr(spack.store.STORE.db, "query", lambda x: [python])
# ensure that we can't be faking this by getting it from config
external_conf.pop("python")
@@ -2392,62 +2360,6 @@ def test_select_lower_priority_package_from_repository_stack(
assert s[name].concrete
assert s[name].namespace == namespace
@pytest.mark.only_clingo("Old concretizer cannot reuse")
def test_reuse_specs_from_non_available_compilers(self, mutable_config, mutable_database):
"""Tests that we can reuse specs with compilers that are not configured locally."""
# All the specs in the mutable DB have been compiled with %gcc@=10.2.1
specs = mutable_database.query_local()
assert all(s.satisfies("%gcc@=10.2.1") for s in specs)
spack.compilers.remove_compiler_from_config("gcc@=10.2.1")
assert not spack.compilers.compilers_for_spec("gcc@=10.2.1")
mutable_config.set("concretizer:reuse", True)
# mpileaks is in the database, it will be reused with gcc@=10.2.1
root = Spec("mpileaks").concretized()
for s in root.traverse():
assert s.satisfies("%gcc@10.2.1")
# fftw is not in the database, therefore the root will be compiled with gcc@=9.4.0,
# while the mpi is reused from the database and is compiled with gcc@=10.2.1
root = Spec("fftw").concretized()
assert root.satisfies("%gcc@=9.4.0")
for s in root.traverse(root=False):
assert s.satisfies("%gcc@10.2.1")
@pytest.mark.regression("43406")
def test_externals_with_platform_explicitly_set(self, tmp_path):
"""Tests that users can specify platform=xxx in an external spec"""
external_conf = {
"mpich": {
"buildable": False,
"externals": [{"spec": "mpich@=2.0.0 platform=test", "prefix": str(tmp_path)}],
}
}
spack.config.set("packages", external_conf)
s = Spec("mpich").concretized()
assert s.external
@pytest.mark.regression("43875")
def test_concretize_missing_compiler(self, mutable_config, monkeypatch):
"""Tests that Spack can concretize a spec with a missing compiler when the
option is active.
"""
def _default_libc(self):
if self.cc is None:
return None
return Spec("glibc@=2.28")
monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
monkeypatch.setattr(spack.compiler.Compiler, "default_libc", property(_default_libc))
monkeypatch.setattr(
spack.util.libc, "libc_from_current_python_process", lambda: Spec("glibc@=2.28")
)
mutable_config.set("config:install_missing_compilers", True)
s = Spec("a %gcc@=13.2.0").concretized()
assert s.satisfies("%gcc@13.2.0")
@pytest.fixture()
def duplicates_test_repository():
@@ -2586,29 +2498,6 @@ def test_no_multiple_solutions_with_different_edges_same_nodes(self):
assert len(edges) == 1
assert edges[0].spec.satisfies("@=60")
@pytest.mark.regression("43647")
def test_specifying_different_versions_build_deps(self):
"""Tests that we can concretize a spec with nodes using the same build
dependency pinned at different versions, when the constraint is specified
in the root spec.
o hdf5@1.0
|\
o | pinned-gmake@1.0
o | gmake@3.0
/
o gmake@4.1
"""
hdf5_str = "hdf5@1.0 ^gmake@4.1"
pinned_str = "pinned-gmake@1.0 ^gmake@3.0"
input_specs = [Spec(hdf5_str), Spec(pinned_str)]
solver = spack.solver.asp.Solver()
result = solver.solve(input_specs)
assert any(x.satisfies(hdf5_str) for x in result.specs)
assert any(x.satisfies(pinned_str) for x in result.specs)
@pytest.mark.parametrize(
"v_str,v_opts,checksummed",

View File

@@ -7,8 +7,6 @@
import pytest
import archspec.cpu
import spack.paths
import spack.repo
import spack.solver.asp
@@ -26,7 +24,9 @@ def _concretize_with_reuse(*, root_str, reused_str):
reused_spec = spack.spec.Spec(reused_str).concretized()
setup = spack.solver.asp.SpackSolverSetup(tests=False)
driver = spack.solver.asp.PyclingoDriver()
result, _, _ = driver.solve(setup, [spack.spec.Spec(f"{root_str}")], reuse=[reused_spec])
result, _, _ = driver.solve(
setup, [spack.spec.Spec(f"{root_str} ^{reused_str}")], reuse=[reused_spec]
)
root = result.specs[0]
return root, reused_spec
@@ -47,7 +47,7 @@ def enable_runtimes():
def test_correct_gcc_runtime_is_injected_as_dependency(runtime_repo):
s = spack.spec.Spec("a%gcc@10.2.1 ^b%gcc@9.4.0").concretized()
s = spack.spec.Spec("a%gcc@10.2.1 ^b%gcc@4.8.0").concretized()
a, b = s["a"], s["b"]
# Both a and b should depend on the same gcc-runtime directly
@@ -78,28 +78,9 @@ def test_external_nodes_do_not_have_runtimes(runtime_repo, mutable_config, tmp_p
"root_str,reused_str,expected,nruntime",
[
# The reused runtime is older than we need, thus we'll add a more recent one for a
("a%gcc@10.2.1", "b%gcc@9.4.0", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@9.4.0"}, 2),
("a%gcc@10.2.1", "b%gcc@4.8.0", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@4.8.0"}, 2),
# The root is compiled with an older compiler, thus we'll reuse the runtime from b
("a%gcc@9.4.0", "b%gcc@10.2.1", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@10.2.1"}, 1),
# Same as before, but tests that we can reuse from a more generic target
pytest.param(
"a%gcc@9.4.0",
"b%gcc@10.2.1 target=x86_64",
{"a": "gcc-runtime@10.2.1 target=x86_64", "b": "gcc-runtime@10.2.1 target=x86_64"},
1,
marks=pytest.mark.skipif(
str(archspec.cpu.host().family) != "x86_64", reason="test data is x86_64 specific"
),
),
pytest.param(
"a%gcc@10.2.1",
"b%gcc@9.4.0 target=x86_64",
{"a": "gcc-runtime@10.2.1 target=x86_64", "b": "gcc-runtime@9.4.0 target=x86_64"},
2,
marks=pytest.mark.skipif(
str(archspec.cpu.host().family) != "x86_64", reason="test data is x86_64 specific"
),
),
("a%gcc@4.8.0", "b%gcc@10.2.1", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@10.2.1"}, 1),
],
)
def test_reusing_specs_with_gcc_runtime(root_str, reused_str, expected, nruntime, runtime_repo):
@@ -123,8 +104,8 @@ def test_reusing_specs_with_gcc_runtime(root_str, reused_str, expected, nruntime
[
# Ensure that, whether we have multiple runtimes in the DAG or not,
# we always link only the latest version
("a%gcc@10.2.1", "b%gcc@9.4.0", ["gcc-runtime@10.2.1"], ["gcc-runtime@9.4.0"]),
("a%gcc@9.4.0", "b%gcc@10.2.1", ["gcc-runtime@10.2.1"], ["gcc-runtime@9.4.0"]),
("a%gcc@10.2.1", "b%gcc@4.8.0", ["gcc-runtime@10.2.1"], ["gcc-runtime@4.8.0"]),
("a%gcc@4.8.0", "b%gcc@10.2.1", ["gcc-runtime@10.2.1"], ["gcc-runtime@4.8.0"]),
],
)
def test_views_can_handle_duplicate_runtime_nodes(

View File

@@ -105,7 +105,7 @@ def test_preferred_variants_from_wildcard(self):
@pytest.mark.parametrize(
"compiler_str,spec_str",
[("gcc@=9.4.0", "mpileaks"), ("clang@=15.0.0", "mpileaks"), ("gcc@=9.4.0", "openmpi")],
[("gcc@=4.8.0", "mpileaks"), ("clang@=12.0.0", "mpileaks"), ("gcc@=4.8.0", "openmpi")],
)
def test_preferred_compilers(self, compiler_str, spec_str):
"""Test preferred compilers are applied correctly"""

View File

@@ -977,7 +977,7 @@ def test_single_file_scope(config, env_yaml):
# from the single-file config
assert spack.config.get("config:verify_ssl") is False
assert spack.config.get("config:dirty") is False
assert spack.config.get("packages:all:compiler") == ["gcc@4.5.3", "gcc", "clang"]
assert spack.config.get("packages:all:compiler") == ["gcc@4.5.3"]
# from the lower config scopes
assert spack.config.get("config:checksum") is True
@@ -1276,7 +1276,7 @@ def test_user_config_path_is_default_when_env_var_is_empty(working_env):
def test_default_install_tree(monkeypatch, default_config):
s = spack.spec.Spec("nonexistent@x.y.z %none@a.b.c arch=foo-bar-baz")
monkeypatch.setattr(s, "dag_hash", lambda length: "abc123")
monkeypatch.setattr(s, "dag_hash", lambda: "abc123")
_, _, projections = spack.store.parse_install_tree(spack.config.get("config"))
assert s.format(projections["all"]) == "foo-bar-baz/none-a.b.c/nonexistent-x.y.z-abc123"

View File

@@ -34,7 +34,6 @@
import spack.binary_distribution
import spack.caches
import spack.cmd.buildcache
import spack.compiler
import spack.compilers
import spack.config
import spack.database
@@ -56,7 +55,6 @@
import spack.util.gpg
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
import spack.version
from spack.fetch_strategy import URLFetchStrategy
from spack.util.pattern import Bunch
@@ -270,6 +268,10 @@ def clean_test_environment():
ev.deactivate()
def _verify_executables_noop(*args):
return None
def _host():
"""Mock archspec host so there is no inconsistency on the Windows platform
This function cannot be local as it needs to be pickleable"""
@@ -295,7 +297,9 @@ def mock_compiler_executable_verification(request, monkeypatch):
If a test is marked in that way this is a no-op."""
if "enable_compiler_verification" not in request.keywords:
monkeypatch.setattr(spack.compiler.Compiler, "verify_executables", _return_none)
monkeypatch.setattr(
spack.compiler.Compiler, "verify_executables", _verify_executables_noop
)
# Hooks to add command line options or set other custom behaviors.
@@ -786,7 +790,6 @@ def no_compilers_yaml(mutable_config):
compilers_yaml = os.path.join(local_config.path, "compilers.yaml")
if os.path.exists(compilers_yaml):
os.remove(compilers_yaml)
return mutable_config
@pytest.fixture()
@@ -929,16 +932,26 @@ def dirs_with_libfiles(tmpdir_factory):
yield lib_to_dirs, all_dirs
def _return_none(*args):
return None
def _compiler_link_paths_noop(*args):
return []
@pytest.fixture(scope="function", autouse=True)
def disable_compiler_execution(monkeypatch, request):
"""Disable compiler execution to determine implicit link paths and libc flavor and version.
To re-enable use `@pytest.mark.enable_compiler_execution`"""
if "enable_compiler_execution" not in request.keywords:
monkeypatch.setattr(spack.compiler.Compiler, "_compile_dummy_c_source", _return_none)
"""
This fixture can be disabled for tests of the compiler link path
functionality by::
@pytest.mark.enable_compiler_link_paths
If a test is marked in that way this is a no-op."""
if "enable_compiler_link_paths" not in request.keywords:
# Compiler.determine_implicit_rpaths actually runs the compiler. So
# replace that function with a noop that simulates finding no implicit
# RPATHs
monkeypatch.setattr(
spack.compiler.Compiler, "_get_compiler_link_paths", _compiler_link_paths_noop
)
@pytest.fixture(scope="function")
@@ -1426,15 +1439,6 @@ def mock_git_repository(git, tmpdir_factory):
yield t
@pytest.fixture(scope="function")
def mock_git_test_package(mock_git_repository, mutable_mock_repo, monkeypatch):
# install a fake git version in the package class
pkg_class = spack.repo.PATH.get_pkg_class("git-test")
monkeypatch.delattr(pkg_class, "git")
monkeypatch.setitem(pkg_class.versions, spack.version.Version("git"), mock_git_repository.url)
return pkg_class
@pytest.fixture(scope="session")
def mock_hg_repository(tmpdir_factory):
"""Creates a very simple hg repository with two commits."""
@@ -1966,24 +1970,17 @@ def mock_modules_root(tmp_path, monkeypatch):
monkeypatch.setattr(spack.modules.common, "root_path", fn)
_repo_name_id = 0
def create_test_repo(tmpdir, pkg_name_content_tuples):
global _repo_name_id
repo_path = str(tmpdir)
repo_yaml = tmpdir.join("repo.yaml")
with open(str(repo_yaml), "w") as f:
f.write(
f"""\
"""\
repo:
namespace: testrepo{str(_repo_name_id)}
namespace: testcfgrequirements
"""
)
_repo_name_id += 1
packages_dir = tmpdir.join("packages")
for pkg_name, pkg_str in pkg_name_content_tuples:
pkg_dir = packages_dir.ensure(pkg_name, dir=True)
@@ -2016,12 +2013,3 @@ def _factory(*, spec, operating_system):
def host_architecture_str():
"""Returns the broad architecture family (x86_64, aarch64, etc.)"""
return str(archspec.cpu.host().family)
def _true(x):
return True
@pytest.fixture()
def do_not_check_runtimes_on_reuse(monkeypatch):
monkeypatch.setattr(spack.solver.asp, "_has_runtime_dependencies", _true)

View File

@@ -1,6 +1,6 @@
compilers:
- compiler:
spec: gcc@=9.4.0
spec: gcc@=4.8.0
operating_system: {linux_os.name}{linux_os.version}
paths:
cc: /path/to/gcc
@@ -10,7 +10,7 @@ compilers:
modules: []
target: {target}
- compiler:
spec: gcc@=9.4.0
spec: gcc@=4.8.0
operating_system: redhat6
paths:
cc: /path/to/gcc
@@ -20,7 +20,7 @@ compilers:
modules: []
target: {target}
- compiler:
spec: clang@=15.0.0
spec: clang@=12.0.0
operating_system: {linux_os.name}{linux_os.version}
paths:
cc: /path/to/clang

View File

@@ -1,5 +1,5 @@
concretizer:
reuse: True
# reuse is missing on purpose, see "test_concretizer_arguments"
targets:
granularity: microarchitectures
host_compatible: false

View File

@@ -1,6 +1,5 @@
packages:
all:
compiler: [gcc, clang]
providers:
mpi: [openmpi, mpich, zmpi]
lapack: [openblas-with-lapack]
@@ -17,7 +16,7 @@ packages:
externalvirtual:
buildable: False
externals:
- spec: externalvirtual@2.0%clang@15.0.0
- spec: externalvirtual@2.0%clang@12.0.0
prefix: /path/to/external_virtual_clang
- spec: externalvirtual@1.0%gcc@10.2.1
prefix: /path/to/external_virtual_gcc

View File

@@ -4,7 +4,7 @@ lmod:
hash_length: 0
core_compilers:
- 'clang@15.0.0'
- 'clang@12.0.0'
core_specs:
- 'mpich@3.0.1'

View File

@@ -2,4 +2,4 @@ enable:
- lmod
lmod:
core_compilers:
- 'clang@15.0.0'
- 'clang@12.0.0'

View File

@@ -2,4 +2,4 @@ enable:
- lmod
lmod:
core_compilers:
- 'clang@=15.0.0'
- 'clang@=12.0.0'

View File

@@ -1106,31 +1106,3 @@ def test_database_construction_doesnt_use_globals(tmpdir, config, nullify_global
lock_cfg = lock_cfg or spack.database.lock_configuration(config)
db = spack.database.Database(str(tmpdir), lock_cfg=lock_cfg)
assert os.path.exists(db.database_directory)
def test_database_read_works_with_trailing_data(tmp_path, default_mock_concretization):
# Populate a database
root = str(tmp_path)
db = spack.database.Database(root)
spec = default_mock_concretization("a")
db.add(spec, directory_layout=None)
specs_in_db = db.query_local()
assert spec in specs_in_db
# Append anything to the end of the database file
with open(db._index_path, "a") as f:
f.write(json.dumps({"hello": "world"}))
# Read the database and check that it ignores the trailing data
assert spack.database.Database(root).query_local() == specs_in_db
def test_database_errors_with_just_a_version_key(tmp_path):
root = str(tmp_path)
db = spack.database.Database(root)
next_version = f"{spack.database._DB_VERSION}.next"
with open(db._index_path, "w") as f:
f.write(json.dumps({"database": {"version": next_version}}))
with pytest.raises(spack.database.InvalidDatabaseVersionError):
spack.database.Database(root).query_local()

View File

@@ -10,7 +10,6 @@
import spack.repo
import spack.spec
import spack.version
from spack.test.conftest import create_test_repo
def test_false_directives_do_not_exist(mock_packages):
@@ -143,86 +142,3 @@ def test_version_type_validation():
# Try passing a bogus type; it's just that we want a nice error message
with pytest.raises(spack.version.VersionError, match=msg):
spack.directives._execute_version(package(name="python"), {})
_pkgx = (
"x",
"""\
class X(Package):
version("1.3")
version("1.2")
version("1.1")
version("1.0")
variant("foo", default=False)
redistribute(binary=False, when="@1.1")
redistribute(binary=False, when="@1.0:1.2+foo")
redistribute(source=False, when="@1.0:1.2")
""",
)
_pkgy = (
"y",
"""\
class Y(Package):
version("2.1")
version("2.0")
variant("bar", default=False)
redistribute(binary=False, source=False)
""",
)
@pytest.fixture
def _create_test_repo(tmpdir, mutable_config):
yield create_test_repo(tmpdir, [_pkgx, _pkgy])
@pytest.fixture
def test_repo(_create_test_repo, monkeypatch, mock_stage):
with spack.repo.use_repositories(_create_test_repo) as mock_repo_path:
yield mock_repo_path
@pytest.mark.parametrize(
"spec_str,distribute_src,distribute_bin",
[
("x@1.1~foo", False, False),
("x@1.2+foo", False, False),
("x@1.2~foo", False, True),
("x@1.0~foo", False, True),
("x@1.3+foo", True, True),
("y@2.0", False, False),
("y@2.1+bar", False, False),
],
)
def test_redistribute_directive(test_repo, spec_str, distribute_src, distribute_bin):
spec = spack.spec.Spec(spec_str)
assert spec.package_class.redistribute_source(spec) == distribute_src
concretized_spec = spec.concretized()
assert concretized_spec.package.redistribute_binary == distribute_bin
def test_redistribute_override_when():
"""Allow a user to call `redistribute` twice to separately disable
source and binary distribution for the same when spec.
The second call should not undo the effect of the first.
"""
class MockPackage:
name = "mock"
disable_redistribute = {}
cls = MockPackage
spack.directives._execute_redistribute(cls, source=False, when="@1.0")
spec_key = spack.directives._make_when_spec("@1.0")
assert not cls.disable_redistribute[spec_key].binary
assert cls.disable_redistribute[spec_key].source
spack.directives._execute_redistribute(cls, binary=False, when="@1.0")
assert cls.disable_redistribute[spec_key].binary
assert cls.disable_redistribute[spec_key].source

View File

@@ -136,7 +136,7 @@ def test_get_dependent_ids(install_mockery, mock_packages):
spec.concretize()
assert spec.concrete
pkg_id = inst.package_id(spec)
pkg_id = inst.package_id(spec.package)
# Grab the sole dependency of 'a', which is 'b'
dep = spec.dependencies()[0]
@@ -385,7 +385,7 @@ def test_ensure_locked_have(install_mockery, tmpdir, capsys):
const_arg = installer_args(["trivial-install-test-package"], {})
installer = create_installer(const_arg)
spec = installer.build_requests[0].pkg.spec
pkg_id = inst.package_id(spec)
pkg_id = inst.package_id(spec.package)
with tmpdir.as_cwd():
# Test "downgrade" of a read lock (to a read lock)
@@ -456,15 +456,17 @@ def _pl(db, spec, timeout):
def test_package_id_err(install_mockery):
s = spack.spec.Spec("trivial-install-test-package")
pkg_cls = spack.repo.PATH.get_pkg_class(s.name)
with pytest.raises(ValueError, match="spec is not concretized"):
inst.package_id(s)
inst.package_id(pkg_cls(s))
def test_package_id_ok(install_mockery):
spec = spack.spec.Spec("trivial-install-test-package")
spec.concretize()
assert spec.concrete
assert spec.name in inst.package_id(spec)
pkg = spec.package
assert pkg.name in inst.package_id(pkg)
def test_fake_install(install_mockery):
@@ -724,7 +726,7 @@ def test_check_deps_status_external(install_mockery, monkeypatch):
installer._check_deps_status(request)
for dep in request.spec.traverse(root=False):
assert inst.package_id(dep) in installer.installed
assert inst.package_id(dep.package) in installer.installed
def test_check_deps_status_upstream(install_mockery, monkeypatch):
@@ -737,7 +739,7 @@ def test_check_deps_status_upstream(install_mockery, monkeypatch):
installer._check_deps_status(request)
for dep in request.spec.traverse(root=False):
assert inst.package_id(dep) in installer.installed
assert inst.package_id(dep.package) in installer.installed
def test_add_bootstrap_compilers(install_mockery, monkeypatch):
@@ -1109,7 +1111,7 @@ def test_install_fail_fast_on_detect(install_mockery, monkeypatch, capsys):
const_arg = installer_args(["b"], {"fail_fast": False})
const_arg.extend(installer_args(["c"], {"fail_fast": True}))
installer = create_installer(const_arg)
pkg_ids = [inst.package_id(spec) for spec, _ in const_arg]
pkg_ids = [inst.package_id(spec.package) for spec, _ in const_arg]
# Make sure all packages are identified as failed
#
@@ -1184,7 +1186,7 @@ def test_install_lock_installed_requeue(install_mockery, monkeypatch, capfd):
const_arg = installer_args(["b"], {})
b, _ = const_arg[0]
installer = create_installer(const_arg)
b_pkg_id = inst.package_id(b)
b_pkg_id = inst.package_id(b.package)
def _prep(installer, task):
installer.installed.add(b_pkg_id)
@@ -1194,7 +1196,7 @@ def _prep(installer, task):
monkeypatch.setattr(inst.PackageInstaller, "_ensure_locked", _not_locked)
def _requeued(installer, task, install_status):
tty.msg("requeued {0}".format(inst.package_id(task.pkg.spec)))
tty.msg("requeued {0}".format(inst.package_id(task.pkg)))
# Flag the package as installed
monkeypatch.setattr(inst.PackageInstaller, "_prepare_for_install", _prep)
@@ -1260,7 +1262,7 @@ def test_install_skip_patch(install_mockery, mock_fetch):
installer.install()
spec, install_args = const_arg[0]
assert inst.package_id(spec) in installer.installed
assert inst.package_id(spec.package) in installer.installed
def test_install_implicit(install_mockery, mock_fetch):

View File

@@ -9,7 +9,6 @@
import shutil
import stat
import sys
from contextlib import contextmanager
import pytest
@@ -909,129 +908,3 @@ def test_find_first_file(tmpdir, bfs_depth):
# Should find first dir
assert os.path.samefile(fs.find_first(root, "a", bfs_depth=bfs_depth), os.path.join(root, "a"))
def test_rename_dest_exists(tmpdir):
@contextmanager
def setup_test_files():
a = tmpdir.join("a", "file1")
b = tmpdir.join("a", "file2")
fs.touchp(a)
fs.touchp(b)
with open(a, "w") as oa, open(b, "w") as ob:
oa.write("I am A")
ob.write("I am B")
yield a, b
shutil.rmtree(tmpdir.join("a"))
@contextmanager
def setup_test_dirs():
a = tmpdir.join("d", "a")
b = tmpdir.join("d", "b")
fs.mkdirp(a)
fs.mkdirp(b)
yield a, b
shutil.rmtree(tmpdir.join("d"))
# test standard behavior of rename
# smoke test
with setup_test_files() as files:
a, b = files
fs.rename(str(a), str(b))
assert os.path.exists(b)
assert not os.path.exists(a)
with open(b, "r") as ob:
content = ob.read()
assert content == "I am A"
# test relatitve paths
# another sanity check/smoke test
with setup_test_files() as files:
a, b = files
with fs.working_dir(str(tmpdir)):
fs.rename(os.path.join("a", "file1"), os.path.join("a", "file2"))
assert os.path.exists(b)
assert not os.path.exists(a)
with open(b, "r") as ob:
content = ob.read()
assert content == "I am A"
# Test rename symlinks to same file
c = tmpdir.join("a", "file1")
a = tmpdir.join("a", "link1")
b = tmpdir.join("a", "link2")
fs.touchp(c)
symlink(c, a)
symlink(c, b)
fs.rename(str(a), str(b))
assert os.path.exists(b)
assert not os.path.exists(a)
assert os.path.realpath(b) == c
shutil.rmtree(tmpdir.join("a"))
# test rename onto itself
a = tmpdir.join("a", "file1")
b = a
fs.touchp(a)
with open(a, "w") as oa:
oa.write("I am A")
fs.rename(str(a), str(b))
# check a, or b, doesn't matter, same file
assert os.path.exists(a)
# ensure original file was not duplicated
assert len(os.listdir(tmpdir.join("a"))) == 1
with open(a, "r") as oa:
assert oa.read()
shutil.rmtree(tmpdir.join("a"))
# test rename onto symlink
# to directory from symlink to directory
# (this is something spack does when regenerating views)
with setup_test_dirs() as dirs:
a, b = dirs
link1 = tmpdir.join("f", "link1")
link2 = tmpdir.join("f", "link2")
fs.mkdirp(tmpdir.join("f"))
symlink(a, link1)
symlink(b, link2)
fs.rename(str(link1), str(link2))
assert os.path.exists(link2)
assert os.path.realpath(link2) == a
shutil.rmtree(tmpdir.join("f"))
@pytest.mark.skipif(sys.platform != "win32", reason="No-op on non Windows")
def test_windows_sfn(tmpdir):
# first check some standard Windows locations
# we know require sfn names
# this is basically a smoke test
# ensure spaces are replaced + path abbreviated
assert fs.windows_sfn("C:\\Program Files (x86)") == "C:\\PROGRA~2"
# ensure path without spaces is still properly shortened
assert fs.windows_sfn("C:\\ProgramData") == "C:\\PROGRA~3"
# test user created paths
# ensure longer path with spaces is properly abbreviated
a = tmpdir.join("d", "this is a test", "a", "still test")
# ensure longer path is properly abbreviated
b = tmpdir.join("d", "long_path_with_no_spaces", "more_long_path")
# ensure path not in need of abbreviation is properly roundtripped
c = tmpdir.join("d", "this", "is", "short")
# ensure paths that are the same in the first six letters
# are incremented post tilde
d = tmpdir.join("d", "longerpath1")
e = tmpdir.join("d", "longerpath2")
fs.mkdirp(a)
fs.mkdirp(b)
fs.mkdirp(c)
fs.mkdirp(d)
fs.mkdirp(e)
# check only for path of path we can control,
# pytest prefix may or may not be mangled by windows_sfn
# based on user/pytest config
assert "d\\THISIS~1\\a\\STILLT~1" in fs.windows_sfn(a)
assert "d\\LONG_P~1\\MORE_L~1" in fs.windows_sfn(b)
assert "d\\this\\is\\short" in fs.windows_sfn(c)
assert "d\\LONGER~1" in fs.windows_sfn(d)
assert "d\\LONGER~2" in fs.windows_sfn(e)
shutil.rmtree(tmpdir.join("d"))

View File

@@ -29,7 +29,7 @@
]
@pytest.fixture(params=["clang@=15.0.0", "gcc@=10.2.1"])
@pytest.fixture(params=["clang@=12.0.0", "gcc@=10.2.1"])
def compiler(request):
return request.param
@@ -59,7 +59,7 @@ def test_layout_for_specs_compiled_with_core_compilers(
we can use both ``compiler@version`` and ``compiler@=version`` to specify a core compiler.
"""
module_configuration(modules_config)
module, spec = factory("libelf%clang@15.0.0")
module, spec = factory("libelf%clang@12.0.0")
assert "Core" in module.layout.available_path_parts
def test_file_layout(self, compiler, provider, factory, module_configuration):
@@ -78,7 +78,7 @@ def test_file_layout(self, compiler, provider, factory, module_configuration):
# is transformed to r"Core" if the compiler is listed among core
# compilers
# Check that specs listed as core_specs are transformed to "Core"
if compiler == "clang@=15.0.0" or spec_string == "mpich@3.0.1":
if compiler == "clang@=12.0.0" or spec_string == "mpich@3.0.1":
assert "Core" in layout.available_path_parts
else:
assert compiler.replace("@=", "/") in layout.available_path_parts
@@ -146,6 +146,9 @@ def test_autoload_all(self, modulefile_content, module_configuration):
assert len([x for x in content if "depends_on(" in x]) == 5
@pytest.mark.skipif(
str(archspec.cpu.host().family) != "x86_64", reason="test data is specific for x86_64"
)
def test_alter_environment(self, modulefile_content, module_configuration):
"""Tests modifications to run-time environment."""

View File

@@ -114,6 +114,9 @@ def test_prerequisites_all(
assert len([x for x in content if "prereq" in x]) == 5
@pytest.mark.skipif(
str(archspec.cpu.host().family) != "x86_64", reason="test data is specific for x86_64"
)
def test_alter_environment(self, modulefile_content, module_configuration):
"""Tests modifications to run-time environment."""

View File

@@ -1,51 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Unit tests for objects turning configuration into an intermediate format used by the solver."""
import pytest
import spack.compilers
import spack.config
import spack.spec
from spack.concretize import UnavailableCompilerVersionError
from spack.solver import asp
class TestCompilerParser:
def test_expected_order_mock_config(self, config):
"""Tests the expected preference order in the mock compiler configuration"""
parser = asp.CompilerParser(config)
expected_order = ["gcc@=10.2.1", "gcc@=9.4.0", "gcc@=9.4.0", "clang@=15.0.0"]
for c, expected in zip(parser.possible_compilers(), expected_order):
assert c.spec.satisfies(expected)
@pytest.mark.parametrize("spec_str", ["a %gcc@=13.2.0", "a ^b %gcc@=13.2.0"])
def test_compiler_from_input_raise(self, spec_str, config):
"""Tests that having an unknown compiler in the input spec raises an exception, if we
don't allow bootstrapping missing compilers.
"""
spec = spack.spec.Spec(spec_str)
with pytest.raises(UnavailableCompilerVersionError):
asp.CompilerParser(config).with_input_specs([spec])
def test_compilers_inferred_from_concrete_specs(self, mutable_config, mutable_database):
"""Test that compilers inferred from concrete specs, that are not in the local
configuration too, are last in the preference order.
"""
spack.compilers.remove_compiler_from_config("gcc@=10.2.1")
assert not spack.compilers.compilers_for_spec("gcc@=10.2.1")
parser = asp.CompilerParser(mutable_config)
for reuse_spec in mutable_database.query():
parser.add_compiler_from_concrete_spec(reuse_spec)
expected_order = [
("gcc@=9.4.0", True),
("gcc@=9.4.0", True),
("clang@=15.0.0", True),
("gcc@=10.2.1", False),
]
for c, (expected, available) in zip(parser.possible_compilers(), expected_order):
assert c.spec.satisfies(expected)
assert c.available is available

View File

@@ -703,25 +703,22 @@ def check_prop(check_spec, fmt_str, prop, getter):
actual = spec.format(named_str)
assert expected == actual
@pytest.mark.parametrize(
"fmt_str",
[
def test_spec_formatting_escapes(self, default_mock_concretization):
spec = default_mock_concretization("multivalue-variant cflags=-O2")
sigil_mismatches = [
"{@name}",
"{@version.concrete}",
"{%compiler.version}",
"{/hashd}",
"{arch=architecture.os}",
],
)
def test_spec_formatting_sigil_mismatches(self, default_mock_concretization, fmt_str):
spec = default_mock_concretization("multivalue-variant cflags=-O2")
]
with pytest.raises(SpecFormatSigilError):
spec.format(fmt_str)
for fmt_str in sigil_mismatches:
with pytest.raises(SpecFormatSigilError):
spec.format(fmt_str)
@pytest.mark.parametrize(
"fmt_str",
[
bad_formats = [
r"{}",
r"name}",
r"\{name}",
@@ -731,12 +728,11 @@ def test_spec_formatting_sigil_mismatches(self, default_mock_concretization, fmt
r"{dag_hash}",
r"{foo}",
r"{+variants.debug}",
],
)
def test_spec_formatting_bad_formats(self, default_mock_concretization, fmt_str):
spec = default_mock_concretization("multivalue-variant cflags=-O2")
with pytest.raises(SpecFormatStringError):
spec.format(fmt_str)
]
for fmt_str in bad_formats:
with pytest.raises(SpecFormatStringError):
spec.format(fmt_str)
def test_combination_of_wildcard_or_none(self):
# Test that using 'none' and another value raises
@@ -1100,22 +1096,22 @@ def test_unsatisfiable_virtual_deps_bindings(self, spec_str):
@pytest.mark.parametrize(
"spec_str,format_str,expected",
[
("git-test@git.foo/bar", "{name}-{version}", str(pathlib.Path("git-test-git.foo_bar"))),
("git-test@git.foo/bar", "{name}-{version}-{/hash}", None),
("git-test@git.foo/bar", "{name}/{version}", str(pathlib.Path("git-test", "git.foo_bar"))),
("zlib@git.foo/bar", "{name}-{version}", str(pathlib.Path("zlib-git.foo_bar"))),
("zlib@git.foo/bar", "{name}-{version}-{/hash}", None),
("zlib@git.foo/bar", "{name}/{version}", str(pathlib.Path("zlib", "git.foo_bar"))),
(
"git-test@{0}=1.0%gcc".format("a" * 40),
"zlib@{0}=1.0%gcc".format("a" * 40),
"{name}/{version}/{compiler}",
str(pathlib.Path("git-test", "{0}_1.0".format("a" * 40), "gcc")),
str(pathlib.Path("zlib", "{0}_1.0".format("a" * 40), "gcc")),
),
(
"git-test@git.foo/bar=1.0%gcc",
"zlib@git.foo/bar=1.0%gcc",
"{name}/{version}/{compiler}",
str(pathlib.Path("git-test", "git.foo_bar_1.0", "gcc")),
str(pathlib.Path("zlib", "git.foo_bar_1.0", "gcc")),
),
],
)
def test_spec_format_path(spec_str, format_str, expected, mock_git_test_package):
def test_spec_format_path(spec_str, format_str, expected):
_check_spec_format_path(spec_str, format_str, expected)
@@ -1133,57 +1129,45 @@ def _check_spec_format_path(spec_str, format_str, expected, path_ctor=None):
"spec_str,format_str,expected",
[
(
"git-test@git.foo/bar",
"zlib@git.foo/bar",
r"C:\\installroot\{name}\{version}",
r"C:\installroot\git-test\git.foo_bar",
r"C:\installroot\zlib\git.foo_bar",
),
(
"git-test@git.foo/bar",
"zlib@git.foo/bar",
r"\\hostname\sharename\{name}\{version}",
r"\\hostname\sharename\git-test\git.foo_bar",
),
# leading '/' is preserved on windows but converted to '\'
# note that it's still not "absolute" -- absolute windows paths start with a drive.
(
"git-test@git.foo/bar",
r"/installroot/{name}/{version}",
r"\installroot\git-test\git.foo_bar",
r"\\hostname\sharename\zlib\git.foo_bar",
),
# Windows doesn't attribute any significance to a leading
# "/" so it is discarded
("zlib@git.foo/bar", r"/installroot/{name}/{version}", r"installroot\zlib\git.foo_bar"),
],
)
def test_spec_format_path_windows(spec_str, format_str, expected, mock_git_test_package):
def test_spec_format_path_windows(spec_str, format_str, expected):
_check_spec_format_path(spec_str, format_str, expected, path_ctor=pathlib.PureWindowsPath)
@pytest.mark.parametrize(
"spec_str,format_str,expected",
[
(
"git-test@git.foo/bar",
r"/installroot/{name}/{version}",
"/installroot/git-test/git.foo_bar",
),
(
"git-test@git.foo/bar",
r"//installroot/{name}/{version}",
"//installroot/git-test/git.foo_bar",
),
("zlib@git.foo/bar", r"/installroot/{name}/{version}", "/installroot/zlib/git.foo_bar"),
("zlib@git.foo/bar", r"//installroot/{name}/{version}", "//installroot/zlib/git.foo_bar"),
# This is likely unintentional on Linux: Firstly, "\" is not a
# path separator for POSIX, so this is treated as a single path
# component (containing literal "\" characters); secondly,
# Spec.format treats "\" as an escape character, so is
# discarded (unless directly following another "\")
(
"git-test@git.foo/bar",
"zlib@git.foo/bar",
r"C:\\installroot\package-{name}-{version}",
r"C__installrootpackage-git-test-git.foo_bar",
r"C__installrootpackage-zlib-git.foo_bar",
),
# "\" is not a POSIX separator, and Spec.format treats "\{" as a literal
# "{", which means that the resulting format string is invalid
("git-test@git.foo/bar", r"package\{name}\{version}", None),
("zlib@git.foo/bar", r"package\{name}\{version}", None),
],
)
def test_spec_format_path_posix(spec_str, format_str, expected, mock_git_test_package):
def test_spec_format_path_posix(spec_str, format_str, expected):
_check_spec_format_path(spec_str, format_str, expected, path_ctor=pathlib.PurePosixPath)

View File

@@ -551,12 +551,12 @@ def _specfile_for(spec_str, filename):
"^[deptypes=build,link] zlib",
),
(
"git-test@git.foo/bar",
"zlib@git.foo/bar",
[
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "git-test"),
Token(TokenType.UNQUALIFIED_PACKAGE_NAME, "zlib"),
Token(TokenType.GIT_VERSION, "@git.foo/bar"),
],
"git-test@git.foo/bar",
"zlib@git.foo/bar",
),
# Variant propagation
(
@@ -585,7 +585,7 @@ def _specfile_for(spec_str, filename):
),
],
)
def test_parse_single_spec(spec_str, tokens, expected_roundtrip, mock_git_test_package):
def test_parse_single_spec(spec_str, tokens, expected_roundtrip):
parser = SpecParser(spec_str)
assert tokens == parser.tokens()
assert expected_roundtrip == str(parser.next_spec())

View File

@@ -1,26 +0,0 @@
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pytest
from spack.util import libc
@pytest.mark.parametrize(
"libc_prefix,startfile_prefix,expected",
[
# Ubuntu
("/usr", "/usr/lib/x86_64-linux-gnu", "/usr/include/x86_64-linux-gnu"),
("/usr", "/usr/lib/x86_64-linux-musl", "/usr/include/x86_64-linux-musl"),
("/usr", "/usr/lib/aarch64-linux-gnu", "/usr/include/aarch64-linux-gnu"),
("/usr", "/usr/lib/aarch64-linux-musl", "/usr/include/aarch64-linux-musl"),
# rhel-like
("/usr", "/usr/lib64", "/usr/include"),
("/usr", "/usr/lib", "/usr/include"),
],
)
@pytest.mark.not_on_windows("The unit test deals with unix-like paths")
def test_header_dir_computation(libc_prefix, startfile_prefix, expected):
"""Tests that we compute the correct header directory from the prefix of the libc startfiles"""
assert libc.libc_include_dir_from_startfile_prefix(libc_prefix, startfile_prefix) == expected

View File

@@ -641,30 +641,6 @@ def substitute_rpath_and_pt_interp_in_place_or_raise(
return False
def pt_interp(path: str) -> Optional[str]:
"""Retrieve the interpreter of an executable at `path`."""
try:
with open(path, "rb") as f:
elf = parse_elf(f, interpreter=True)
except (OSError, ElfParsingError):
return None
if not elf.has_pt_interp:
return None
return elf.pt_interp_str.decode("utf-8")
def get_elf_compat(path):
"""Get a triplet (EI_CLASS, EI_DATA, e_machine) from an ELF file, which can be used to see if
two ELF files are compatible."""
# On ELF platforms supporting, we try to be a bit smarter when it comes to shared
# libraries, by dropping those that are not host compatible.
with open(path, "rb") as f:
elf = parse_elf(f, only_header=True)
return (elf.is_64_bit, elf.is_little_endian, elf.elf_hdr.e_machine)
class ElfCStringUpdatesFailed(Exception):
def __init__(
self, rpath: Optional[UpdateCStringAction], pt_interp: Optional[UpdateCStringAction]

Some files were not shown because too many files have changed in this diff Show More