Compare commits
226 Commits
dag-ordere
...
features/o
Author | SHA1 | Date | |
---|---|---|---|
![]() |
a542b0cfc0 | ||
![]() |
0d02262bbc | ||
![]() |
9beac03142 | ||
![]() |
9551312e9d | ||
![]() |
b282b66579 | ||
![]() |
226a9b0e7f | ||
![]() |
7aeee3339c | ||
![]() |
38512d18e9 | ||
![]() |
e75a07d155 | ||
![]() |
9b3c4e0696 | ||
![]() |
54f783e656 | ||
![]() |
34441c9eaa | ||
![]() |
932a9dfc57 | ||
![]() |
3430c55b0a | ||
![]() |
1a69d436e4 | ||
![]() |
c5d7ea04d0 | ||
![]() |
1bc425ddad | ||
![]() |
471684add4 | ||
![]() |
973d33a8f1 | ||
![]() |
d0387cbbaf | ||
![]() |
97d59c2efb | ||
![]() |
b69378c8cb | ||
![]() |
f1b004a0d3 | ||
![]() |
1f7c59eb06 | ||
![]() |
e1341d70ce | ||
![]() |
38c321abb3 | ||
![]() |
82041ac5a3 | ||
![]() |
ecf93c77ae | ||
![]() |
34e42d5540 | ||
![]() |
cdcbf7dc46 | ||
![]() |
e25501f76c | ||
![]() |
7fec7cd013 | ||
![]() |
3d881dbad7 | ||
![]() |
c818e36d79 | ||
![]() |
4fbbb23933 | ||
![]() |
b35af7d9e7 | ||
![]() |
f7de22eb14 | ||
![]() |
72f57ffede | ||
![]() |
12f43380b9 | ||
![]() |
ffdc85e8ce | ||
![]() |
259a32e5e4 | ||
![]() |
8c0b8c785f | ||
![]() |
02d3bd782d | ||
![]() |
b6906be846 | ||
![]() |
a0ce6f7890 | ||
![]() |
09cf265ff4 | ||
![]() |
0d72b29193 | ||
![]() |
7f1467e795 | ||
![]() |
f89cd29054 | ||
![]() |
89720583c3 | ||
![]() |
7e78efcc44 | ||
![]() |
126accfce1 | ||
![]() |
27c2ff6c64 | ||
![]() |
9bde77199c | ||
![]() |
f5ed18f6a3 | ||
![]() |
ccd11666c6 | ||
![]() |
df80cffafa | ||
![]() |
b52d4b8abf | ||
![]() |
96624d1490 | ||
![]() |
1648968514 | ||
![]() |
8358f430a4 | ||
![]() |
ec045f993b | ||
![]() |
7fe2039b01 | ||
![]() |
25cb55ccd9 | ||
![]() |
d4e075f667 | ||
![]() |
ae98d2ba2f | ||
![]() |
8e49bf0c5b | ||
![]() |
1bb119dbd7 | ||
![]() |
a7f39da5db | ||
![]() |
1d3a74d926 | ||
![]() |
0448f18ab2 | ||
![]() |
2516ed181a | ||
![]() |
7740b37923 | ||
![]() |
358cc5ed1a | ||
![]() |
30b8cfad98 | ||
![]() |
383a343412 | ||
![]() |
3714d3443b | ||
![]() |
42a452d54c | ||
![]() |
a913ed229d | ||
![]() |
7c122da48b | ||
![]() |
fdaa54941d | ||
![]() |
135832650f | ||
![]() |
29d710fdec | ||
![]() |
13c4f92907 | ||
![]() |
7a2c9601e8 | ||
![]() |
31959b72b0 | ||
![]() |
8109877424 | ||
![]() |
91243ecb5b | ||
![]() |
1fc2bf846d | ||
![]() |
848344d9a5 | ||
![]() |
e08da4e2b6 | ||
![]() |
75a72766ff | ||
![]() |
797e230498 | ||
![]() |
1c6993145e | ||
![]() |
00573d6ea2 | ||
![]() |
4c0116bd64 | ||
![]() |
f0d8355248 | ||
![]() |
01c21d0496 | ||
![]() |
43057e2edd | ||
![]() |
ba8d9f22ef | ||
![]() |
07d7c32d79 | ||
![]() |
017a15988c | ||
![]() |
1ad290e5a2 | ||
![]() |
f6fa64f979 | ||
![]() |
1826a41cdd | ||
![]() |
509a8ea5e2 | ||
![]() |
ca202ba11e | ||
![]() |
0b1d51e450 | ||
![]() |
2936573fc6 | ||
![]() |
5d4c250354 | ||
![]() |
b0913b1bf8 | ||
![]() |
c49e2e5620 | ||
![]() |
60624265f8 | ||
![]() |
79aa9e9c87 | ||
![]() |
f2b0c1deab | ||
![]() |
7d50680d9c | ||
![]() |
defa4a2340 | ||
![]() |
1e1d1ec43b | ||
![]() |
0e41788812 | ||
![]() |
ddecf07045 | ||
![]() |
9865a42b20 | ||
![]() |
2432be5911 | ||
![]() |
002bd8d20b | ||
![]() |
eac04af0e7 | ||
![]() |
448bd31c87 | ||
![]() |
46466302a9 | ||
![]() |
5e39acea16 | ||
![]() |
cfdf0b6987 | ||
![]() |
6be6935671 | ||
![]() |
68233db9f6 | ||
![]() |
aea2c73b04 | ||
![]() |
5dc5db6679 | ||
![]() |
51702a725b | ||
![]() |
931c0edaf4 | ||
![]() |
689bdd6f36 | ||
![]() |
a426db06e7 | ||
![]() |
4b12d015e1 | ||
![]() |
ecd4eac184 | ||
![]() |
4d502c8ff7 | ||
![]() |
227c6061e5 | ||
![]() |
3453f59ba3 | ||
![]() |
3201b4e2a1 | ||
![]() |
80d26168b5 | ||
![]() |
67040e79c5 | ||
![]() |
a116775ff3 | ||
![]() |
de2aaeb26f | ||
![]() |
830e3211e5 | ||
![]() |
6b3b7f8b7f | ||
![]() |
99f3716346 | ||
![]() |
4f6ef3b698 | ||
![]() |
2dc020af31 | ||
![]() |
17f2d66285 | ||
![]() |
32f480936a | ||
![]() |
481b598963 | ||
![]() |
4b186df5b4 | ||
![]() |
f4dac7cd4c | ||
![]() |
48a63719b2 | ||
![]() |
f576b4b6c5 | ||
![]() |
9f5d9266e6 | ||
![]() |
28c4809a8f | ||
![]() |
57d6b70226 | ||
![]() |
508fcd8240 | ||
![]() |
bec79d9ee1 | ||
![]() |
27775163ca | ||
![]() |
b1cf512d78 | ||
![]() |
0bfd06d0b6 | ||
![]() |
367bd4d670 | ||
![]() |
8f359df2d3 | ||
![]() |
cc2ae9f270 | ||
![]() |
75f1077b4b | ||
![]() |
b1e6507060 | ||
![]() |
2981b4e5ee | ||
![]() |
6847d73504 | ||
![]() |
eeba92e788 | ||
![]() |
84917cfa79 | ||
![]() |
14e327be23 | ||
![]() |
c329f7de33 | ||
![]() |
f686a90779 | ||
![]() |
918bb63c3a | ||
![]() |
8d0cbb9812 | ||
![]() |
95a76de7d5 | ||
![]() |
0b388ff930 | ||
![]() |
d25ac66a5d | ||
![]() |
682ffd30ac | ||
![]() |
af3dba8db6 | ||
![]() |
08a24b5e03 | ||
![]() |
65fbc5a461 | ||
![]() |
80f3888cc8 | ||
![]() |
5845750a10 | ||
![]() |
c529a0fddf | ||
![]() |
d7265d7ddc | ||
![]() |
df0be87ada | ||
![]() |
af2aeb275f | ||
![]() |
4fef0bac20 | ||
![]() |
b32a07bbe1 | ||
![]() |
8709dbc232 | ||
![]() |
54b9e3ce07 | ||
![]() |
d35c24a3ed | ||
![]() |
d4d200952e | ||
![]() |
902a40bf72 | ||
![]() |
a2d51cd4db | ||
![]() |
f8e433689a | ||
![]() |
5f8c09fd33 | ||
![]() |
8eb4807615 | ||
![]() |
047a481e48 | ||
![]() |
051abfb894 | ||
![]() |
0447ba1213 | ||
![]() |
ff3dbe9394 | ||
![]() |
90d00f8a59 | ||
![]() |
fdc6bd3f1c | ||
![]() |
d17aaf8729 | ||
![]() |
ef6699b874 | ||
![]() |
f5418ac344 | ||
![]() |
909a5b1d83 | ||
![]() |
9a0884bfed | ||
![]() |
a9d5db572c | ||
![]() |
16dbbb9b26 | ||
![]() |
3695200a3c | ||
![]() |
a1b2ba412b | ||
![]() |
c81f1235a9 | ||
![]() |
eaa16338de | ||
![]() |
115b6b2a51 | ||
![]() |
cd2d6a6397 | ||
![]() |
c5086a5d0e | ||
![]() |
b5fc217dc2 | ||
![]() |
a4b8753456 |
4
.github/workflows/build-containers.yml
vendored
4
.github/workflows/build-containers.yml
vendored
@@ -89,7 +89,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 # @v1
|
||||
uses: docker/setup-buildx-action@f03ac48505955848960e80bbb68046aa35c7b9e7 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@37abcedcc1da61a57767b7588cb9d03eb57e28b3 # @v2
|
||||
uses: docker/build-push-action@3b5e8027fcad23fda98b2e3ac259d8d67585f671 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
2
.github/workflows/valid-style.yml
vendored
2
.github/workflows/valid-style.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
python3 -m pip install --upgrade pip six setuptools types-six black mypy isort clingo flake8
|
||||
python3 -m pip install --upgrade pip six setuptools types-six black==22.12.0 mypy isort clingo flake8
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
|
@@ -83,6 +83,16 @@ if defined _sp_flags (
|
||||
exit /B 0
|
||||
)
|
||||
)
|
||||
if not defined _sp_subcommand (
|
||||
if not defined _sp_args (
|
||||
if not defined _sp_flags (
|
||||
python "%spack%" --help
|
||||
exit /B 0
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
:: pass parsed variables outside of local scope. Need to do
|
||||
:: this because delayedexpansion can only be set by setlocal
|
||||
echo %_sp_flags%>flags
|
||||
@@ -92,24 +102,24 @@ endlocal
|
||||
set /p _sp_subcommand=<subcmd
|
||||
set /p _sp_flags=<flags
|
||||
set /p _sp_args=<args
|
||||
set str_subcommand=%_sp_subcommand:"='%
|
||||
set str_flags=%_sp_flags:"='%
|
||||
set str_args=%_sp_args:"='%
|
||||
if "%str_subcommand%"=="ECHO is off." (set "_sp_subcommand=")
|
||||
if "%str_flags%"=="ECHO is off." (set "_sp_flags=")
|
||||
if "%str_args%"=="ECHO is off." (set "_sp_args=")
|
||||
if "%_sp_subcommand%"=="ECHO is off." (set "_sp_subcommand=")
|
||||
if "%_sp_subcommand%"=="ECHO is on." (set "_sp_subcommand=")
|
||||
if "%_sp_flags%"=="ECHO is off." (set "_sp_flags=")
|
||||
if "%_sp_flags%"=="ECHO is on." (set "_sp_flags=")
|
||||
if "%_sp_args%"=="ECHO is off." (set "_sp_args=")
|
||||
if "%_sp_args%"=="ECHO is on." (set "_sp_args=")
|
||||
del subcmd
|
||||
del flags
|
||||
del args
|
||||
|
||||
:: Filter out some commands. For any others, just run the command.
|
||||
if "%_sp_subcommand%" == "cd" (
|
||||
if %_sp_subcommand% == "cd" (
|
||||
goto :case_cd
|
||||
) else if "%_sp_subcommand%" == "env" (
|
||||
) else if %_sp_subcommand% == "env" (
|
||||
goto :case_env
|
||||
) else if "%_sp_subcommand%" == "load" (
|
||||
) else if %_sp_subcommand% == "load" (
|
||||
goto :case_load
|
||||
) else if "%_sp_subcommand%" == "unload" (
|
||||
) else if %_sp_subcommand% == "unload" (
|
||||
goto :case_load
|
||||
) else (
|
||||
goto :default_case
|
||||
@@ -143,19 +153,21 @@ goto :end_switch
|
||||
:: If no args or args contain --bat or -h/--help: just execute.
|
||||
if NOT defined _sp_args (
|
||||
goto :default_case
|
||||
)else if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
)
|
||||
set args_no_quote=%_sp_args:"=%
|
||||
if NOT "%args_no_quote%"=="%args_no_quote:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
||||
) else if NOT "%args_no_quote%"=="%args_no_quote: -h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
) else if NOT "%args_no_quote%"=="%args_no_quote:--bat=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:deactivate=%" (
|
||||
) else if NOT "%args_no_quote%"=="%args_no_quote:deactivate=%" (
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`call python "%spack%" %_sp_flags% env deactivate --bat %_sp_args:deactivate=%`
|
||||
`call python %spack% %_sp_flags% env deactivate --bat %args_no_quote:deactivate=%`
|
||||
) do %%I
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:activate=%" (
|
||||
) else if NOT "%args_no_quote%"=="%args_no_quote:activate=%" (
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`call python "%spack%" %_sp_flags% env activate --bat %_sp_args:activate=%`
|
||||
`python %spack% %_sp_flags% env activate --bat %args_no_quote:activate=%`
|
||||
) do %%I
|
||||
) else (
|
||||
goto :default_case
|
||||
@@ -220,4 +232,4 @@ for %%I in (%~2) do (
|
||||
:pathadd "%~1" "%%I\%%Z"
|
||||
)
|
||||
)
|
||||
exit /B %ERRORLEVEL%
|
||||
exit /B %ERRORLEVEL%
|
||||
|
@@ -346,7 +346,7 @@ the Environment and then install the concretized specs.
|
||||
(see :ref:`build-jobs`). To speed up environment builds further, independent
|
||||
packages can be installed in parallel by launching more Spack instances. For
|
||||
example, the following will build at most four packages in parallel using
|
||||
three background jobs:
|
||||
three background jobs:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -394,7 +394,7 @@ version (and other constraints) passed as the spec argument to the
|
||||
|
||||
For packages with ``git`` attributes, git branches, tags, and commits can
|
||||
also be used as valid concrete versions (see :ref:`version-specifier`).
|
||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||
the ``main`` branch of the package, and ``spack install`` will install from
|
||||
that git clone if ``foo`` is in the environment.
|
||||
Further development on ``foo`` can be tested by reinstalling the environment,
|
||||
@@ -630,6 +630,35 @@ The following two Environment manifests are identical:
|
||||
Spec matrices can be used to install swaths of software across various
|
||||
toolchains.
|
||||
|
||||
Note that ordering of matrices is important. For example, the
|
||||
following environments are identical:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- matrix:
|
||||
- [hdf5@1.10.2+mpi]
|
||||
- [^mpich, ^openmpi]
|
||||
- ['%gcc']
|
||||
- matrix:
|
||||
- [hdf5@1.12.1+mpi]
|
||||
- ['%gcc']
|
||||
- [^mpich, ^openmpi]
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5@1.10.2+mpi ^mpich%gcc
|
||||
- hdf5@1.10.2+mpi ^openmpi%gcc
|
||||
- hdf5@1.12.1+mpi %gcc ^mpich
|
||||
- hdf5@1.12.1+mpi %gcc ^openmpi
|
||||
|
||||
Notice how the first matrix applies the compiler constraints to the
|
||||
mpi dependencies, whereas the second matrix applies the compiler
|
||||
constraints directly to the root hdf5 node. This gives users the full
|
||||
breadth of expressiveness of the spec syntax through the matrix
|
||||
interface.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Spec List References
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -1120,19 +1149,19 @@ index once every package is pushed. Note how this target uses the generated
|
||||
|
||||
SPACK ?= spack
|
||||
BUILDCACHE_DIR = $(CURDIR)/tarballs
|
||||
|
||||
|
||||
.PHONY: all
|
||||
|
||||
|
||||
all: push
|
||||
|
||||
|
||||
include env.mk
|
||||
|
||||
|
||||
example/push/%: example/install/%
|
||||
@mkdir -p $(dir $@)
|
||||
$(info About to push $(SPEC) to a buildcache)
|
||||
$(SPACK) -e . buildcache create --allow-root --only=package --directory $(BUILDCACHE_DIR) /$(HASH)
|
||||
@touch $@
|
||||
|
||||
|
||||
push: $(addprefix example/push/,$(example/SPACK_PACKAGE_IDS))
|
||||
$(info Updating the buildcache index)
|
||||
$(SPACK) -e . buildcache update-index --directory $(BUILDCACHE_DIR)
|
||||
|
@@ -116,7 +116,7 @@ creates a simple python file:
|
||||
|
||||
# FIXME: Add a list of GitHub accounts to
|
||||
# notify when the package is updated.
|
||||
# maintainers = ["github_user1", "github_user2"]
|
||||
# maintainers("github_user1", "github_user2")
|
||||
|
||||
version("0.8.13", sha256="591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d")
|
||||
|
||||
|
@@ -268,7 +268,7 @@ generates a boilerplate template for your package, and opens up the new
|
||||
|
||||
# FIXME: Add a list of GitHub accounts to
|
||||
# notify when the package is updated.
|
||||
# maintainers = ["github_user1", "github_user2"]
|
||||
# maintainers("github_user1", "github_user2")
|
||||
|
||||
version("6.2.1", sha256="eae9326beb4158c386e39a356818031bd28f3124cf915f8c5b1dc4c7a36b4d7c")
|
||||
|
||||
@@ -319,14 +319,8 @@ The rest of the tasks you need to do are as follows:
|
||||
|
||||
#. Add a comma-separated list of maintainers.
|
||||
|
||||
The ``maintainers`` field is a list of GitHub accounts of people
|
||||
who want to be notified any time the package is modified. When a
|
||||
pull request is submitted that updates the package, these people
|
||||
will be requested to review the PR. This is useful for developers
|
||||
who maintain a Spack package for their own software, as well as
|
||||
users who rely on a piece of software and want to ensure that the
|
||||
package doesn't break. It also gives users a list of people to
|
||||
contact for help when someone reports a build error with the package.
|
||||
Add a list of Github accounts of people who want to be notified
|
||||
any time the package is modified. See :ref:`package_maintainers`.
|
||||
|
||||
#. Add ``depends_on()`` calls for the package's dependencies.
|
||||
|
||||
@@ -497,6 +491,31 @@ some examples:
|
||||
In general, you won't have to remember this naming convention because
|
||||
:ref:`cmd-spack-create` and :ref:`cmd-spack-edit` handle the details for you.
|
||||
|
||||
.. _package_maintainers:
|
||||
|
||||
-----------
|
||||
Maintainers
|
||||
-----------
|
||||
|
||||
Each package in Spack may have one or more maintainers, i.e. one or more
|
||||
GitHub accounts of people who want to be notified any time the package is
|
||||
modified.
|
||||
|
||||
When a pull request is submitted that updates the package, these people will
|
||||
be requested to review the PR. This is useful for developers who maintain a
|
||||
Spack package for their own software, as well as users who rely on a piece of
|
||||
software and want to ensure that the package doesn't break. It also gives users
|
||||
a list of people to contact for help when someone reports a build error with
|
||||
the package.
|
||||
|
||||
To add maintainers to a package, simply declare them with the ``maintainers`` directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
maintainers("user1", "user2")
|
||||
|
||||
The list of maintainers is additive, and includes all the accounts eventually declared in base classes.
|
||||
|
||||
-----------------
|
||||
Trusted Downloads
|
||||
-----------------
|
||||
|
@@ -23,7 +23,7 @@
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.symlink import islink, symlink
|
||||
|
||||
from spack.util.executable import CommandNotFoundError, Executable, which
|
||||
from spack.util.executable import Executable, which
|
||||
from spack.util.path import path_to_os_path, system_path_filter
|
||||
|
||||
is_windows = _platform == "win32"
|
||||
@@ -117,13 +117,7 @@ def path_contains_subdirectory(path, root):
|
||||
@memoized
|
||||
def file_command(*args):
|
||||
"""Creates entry point to `file` system command with provided arguments"""
|
||||
try:
|
||||
file_cmd = which("file", required=True)
|
||||
except CommandNotFoundError as e:
|
||||
if is_windows:
|
||||
raise CommandNotFoundError("`file` utility is not available on Windows")
|
||||
else:
|
||||
raise e
|
||||
file_cmd = which("file", required=True)
|
||||
for arg in args:
|
||||
file_cmd.add_default_arg(arg)
|
||||
return file_cmd
|
||||
@@ -134,7 +128,11 @@ def _get_mime_type():
|
||||
"""Generate method to call `file` system command to aquire mime type
|
||||
for a specified path
|
||||
"""
|
||||
return file_command("-b", "-h", "--mime-type")
|
||||
if is_windows:
|
||||
# -h option (no-dereference) does not exist in Windows
|
||||
return file_command("-b", "--mime-type")
|
||||
else:
|
||||
return file_command("-b", "-h", "--mime-type")
|
||||
|
||||
|
||||
@memoized
|
||||
|
@@ -47,7 +47,7 @@
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.caches import misc_cache_location
|
||||
from spack.relocate import utf8_paths_to_single_binary_regex
|
||||
from spack.relocate_text import utf8_paths_to_single_binary_regex
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
@@ -294,10 +294,12 @@ def update_spec(self, spec, found_list):
|
||||
cur_entry["spec"] = new_entry["spec"]
|
||||
break
|
||||
else:
|
||||
current_list.append = {
|
||||
"mirror_url": new_entry["mirror_url"],
|
||||
"spec": new_entry["spec"],
|
||||
}
|
||||
current_list.append(
|
||||
{
|
||||
"mirror_url": new_entry["mirror_url"],
|
||||
"spec": new_entry["spec"],
|
||||
}
|
||||
)
|
||||
|
||||
def update(self, with_cooldown=False):
|
||||
"""Make sure local cache of buildcache index files is up to date.
|
||||
@@ -1730,16 +1732,16 @@ def is_backup_file(file):
|
||||
|
||||
# For all buildcaches
|
||||
# relocate the install prefixes in text files including dependencies
|
||||
relocate.unsafe_relocate_text(text_names, prefix_to_prefix_text)
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
# relocate the install prefixes in binary files including dependencies
|
||||
relocate.unsafe_relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||
relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
else:
|
||||
if old_spack_prefix != new_spack_prefix:
|
||||
relocate.unsafe_relocate_text(text_names, prefix_to_prefix_text)
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
|
||||
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
|
||||
|
@@ -171,7 +171,7 @@ def mypy_root_spec():
|
||||
|
||||
def black_root_spec():
|
||||
"""Return the root spec used to bootstrap black"""
|
||||
return _root_spec("py-black")
|
||||
return _root_spec("py-black@:22.12.0")
|
||||
|
||||
|
||||
def flake8_root_spec():
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import spack.directives
|
||||
import spack.package_base
|
||||
|
||||
from ._checks import BaseBuilder, apply_macos_rpath_fixups
|
||||
from ._checks import BaseBuilder, apply_macos_rpath_fixups, execute_install_time_tests
|
||||
|
||||
|
||||
class Package(spack.package_base.PackageBase):
|
||||
@@ -38,7 +38,16 @@ class GenericBuilder(BaseBuilder):
|
||||
legacy_methods: Tuple[str, ...] = ()
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes: Tuple[str, ...] = ("archive_files",)
|
||||
legacy_attributes: Tuple[str, ...] = (
|
||||
"archive_files",
|
||||
"install_time_test_callbacks",
|
||||
)
|
||||
|
||||
#: Callback names for post-install phase tests
|
||||
install_time_test_callbacks = []
|
||||
|
||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||
|
||||
# unconditionally perform any post-install phase tests
|
||||
spack.builder.run_after("install")(execute_install_time_tests)
|
||||
|
@@ -120,6 +120,7 @@ def std_meson_args(self):
|
||||
of package writers.
|
||||
"""
|
||||
# standard Meson arguments
|
||||
|
||||
std_meson_args = MesonBuilder.std_args(self.pkg)
|
||||
std_meson_args += getattr(self, "meson_flag_args", [])
|
||||
return std_meson_args
|
||||
@@ -182,7 +183,10 @@ def meson_args(self):
|
||||
|
||||
def meson(self, pkg, spec, prefix):
|
||||
"""Run ``meson`` in the build directory"""
|
||||
options = [os.path.abspath(self.root_mesonlists_dir)]
|
||||
options = []
|
||||
if self.spec["meson"].satisfies("@0.64:"):
|
||||
options.append("setup")
|
||||
options.append(os.path.abspath(self.root_mesonlists_dir))
|
||||
options += self.std_meson_args
|
||||
options += self.meson_args()
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
|
@@ -77,7 +77,7 @@ def toolchain_version(self):
|
||||
Override this method to select a specific version of the toolchain or change
|
||||
selection heuristics.
|
||||
Default is whatever version of msvc has been selected by concretization"""
|
||||
return self.compiler.msvc_version
|
||||
return "v" + self.pkg.compiler.platform_toolset_ver
|
||||
|
||||
@property
|
||||
def std_msbuild_args(self):
|
||||
|
@@ -92,7 +92,7 @@ def makefile_root(self):
|
||||
This path is relative to the root of the extracted tarball,
|
||||
not to the ``build_directory``. Defaults to the current directory.
|
||||
"""
|
||||
return self.stage.source_dir
|
||||
return self.stage.source_path
|
||||
|
||||
@property
|
||||
def nmakefile_name(self):
|
||||
|
@@ -267,7 +267,7 @@ def update_external_dependencies(self, extendee_spec=None):
|
||||
|
||||
python.external_path = self.spec.external_path
|
||||
python._mark_concrete()
|
||||
self.spec.add_dependency_edge(python, ("build", "link", "run"))
|
||||
self.spec.add_dependency_edge(python, deptypes=("build", "link", "run"))
|
||||
|
||||
def get_external_python_for_prefix(self):
|
||||
"""
|
||||
|
@@ -138,7 +138,7 @@ class ROCmPackage(PackageBase):
|
||||
|
||||
depends_on("llvm-amdgpu", when="+rocm")
|
||||
depends_on("hsa-rocr-dev", when="+rocm")
|
||||
depends_on("hip", when="+rocm")
|
||||
depends_on("hip +rocm", when="+rocm")
|
||||
|
||||
conflicts("^blt@:0.3.6", when="+rocm")
|
||||
|
||||
|
@@ -530,10 +530,9 @@ def ci_rebuild(args):
|
||||
if not verify_binaries:
|
||||
install_args.append("--no-check-signature")
|
||||
|
||||
cdash_args = []
|
||||
if cdash_handler:
|
||||
# Add additional arguments to `spack install` for CDash reporting.
|
||||
cdash_args.extend(cdash_handler.args())
|
||||
install_args.extend(cdash_handler.args())
|
||||
|
||||
slash_hash = "/{}".format(job_spec.dag_hash())
|
||||
deps_install_args = install_args
|
||||
|
@@ -6,6 +6,7 @@
|
||||
|
||||
import argparse
|
||||
import os.path
|
||||
import textwrap
|
||||
|
||||
from llnl.util.lang import stable_partition
|
||||
|
||||
@@ -415,6 +416,40 @@ def add_cdash_args(subparser, add_help):
|
||||
cdash_subgroup.add_argument("--cdash-buildstamp", default=None, help=cdash_help["buildstamp"])
|
||||
|
||||
|
||||
def print_cdash_help():
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog=textwrap.dedent(
|
||||
"""\
|
||||
environment variables:
|
||||
SPACK_CDASH_AUTH_TOKEN
|
||||
authentication token to present to CDash
|
||||
"""
|
||||
),
|
||||
)
|
||||
add_cdash_args(parser, True)
|
||||
parser.print_help()
|
||||
|
||||
|
||||
def sanitize_reporter_options(namespace: argparse.Namespace):
|
||||
"""Sanitize options that affect generation and configuration of reports, like
|
||||
CDash or JUnit.
|
||||
|
||||
Args:
|
||||
namespace: options parsed from cli
|
||||
"""
|
||||
has_any_cdash_option = (
|
||||
namespace.cdash_upload_url or namespace.cdash_build or namespace.cdash_site
|
||||
)
|
||||
if namespace.log_format == "junit" and has_any_cdash_option:
|
||||
raise argparse.ArgumentTypeError("cannot pass any cdash option when --log-format=junit")
|
||||
|
||||
# If any CDash option is passed, assume --log-format=cdash is implied
|
||||
if namespace.log_format is None and has_any_cdash_option:
|
||||
namespace.log_format = "cdash"
|
||||
namespace.reporter = _cdash_reporter(namespace)
|
||||
|
||||
|
||||
class ConfigSetAction(argparse.Action):
|
||||
"""Generic action for setting spack config options from CLI.
|
||||
|
||||
|
@@ -70,7 +70,7 @@ class {class_name}({base_class_name}):
|
||||
|
||||
# FIXME: Add a list of GitHub accounts to
|
||||
# notify when the package is updated.
|
||||
# maintainers = ["github_user1", "github_user2"]
|
||||
# maintainers("github_user1", "github_user2")
|
||||
|
||||
{versions}
|
||||
|
||||
|
@@ -39,12 +39,19 @@
|
||||
compiler flags:
|
||||
@g{cflags="flags"} cppflags, cflags, cxxflags,
|
||||
fflags, ldflags, ldlibs
|
||||
@g{cflags=="flags"} propagate flags to package dependencies
|
||||
cppflags, cflags, cxxflags, fflags,
|
||||
ldflags, ldlibs
|
||||
|
||||
variants:
|
||||
@B{+variant} enable <variant>
|
||||
@B{++variant} propagate enable <variant>
|
||||
@r{-variant} or @r{~variant} disable <variant>
|
||||
@r{--variant} or @r{~~variant} propagate disable <variant>
|
||||
@B{variant=value} set non-boolean <variant> to <value>
|
||||
@B{variant==value} propagate non-boolean <variant> to <value>
|
||||
@B{variant=value1,value2,value3} set multi-value <variant> values
|
||||
@B{variant==value1,value2,value3} propagate multi-value <variant> values
|
||||
|
||||
architecture variants:
|
||||
@m{platform=platform} linux, darwin, cray, etc.
|
||||
@@ -68,6 +75,8 @@
|
||||
hdf5 @c{@1.8:} @g{%gcc} hdf5 1.8 or higher built with gcc
|
||||
hdf5 @B{+mpi} hdf5 with mpi enabled
|
||||
hdf5 @r{~mpi} hdf5 with mpi disabled
|
||||
hdf5 @B{++mpi} hdf5 with mpi enabled and propagates
|
||||
hdf5 @r{~~mpi} hdf5 with mpi disabled and propagates
|
||||
hdf5 @B{+mpi} ^mpich hdf5 with mpi, using mpich
|
||||
hdf5 @B{+mpi} ^openmpi@c{@1.7} hdf5 with mpi, using openmpi 1.7
|
||||
boxlib @B{dim=2} boxlib built for 2 dimensions
|
||||
|
@@ -7,7 +7,6 @@
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import textwrap
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -260,7 +259,7 @@ def default_log_file(spec):
|
||||
|
||||
def report_filename(args: argparse.Namespace, specs: List[spack.spec.Spec]) -> str:
|
||||
"""Return the filename to be used for reporting to JUnit or CDash format."""
|
||||
result = args.log_file or args.cdash_upload_url or default_log_file(specs[0])
|
||||
result = args.log_file or default_log_file(specs[0])
|
||||
return result
|
||||
|
||||
|
||||
@@ -348,21 +347,6 @@ def install_specs_outside_environment(specs, install_kwargs):
|
||||
builder.install()
|
||||
|
||||
|
||||
def print_cdash_help():
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog=textwrap.dedent(
|
||||
"""\
|
||||
environment variables:
|
||||
SPACK_CDASH_AUTH_TOKEN
|
||||
authentication token to present to CDash
|
||||
"""
|
||||
),
|
||||
)
|
||||
arguments.add_cdash_args(parser, True)
|
||||
parser.print_help()
|
||||
|
||||
|
||||
def install_all_specs_from_active_environment(
|
||||
install_kwargs, only_concrete, cli_test_arg, reporter_factory
|
||||
):
|
||||
@@ -496,7 +480,7 @@ def install(parser, args):
|
||||
tty.set_verbose(args.verbose or args.install_verbose)
|
||||
|
||||
if args.help_cdash:
|
||||
print_cdash_help()
|
||||
spack.cmd.common.arguments.print_cdash_help()
|
||||
return
|
||||
|
||||
if args.no_checksum:
|
||||
@@ -505,6 +489,8 @@ def install(parser, args):
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
|
||||
spack.cmd.common.arguments.sanitize_reporter_options(args)
|
||||
|
||||
def reporter_factory(specs):
|
||||
if args.log_format is None:
|
||||
return None
|
||||
|
@@ -11,7 +11,6 @@
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
from llnl.util import lang, tty
|
||||
from llnl.util.tty import colify
|
||||
@@ -171,20 +170,11 @@ def test_run(args):
|
||||
|
||||
# cdash help option
|
||||
if args.help_cdash:
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog=textwrap.dedent(
|
||||
"""\
|
||||
environment variables:
|
||||
SPACK_CDASH_AUTH_TOKEN
|
||||
authentication token to present to CDash
|
||||
"""
|
||||
),
|
||||
)
|
||||
arguments.add_cdash_args(parser, True)
|
||||
parser.print_help()
|
||||
arguments.print_cdash_help()
|
||||
return
|
||||
|
||||
arguments.sanitize_reporter_options(args)
|
||||
|
||||
# set config option for fail-fast
|
||||
if args.fail_fast:
|
||||
spack.config.set("config:fail_fast", True, scope="command_line")
|
||||
@@ -237,22 +227,22 @@ def test_run(args):
|
||||
)
|
||||
|
||||
|
||||
def report_filename(args, test_suite):
|
||||
if args.log_file:
|
||||
if os.path.isabs(args.log_file):
|
||||
return args.log_file
|
||||
else:
|
||||
log_dir = os.getcwd()
|
||||
return os.path.join(log_dir, args.log_file)
|
||||
else:
|
||||
return os.path.join(os.getcwd(), "test-%s" % test_suite.name)
|
||||
|
||||
|
||||
def create_reporter(args, specs_to_test, test_suite):
|
||||
if args.log_format is None:
|
||||
return None
|
||||
|
||||
filename = args.cdash_upload_url
|
||||
if not filename:
|
||||
if args.log_file:
|
||||
if os.path.isabs(args.log_file):
|
||||
log_file = args.log_file
|
||||
else:
|
||||
log_dir = os.getcwd()
|
||||
log_file = os.path.join(log_dir, args.log_file)
|
||||
else:
|
||||
log_file = os.path.join(os.getcwd(), "test-%s" % test_suite.name)
|
||||
filename = log_file
|
||||
|
||||
filename = report_filename(args, test_suite)
|
||||
context_manager = spack.report.test_context_manager(
|
||||
reporter=args.reporter(),
|
||||
filename=filename,
|
||||
|
@@ -63,7 +63,7 @@ def tutorial(parser, args):
|
||||
if not tty.get_yes_or_no("Are you sure you want to proceed?"):
|
||||
tty.die("Aborted")
|
||||
|
||||
rm_cmds = ["rm -f %s" % f for f in rm_configs]
|
||||
rm_cmds = [f"rm -f {f}" for f in rm_configs]
|
||||
tty.msg("Reverting compiler and repository configuration", *rm_cmds)
|
||||
for path in rm_configs:
|
||||
if os.path.exists(path):
|
||||
@@ -71,19 +71,19 @@ def tutorial(parser, args):
|
||||
|
||||
tty.msg(
|
||||
"Ensuring that the tutorial binary mirror is configured:",
|
||||
"spack mirror add tutorial %s" % tutorial_mirror,
|
||||
f"spack mirror add tutorial {tutorial_mirror}",
|
||||
)
|
||||
mirror_config = syaml_dict()
|
||||
mirror_config["tutorial"] = tutorial_mirror
|
||||
spack.config.set("mirrors", mirror_config, scope="user")
|
||||
|
||||
tty.msg("Ensuring that we trust tutorial binaries", "spack gpg trust %s" % tutorial_key)
|
||||
tty.msg("Ensuring that we trust tutorial binaries", f"spack gpg trust {tutorial_key}")
|
||||
spack.util.gpg.trust(tutorial_key)
|
||||
|
||||
# Note that checkout MUST be last. It changes Spack under our feet.
|
||||
# If you don't put this last, you'll get import errors for the code
|
||||
# that follows (exacerbated by the various lazy singletons we use)
|
||||
tty.msg("Ensuring we're on the releases/v{0}.{1} branch".format(*spack.spack_version_info[:2]))
|
||||
tty.msg(f"Ensuring we're on the {tutorial_branch} branch")
|
||||
git = spack.util.git.git(required=True)
|
||||
with working_dir(spack.paths.prefix):
|
||||
git("checkout", tutorial_branch)
|
||||
|
@@ -89,6 +89,11 @@ def cxx14_flag(self):
|
||||
return "-std=c++14"
|
||||
return "-h std=c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-std=c++17"
|
||||
|
||||
@property
|
||||
def c99_flag(self):
|
||||
if self.is_clang_based:
|
||||
|
@@ -103,11 +103,22 @@ def short_msvc_version(self):
|
||||
"""
|
||||
This is the shorthand VCToolset version of form
|
||||
MSVC<short-ver> *NOT* the full version, for that see
|
||||
Msvc.msvc_version
|
||||
Msvc.msvc_version or MSVC.platform_toolset_ver for the
|
||||
raw platform toolset version
|
||||
"""
|
||||
ver = self.msvc_version[:2].joined.string[:3]
|
||||
ver = self.platform_toolset_ver
|
||||
return "MSVC" + ver
|
||||
|
||||
@property
|
||||
def platform_toolset_ver(self):
|
||||
"""
|
||||
This is the platform toolset version of current MSVC compiler
|
||||
i.e. 142.
|
||||
This is different from the VC toolset version as established
|
||||
by `short_msvc_version`
|
||||
"""
|
||||
return self.msvc_version[:2].joined.string[:3]
|
||||
|
||||
@property
|
||||
def cl_version(self):
|
||||
"""Cl toolset version"""
|
||||
|
@@ -162,7 +162,7 @@ def entries_to_specs(entries):
|
||||
continue
|
||||
parent_spec = spec_dict[entry["hash"]]
|
||||
dep_spec = spec_dict[dep_hash]
|
||||
parent_spec._add_dependency(dep_spec, deptypes)
|
||||
parent_spec._add_dependency(dep_spec, deptypes=deptypes)
|
||||
|
||||
return spec_dict
|
||||
|
||||
|
@@ -107,6 +107,14 @@
|
||||
]
|
||||
|
||||
|
||||
def reader(version):
|
||||
reader_cls = {
|
||||
Version("5"): spack.spec.SpecfileV1,
|
||||
Version("6"): spack.spec.SpecfileV3,
|
||||
}
|
||||
return reader_cls[version]
|
||||
|
||||
|
||||
def _now():
|
||||
"""Returns the time since the epoch"""
|
||||
return time.time()
|
||||
@@ -674,7 +682,7 @@ def _write_to_file(self, stream):
|
||||
except (TypeError, ValueError) as e:
|
||||
raise sjson.SpackJSONError("error writing JSON database:", str(e))
|
||||
|
||||
def _read_spec_from_dict(self, hash_key, installs, hash=ht.dag_hash):
|
||||
def _read_spec_from_dict(self, spec_reader, hash_key, installs, hash=ht.dag_hash):
|
||||
"""Recursively construct a spec from a hash in a YAML database.
|
||||
|
||||
Does not do any locking.
|
||||
@@ -692,7 +700,7 @@ def _read_spec_from_dict(self, hash_key, installs, hash=ht.dag_hash):
|
||||
spec_dict[hash.name] = hash_key
|
||||
|
||||
# Build spec from dict first.
|
||||
spec = spack.spec.Spec.from_node_dict(spec_dict)
|
||||
spec = spec_reader.from_node_dict(spec_dict)
|
||||
return spec
|
||||
|
||||
def db_for_spec_hash(self, hash_key):
|
||||
@@ -732,7 +740,7 @@ def query_local_by_spec_hash(self, hash_key):
|
||||
with self.read_transaction():
|
||||
return self._data.get(hash_key, None)
|
||||
|
||||
def _assign_dependencies(self, hash_key, installs, data):
|
||||
def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
# Add dependencies from other records in the install DB to
|
||||
# form a full spec.
|
||||
spec = data[hash_key].spec
|
||||
@@ -742,7 +750,7 @@ def _assign_dependencies(self, hash_key, installs, data):
|
||||
spec_node_dict = spec_node_dict[spec.name]
|
||||
if "dependencies" in spec_node_dict:
|
||||
yaml_deps = spec_node_dict["dependencies"]
|
||||
for dname, dhash, dtypes, _ in spack.spec.Spec.read_yaml_dep_specs(yaml_deps):
|
||||
for dname, dhash, dtypes, _ in spec_reader.read_specfile_dep_specs(yaml_deps):
|
||||
# It is important that we always check upstream installations
|
||||
# in the same order, and that we always check the local
|
||||
# installation first: if a downstream Spack installs a package
|
||||
@@ -765,7 +773,7 @@ def _assign_dependencies(self, hash_key, installs, data):
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
spec._add_dependency(child, dtypes)
|
||||
spec._add_dependency(child, deptypes=dtypes)
|
||||
|
||||
def _read_from_file(self, filename):
|
||||
"""Fill database from file, do not maintain old data.
|
||||
@@ -797,6 +805,7 @@ def check(cond, msg):
|
||||
|
||||
# TODO: better version checking semantics.
|
||||
version = Version(db["version"])
|
||||
spec_reader = reader(version)
|
||||
if version > _db_version:
|
||||
raise InvalidDatabaseVersionError(_db_version, version)
|
||||
elif version < _db_version:
|
||||
@@ -832,7 +841,7 @@ def invalid_record(hash_key, error):
|
||||
for hash_key, rec in installs.items():
|
||||
try:
|
||||
# This constructs a spec DAG from the list of all installs
|
||||
spec = self._read_spec_from_dict(hash_key, installs)
|
||||
spec = self._read_spec_from_dict(spec_reader, hash_key, installs)
|
||||
|
||||
# Insert the brand new spec in the database. Each
|
||||
# spec has its own copies of its dependency specs.
|
||||
@@ -848,7 +857,7 @@ def invalid_record(hash_key, error):
|
||||
# Pass 2: Assign dependencies once all specs are created.
|
||||
for hash_key in data:
|
||||
try:
|
||||
self._assign_dependencies(hash_key, installs, data)
|
||||
self._assign_dependencies(spec_reader, hash_key, installs, data)
|
||||
except MissingDependenciesError:
|
||||
raise
|
||||
except Exception as e:
|
||||
@@ -1167,7 +1176,7 @@ def _add(
|
||||
for dep in spec.edges_to_dependencies(deptype=_tracked_deps):
|
||||
dkey = dep.spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(dkey)
|
||||
new_spec._add_dependency(record.spec, dep.deptypes)
|
||||
new_spec._add_dependency(record.spec, deptypes=dep.deptypes)
|
||||
if not upstream:
|
||||
record.ref_count += 1
|
||||
|
||||
|
@@ -54,6 +54,7 @@ class OpenMpi(Package):
|
||||
"conflicts",
|
||||
"depends_on",
|
||||
"extends",
|
||||
"maintainers",
|
||||
"provides",
|
||||
"patch",
|
||||
"variant",
|
||||
@@ -767,6 +768,22 @@ def build_system(*values, **kwargs):
|
||||
)
|
||||
|
||||
|
||||
@directive(dicts=())
|
||||
def maintainers(*names: str):
|
||||
"""Add a new maintainer directive, to specify maintainers in a declarative way.
|
||||
|
||||
Args:
|
||||
names: GitHub username for the maintainer
|
||||
"""
|
||||
|
||||
def _execute_maintainer(pkg):
|
||||
maintainers_from_base = getattr(pkg, "maintainers", [])
|
||||
# Here it is essential to copy, otherwise we might add to an empty list in the parent
|
||||
pkg.maintainers = list(sorted(set(maintainers_from_base + list(names))))
|
||||
|
||||
return _execute_maintainer
|
||||
|
||||
|
||||
class DirectiveError(spack.error.SpackError):
|
||||
"""This is raised when something is wrong with a package directive."""
|
||||
|
||||
|
@@ -104,6 +104,15 @@ def default_manifest_yaml():
|
||||
#: version of the lockfile format. Must increase monotonically.
|
||||
lockfile_format_version = 4
|
||||
|
||||
|
||||
READER_CLS = {
|
||||
1: spack.spec.SpecfileV1,
|
||||
2: spack.spec.SpecfileV1,
|
||||
3: spack.spec.SpecfileV2,
|
||||
4: spack.spec.SpecfileV3,
|
||||
}
|
||||
|
||||
|
||||
# Magic names
|
||||
# The name of the standalone spec list in the manifest yaml
|
||||
user_speclist_name = "specs"
|
||||
@@ -1436,7 +1445,7 @@ def _concretize_separately(self, tests=False):
|
||||
if test_dependency in current_spec[node.name]:
|
||||
continue
|
||||
current_spec[node.name].add_dependency_edge(
|
||||
test_dependency.copy(), deptype="test"
|
||||
test_dependency.copy(), deptypes="test"
|
||||
)
|
||||
|
||||
results = [
|
||||
@@ -1942,7 +1951,7 @@ def _to_lockfile_dict(self):
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": lockfile_format_version,
|
||||
"specfile-version": spack.spec.specfile_format_version,
|
||||
"specfile-version": spack.spec.SPECFILE_FORMAT_VERSION,
|
||||
},
|
||||
# users specs + hashes are the 'roots' of the environment
|
||||
"roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list],
|
||||
@@ -1975,10 +1984,19 @@ def _read_lockfile_dict(self, d):
|
||||
|
||||
# Track specs by their DAG hash, allows handling DAG hash collisions
|
||||
first_seen = {}
|
||||
current_lockfile_format = d["_meta"]["lockfile-version"]
|
||||
try:
|
||||
reader = READER_CLS[current_lockfile_format]
|
||||
except KeyError:
|
||||
msg = (
|
||||
f"Spack {spack.__version__} cannot read environment lockfiles using the "
|
||||
f"v{current_lockfile_format} format"
|
||||
)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# First pass: Put each spec in the map ignoring dependencies
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
spec = Spec.from_node_dict(node_dict)
|
||||
spec = reader.from_node_dict(node_dict)
|
||||
if not spec._hash:
|
||||
# in v1 lockfiles, the hash only occurs as a key
|
||||
spec._hash = lockfile_key
|
||||
@@ -1987,8 +2005,11 @@ def _read_lockfile_dict(self, d):
|
||||
# Second pass: For each spec, get its dependencies from the node dict
|
||||
# and add them to the spec
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
for _, dep_hash, deptypes, _ in Spec.dependencies_from_node_dict(node_dict):
|
||||
specs_by_hash[lockfile_key]._add_dependency(specs_by_hash[dep_hash], deptypes)
|
||||
name, data = reader.name_and_data(node_dict)
|
||||
for _, dep_hash, deptypes, _ in reader.dependencies_from_node_dict(data):
|
||||
specs_by_hash[lockfile_key]._add_dependency(
|
||||
specs_by_hash[dep_hash], deptypes=deptypes
|
||||
)
|
||||
|
||||
# Traverse the root specs one at a time in the order they appear.
|
||||
# The first time we see each DAG hash, that's the one we want to
|
||||
@@ -2244,15 +2265,12 @@ def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
m += "concretization target. all specs must have a single name "
|
||||
m += "constraint for concretization."
|
||||
raise InvalidSpecConstraintError(m)
|
||||
spec_constraints.remove(root_spec[0])
|
||||
|
||||
invalid_constraints = []
|
||||
while True:
|
||||
# Attach all anonymous constraints to one named spec
|
||||
s = root_spec[0].copy()
|
||||
for c in spec_constraints:
|
||||
if c not in invalid_constraints:
|
||||
s.constrain(c)
|
||||
# Combine constraints into a single spec
|
||||
s = Spec(" ".join([str(c) for c in spec_constraints if c not in invalid_constraints]))
|
||||
|
||||
try:
|
||||
return s.concretized(tests=tests)
|
||||
except spack.spec.InvalidDependencyError as e:
|
||||
|
@@ -95,6 +95,22 @@ def _ensure_one_stage_entry(stage_path):
|
||||
return os.path.join(stage_path, stage_entries[0])
|
||||
|
||||
|
||||
def _filesummary(path, print_bytes=16):
|
||||
try:
|
||||
n = print_bytes
|
||||
with open(path, "rb") as f:
|
||||
size = os.fstat(f.fileno()).st_size
|
||||
if size <= 2 * n:
|
||||
short_contents = f.read(2 * n)
|
||||
else:
|
||||
short_contents = f.read(n)
|
||||
f.seek(-n, 2)
|
||||
short_contents += b"..." + f.read(n)
|
||||
return size, short_contents
|
||||
except OSError:
|
||||
return 0, b""
|
||||
|
||||
|
||||
def fetcher(cls):
|
||||
"""Decorator used to register fetch strategies."""
|
||||
all_strategies.append(cls)
|
||||
@@ -500,9 +516,14 @@ def check(self):
|
||||
|
||||
checker = crypto.Checker(self.digest)
|
||||
if not checker.check(self.archive_file):
|
||||
# On failure, provide some information about the file size and
|
||||
# contents, so that we can quickly see what the issue is (redirect
|
||||
# was not followed, empty file, text instead of binary, ...)
|
||||
size, contents = _filesummary(self.archive_file)
|
||||
raise ChecksumError(
|
||||
"%s checksum failed for %s" % (checker.hash_name, self.archive_file),
|
||||
"Expected %s but got %s" % (self.digest, checker.sum),
|
||||
f"{checker.hash_name} checksum failed for {self.archive_file}",
|
||||
f"Expected {self.digest} but got {checker.sum}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
|
||||
@_needs_stage
|
||||
|
@@ -90,11 +90,11 @@ def view_copy(src, dst, view, spec=None):
|
||||
prefix_to_projection[dep.prefix] = view.get_projection_for_spec(dep)
|
||||
|
||||
if spack.relocate.is_binary(dst):
|
||||
spack.relocate.unsafe_relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection)
|
||||
spack.relocate.relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection)
|
||||
else:
|
||||
prefix_to_projection[spack.store.layout.root] = view._root
|
||||
prefix_to_projection[orig_sbang] = new_sbang
|
||||
spack.relocate.unsafe_relocate_text(files=[dst], prefixes=prefix_to_projection)
|
||||
spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
|
||||
try:
|
||||
stat = os.stat(src)
|
||||
os.chown(dst, stat.st_uid, stat.st_gid)
|
||||
|
@@ -283,7 +283,7 @@ def next_spec(self, initial_spec: Optional[spack.spec.Spec] = None) -> spack.spe
|
||||
if root_spec.concrete:
|
||||
raise spack.spec.RedundantSpecError(root_spec, "^" + str(dependency))
|
||||
|
||||
root_spec._add_dependency(dependency, ())
|
||||
root_spec._add_dependency(dependency, deptypes=())
|
||||
|
||||
else:
|
||||
break
|
||||
|
@@ -292,8 +292,8 @@ def from_json(stream, repository):
|
||||
index.providers = _transform(
|
||||
providers,
|
||||
lambda vpkg, plist: (
|
||||
spack.spec.Spec.from_node_dict(vpkg),
|
||||
set(spack.spec.Spec.from_node_dict(p) for p in plist),
|
||||
spack.spec.SpecfileV3.from_node_dict(vpkg),
|
||||
set(spack.spec.SpecfileV3.from_node_dict(p) for p in plist),
|
||||
),
|
||||
)
|
||||
return index
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
@@ -27,6 +26,8 @@
|
||||
import spack.util.elf as elf
|
||||
import spack.util.executable as executable
|
||||
|
||||
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
|
||||
|
||||
is_macos = str(spack.platforms.real_host()) == "darwin"
|
||||
|
||||
|
||||
@@ -46,49 +47,6 @@ def __init__(self, file_path, root_path):
|
||||
)
|
||||
|
||||
|
||||
class BinaryStringReplacementError(spack.error.SpackError):
|
||||
def __init__(self, file_path, old_len, new_len):
|
||||
"""The size of the file changed after binary path substitution
|
||||
|
||||
Args:
|
||||
file_path (str): file with changing size
|
||||
old_len (str): original length of the file
|
||||
new_len (str): length of the file after substitution
|
||||
"""
|
||||
super(BinaryStringReplacementError, self).__init__(
|
||||
"Doing a binary string replacement in %s failed.\n"
|
||||
"The size of the file changed from %s to %s\n"
|
||||
"when it should have remanined the same." % (file_path, old_len, new_len)
|
||||
)
|
||||
|
||||
|
||||
class BinaryTextReplaceError(spack.error.SpackError):
|
||||
def __init__(self, msg):
|
||||
msg += (
|
||||
" To fix this, compile with more padding "
|
||||
"(config:install_tree:padded_length), or install to a shorter prefix."
|
||||
)
|
||||
super(BinaryTextReplaceError, self).__init__(msg)
|
||||
|
||||
|
||||
class CannotGrowString(BinaryTextReplaceError):
|
||||
def __init__(self, old, new):
|
||||
msg = "Cannot replace {!r} with {!r} because the new prefix is longer.".format(old, new)
|
||||
super(CannotGrowString, self).__init__(msg)
|
||||
|
||||
|
||||
class CannotShrinkCString(BinaryTextReplaceError):
|
||||
def __init__(self, old, new, full_old_string):
|
||||
# Just interpolate binary string to not risk issues with invalid
|
||||
# unicode, which would be really bad user experience: error in error.
|
||||
# We have no clue if we actually deal with a real C-string nor what
|
||||
# encoding it has.
|
||||
msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
old, new, full_old_string
|
||||
)
|
||||
super(CannotShrinkCString, self).__init__(msg)
|
||||
|
||||
|
||||
@memoized
|
||||
def _patchelf():
|
||||
"""Return the full path to the patchelf binary, if available, else None."""
|
||||
@@ -450,108 +408,6 @@ def needs_text_relocation(m_type, m_subtype):
|
||||
return m_type == "text"
|
||||
|
||||
|
||||
def apply_binary_replacements(f, prefix_to_prefix, suffix_safety_size=7):
|
||||
"""
|
||||
Given a file opened in rb+ mode, apply the string replacements as
|
||||
specified by an ordered dictionary of prefix to prefix mappings. This
|
||||
method takes special care of null-terminated C-strings. C-string constants
|
||||
are problematic because compilers and linkers optimize readonly strings for
|
||||
space by aliasing those that share a common suffix (only suffix since all
|
||||
of them are null terminated). See https://github.com/spack/spack/pull/31739
|
||||
and https://github.com/spack/spack/pull/32253 for details. Our logic matches
|
||||
the original prefix with a ``suffix_safety_size + 1`` lookahead for null bytes.
|
||||
If no null terminator is found, we simply pad with leading /, assuming that
|
||||
it's a long C-string; the full C-string after replacement has a large suffix
|
||||
in common with its original value.
|
||||
If there *is* a null terminator we can do the same as long as the replacement
|
||||
has a sufficiently long common suffix with the original prefix.
|
||||
As a last resort when the replacement does not have a long enough common suffix,
|
||||
we can try to shorten the string, but this only works if the new length is
|
||||
sufficiently short (typically the case when going from large padding -> normal path)
|
||||
If the replacement string is longer, or all of the above fails, we error out.
|
||||
|
||||
Arguments:
|
||||
f: file opened in rb+ mode
|
||||
prefix_to_prefix (OrderedDict): OrderedDictionary where the keys are
|
||||
bytes representing the old prefixes and the values are the new
|
||||
suffix_safety_size (int): in case of null terminated strings, what size
|
||||
of the suffix should remain to avoid aliasing issues?
|
||||
"""
|
||||
assert suffix_safety_size >= 0
|
||||
assert f.tell() == 0
|
||||
|
||||
# Look for exact matches of our paths, and also look if there's a null terminator
|
||||
# soon after (this covers the case where we search for /abc but match /abc/ with
|
||||
# a trailing dir seperator).
|
||||
regex = re.compile(
|
||||
b"("
|
||||
+ b"|".join(re.escape(p) for p in prefix_to_prefix.keys())
|
||||
+ b")([^\0]{0,%d}\0)?" % suffix_safety_size
|
||||
)
|
||||
|
||||
# We *could* read binary data in chunks to avoid loading all in memory,
|
||||
# but it's nasty to deal with matches across boundaries, so let's stick to
|
||||
# something simple.
|
||||
|
||||
for match in regex.finditer(f.read()):
|
||||
# The matching prefix (old) and its replacement (new)
|
||||
old = match.group(1)
|
||||
new = prefix_to_prefix[old]
|
||||
|
||||
# Did we find a trailing null within a N + 1 bytes window after the prefix?
|
||||
null_terminated = match.end(0) > match.end(1)
|
||||
|
||||
# Suffix string length, excluding the null byte
|
||||
# Only makes sense if null_terminated
|
||||
suffix_strlen = match.end(0) - match.end(1) - 1
|
||||
|
||||
# How many bytes are we shrinking our string?
|
||||
bytes_shorter = len(old) - len(new)
|
||||
|
||||
# We can't make strings larger.
|
||||
if bytes_shorter < 0:
|
||||
raise CannotGrowString(old, new)
|
||||
|
||||
# If we don't know whether this is a null terminated C-string (we're looking
|
||||
# only N + 1 bytes ahead), or if it is and we have a common suffix, we can
|
||||
# simply pad with leading dir separators.
|
||||
elif (
|
||||
not null_terminated
|
||||
or suffix_strlen >= suffix_safety_size # == is enough, but let's be defensive
|
||||
or old[-suffix_safety_size + suffix_strlen :]
|
||||
== new[-suffix_safety_size + suffix_strlen :]
|
||||
):
|
||||
replacement = b"/" * bytes_shorter + new
|
||||
|
||||
# If it *was* null terminated, all that matters is that we can leave N bytes
|
||||
# of old suffix in place. Note that > is required since we also insert an
|
||||
# additional null terminator.
|
||||
elif bytes_shorter > suffix_safety_size:
|
||||
replacement = new + match.group(2) # includes the trailing null
|
||||
|
||||
# Otherwise... we can't :(
|
||||
else:
|
||||
raise CannotShrinkCString(old, new, match.group()[:-1])
|
||||
|
||||
f.seek(match.start())
|
||||
f.write(replacement)
|
||||
|
||||
|
||||
def _replace_prefix_bin(filename, prefix_to_prefix):
|
||||
"""Replace all the occurrences of the old prefix with a new prefix in binary
|
||||
files. See :func:`~spack.relocate.apply_binary_replacements` for details.
|
||||
|
||||
Args:
|
||||
filename (str): target binary file
|
||||
byte_prefixes (OrderedDict): ordered dictionary where the keys are
|
||||
bytes representing the old prefixes and the values are the new
|
||||
prefixes (all bytes utf-8 encoded)
|
||||
"""
|
||||
|
||||
with open(filename, "rb+") as f:
|
||||
apply_binary_replacements(f, prefix_to_prefix)
|
||||
|
||||
|
||||
def relocate_macho_binaries(
|
||||
path_names,
|
||||
old_layout_root,
|
||||
@@ -800,120 +656,32 @@ def relocate_links(links, prefix_to_prefix):
|
||||
symlink(new_target, link)
|
||||
|
||||
|
||||
def utf8_path_to_binary_regex(prefix):
|
||||
"""Create a (binary) regex that matches the input path in utf8"""
|
||||
prefix_bytes = re.escape(prefix).encode("utf-8")
|
||||
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)%s([\\w\\-_/]*)" % prefix_bytes)
|
||||
|
||||
|
||||
def byte_strings_to_single_binary_regex(prefixes):
|
||||
all_prefixes = b"|".join(re.escape(p) for p in prefixes)
|
||||
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)(%s)([\\w\\-_/]*)" % all_prefixes)
|
||||
|
||||
|
||||
def utf8_paths_to_single_binary_regex(prefixes):
|
||||
"""Create a (binary) regex that matches any input path in utf8"""
|
||||
return byte_strings_to_single_binary_regex(p.encode("utf-8") for p in prefixes)
|
||||
|
||||
|
||||
def _replace_prefix_text_file(file, regex, prefix_to_prefix):
|
||||
"""Given a text file opened in rb+, substitute all old with new prefixes and write
|
||||
in-place (file size may grow or shrink)."""
|
||||
|
||||
def replacement(match):
|
||||
return match.group(1) + prefix_to_prefix[match.group(2)] + match.group(3)
|
||||
|
||||
data = file.read()
|
||||
file.seek(0)
|
||||
file.write(re.sub(regex, replacement, data))
|
||||
file.truncate()
|
||||
|
||||
|
||||
def _replace_prefix_text(filename, regex, prefix_to_prefix):
|
||||
with open(filename, "rb+") as f:
|
||||
_replace_prefix_text_file(f, regex, prefix_to_prefix)
|
||||
|
||||
|
||||
def unsafe_relocate_text(files, prefixes, concurrency=32):
|
||||
def relocate_text(files, prefixes):
|
||||
"""Relocate text file from the original installation prefix to the
|
||||
new prefix.
|
||||
|
||||
Relocation also affects the the path in Spack's sbang script.
|
||||
|
||||
Note: unsafe when files contains duplicates, such as repeated paths,
|
||||
symlinks, hardlinks.
|
||||
|
||||
Args:
|
||||
files (list): Text files to be relocated
|
||||
prefixes (OrderedDict): String prefixes which need to be changed
|
||||
concurrency (int): Preferred degree of parallelism
|
||||
"""
|
||||
|
||||
# This now needs to be handled by the caller in all cases
|
||||
# orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(orig_spack)
|
||||
# new_sbang = '#!/bin/bash {0}/bin/sbang'.format(new_spack)
|
||||
|
||||
# Transform to binary string
|
||||
prefix_to_prefix = OrderedDict(
|
||||
(k.encode("utf-8"), v.encode("utf-8")) for (k, v) in prefixes.items()
|
||||
)
|
||||
|
||||
# Create a regex of the form (pre check)(prefix 1|prefix 2|prefix 3)(post check).
|
||||
regex = byte_strings_to_single_binary_regex(prefix_to_prefix.keys())
|
||||
|
||||
args = [(filename, regex, prefix_to_prefix) for filename in files]
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
try:
|
||||
tp.map(llnl.util.lang.star(_replace_prefix_text), args)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
TextFilePrefixReplacer.from_strings_or_bytes(prefixes).apply(files)
|
||||
|
||||
|
||||
def unsafe_relocate_text_bin(binaries, prefixes, concurrency=32):
|
||||
"""Replace null terminated path strings hard coded into binaries.
|
||||
def relocate_text_bin(binaries, prefixes):
|
||||
"""Replace null terminated path strings hard-coded into binaries.
|
||||
|
||||
The new install prefix must be shorter than the original one.
|
||||
|
||||
Note: unsafe when files contains duplicates, such as repeated paths,
|
||||
symlinks, hardlinks.
|
||||
|
||||
Args:
|
||||
binaries (list): binaries to be relocated
|
||||
prefixes (OrderedDict): String prefixes which need to be changed.
|
||||
concurrency (int): Desired degree of parallelism.
|
||||
|
||||
Raises:
|
||||
BinaryTextReplaceError: when the new path is longer than the old path
|
||||
spack.relocate_text.BinaryTextReplaceError: when the new path is longer than the old path
|
||||
"""
|
||||
byte_prefixes = collections.OrderedDict({})
|
||||
|
||||
for orig_prefix, new_prefix in prefixes.items():
|
||||
if orig_prefix != new_prefix:
|
||||
if isinstance(orig_prefix, bytes):
|
||||
orig_bytes = orig_prefix
|
||||
else:
|
||||
orig_bytes = orig_prefix.encode("utf-8")
|
||||
if isinstance(new_prefix, bytes):
|
||||
new_bytes = new_prefix
|
||||
else:
|
||||
new_bytes = new_prefix.encode("utf-8")
|
||||
byte_prefixes[orig_bytes] = new_bytes
|
||||
|
||||
# Do relocations on text in binaries that refers to the install tree
|
||||
# multiprocesing.ThreadPool.map requires single argument
|
||||
args = []
|
||||
|
||||
for binary in binaries:
|
||||
args.append((binary, byte_prefixes))
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
|
||||
try:
|
||||
tp.map(llnl.util.lang.star(_replace_prefix_bin), args)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
BinaryFilePrefixReplacer.from_strings_or_bytes(prefixes).apply(binaries)
|
||||
|
||||
|
||||
def is_relocatable(spec):
|
||||
|
288
lib/spack/spack/relocate_text.py
Normal file
288
lib/spack/spack/relocate_text.py
Normal file
@@ -0,0 +1,288 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""This module contains pure-Python classes and functions for replacing
|
||||
paths inside text files and binaries."""
|
||||
|
||||
import re
|
||||
from collections import OrderedDict
|
||||
from typing import Dict, Union
|
||||
|
||||
import spack.error
|
||||
|
||||
Prefix = Union[str, bytes]
|
||||
|
||||
|
||||
def encode_path(p: Prefix) -> bytes:
|
||||
return p if isinstance(p, bytes) else p.encode("utf-8")
|
||||
|
||||
|
||||
def _prefix_to_prefix_as_bytes(prefix_to_prefix) -> Dict[bytes, bytes]:
|
||||
return OrderedDict((encode_path(k), encode_path(v)) for (k, v) in prefix_to_prefix.items())
|
||||
|
||||
|
||||
def utf8_path_to_binary_regex(prefix: str):
|
||||
"""Create a binary regex that matches the input path in utf8"""
|
||||
prefix_bytes = re.escape(prefix).encode("utf-8")
|
||||
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)%s([\\w\\-_/]*)" % prefix_bytes)
|
||||
|
||||
|
||||
def _byte_strings_to_single_binary_regex(prefixes):
|
||||
all_prefixes = b"|".join(re.escape(p) for p in prefixes)
|
||||
return re.compile(b"(?<![\\w\\-_/])([\\w\\-_]*?)(%s)([\\w\\-_/]*)" % all_prefixes)
|
||||
|
||||
|
||||
def utf8_paths_to_single_binary_regex(prefixes):
|
||||
"""Create a (binary) regex that matches any input path in utf8"""
|
||||
return _byte_strings_to_single_binary_regex(p.encode("utf-8") for p in prefixes)
|
||||
|
||||
|
||||
def filter_identity_mappings(prefix_to_prefix):
|
||||
"""Drop mappings that are not changed."""
|
||||
# NOTE: we don't guard against the following case:
|
||||
# [/abc/def -> /abc/def, /abc -> /x] *will* be simplified to
|
||||
# [/abc -> /x], meaning that after this simplification /abc/def will be
|
||||
# mapped to /x/def instead of /abc/def. This should not be a problem.
|
||||
return OrderedDict((k, v) for (k, v) in prefix_to_prefix.items() if k != v)
|
||||
|
||||
|
||||
class PrefixReplacer:
|
||||
"""Base class for applying a prefix to prefix map
|
||||
to a list of binaries or text files.
|
||||
Child classes implement _apply_to_file to do the
|
||||
actual work, which is different when it comes to
|
||||
binaries and text files."""
|
||||
|
||||
def __init__(self, prefix_to_prefix: Dict[bytes, bytes]):
|
||||
"""
|
||||
Arguments:
|
||||
|
||||
prefix_to_prefix (OrderedDict):
|
||||
|
||||
A ordered mapping from prefix to prefix. The order is
|
||||
relevant to support substring fallbacks, for example
|
||||
[("/first/sub", "/x"), ("/first", "/y")] will ensure
|
||||
/first/sub is matched and replaced before /first.
|
||||
"""
|
||||
self.prefix_to_prefix = filter_identity_mappings(prefix_to_prefix)
|
||||
|
||||
@property
|
||||
def is_noop(self) -> bool:
|
||||
"""Returns true when the prefix to prefix map
|
||||
is mapping everything to the same location (identity)
|
||||
or there are no prefixes to replace."""
|
||||
return not bool(self.prefix_to_prefix)
|
||||
|
||||
def apply(self, filenames: list):
|
||||
if self.is_noop:
|
||||
return
|
||||
for filename in filenames:
|
||||
self.apply_to_filename(filename)
|
||||
|
||||
def apply_to_filename(self, filename):
|
||||
if self.is_noop:
|
||||
return
|
||||
with open(filename, "rb+") as f:
|
||||
self.apply_to_file(f)
|
||||
|
||||
def apply_to_file(self, f):
|
||||
if self.is_noop:
|
||||
return
|
||||
self._apply_to_file(f)
|
||||
|
||||
|
||||
class TextFilePrefixReplacer(PrefixReplacer):
|
||||
"""This class applies prefix to prefix mappings for relocation
|
||||
on text files.
|
||||
|
||||
Note that UTF-8 encoding is assumed."""
|
||||
|
||||
def __init__(self, prefix_to_prefix: Dict[bytes, bytes]):
|
||||
"""
|
||||
prefix_to_prefix (OrderedDict): OrderedDictionary where the keys are
|
||||
bytes representing the old prefixes and the values are the new.
|
||||
"""
|
||||
super().__init__(prefix_to_prefix)
|
||||
# Single regex for all paths.
|
||||
self.regex = _byte_strings_to_single_binary_regex(self.prefix_to_prefix.keys())
|
||||
|
||||
@classmethod
|
||||
def from_strings_or_bytes(
|
||||
cls, prefix_to_prefix: Dict[Prefix, Prefix]
|
||||
) -> "TextFilePrefixReplacer":
|
||||
"""Create a TextFilePrefixReplacer from an ordered prefix to prefix map."""
|
||||
return cls(_prefix_to_prefix_as_bytes(prefix_to_prefix))
|
||||
|
||||
def _apply_to_file(self, f):
|
||||
"""Text replacement implementation simply reads the entire file
|
||||
in memory and applies the combined regex."""
|
||||
replacement = lambda m: m.group(1) + self.prefix_to_prefix[m.group(2)] + m.group(3)
|
||||
data = f.read()
|
||||
new_data = re.sub(self.regex, replacement, data)
|
||||
if id(data) == id(new_data):
|
||||
return
|
||||
f.seek(0)
|
||||
f.write(new_data)
|
||||
f.truncate()
|
||||
|
||||
|
||||
class BinaryFilePrefixReplacer(PrefixReplacer):
|
||||
def __init__(self, prefix_to_prefix, suffix_safety_size=7):
|
||||
"""
|
||||
prefix_to_prefix (OrderedDict): OrderedDictionary where the keys are
|
||||
bytes representing the old prefixes and the values are the new
|
||||
suffix_safety_size (int): in case of null terminated strings, what size
|
||||
of the suffix should remain to avoid aliasing issues?
|
||||
"""
|
||||
assert suffix_safety_size >= 0
|
||||
super().__init__(prefix_to_prefix)
|
||||
self.suffix_safety_size = suffix_safety_size
|
||||
self.regex = self.binary_text_regex(self.prefix_to_prefix.keys(), suffix_safety_size)
|
||||
|
||||
@classmethod
|
||||
def binary_text_regex(cls, binary_prefixes, suffix_safety_size=7):
|
||||
"""
|
||||
Create a regex that looks for exact matches of prefixes, and also tries to
|
||||
match a C-string type null terminator in a small lookahead window.
|
||||
|
||||
Arguments:
|
||||
binary_prefixes (list): List of byte strings of prefixes to match
|
||||
suffix_safety_size (int): Sizeof the lookahed for null-terminated string.
|
||||
|
||||
Returns: compiled regex
|
||||
"""
|
||||
return re.compile(
|
||||
b"("
|
||||
+ b"|".join(re.escape(p) for p in binary_prefixes)
|
||||
+ b")([^\0]{0,%d}\0)?" % suffix_safety_size
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_strings_or_bytes(
|
||||
cls, prefix_to_prefix: Dict[Prefix, Prefix], suffix_safety_size: int = 7
|
||||
) -> "BinaryFilePrefixReplacer":
|
||||
"""Create a BinaryFilePrefixReplacer from an ordered prefix to prefix map.
|
||||
|
||||
Arguments:
|
||||
prefix_to_prefix (OrderedDict): Ordered mapping of prefix to prefix.
|
||||
suffix_safety_size (int): Number of bytes to retain at the end of a C-string
|
||||
to avoid binary string-aliasing issues.
|
||||
"""
|
||||
return cls(_prefix_to_prefix_as_bytes(prefix_to_prefix), suffix_safety_size)
|
||||
|
||||
def _apply_to_file(self, f):
|
||||
"""
|
||||
Given a file opened in rb+ mode, apply the string replacements as
|
||||
specified by an ordered dictionary of prefix to prefix mappings. This
|
||||
method takes special care of null-terminated C-strings. C-string constants
|
||||
are problematic because compilers and linkers optimize readonly strings for
|
||||
space by aliasing those that share a common suffix (only suffix since all
|
||||
of them are null terminated). See https://github.com/spack/spack/pull/31739
|
||||
and https://github.com/spack/spack/pull/32253 for details. Our logic matches
|
||||
the original prefix with a ``suffix_safety_size + 1`` lookahead for null bytes.
|
||||
If no null terminator is found, we simply pad with leading /, assuming that
|
||||
it's a long C-string; the full C-string after replacement has a large suffix
|
||||
in common with its original value.
|
||||
If there *is* a null terminator we can do the same as long as the replacement
|
||||
has a sufficiently long common suffix with the original prefix.
|
||||
As a last resort when the replacement does not have a long enough common suffix,
|
||||
we can try to shorten the string, but this only works if the new length is
|
||||
sufficiently short (typically the case when going from large padding -> normal path)
|
||||
If the replacement string is longer, or all of the above fails, we error out.
|
||||
|
||||
Arguments:
|
||||
f: file opened in rb+ mode
|
||||
"""
|
||||
assert f.tell() == 0
|
||||
|
||||
# We *could* read binary data in chunks to avoid loading all in memory,
|
||||
# but it's nasty to deal with matches across boundaries, so let's stick to
|
||||
# something simple.
|
||||
|
||||
for match in self.regex.finditer(f.read()):
|
||||
# The matching prefix (old) and its replacement (new)
|
||||
old = match.group(1)
|
||||
new = self.prefix_to_prefix[old]
|
||||
|
||||
# Did we find a trailing null within a N + 1 bytes window after the prefix?
|
||||
null_terminated = match.end(0) > match.end(1)
|
||||
|
||||
# Suffix string length, excluding the null byte
|
||||
# Only makes sense if null_terminated
|
||||
suffix_strlen = match.end(0) - match.end(1) - 1
|
||||
|
||||
# How many bytes are we shrinking our string?
|
||||
bytes_shorter = len(old) - len(new)
|
||||
|
||||
# We can't make strings larger.
|
||||
if bytes_shorter < 0:
|
||||
raise CannotGrowString(old, new)
|
||||
|
||||
# If we don't know whether this is a null terminated C-string (we're looking
|
||||
# only N + 1 bytes ahead), or if it is and we have a common suffix, we can
|
||||
# simply pad with leading dir separators.
|
||||
elif (
|
||||
not null_terminated
|
||||
or suffix_strlen >= self.suffix_safety_size # == is enough, but let's be defensive
|
||||
or old[-self.suffix_safety_size + suffix_strlen :]
|
||||
== new[-self.suffix_safety_size + suffix_strlen :]
|
||||
):
|
||||
replacement = b"/" * bytes_shorter + new
|
||||
|
||||
# If it *was* null terminated, all that matters is that we can leave N bytes
|
||||
# of old suffix in place. Note that > is required since we also insert an
|
||||
# additional null terminator.
|
||||
elif bytes_shorter > self.suffix_safety_size:
|
||||
replacement = new + match.group(2) # includes the trailing null
|
||||
|
||||
# Otherwise... we can't :(
|
||||
else:
|
||||
raise CannotShrinkCString(old, new, match.group()[:-1])
|
||||
|
||||
f.seek(match.start())
|
||||
f.write(replacement)
|
||||
|
||||
|
||||
class BinaryStringReplacementError(spack.error.SpackError):
|
||||
def __init__(self, file_path, old_len, new_len):
|
||||
"""The size of the file changed after binary path substitution
|
||||
|
||||
Args:
|
||||
file_path (str): file with changing size
|
||||
old_len (str): original length of the file
|
||||
new_len (str): length of the file after substitution
|
||||
"""
|
||||
super(BinaryStringReplacementError, self).__init__(
|
||||
"Doing a binary string replacement in %s failed.\n"
|
||||
"The size of the file changed from %s to %s\n"
|
||||
"when it should have remanined the same." % (file_path, old_len, new_len)
|
||||
)
|
||||
|
||||
|
||||
class BinaryTextReplaceError(spack.error.SpackError):
|
||||
def __init__(self, msg):
|
||||
msg += (
|
||||
" To fix this, compile with more padding "
|
||||
"(config:install_tree:padded_length), or install to a shorter prefix."
|
||||
)
|
||||
super(BinaryTextReplaceError, self).__init__(msg)
|
||||
|
||||
|
||||
class CannotGrowString(BinaryTextReplaceError):
|
||||
def __init__(self, old, new):
|
||||
msg = "Cannot replace {!r} with {!r} because the new prefix is longer.".format(old, new)
|
||||
super(CannotGrowString, self).__init__(msg)
|
||||
|
||||
|
||||
class CannotShrinkCString(BinaryTextReplaceError):
|
||||
def __init__(self, old, new, full_old_string):
|
||||
# Just interpolate binary string to not risk issues with invalid
|
||||
# unicode, which would be really bad user experience: error in error.
|
||||
# We have no clue if we actually deal with a real C-string nor what
|
||||
# encoding it has.
|
||||
msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
old, new, full_old_string
|
||||
)
|
||||
super(CannotShrinkCString, self).__init__(msg)
|
@@ -70,7 +70,7 @@ def rewire_node(spec, explicit):
|
||||
for rel_path in manifest.get("text_to_relocate", [])
|
||||
]
|
||||
if text_to_relocate:
|
||||
relocate.unsafe_relocate_text(files=text_to_relocate, prefixes=prefix_to_prefix)
|
||||
relocate.relocate_text(files=text_to_relocate, prefixes=prefix_to_prefix)
|
||||
|
||||
bins_to_relocate = [
|
||||
os.path.join(tempdir, spec.dag_hash(), rel_path)
|
||||
@@ -97,7 +97,7 @@ def rewire_node(spec, explicit):
|
||||
spec.build_spec.prefix,
|
||||
spec.prefix,
|
||||
)
|
||||
relocate.unsafe_relocate_text_bin(binaries=bins_to_relocate, prefixes=prefix_to_prefix)
|
||||
relocate.relocate_text_bin(binaries=bins_to_relocate, prefixes=prefix_to_prefix)
|
||||
# Copy package into place, except for spec.json (because spec.json
|
||||
# describes the old spec and not the new spliced spec).
|
||||
shutil.copytree(
|
||||
|
@@ -2259,7 +2259,7 @@ def depends_on(self, pkg, dep, type):
|
||||
assert len(dependencies) < 2, msg
|
||||
|
||||
if not dependencies:
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], (type,))
|
||||
self._specs[pkg].add_dependency_edge(self._specs[dep], deptypes=(type,))
|
||||
else:
|
||||
# TODO: This assumes that each solve unifies dependencies
|
||||
dependencies[0].add_type(type)
|
||||
|
@@ -168,7 +168,7 @@
|
||||
)
|
||||
|
||||
#: specfile format version. Must increase monotonically
|
||||
specfile_format_version = 3
|
||||
SPECFILE_FORMAT_VERSION = 3
|
||||
|
||||
|
||||
def colorize_spec(spec):
|
||||
@@ -1529,14 +1529,14 @@ def _set_compiler(self, compiler):
|
||||
)
|
||||
self.compiler = compiler
|
||||
|
||||
def _add_dependency(self, spec, deptypes):
|
||||
def _add_dependency(self, spec: "Spec", *, deptypes: dp.DependencyArgument):
|
||||
"""Called by the parser to add another spec as a dependency."""
|
||||
if spec.name not in self._dependencies:
|
||||
self.add_dependency_edge(spec, deptypes)
|
||||
self.add_dependency_edge(spec, deptypes=deptypes)
|
||||
return
|
||||
|
||||
# Keep the intersection of constraints when a dependency is added
|
||||
# multiple times. Currently we only allow identical edge types.
|
||||
# multiple times. Currently, we only allow identical edge types.
|
||||
orig = self._dependencies[spec.name]
|
||||
try:
|
||||
dspec = next(dspec for dspec in orig if deptypes == dspec.deptypes)
|
||||
@@ -1550,34 +1550,39 @@ def _add_dependency(self, spec, deptypes):
|
||||
"Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec)
|
||||
)
|
||||
|
||||
def add_dependency_edge(self, dependency_spec, deptype):
|
||||
def add_dependency_edge(
|
||||
self,
|
||||
dependency_spec: "Spec",
|
||||
*,
|
||||
deptypes: dp.DependencyArgument,
|
||||
):
|
||||
"""Add a dependency edge to this spec.
|
||||
|
||||
Args:
|
||||
dependency_spec (Spec): spec of the dependency
|
||||
deptype (str or tuple): dependency types
|
||||
dependency_spec: spec of the dependency
|
||||
deptypes: dependency types for this edge
|
||||
"""
|
||||
deptype = dp.canonical_deptype(deptype)
|
||||
deptypes = dp.canonical_deptype(deptypes)
|
||||
|
||||
# Check if we need to update edges that are already present
|
||||
selected = self._dependencies.select(child=dependency_spec.name)
|
||||
for edge in selected:
|
||||
if any(d in edge.deptypes for d in deptype):
|
||||
if any(d in edge.deptypes for d in deptypes):
|
||||
msg = (
|
||||
'cannot add a dependency on "{0.spec}" of {1} type '
|
||||
'when the "{0.parent}" has the edge {0!s} already'
|
||||
)
|
||||
raise spack.error.SpecError(msg.format(edge, deptype))
|
||||
raise spack.error.SpecError(msg.format(edge, deptypes))
|
||||
|
||||
for edge in selected:
|
||||
if id(dependency_spec) == id(edge.spec):
|
||||
# If we are here, it means the edge object was previously added to
|
||||
# both the parent and the child. When we update this object they'll
|
||||
# both see the deptype modification.
|
||||
edge.add_type(deptype)
|
||||
edge.add_type(deptypes)
|
||||
return
|
||||
|
||||
edge = DependencySpec(self, dependency_spec, deptypes=deptype)
|
||||
edge = DependencySpec(self, dependency_spec, deptypes=deptypes)
|
||||
self._dependencies.add(edge)
|
||||
dependency_spec._dependents.add(edge)
|
||||
|
||||
@@ -2027,7 +2032,7 @@ def to_dict(self, hash=ht.dag_hash):
|
||||
node_list.append(node)
|
||||
hash_set.add(node_hash)
|
||||
|
||||
meta_dict = syaml.syaml_dict([("version", specfile_format_version)])
|
||||
meta_dict = syaml.syaml_dict([("version", SPECFILE_FORMAT_VERSION)])
|
||||
inner_dict = syaml.syaml_dict([("_meta", meta_dict), ("nodes", node_list)])
|
||||
spec_dict = syaml.syaml_dict([("spec", inner_dict)])
|
||||
return spec_dict
|
||||
@@ -2063,137 +2068,13 @@ def to_json(self, stream=None, hash=ht.dag_hash):
|
||||
|
||||
@staticmethod
|
||||
def from_specfile(path):
|
||||
"""Construct a spec from aJSON or YAML spec file path"""
|
||||
"""Construct a spec from a JSON or YAML spec file path"""
|
||||
with open(path, "r") as fd:
|
||||
file_content = fd.read()
|
||||
if path.endswith(".json"):
|
||||
return Spec.from_json(file_content)
|
||||
return Spec.from_yaml(file_content)
|
||||
|
||||
@staticmethod
|
||||
def from_node_dict(node):
|
||||
spec = Spec()
|
||||
if "name" in node.keys():
|
||||
# New format
|
||||
name = node["name"]
|
||||
else:
|
||||
# Old format
|
||||
name = next(iter(node))
|
||||
node = node[name]
|
||||
for h in ht.hashes:
|
||||
setattr(spec, h.attr, node.get(h.name, None))
|
||||
|
||||
spec.name = name
|
||||
spec.namespace = node.get("namespace", None)
|
||||
|
||||
if "version" in node or "versions" in node:
|
||||
spec.versions = vn.VersionList.from_dict(node)
|
||||
|
||||
if "arch" in node:
|
||||
spec.architecture = ArchSpec.from_dict(node)
|
||||
|
||||
if "compiler" in node:
|
||||
spec.compiler = CompilerSpec.from_dict(node)
|
||||
else:
|
||||
spec.compiler = None
|
||||
|
||||
if "parameters" in node:
|
||||
for name, values in node["parameters"].items():
|
||||
if name in _valid_compiler_flags:
|
||||
spec.compiler_flags[name] = []
|
||||
for val in values:
|
||||
spec.compiler_flags.add_flag(name, val, False)
|
||||
else:
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values)
|
||||
elif "variants" in node:
|
||||
for name, value in node["variants"].items():
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, value)
|
||||
for name in FlagMap.valid_compiler_flags():
|
||||
spec.compiler_flags[name] = []
|
||||
|
||||
spec.external_path = None
|
||||
spec.external_modules = None
|
||||
if "external" in node:
|
||||
# This conditional is needed because sometimes this function is
|
||||
# called with a node already constructed that contains a 'versions'
|
||||
# and 'external' field. Related to virtual packages provider
|
||||
# indexes.
|
||||
if node["external"]:
|
||||
spec.external_path = node["external"]["path"]
|
||||
spec.external_modules = node["external"]["module"]
|
||||
if spec.external_modules is False:
|
||||
spec.external_modules = None
|
||||
spec.extra_attributes = node["external"].get(
|
||||
"extra_attributes", syaml.syaml_dict()
|
||||
)
|
||||
|
||||
# specs read in are concrete unless marked abstract
|
||||
spec._concrete = node.get("concrete", True)
|
||||
|
||||
if "patches" in node:
|
||||
patches = node["patches"]
|
||||
if len(patches) > 0:
|
||||
mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
|
||||
mvar.value = patches
|
||||
# FIXME: Monkey patches mvar to store patches order
|
||||
mvar._patches_in_order_of_appearance = patches
|
||||
|
||||
# Don't read dependencies here; from_dict() is used by
|
||||
# from_yaml() and from_json() to read the root *and* each dependency
|
||||
# spec.
|
||||
|
||||
return spec
|
||||
|
||||
@staticmethod
|
||||
def build_spec_from_node_dict(node, hash_type=ht.dag_hash.name):
|
||||
build_spec_dict = node["build_spec"]
|
||||
return build_spec_dict["name"], build_spec_dict[hash_type], hash_type
|
||||
|
||||
@staticmethod
|
||||
def dependencies_from_node_dict(node):
|
||||
if "name" in node.keys():
|
||||
# New format
|
||||
name = node["name"]
|
||||
else:
|
||||
name = next(iter(node))
|
||||
node = node[name]
|
||||
if "dependencies" not in node:
|
||||
return
|
||||
for t in Spec.read_yaml_dep_specs(node["dependencies"]):
|
||||
yield t
|
||||
|
||||
@staticmethod
|
||||
def read_yaml_dep_specs(deps, hash_type=ht.dag_hash.name):
|
||||
"""Read the DependencySpec portion of a YAML-formatted Spec.
|
||||
This needs to be backward-compatible with older spack spec
|
||||
formats so that reindex will work on old specs/databases.
|
||||
"""
|
||||
dep_iter = deps.items() if isinstance(deps, dict) else deps
|
||||
for dep in dep_iter:
|
||||
if isinstance(dep, tuple):
|
||||
dep_name, elt = dep
|
||||
else:
|
||||
elt = dep
|
||||
dep_name = dep["name"]
|
||||
if isinstance(elt, str):
|
||||
# original format, elt is just the dependency hash.
|
||||
dep_hash, deptypes = elt, ["build", "link"]
|
||||
elif isinstance(elt, tuple):
|
||||
# original deptypes format: (used tuples, not future-proof)
|
||||
dep_hash, deptypes = elt
|
||||
elif isinstance(elt, dict):
|
||||
# new format: elements of dependency spec are keyed.
|
||||
for h in ht.hashes:
|
||||
if h.name in elt:
|
||||
dep_hash, deptypes = elt[h.name], elt["type"]
|
||||
hash_type = h.name
|
||||
break
|
||||
else: # We never determined a hash type...
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type
|
||||
|
||||
@staticmethod
|
||||
def override(init_spec, change_spec):
|
||||
# TODO: this doesn't account for the case where the changed spec
|
||||
@@ -2367,7 +2248,7 @@ def spec_and_dependency_types(s):
|
||||
dag_node, dependency_types = spec_and_dependency_types(s)
|
||||
|
||||
dependency_spec = spec_builder({dag_node: s_dependencies})
|
||||
spec._add_dependency(dependency_spec, dependency_types)
|
||||
spec._add_dependency(dependency_spec, deptypes=dependency_types)
|
||||
|
||||
return spec
|
||||
|
||||
@@ -2380,54 +2261,14 @@ def from_dict(data):
|
||||
Args:
|
||||
data: a nested dict/list data structure read from YAML or JSON.
|
||||
"""
|
||||
if isinstance(data["spec"], list): # Legacy specfile format
|
||||
return _spec_from_old_dict(data)
|
||||
# Legacy specfile format
|
||||
if isinstance(data["spec"], list):
|
||||
return SpecfileV1.load(data)
|
||||
|
||||
# Current specfile format
|
||||
nodes = data["spec"]["nodes"]
|
||||
hash_type = None
|
||||
any_deps = False
|
||||
|
||||
# Pass 0: Determine hash type
|
||||
for node in nodes:
|
||||
if "dependencies" in node.keys():
|
||||
any_deps = True
|
||||
for _, _, _, dhash_type in Spec.dependencies_from_node_dict(node):
|
||||
if dhash_type:
|
||||
hash_type = dhash_type
|
||||
break
|
||||
|
||||
if not any_deps: # If we never see a dependency...
|
||||
hash_type = ht.dag_hash.name
|
||||
elif not hash_type: # Seen a dependency, still don't know hash_type
|
||||
raise spack.error.SpecError(
|
||||
"Spec dictionary contains malformed " "dependencies. Old format?"
|
||||
)
|
||||
|
||||
hash_dict = {}
|
||||
root_spec_hash = None
|
||||
|
||||
# Pass 1: Create a single lookup dictionary by hash
|
||||
for i, node in enumerate(nodes):
|
||||
node_hash = node[hash_type]
|
||||
node_spec = Spec.from_node_dict(node)
|
||||
hash_dict[node_hash] = node
|
||||
hash_dict[node_hash]["node_spec"] = node_spec
|
||||
if i == 0:
|
||||
root_spec_hash = node_hash
|
||||
if not root_spec_hash:
|
||||
raise spack.error.SpecError("Spec dictionary contains no nodes.")
|
||||
|
||||
# Pass 2: Finish construction of all DAG edges (including build specs)
|
||||
for node_hash, node in hash_dict.items():
|
||||
node_spec = node["node_spec"]
|
||||
for _, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(hash_dict[dhash]["node_spec"], dtypes)
|
||||
if "build_spec" in node.keys():
|
||||
_, bhash, _ = Spec.build_spec_from_node_dict(node, hash_type=hash_type)
|
||||
node_spec._build_spec = hash_dict[bhash]["node_spec"]
|
||||
|
||||
return hash_dict[root_spec_hash]["node_spec"]
|
||||
specfile_version = int(data["spec"]["_meta"]["version"])
|
||||
if specfile_version == 2:
|
||||
return SpecfileV2.load(data)
|
||||
return SpecfileV3.load(data)
|
||||
|
||||
@staticmethod
|
||||
def from_yaml(stream):
|
||||
@@ -2583,7 +2424,7 @@ def _replace_with(self, concrete):
|
||||
|
||||
# add the replacement, unless it is already a dep of dependent.
|
||||
if concrete.name not in dependent._dependencies:
|
||||
dependent._add_dependency(concrete, deptypes)
|
||||
dependent._add_dependency(concrete, deptypes=deptypes)
|
||||
|
||||
def _expand_virtual_packages(self, concretizer):
|
||||
"""Find virtual packages in this spec, replace them with providers,
|
||||
@@ -3254,7 +3095,7 @@ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, test
|
||||
# Add merged spec to my deps and recurse
|
||||
spec_dependency = spec_deps[dep.name]
|
||||
if dep.name not in self._dependencies:
|
||||
self._add_dependency(spec_dependency, dependency.type)
|
||||
self._add_dependency(spec_dependency, deptypes=dependency.type)
|
||||
|
||||
changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
|
||||
return changed
|
||||
@@ -3585,7 +3426,7 @@ def _constrain_dependencies(self, other):
|
||||
dep_spec_copy = other._get_dependency(name)
|
||||
dep_copy = dep_spec_copy.spec
|
||||
deptypes = dep_spec_copy.deptypes
|
||||
self._add_dependency(dep_copy.copy(), deptypes)
|
||||
self._add_dependency(dep_copy.copy(), deptypes=deptypes)
|
||||
changed = True
|
||||
|
||||
return changed
|
||||
@@ -3898,7 +3739,7 @@ def spid(spec):
|
||||
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
|
||||
|
||||
new_specs[spid(edge.parent)].add_dependency_edge(
|
||||
new_specs[spid(edge.spec)], edge.deptypes
|
||||
new_specs[spid(edge.spec)], deptypes=edge.deptypes
|
||||
)
|
||||
|
||||
def copy(self, deps=True, **kwargs):
|
||||
@@ -4813,12 +4654,12 @@ def from_self(name, transitive):
|
||||
if name in self_nodes:
|
||||
for edge in self[name].edges_to_dependencies():
|
||||
dep_name = deps_to_replace.get(edge.spec, edge.spec).name
|
||||
nodes[name].add_dependency_edge(nodes[dep_name], edge.deptypes)
|
||||
nodes[name].add_dependency_edge(nodes[dep_name], deptypes=edge.deptypes)
|
||||
if any(dep not in self_nodes for dep in self[name]._dependencies):
|
||||
nodes[name].build_spec = self[name].build_spec
|
||||
else:
|
||||
for edge in other[name].edges_to_dependencies():
|
||||
nodes[name].add_dependency_edge(nodes[edge.spec.name], edge.deptypes)
|
||||
nodes[name].add_dependency_edge(nodes[edge.spec.name], deptypes=edge.deptypes)
|
||||
if any(dep not in other_nodes for dep in other[name]._dependencies):
|
||||
nodes[name].build_spec = other[name].build_spec
|
||||
|
||||
@@ -4891,40 +4732,252 @@ def merge_abstract_anonymous_specs(*abstract_specs: Spec):
|
||||
# Update with additional constraints from other spec
|
||||
for name in current_spec_constraint.direct_dep_difference(merged_spec):
|
||||
edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
|
||||
merged_spec._add_dependency(edge.spec.copy(), edge.deptypes)
|
||||
merged_spec._add_dependency(edge.spec.copy(), deptypes=edge.deptypes)
|
||||
|
||||
return merged_spec
|
||||
|
||||
|
||||
def _spec_from_old_dict(data):
|
||||
"""Construct a spec from JSON/YAML using the format version 1.
|
||||
Note: Version 1 format has no notion of a build_spec, and names are
|
||||
guaranteed to be unique.
|
||||
class SpecfileReaderBase:
|
||||
@classmethod
|
||||
def from_node_dict(cls, node):
|
||||
spec = Spec()
|
||||
|
||||
Parameters:
|
||||
data -- a nested dict/list data structure read from YAML or JSON.
|
||||
"""
|
||||
nodes = data["spec"]
|
||||
name, node = cls.name_and_data(node)
|
||||
for h in ht.hashes:
|
||||
setattr(spec, h.attr, node.get(h.name, None))
|
||||
|
||||
# Read nodes out of list. Root spec is the first element;
|
||||
# dependencies are the following elements.
|
||||
dep_list = [Spec.from_node_dict(node) for node in nodes]
|
||||
if not dep_list:
|
||||
raise spack.error.SpecError("YAML spec contains no nodes.")
|
||||
deps = dict((spec.name, spec) for spec in dep_list)
|
||||
spec = dep_list[0]
|
||||
spec.name = name
|
||||
spec.namespace = node.get("namespace", None)
|
||||
|
||||
for node in nodes:
|
||||
# get dependency dict from the node.
|
||||
if "version" in node or "versions" in node:
|
||||
spec.versions = vn.VersionList.from_dict(node)
|
||||
|
||||
if "arch" in node:
|
||||
spec.architecture = ArchSpec.from_dict(node)
|
||||
|
||||
if "compiler" in node:
|
||||
spec.compiler = CompilerSpec.from_dict(node)
|
||||
else:
|
||||
spec.compiler = None
|
||||
|
||||
for name, values in node.get("parameters", {}).items():
|
||||
if name in _valid_compiler_flags:
|
||||
spec.compiler_flags[name] = []
|
||||
for val in values:
|
||||
spec.compiler_flags.add_flag(name, val, False)
|
||||
else:
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values)
|
||||
|
||||
spec.external_path = None
|
||||
spec.external_modules = None
|
||||
if "external" in node:
|
||||
# This conditional is needed because sometimes this function is
|
||||
# called with a node already constructed that contains a 'versions'
|
||||
# and 'external' field. Related to virtual packages provider
|
||||
# indexes.
|
||||
if node["external"]:
|
||||
spec.external_path = node["external"]["path"]
|
||||
spec.external_modules = node["external"]["module"]
|
||||
if spec.external_modules is False:
|
||||
spec.external_modules = None
|
||||
spec.extra_attributes = node["external"].get(
|
||||
"extra_attributes", syaml.syaml_dict()
|
||||
)
|
||||
|
||||
# specs read in are concrete unless marked abstract
|
||||
spec._concrete = node.get("concrete", True)
|
||||
|
||||
if "patches" in node:
|
||||
patches = node["patches"]
|
||||
if len(patches) > 0:
|
||||
mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
|
||||
mvar.value = patches
|
||||
# FIXME: Monkey patches mvar to store patches order
|
||||
mvar._patches_in_order_of_appearance = patches
|
||||
|
||||
# Don't read dependencies here; from_dict() is used by
|
||||
# from_yaml() and from_json() to read the root *and* each dependency
|
||||
# spec.
|
||||
|
||||
return spec
|
||||
|
||||
@classmethod
|
||||
def _load(cls, data):
|
||||
"""Construct a spec from JSON/YAML using the format version 2.
|
||||
|
||||
This format is used in Spack v0.17, was introduced in
|
||||
https://github.com/spack/spack/pull/22845
|
||||
|
||||
Args:
|
||||
data: a nested dict/list data structure read from YAML or JSON.
|
||||
"""
|
||||
# Current specfile format
|
||||
nodes = data["spec"]["nodes"]
|
||||
hash_type = None
|
||||
any_deps = False
|
||||
|
||||
# Pass 0: Determine hash type
|
||||
for node in nodes:
|
||||
for _, _, _, dhash_type in cls.dependencies_from_node_dict(node):
|
||||
any_deps = True
|
||||
if dhash_type:
|
||||
hash_type = dhash_type
|
||||
break
|
||||
|
||||
if not any_deps: # If we never see a dependency...
|
||||
hash_type = ht.dag_hash.name
|
||||
elif not hash_type: # Seen a dependency, still don't know hash_type
|
||||
raise spack.error.SpecError(
|
||||
"Spec dictionary contains malformed dependencies. Old format?"
|
||||
)
|
||||
|
||||
hash_dict = {}
|
||||
root_spec_hash = None
|
||||
|
||||
# Pass 1: Create a single lookup dictionary by hash
|
||||
for i, node in enumerate(nodes):
|
||||
node_hash = node[hash_type]
|
||||
node_spec = cls.from_node_dict(node)
|
||||
hash_dict[node_hash] = node
|
||||
hash_dict[node_hash]["node_spec"] = node_spec
|
||||
if i == 0:
|
||||
root_spec_hash = node_hash
|
||||
|
||||
if not root_spec_hash:
|
||||
raise spack.error.SpecError("Spec dictionary contains no nodes.")
|
||||
|
||||
# Pass 2: Finish construction of all DAG edges (including build specs)
|
||||
for node_hash, node in hash_dict.items():
|
||||
node_spec = node["node_spec"]
|
||||
for _, dhash, dtypes, _ in cls.dependencies_from_node_dict(node):
|
||||
node_spec._add_dependency(hash_dict[dhash]["node_spec"], deptypes=dtypes)
|
||||
if "build_spec" in node.keys():
|
||||
_, bhash, _ = cls.build_spec_from_node_dict(node, hash_type=hash_type)
|
||||
node_spec._build_spec = hash_dict[bhash]["node_spec"]
|
||||
|
||||
return hash_dict[root_spec_hash]["node_spec"]
|
||||
|
||||
|
||||
class SpecfileV1(SpecfileReaderBase):
|
||||
@classmethod
|
||||
def load(cls, data):
|
||||
"""Construct a spec from JSON/YAML using the format version 1.
|
||||
|
||||
Note: Version 1 format has no notion of a build_spec, and names are
|
||||
guaranteed to be unique. This function is guaranteed to read specs as
|
||||
old as v0.10 - while it was not checked for older formats.
|
||||
|
||||
Args:
|
||||
data: a nested dict/list data structure read from YAML or JSON.
|
||||
"""
|
||||
nodes = data["spec"]
|
||||
|
||||
# Read nodes out of list. Root spec is the first element;
|
||||
# dependencies are the following elements.
|
||||
dep_list = [cls.from_node_dict(node) for node in nodes]
|
||||
if not dep_list:
|
||||
raise spack.error.SpecError("specfile contains no nodes.")
|
||||
|
||||
deps = {spec.name: spec for spec in dep_list}
|
||||
result = dep_list[0]
|
||||
|
||||
for node in nodes:
|
||||
# get dependency dict from the node.
|
||||
name, data = cls.name_and_data(node)
|
||||
for dname, _, dtypes, _ in cls.dependencies_from_node_dict(data):
|
||||
deps[name]._add_dependency(deps[dname], deptypes=dtypes)
|
||||
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def name_and_data(cls, node):
|
||||
name = next(iter(node))
|
||||
node = node[name]
|
||||
return name, node
|
||||
|
||||
if "dependencies" not in node[name]:
|
||||
continue
|
||||
@classmethod
|
||||
def dependencies_from_node_dict(cls, node):
|
||||
if "dependencies" not in node:
|
||||
return []
|
||||
|
||||
for dname, _, dtypes, _ in Spec.dependencies_from_node_dict(node):
|
||||
deps[name]._add_dependency(deps[dname], dtypes)
|
||||
for t in cls.read_specfile_dep_specs(node["dependencies"]):
|
||||
yield t
|
||||
|
||||
return spec
|
||||
@classmethod
|
||||
def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
"""Read the DependencySpec portion of a YAML-formatted Spec.
|
||||
This needs to be backward-compatible with older spack spec
|
||||
formats so that reindex will work on old specs/databases.
|
||||
"""
|
||||
for dep_name, elt in deps.items():
|
||||
if isinstance(elt, dict):
|
||||
for h in ht.hashes:
|
||||
if h.name in elt:
|
||||
dep_hash, deptypes = elt[h.name], elt["type"]
|
||||
hash_type = h.name
|
||||
break
|
||||
else: # We never determined a hash type...
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
yield dep_name, dep_hash, list(deptypes), hash_type
|
||||
|
||||
|
||||
class SpecfileV2(SpecfileReaderBase):
|
||||
@classmethod
|
||||
def load(cls, data):
|
||||
result = cls._load(data)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def name_and_data(cls, node):
|
||||
return node["name"], node
|
||||
|
||||
@classmethod
|
||||
def dependencies_from_node_dict(cls, node):
|
||||
return cls.read_specfile_dep_specs(node.get("dependencies", []))
|
||||
|
||||
@classmethod
|
||||
def read_specfile_dep_specs(cls, deps, hash_type=ht.dag_hash.name):
|
||||
"""Read the DependencySpec portion of a YAML-formatted Spec.
|
||||
This needs to be backward-compatible with older spack spec
|
||||
formats so that reindex will work on old specs/databases.
|
||||
"""
|
||||
if not isinstance(deps, list):
|
||||
raise spack.error.SpecError("Spec dictionary contains malformed dependencies")
|
||||
|
||||
result = []
|
||||
for dep in deps:
|
||||
elt = dep
|
||||
dep_name = dep["name"]
|
||||
if isinstance(elt, dict):
|
||||
# new format: elements of dependency spec are keyed.
|
||||
for h in ht.hashes:
|
||||
if h.name in elt:
|
||||
dep_hash, deptypes, hash_type, virtuals = cls.extract_info_from_dep(elt, h)
|
||||
break
|
||||
else: # We never determined a hash type...
|
||||
raise spack.error.SpecError("Couldn't parse dependency spec.")
|
||||
else:
|
||||
raise spack.error.SpecError("Couldn't parse dependency types in spec.")
|
||||
result.append((dep_name, dep_hash, list(deptypes), hash_type))
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def extract_info_from_dep(cls, elt, hash):
|
||||
dep_hash, deptypes = elt[hash.name], elt["type"]
|
||||
hash_type = hash.name
|
||||
virtuals = []
|
||||
return dep_hash, deptypes, hash_type, virtuals
|
||||
|
||||
@classmethod
|
||||
def build_spec_from_node_dict(cls, node, hash_type=ht.dag_hash.name):
|
||||
build_spec_dict = node["build_spec"]
|
||||
return build_spec_dict["name"], build_spec_dict[hash_type], hash_type
|
||||
|
||||
|
||||
class SpecfileV3(SpecfileV2):
|
||||
pass
|
||||
|
||||
|
||||
class LazySpecCache(collections.defaultdict):
|
||||
|
@@ -51,7 +51,9 @@ def specs_as_constraints(self):
|
||||
constraints = []
|
||||
for item in self.specs_as_yaml_list:
|
||||
if isinstance(item, dict): # matrix of specs
|
||||
constraints.extend(_expand_matrix_constraints(item))
|
||||
expanded = _expand_matrix_constraints(item)
|
||||
for e in expanded:
|
||||
constraints.append([Spec(x) for x in e])
|
||||
else: # individual spec
|
||||
constraints.append([Spec(item)])
|
||||
self._constraints = constraints
|
||||
@@ -62,13 +64,11 @@ def specs_as_constraints(self):
|
||||
def specs(self):
|
||||
if self._specs is None:
|
||||
specs = []
|
||||
# This could be slightly faster done directly from yaml_list,
|
||||
# but this way is easier to maintain.
|
||||
for constraint_list in self.specs_as_constraints:
|
||||
spec = constraint_list[0].copy()
|
||||
for const in constraint_list[1:]:
|
||||
spec.constrain(const)
|
||||
specs.append(spec)
|
||||
for item in self.specs_as_yaml_list:
|
||||
if isinstance(item, dict): # matrix of specs
|
||||
specs.extend([Spec(" ".join(x)) for x in _expand_matrix_constraints(item)])
|
||||
else: # individual spec
|
||||
specs.append(Spec(item))
|
||||
self._specs = specs
|
||||
|
||||
return self._specs
|
||||
@@ -193,11 +193,7 @@ def _expand_matrix_constraints(matrix_config):
|
||||
for combo in itertools.product(*expanded_rows):
|
||||
# Construct a combined spec to test against excludes
|
||||
flat_combo = [constraint for constraint_list in combo for constraint in constraint_list]
|
||||
flat_combo = [Spec(x) for x in flat_combo]
|
||||
|
||||
test_spec = flat_combo[0].copy()
|
||||
for constraint in flat_combo[1:]:
|
||||
test_spec.constrain(constraint)
|
||||
test_spec = Spec(" ".join(flat_combo))
|
||||
|
||||
# Abstract variants don't have normal satisfaction semantics
|
||||
# Convert all variants to concrete types.
|
||||
@@ -213,7 +209,7 @@ def _expand_matrix_constraints(matrix_config):
|
||||
continue
|
||||
|
||||
if sigil:
|
||||
flat_combo[0] = Spec(sigil + str(flat_combo[0]))
|
||||
flat_combo[0] = sigil + flat_combo[0]
|
||||
|
||||
# Add to list of constraints
|
||||
results.append(flat_combo)
|
||||
|
@@ -2,9 +2,9 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import itertools as it
|
||||
import itertools
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
@@ -14,7 +14,7 @@
|
||||
import spack.ci as ci
|
||||
import spack.ci_needs_workaround as cinw
|
||||
import spack.ci_optimization as ci_opt
|
||||
import spack.config as cfg
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.paths as spack_paths
|
||||
@@ -23,12 +23,16 @@
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def repro_dir(tmp_path):
|
||||
result = tmp_path / "repro_dir"
|
||||
result.mkdir()
|
||||
with fs.working_dir(str(tmp_path)):
|
||||
yield result
|
||||
|
||||
|
||||
def test_urlencode_string():
|
||||
s = "Spack Test Project"
|
||||
|
||||
s_enc = ci._url_encode_string(s)
|
||||
|
||||
assert s_enc == "Spack+Test+Project"
|
||||
assert ci._url_encode_string("Spack Test Project") == "Spack+Test+Project"
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
|
||||
@@ -54,16 +58,16 @@ def assert_present(config):
|
||||
"install_missing_compilers" in config and config["install_missing_compilers"] is True
|
||||
)
|
||||
|
||||
original_config = cfg.get("config")
|
||||
original_config = spack.config.get("config")
|
||||
assert_missing(original_config)
|
||||
|
||||
ci.configure_compilers("FIND_ANY", scope="site")
|
||||
|
||||
second_config = cfg.get("config")
|
||||
second_config = spack.config.get("config")
|
||||
assert_missing(second_config)
|
||||
|
||||
ci.configure_compilers("INSTALL_MISSING")
|
||||
last_config = cfg.get("config")
|
||||
last_config = spack.config.get("config")
|
||||
assert_present(last_config)
|
||||
|
||||
|
||||
@@ -380,7 +384,7 @@ def make_yaml_obj(use_artifact_buildcache, optimize, use_dependencies):
|
||||
use_artifact_buildcache=use_ab, optimize=False, use_dependencies=False
|
||||
)
|
||||
|
||||
for opt, deps in it.product(*(((False, True),) * 2)):
|
||||
for opt, deps in itertools.product(*(((False, True),) * 2)):
|
||||
# neither optimizing nor converting needs->dependencies
|
||||
if not (opt or deps):
|
||||
# therefore, nothing to test
|
||||
@@ -453,33 +457,24 @@ def test_affected_specs_on_first_concretization(mutable_mock_env_path, mock_pack
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
def test_ci_process_command(tmpdir):
|
||||
repro_dir = tmpdir.join("repro_dir").strpath
|
||||
os.makedirs(repro_dir)
|
||||
result = ci.process_command("help", [], repro_dir)
|
||||
|
||||
assert os.path.exists(fs.join_path(repro_dir, "help.sh"))
|
||||
assert not result
|
||||
def test_ci_process_command(repro_dir):
|
||||
result = ci.process_command("help", commands=[], repro_dir=str(repro_dir))
|
||||
help_sh = repro_dir / "help.sh"
|
||||
assert help_sh.exists() and not result
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
def test_ci_process_command_fail(tmpdir, monkeypatch):
|
||||
import subprocess
|
||||
|
||||
err = "subprocess wait exception"
|
||||
def test_ci_process_command_fail(repro_dir, monkeypatch):
|
||||
msg = "subprocess wait exception"
|
||||
|
||||
def _fail(self, args):
|
||||
raise RuntimeError(err)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
monkeypatch.setattr(subprocess.Popen, "__init__", _fail)
|
||||
|
||||
repro_dir = tmpdir.join("repro_dir").strpath
|
||||
os.makedirs(repro_dir)
|
||||
|
||||
with pytest.raises(RuntimeError, match=err):
|
||||
ci.process_command("help", [], repro_dir)
|
||||
with pytest.raises(RuntimeError, match=msg):
|
||||
ci.process_command("help", [], str(repro_dir))
|
||||
|
||||
|
||||
def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkeypatch):
|
||||
@@ -513,16 +508,15 @@ def test_ci_run_standalone_tests_missing_requirements(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
def test_ci_run_standalone_tests_not_installed_junit(
|
||||
tmpdir, working_env, default_mock_concretization, mock_test_stage, capfd
|
||||
tmp_path, repro_dir, working_env, default_mock_concretization, mock_test_stage, capfd
|
||||
):
|
||||
log_file = tmpdir.join("junit.xml").strpath
|
||||
log_file = tmp_path / "junit.xml"
|
||||
args = {
|
||||
"log_file": log_file,
|
||||
"log_file": str(log_file),
|
||||
"job_spec": default_mock_concretization("printing-package"),
|
||||
"repro_dir": tmpdir.join("repro_dir").strpath,
|
||||
"repro_dir": str(repro_dir),
|
||||
"fail_fast": True,
|
||||
}
|
||||
os.makedirs(args["repro_dir"])
|
||||
|
||||
ci.run_standalone_tests(**args)
|
||||
err = capfd.readouterr()[1]
|
||||
@@ -534,16 +528,15 @@ def test_ci_run_standalone_tests_not_installed_junit(
|
||||
sys.platform == "win32", reason="Reliance on bash script not supported on Windows"
|
||||
)
|
||||
def test_ci_run_standalone_tests_not_installed_cdash(
|
||||
tmpdir, working_env, default_mock_concretization, mock_test_stage, capfd
|
||||
tmp_path, repro_dir, working_env, default_mock_concretization, mock_test_stage, capfd
|
||||
):
|
||||
"""Test run_standalone_tests with cdash and related options."""
|
||||
log_file = tmpdir.join("junit.xml").strpath
|
||||
log_file = tmp_path / "junit.xml"
|
||||
args = {
|
||||
"log_file": log_file,
|
||||
"log_file": str(log_file),
|
||||
"job_spec": default_mock_concretization("printing-package"),
|
||||
"repro_dir": tmpdir.join("repro_dir").strpath,
|
||||
"repro_dir": str(repro_dir),
|
||||
}
|
||||
os.makedirs(args["repro_dir"])
|
||||
|
||||
# Cover when CDash handler provided (with the log file as well)
|
||||
ci_cdash = {
|
||||
@@ -564,9 +557,9 @@ def test_ci_run_standalone_tests_not_installed_cdash(
|
||||
assert "0 passed of 0" in out
|
||||
|
||||
# copy test results (though none)
|
||||
artifacts_dir = tmpdir.join("artifacts")
|
||||
fs.mkdirp(artifacts_dir.strpath)
|
||||
handler.copy_test_results(tmpdir.strpath, artifacts_dir.strpath)
|
||||
artifacts_dir = tmp_path / "artifacts"
|
||||
artifacts_dir.mkdir()
|
||||
handler.copy_test_results(str(tmp_path), str(artifacts_dir))
|
||||
err = capfd.readouterr()[1]
|
||||
assert "Unable to copy files" in err
|
||||
assert "No such file or directory" in err
|
||||
|
@@ -1187,3 +1187,16 @@ def test_padded_install_runtests_root(install_mockery_mutable_config, mock_fetch
|
||||
spack.config.set("config:install_tree:padded_length", 255)
|
||||
output = install("--test=root", "--no-cache", "test-build-callbacks", fail_on_error=False)
|
||||
assert output.count("method not implemented") == 1
|
||||
|
||||
|
||||
@pytest.mark.regression("35337")
|
||||
def test_report_filename_for_cdash(install_mockery_mutable_config, mock_fetch):
|
||||
"""Test that the temporary file used to write the XML for CDash is not the upload URL"""
|
||||
parser = argparse.ArgumentParser()
|
||||
spack.cmd.install.setup_parser(parser)
|
||||
args = parser.parse_args(
|
||||
["--cdash-upload-url", "https://blahblah/submit.php?project=debugging", "a"]
|
||||
)
|
||||
_, specs = spack.cmd.install.specs_from_cli(args, {})
|
||||
filename = spack.cmd.install.report_filename(args, specs)
|
||||
assert filename != "https://blahblah/submit.php?project=debugging"
|
||||
|
@@ -14,6 +14,8 @@
|
||||
|
||||
maintainers = spack.main.SpackCommand("maintainers")
|
||||
|
||||
MAINTAINED_PACKAGES = ["maintainers-1", "maintainers-2", "maintainers-3", "py-extension1"]
|
||||
|
||||
|
||||
def split(output):
|
||||
"""Split command line output into an array."""
|
||||
@@ -23,14 +25,12 @@ def split(output):
|
||||
|
||||
def test_maintained(mock_packages):
|
||||
out = split(maintainers("--maintained"))
|
||||
assert out == ["maintainers-1", "maintainers-2"]
|
||||
assert out == MAINTAINED_PACKAGES
|
||||
|
||||
|
||||
def test_unmaintained(mock_packages):
|
||||
out = split(maintainers("--unmaintained"))
|
||||
assert out == sorted(
|
||||
set(spack.repo.all_package_names()) - set(["maintainers-1", "maintainers-2"])
|
||||
)
|
||||
assert out == sorted(set(spack.repo.all_package_names()) - set(MAINTAINED_PACKAGES))
|
||||
|
||||
|
||||
def test_all(mock_packages, capfd):
|
||||
@@ -43,6 +43,16 @@ def test_all(mock_packages, capfd):
|
||||
"maintainers-2:",
|
||||
"user2,",
|
||||
"user3",
|
||||
"maintainers-3:",
|
||||
"user0,",
|
||||
"user1,",
|
||||
"user2,",
|
||||
"user3",
|
||||
"py-extension1:",
|
||||
"adamjstewart,",
|
||||
"pradyunsg,",
|
||||
"user1,",
|
||||
"user2",
|
||||
]
|
||||
|
||||
with capfd.disabled():
|
||||
@@ -58,23 +68,38 @@ def test_all_by_user(mock_packages, capfd):
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("--all", "--by-user"))
|
||||
assert out == [
|
||||
"adamjstewart:",
|
||||
"py-extension1",
|
||||
"pradyunsg:",
|
||||
"py-extension1",
|
||||
"user0:",
|
||||
"maintainers-3",
|
||||
"user1:",
|
||||
"maintainers-1",
|
||||
"maintainers-1,",
|
||||
"maintainers-3,",
|
||||
"py-extension1",
|
||||
"user2:",
|
||||
"maintainers-1,",
|
||||
"maintainers-2",
|
||||
"maintainers-2,",
|
||||
"maintainers-3,",
|
||||
"py-extension1",
|
||||
"user3:",
|
||||
"maintainers-2",
|
||||
"maintainers-2,",
|
||||
"maintainers-3",
|
||||
]
|
||||
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("--all", "--by-user", "user1", "user2"))
|
||||
assert out == [
|
||||
"user1:",
|
||||
"maintainers-1",
|
||||
"maintainers-1,",
|
||||
"maintainers-3,",
|
||||
"py-extension1",
|
||||
"user2:",
|
||||
"maintainers-1,",
|
||||
"maintainers-2",
|
||||
"maintainers-2,",
|
||||
"maintainers-3,",
|
||||
"py-extension1",
|
||||
]
|
||||
|
||||
|
||||
@@ -116,16 +141,16 @@ def test_maintainers_list_fails(mock_packages, capfd):
|
||||
def test_maintainers_list_by_user(mock_packages, capfd):
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("--by-user", "user1"))
|
||||
assert out == ["maintainers-1"]
|
||||
assert out == ["maintainers-1", "maintainers-3", "py-extension1"]
|
||||
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("--by-user", "user1", "user2"))
|
||||
assert out == ["maintainers-1", "maintainers-2"]
|
||||
assert out == ["maintainers-1", "maintainers-2", "maintainers-3", "py-extension1"]
|
||||
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("--by-user", "user2"))
|
||||
assert out == ["maintainers-1", "maintainers-2"]
|
||||
assert out == ["maintainers-1", "maintainers-2", "maintainers-3", "py-extension1"]
|
||||
|
||||
with capfd.disabled():
|
||||
out = split(maintainers("--by-user", "user3"))
|
||||
assert out == ["maintainers-2"]
|
||||
assert out == ["maintainers-2", "maintainers-3"]
|
||||
|
@@ -258,7 +258,7 @@ def test_has_test_method_fails(capsys):
|
||||
assert "is not a class" in captured
|
||||
|
||||
|
||||
def test_read_old_results(mock_test_stage):
|
||||
def test_read_old_results(mock_packages, mock_test_stage):
|
||||
"""Take test data generated before the switch to full hash everywhere
|
||||
and make sure we can still read it in"""
|
||||
# Test data was generated with:
|
||||
@@ -319,3 +319,26 @@ def test_test_results_status(mock_packages, mock_test_stage, status, expected):
|
||||
else:
|
||||
assert status in results
|
||||
assert expected in results
|
||||
|
||||
|
||||
@pytest.mark.regression("35337")
|
||||
def test_report_filename_for_cdash(install_mockery_mutable_config, mock_fetch):
|
||||
"""Test that the temporary file used to write Testing.xml for CDash is not the upload URL"""
|
||||
name = "trivial"
|
||||
spec = spack.spec.Spec("trivial-smoke-test").concretized()
|
||||
suite = spack.install_test.TestSuite([spec], name)
|
||||
suite.ensure_stage()
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
spack.cmd.test.setup_parser(parser)
|
||||
args = parser.parse_args(
|
||||
[
|
||||
"run",
|
||||
"--cdash-upload-url=https://blahblah/submit.php?project=debugging",
|
||||
"trivial-smoke-test",
|
||||
]
|
||||
)
|
||||
|
||||
spack.cmd.common.arguments.sanitize_reporter_options(args)
|
||||
filename = spack.cmd.test.report_filename(args, suite)
|
||||
assert filename != "https://blahblah/submit.php?project=debugging"
|
||||
|
@@ -17,6 +17,7 @@
|
||||
from llnl.util.filesystem import getuid, join_path, mkdirp, touch, touchp
|
||||
|
||||
import spack.config
|
||||
import spack.directory_layout
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
@@ -28,6 +29,7 @@
|
||||
import spack.schema.mirrors
|
||||
import spack.schema.packages
|
||||
import spack.schema.repos
|
||||
import spack.store
|
||||
import spack.util.path as spack_path
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
@@ -451,9 +453,9 @@ def test_substitute_date(mock_low_high_config):
|
||||
assert date.today().strftime("%Y-%m-%d") in new_path
|
||||
|
||||
|
||||
PAD_STRING = spack.util.path.SPACK_PATH_PADDING_CHARS
|
||||
MAX_PATH_LEN = spack.util.path.get_system_path_max()
|
||||
MAX_PADDED_LEN = MAX_PATH_LEN - spack.util.path.SPACK_MAX_INSTALL_PATH_LENGTH
|
||||
PAD_STRING = spack_path.SPACK_PATH_PADDING_CHARS
|
||||
MAX_PATH_LEN = spack_path.get_system_path_max()
|
||||
MAX_PADDED_LEN = MAX_PATH_LEN - spack_path.SPACK_MAX_INSTALL_PATH_LENGTH
|
||||
reps = [PAD_STRING for _ in range((MAX_PADDED_LEN // len(PAD_STRING) + 1) + 2)]
|
||||
full_padded_string = os.path.join(os.sep + "path", os.sep.join(reps))[:MAX_PADDED_LEN]
|
||||
|
||||
|
BIN
lib/spack/spack/test/data/specfiles/hdf5.v013.json.gz
Normal file
BIN
lib/spack/spack/test/data/specfiles/hdf5.v013.json.gz
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/specfiles/hdf5.v016.json.gz
Normal file
BIN
lib/spack/spack/test/data/specfiles/hdf5.v016.json.gz
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/specfiles/hdf5.v017.json.gz
Normal file
BIN
lib/spack/spack/test/data/specfiles/hdf5.v017.json.gz
Normal file
Binary file not shown.
BIN
lib/spack/spack/test/data/specfiles/hdf5.v019.json.gz
Normal file
BIN
lib/spack/spack/test/data/specfiles/hdf5.v019.json.gz
Normal file
Binary file not shown.
@@ -68,3 +68,19 @@ def test_error_on_anonymous_dependency(config, mock_packages):
|
||||
pkg = spack.repo.path.get_pkg_class("a")
|
||||
with pytest.raises(spack.directives.DependencyError):
|
||||
spack.directives._depends_on(pkg, "@4.5")
|
||||
|
||||
|
||||
@pytest.mark.regression("34879")
|
||||
@pytest.mark.parametrize(
|
||||
"package_name,expected_maintainers",
|
||||
[
|
||||
("maintainers-1", ["user1", "user2"]),
|
||||
# Reset from PythonPackage
|
||||
("py-extension1", ["adamjstewart", "pradyunsg", "user1", "user2"]),
|
||||
# Extends maintainers-1
|
||||
("maintainers-3", ["user0", "user1", "user2", "user3"]),
|
||||
],
|
||||
)
|
||||
def test_maintainer_directive(config, mock_packages, package_name, expected_maintainers):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(package_name)
|
||||
assert pkg_cls.maintainers == expected_maintainers
|
||||
|
@@ -75,8 +75,8 @@ def test_env_change_spec(tmpdir, mock_packages, config):
|
||||
- desired_specs: ["mpileaks@2.1"]
|
||||
specs:
|
||||
- matrix:
|
||||
- [$compilers]
|
||||
- [$desired_specs]
|
||||
- [$compilers]
|
||||
"""
|
||||
|
||||
|
||||
|
@@ -5,7 +5,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from spack.fetch_strategy import from_url_scheme
|
||||
from spack import fetch_strategy
|
||||
|
||||
|
||||
def test_fetchstrategy_bad_url_scheme():
|
||||
@@ -13,4 +13,14 @@ def test_fetchstrategy_bad_url_scheme():
|
||||
unsupported scheme fails as expected."""
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
fetcher = from_url_scheme("bogus-scheme://example.com/a/b/c") # noqa: F841
|
||||
fetcher = fetch_strategy.from_url_scheme("bogus-scheme://example.com/a/b/c") # noqa: F841
|
||||
|
||||
|
||||
def test_filesummary(tmpdir):
|
||||
p = str(tmpdir.join("xyz"))
|
||||
with open(p, "wb") as f:
|
||||
f.write(b"abcdefghijklmnopqrstuvwxyz")
|
||||
|
||||
assert fetch_strategy._filesummary(p, print_bytes=8) == (26, b"abcdefgh...stuvwxyz")
|
||||
assert fetch_strategy._filesummary(p, print_bytes=13) == (26, b"abcdefghijklmnopqrstuvwxyz")
|
||||
assert fetch_strategy._filesummary(p, print_bytes=100) == (26, b"abcdefghijklmnopqrstuvwxyz")
|
||||
|
@@ -36,7 +36,7 @@
|
||||
needs_binary_relocation,
|
||||
needs_text_relocation,
|
||||
relocate_links,
|
||||
unsafe_relocate_text,
|
||||
relocate_text,
|
||||
)
|
||||
from spack.spec import Spec
|
||||
|
||||
@@ -190,7 +190,7 @@ def test_buildcache(mock_archive, tmpdir):
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("install_mockery")
|
||||
def test_unsafe_relocate_text(tmpdir):
|
||||
def test_relocate_text(tmpdir):
|
||||
spec = Spec("trivial-install-test-package")
|
||||
spec.concretize()
|
||||
with tmpdir.as_cwd():
|
||||
@@ -203,7 +203,7 @@ def test_unsafe_relocate_text(tmpdir):
|
||||
filenames = [filename]
|
||||
new_dir = "/opt/rh/devtoolset/"
|
||||
# Singleton dict doesn't matter if Ordered
|
||||
unsafe_relocate_text(filenames, {old_dir: new_dir})
|
||||
relocate_text(filenames, {old_dir: new_dir})
|
||||
with open(filename, "r") as script:
|
||||
for line in script:
|
||||
assert new_dir in line
|
||||
|
@@ -2,13 +2,11 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import io
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -18,11 +16,11 @@
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.relocate
|
||||
import spack.relocate_text as relocate_text
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.tengine
|
||||
import spack.util.executable
|
||||
from spack.relocate import utf8_path_to_binary_regex, utf8_paths_to_single_binary_regex
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Tests fail on Windows")
|
||||
|
||||
@@ -269,7 +267,7 @@ def test_set_elf_rpaths_warning(mock_patchelf):
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_replace_prefix_bin(binary_with_rpaths, prefix_like):
|
||||
def test_relocate_text_bin(binary_with_rpaths, prefix_like):
|
||||
prefix = "/usr/" + prefix_like
|
||||
prefix_bytes = prefix.encode("utf-8")
|
||||
new_prefix = "/foo/" + prefix_like
|
||||
@@ -278,7 +276,7 @@ def test_replace_prefix_bin(binary_with_rpaths, prefix_like):
|
||||
executable = binary_with_rpaths(rpaths=[prefix + "/lib", prefix + "/lib64"])
|
||||
|
||||
# Relocate the RPATHs
|
||||
spack.relocate._replace_prefix_bin(str(executable), {prefix_bytes: new_prefix_bytes})
|
||||
spack.relocate.relocate_text_bin([str(executable)], {prefix_bytes: new_prefix_bytes})
|
||||
|
||||
# Some compilers add rpaths so ensure changes included in final result
|
||||
assert "%s/lib:%s/lib64" % (new_prefix, new_prefix) in rpaths_for(executable)
|
||||
@@ -349,7 +347,7 @@ def test_make_elf_binaries_relative(binary_with_rpaths, copy_binary, prefix_tmpd
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_relocate_text_bin(binary_with_rpaths, copy_binary, prefix_tmpdir):
|
||||
def test_relocate_text_bin_with_message(binary_with_rpaths, copy_binary, prefix_tmpdir):
|
||||
orig_binary = binary_with_rpaths(
|
||||
rpaths=[
|
||||
str(prefix_tmpdir.mkdir("lib")),
|
||||
@@ -368,7 +366,7 @@ def test_relocate_text_bin(binary_with_rpaths, copy_binary, prefix_tmpdir):
|
||||
orig_path_bytes = str(orig_binary.dirpath()).encode("utf-8")
|
||||
new_path_bytes = str(new_binary.dirpath()).encode("utf-8")
|
||||
|
||||
spack.relocate.unsafe_relocate_text_bin([str(new_binary)], {orig_path_bytes: new_path_bytes})
|
||||
spack.relocate.relocate_text_bin([str(new_binary)], {orig_path_bytes: new_path_bytes})
|
||||
|
||||
# Check original directory is not there anymore and it was
|
||||
# substituted with the new one
|
||||
@@ -382,8 +380,8 @@ def test_relocate_text_bin_raise_if_new_prefix_is_longer(tmpdir):
|
||||
fpath = str(tmpdir.join("fakebin"))
|
||||
with open(fpath, "w") as f:
|
||||
f.write("/short")
|
||||
with pytest.raises(spack.relocate.BinaryTextReplaceError):
|
||||
spack.relocate.unsafe_relocate_text_bin([fpath], {short_prefix: long_prefix})
|
||||
with pytest.raises(relocate_text.BinaryTextReplaceError):
|
||||
spack.relocate.relocate_text_bin([fpath], {short_prefix: long_prefix})
|
||||
|
||||
|
||||
@pytest.mark.requires_executables("install_name_tool", "file", "cc")
|
||||
@@ -438,227 +436,3 @@ def test_fixup_macos_rpaths(make_dylib, make_object_file):
|
||||
# (this is a corner case for GCC installation)
|
||||
(root, filename) = make_object_file()
|
||||
assert not fixup_rpath(root, filename)
|
||||
|
||||
|
||||
def test_text_relocation_regex_is_safe():
|
||||
# Test whether prefix regex is properly escaped
|
||||
string = b"This does not match /a/, but this does: /[a-z]/."
|
||||
assert utf8_path_to_binary_regex("/[a-z]/").search(string).group(0) == b"/[a-z]/"
|
||||
|
||||
|
||||
def test_utf8_paths_to_single_binary_regex():
|
||||
regex = utf8_paths_to_single_binary_regex(["/first/path", "/second/path", "/safe/[a-z]"])
|
||||
# Match nothing
|
||||
assert not regex.search(b"text /neither/first/path text /the/second/path text")
|
||||
|
||||
# Match first
|
||||
string = b"contains both /first/path/subdir and /second/path/sub"
|
||||
assert regex.search(string).group(0) == b"/first/path/subdir"
|
||||
|
||||
# Match second
|
||||
string = b"contains both /not/first/path/subdir but /second/path/subdir"
|
||||
assert regex.search(string).group(0) == b"/second/path/subdir"
|
||||
|
||||
# Match "unsafe" dir name
|
||||
string = b"don't match /safe/a/path but do match /safe/[a-z]/file"
|
||||
assert regex.search(string).group(0) == b"/safe/[a-z]/file"
|
||||
|
||||
|
||||
def test_ordered_replacement():
|
||||
# This tests whether binary text replacement respects order, so that
|
||||
# a long package prefix is replaced before a shorter sub-prefix like
|
||||
# the root of the spack store (as a fallback).
|
||||
def replace_and_expect(prefix_map, before, after=None, suffix_safety_size=7):
|
||||
f = io.BytesIO(before)
|
||||
spack.relocate.apply_binary_replacements(f, OrderedDict(prefix_map), suffix_safety_size)
|
||||
f.seek(0)
|
||||
assert f.read() == after
|
||||
|
||||
# The case of having a non-null terminated common suffix.
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
(b"/old-spack/opt", b"/sec/spack/opt"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package and /old-spack/opt",
|
||||
b"Binary with /////////first/specific-package and /sec/spack/opt",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# The case of having a direct null terminated common suffix.
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
(b"/old-spack/opt", b"/sec/spack/opt"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0",
|
||||
b"Binary with /////////first/specific-package\0 and /sec/spack/opt\0",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Testing the order of operations (not null terminated, long enough common suffix)
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt", b"/s/spack/opt"),
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package and /old-spack/opt",
|
||||
b"Binary with ///s/spack/opt/specific-package and ///s/spack/opt",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Testing the order of operations (null terminated, long enough common suffix)
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt", b"/s/spack/opt"),
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0",
|
||||
b"Binary with ///s/spack/opt/specific-package\0 and ///s/spack/opt\0",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix long enough
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXage")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with ///////////opt/specific-XXXXage/sub\0 data",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix too short, but
|
||||
# shortening is enough to spare more than 7 bytes of old suffix.
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXXge")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with /opt/specific-XXXXXge/sub\0ckage/sub\0 data", # ckage/sub = 9 bytes
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix too short,
|
||||
# shortening leaves exactly 7 suffix bytes untouched, amazing!
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/spack/specific-XXXXXge")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with /spack/specific-XXXXXge/sub\0age/sub\0 data", # age/sub = 7 bytes
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix too short,
|
||||
# shortening doesn't leave space for 7 bytes, sad!
|
||||
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
b"/old-spack/opt/specific-package",
|
||||
b"/snacks/specific-XXXXXge",
|
||||
b"/old-spack/opt/specific-package/sub",
|
||||
)
|
||||
with pytest.raises(spack.relocate.CannotShrinkCString, match=error_msg):
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXge")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
# expect failure!
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Check that it works when changing suffix_safety_size.
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXXe")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with /snacks/specific-XXXXXXe/sub\0ge/sub\0 data",
|
||||
suffix_safety_size=6,
|
||||
)
|
||||
|
||||
# Finally check the case of no shortening but a long enough common suffix.
|
||||
replace_and_expect(
|
||||
[(b"pkg-gwixwaalgczp6", b"pkg-zkesfralgczp6")],
|
||||
b"Binary with pkg-gwixwaalgczp6/config\0 data",
|
||||
b"Binary with pkg-zkesfralgczp6/config\0 data",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Too short matching suffix, identical string length
|
||||
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
b"pkg-gwixwaxlgczp6",
|
||||
b"pkg-zkesfrzlgczp6",
|
||||
b"pkg-gwixwaxlgczp6",
|
||||
)
|
||||
with pytest.raises(spack.relocate.CannotShrinkCString, match=error_msg):
|
||||
replace_and_expect(
|
||||
[(b"pkg-gwixwaxlgczp6", b"pkg-zkesfrzlgczp6")],
|
||||
b"Binary with pkg-gwixwaxlgczp6\0 data",
|
||||
# expect failure
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Finally, make sure that the regex is not greedily finding the LAST null byte
|
||||
# it should find the first null byte in the window. In this test we put one null
|
||||
# at a distance where we cant keep a long enough suffix, and one where we can,
|
||||
# so we should expect failure when the first null is used.
|
||||
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
b"pkg-abcdef",
|
||||
b"pkg-xyzabc",
|
||||
b"pkg-abcdef",
|
||||
)
|
||||
with pytest.raises(spack.relocate.CannotShrinkCString, match=error_msg):
|
||||
replace_and_expect(
|
||||
[(b"pkg-abcdef", b"pkg-xyzabc")],
|
||||
b"Binary with pkg-abcdef\0/xx\0", # def\0/xx is 7 bytes.
|
||||
# expect failure
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
|
||||
def test_inplace_text_replacement():
|
||||
def replace_and_expect(prefix_to_prefix, before: bytes, after: bytes):
|
||||
f = io.BytesIO(before)
|
||||
prefix_to_prefix = OrderedDict(prefix_to_prefix)
|
||||
regex = spack.relocate.byte_strings_to_single_binary_regex(prefix_to_prefix.keys())
|
||||
spack.relocate._replace_prefix_text_file(f, regex, prefix_to_prefix)
|
||||
f.seek(0)
|
||||
assert f.read() == after
|
||||
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/first/prefix", b"/first-replacement/prefix"),
|
||||
(b"/second/prefix", b"/second-replacement/prefix"),
|
||||
],
|
||||
b"Example: /first/prefix/subdir and /second/prefix/subdir",
|
||||
b"Example: /first-replacement/prefix/subdir and /second-replacement/prefix/subdir",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/replace/in/order", b"/first"),
|
||||
(b"/replace/in", b"/second"),
|
||||
(b"/replace", b"/third"),
|
||||
],
|
||||
b"/replace/in/order/x /replace/in/y /replace/z",
|
||||
b"/first/x /second/y /third/z",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/replace", b"/third"),
|
||||
(b"/replace/in", b"/second"),
|
||||
(b"/replace/in/order", b"/first"),
|
||||
],
|
||||
b"/replace/in/order/x /replace/in/y /replace/z",
|
||||
b"/third/in/order/x /third/in/y /third/z",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[(b"/my/prefix", b"/replacement")],
|
||||
b"/dont/replace/my/prefix #!/dont/replace/my/prefix",
|
||||
b"/dont/replace/my/prefix #!/dont/replace/my/prefix",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[(b"/my/prefix", b"/replacement")],
|
||||
b"Install path: /my/prefix.",
|
||||
b"Install path: /replacement.",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[(b"/my/prefix", b"/replacement")],
|
||||
b"#!/my/prefix",
|
||||
b"#!/replacement",
|
||||
)
|
||||
|
247
lib/spack/spack/test/relocate_text.py
Normal file
247
lib/spack/spack/test/relocate_text.py
Normal file
@@ -0,0 +1,247 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import io
|
||||
from collections import OrderedDict
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.relocate_text as relocate_text
|
||||
|
||||
|
||||
def test_text_relocation_regex_is_safe():
|
||||
# Test whether prefix regex is properly escaped
|
||||
string = b"This does not match /a/, but this does: /[a-z]/."
|
||||
assert relocate_text.utf8_path_to_binary_regex("/[a-z]/").search(string).group(0) == b"/[a-z]/"
|
||||
|
||||
|
||||
def test_utf8_paths_to_single_binary_regex():
|
||||
regex = relocate_text.utf8_paths_to_single_binary_regex(
|
||||
["/first/path", "/second/path", "/safe/[a-z]"]
|
||||
)
|
||||
# Match nothing
|
||||
assert not regex.search(b"text /neither/first/path text /the/second/path text")
|
||||
|
||||
# Match first
|
||||
string = b"contains both /first/path/subdir and /second/path/sub"
|
||||
assert regex.search(string).group(0) == b"/first/path/subdir"
|
||||
|
||||
# Match second
|
||||
string = b"contains both /not/first/path/subdir but /second/path/subdir"
|
||||
assert regex.search(string).group(0) == b"/second/path/subdir"
|
||||
|
||||
# Match "unsafe" dir name
|
||||
string = b"don't match /safe/a/path but do match /safe/[a-z]/file"
|
||||
assert regex.search(string).group(0) == b"/safe/[a-z]/file"
|
||||
|
||||
|
||||
def test_ordered_replacement():
|
||||
# This tests whether binary text replacement respects order, so that
|
||||
# a long package prefix is replaced before a shorter sub-prefix like
|
||||
# the root of the spack store (as a fallback).
|
||||
def replace_and_expect(prefix_map, before, after=None, suffix_safety_size=7):
|
||||
f = io.BytesIO(before)
|
||||
relocater = relocate_text.BinaryFilePrefixReplacer(
|
||||
OrderedDict(prefix_map), suffix_safety_size
|
||||
)
|
||||
relocater.apply_to_file(f)
|
||||
f.seek(0)
|
||||
assert f.read() == after
|
||||
|
||||
# The case of having a non-null terminated common suffix.
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
(b"/old-spack/opt", b"/sec/spack/opt"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package and /old-spack/opt",
|
||||
b"Binary with /////////first/specific-package and /sec/spack/opt",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# The case of having a direct null terminated common suffix.
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
(b"/old-spack/opt", b"/sec/spack/opt"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0",
|
||||
b"Binary with /////////first/specific-package\0 and /sec/spack/opt\0",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Testing the order of operations (not null terminated, long enough common suffix)
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt", b"/s/spack/opt"),
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package and /old-spack/opt",
|
||||
b"Binary with ///s/spack/opt/specific-package and ///s/spack/opt",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Testing the order of operations (null terminated, long enough common suffix)
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt", b"/s/spack/opt"),
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0",
|
||||
b"Binary with ///s/spack/opt/specific-package\0 and ///s/spack/opt\0",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix long enough
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXage")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with ///////////opt/specific-XXXXage/sub\0 data",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix too short, but
|
||||
# shortening is enough to spare more than 7 bytes of old suffix.
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXXge")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with /opt/specific-XXXXXge/sub\0ckage/sub\0 data", # ckage/sub = 9 bytes
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix too short,
|
||||
# shortening leaves exactly 7 suffix bytes untouched, amazing!
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/spack/specific-XXXXXge")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with /spack/specific-XXXXXge/sub\0age/sub\0 data", # age/sub = 7 bytes
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix too short,
|
||||
# shortening doesn't leave space for 7 bytes, sad!
|
||||
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
b"/old-spack/opt/specific-package",
|
||||
b"/snacks/specific-XXXXXge",
|
||||
b"/old-spack/opt/specific-package/sub",
|
||||
)
|
||||
with pytest.raises(relocate_text.CannotShrinkCString, match=error_msg):
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXge")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
# expect failure!
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Check that it works when changing suffix_safety_size.
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXXe")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with /snacks/specific-XXXXXXe/sub\0ge/sub\0 data",
|
||||
suffix_safety_size=6,
|
||||
)
|
||||
|
||||
# Finally check the case of no shortening but a long enough common suffix.
|
||||
replace_and_expect(
|
||||
[(b"pkg-gwixwaalgczp6", b"pkg-zkesfralgczp6")],
|
||||
b"Binary with pkg-gwixwaalgczp6/config\0 data",
|
||||
b"Binary with pkg-zkesfralgczp6/config\0 data",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Too short matching suffix, identical string length
|
||||
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
b"pkg-gwixwaxlgczp6",
|
||||
b"pkg-zkesfrzlgczp6",
|
||||
b"pkg-gwixwaxlgczp6",
|
||||
)
|
||||
with pytest.raises(relocate_text.CannotShrinkCString, match=error_msg):
|
||||
replace_and_expect(
|
||||
[(b"pkg-gwixwaxlgczp6", b"pkg-zkesfrzlgczp6")],
|
||||
b"Binary with pkg-gwixwaxlgczp6\0 data",
|
||||
# expect failure
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Finally, make sure that the regex is not greedily finding the LAST null byte
|
||||
# it should find the first null byte in the window. In this test we put one null
|
||||
# at a distance where we cant keep a long enough suffix, and one where we can,
|
||||
# so we should expect failure when the first null is used.
|
||||
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
b"pkg-abcdef",
|
||||
b"pkg-xyzabc",
|
||||
b"pkg-abcdef",
|
||||
)
|
||||
with pytest.raises(relocate_text.CannotShrinkCString, match=error_msg):
|
||||
replace_and_expect(
|
||||
[(b"pkg-abcdef", b"pkg-xyzabc")],
|
||||
b"Binary with pkg-abcdef\0/xx\0", # def\0/xx is 7 bytes.
|
||||
# expect failure
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
|
||||
def test_inplace_text_replacement():
|
||||
def replace_and_expect(prefix_to_prefix, before: bytes, after: bytes):
|
||||
f = io.BytesIO(before)
|
||||
replacer = relocate_text.TextFilePrefixReplacer(OrderedDict(prefix_to_prefix))
|
||||
replacer.apply_to_file(f)
|
||||
f.seek(0)
|
||||
assert f.read() == after
|
||||
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/first/prefix", b"/first-replacement/prefix"),
|
||||
(b"/second/prefix", b"/second-replacement/prefix"),
|
||||
],
|
||||
b"Example: /first/prefix/subdir and /second/prefix/subdir",
|
||||
b"Example: /first-replacement/prefix/subdir and /second-replacement/prefix/subdir",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/replace/in/order", b"/first"),
|
||||
(b"/replace/in", b"/second"),
|
||||
(b"/replace", b"/third"),
|
||||
],
|
||||
b"/replace/in/order/x /replace/in/y /replace/z",
|
||||
b"/first/x /second/y /third/z",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/replace", b"/third"),
|
||||
(b"/replace/in", b"/second"),
|
||||
(b"/replace/in/order", b"/first"),
|
||||
],
|
||||
b"/replace/in/order/x /replace/in/y /replace/z",
|
||||
b"/third/in/order/x /third/in/y /third/z",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[(b"/my/prefix", b"/replacement")],
|
||||
b"/dont/replace/my/prefix #!/dont/replace/my/prefix",
|
||||
b"/dont/replace/my/prefix #!/dont/replace/my/prefix",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[(b"/my/prefix", b"/replacement")],
|
||||
b"Install path: /my/prefix.",
|
||||
b"Install path: /replacement.",
|
||||
)
|
||||
|
||||
replace_and_expect(
|
||||
[(b"/my/prefix", b"/replacement")],
|
||||
b"#!/my/prefix",
|
||||
b"#!/replacement",
|
||||
)
|
||||
|
||||
|
||||
def test_relocate_text_filters_redundant_entries():
|
||||
# Test that we're filtering identical old / new paths, since that's a waste.
|
||||
mapping = OrderedDict([("/hello", "/hello"), ("/world", "/world")])
|
||||
replacer_1 = relocate_text.BinaryFilePrefixReplacer.from_strings_or_bytes(mapping)
|
||||
replacer_2 = relocate_text.TextFilePrefixReplacer.from_strings_or_bytes(mapping)
|
||||
assert not replacer_1.prefix_to_prefix
|
||||
assert not replacer_2.prefix_to_prefix
|
@@ -125,7 +125,7 @@ def _mock_installed(self):
|
||||
# use the installed C. It should *not* force A to use the installed D
|
||||
# *if* we're doing a fresh installation.
|
||||
a_spec = Spec(a)
|
||||
a_spec._add_dependency(c_spec, ("build", "link"))
|
||||
a_spec._add_dependency(c_spec, deptypes=("build", "link"))
|
||||
a_spec.concretize()
|
||||
assert spack.version.Version("2") == a_spec[c][d].version
|
||||
assert spack.version.Version("2") == a_spec[e].version
|
||||
@@ -148,7 +148,7 @@ def test_specify_preinstalled_dep(tmpdir, monkeypatch):
|
||||
monkeypatch.setattr(Spec, "installed", property(lambda x: x.name != "a"))
|
||||
|
||||
a_spec = Spec("a")
|
||||
a_spec._add_dependency(b_spec, ("build", "link"))
|
||||
a_spec._add_dependency(b_spec, deptypes=("build", "link"))
|
||||
a_spec.concretize()
|
||||
|
||||
assert set(x.name for x in a_spec.traverse()) == set(["a", "b", "c"])
|
||||
@@ -992,9 +992,9 @@ def test_synthetic_construction_of_split_dependencies_from_same_package(mock_pac
|
||||
link_run_spec = Spec("c@1.0").concretized()
|
||||
build_spec = Spec("c@2.0").concretized()
|
||||
|
||||
root.add_dependency_edge(link_run_spec, deptype="link")
|
||||
root.add_dependency_edge(link_run_spec, deptype="run")
|
||||
root.add_dependency_edge(build_spec, deptype="build")
|
||||
root.add_dependency_edge(link_run_spec, deptypes="link")
|
||||
root.add_dependency_edge(link_run_spec, deptypes="run")
|
||||
root.add_dependency_edge(build_spec, deptypes="build")
|
||||
|
||||
# Check dependencies from the perspective of root
|
||||
assert len(root.dependencies()) == 2
|
||||
@@ -1020,7 +1020,7 @@ def test_synthetic_construction_bootstrapping(mock_packages, config):
|
||||
root = Spec("b@2.0").concretized()
|
||||
bootstrap = Spec("b@1.0").concretized()
|
||||
|
||||
root.add_dependency_edge(bootstrap, deptype="build")
|
||||
root.add_dependency_edge(bootstrap, deptypes="build")
|
||||
|
||||
assert len(root.dependencies()) == 1
|
||||
assert root.dependencies()[0].name == "b"
|
||||
@@ -1039,7 +1039,7 @@ def test_addition_of_different_deptypes_in_multiple_calls(mock_packages, config)
|
||||
bootstrap = Spec("b@1.0").concretized()
|
||||
|
||||
for current_deptype in ("build", "link", "run"):
|
||||
root.add_dependency_edge(bootstrap, deptype=current_deptype)
|
||||
root.add_dependency_edge(bootstrap, deptypes=current_deptype)
|
||||
|
||||
# Check edges in dependencies
|
||||
assert len(root.edges_to_dependencies()) == 1
|
||||
@@ -1066,9 +1066,9 @@ def test_adding_same_deptype_with_the_same_name_raises(
|
||||
c1 = Spec("b@1.0").concretized()
|
||||
c2 = Spec("b@2.0").concretized()
|
||||
|
||||
p.add_dependency_edge(c1, deptype=c1_deptypes)
|
||||
p.add_dependency_edge(c1, deptypes=c1_deptypes)
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
p.add_dependency_edge(c2, deptype=c2_deptypes)
|
||||
p.add_dependency_edge(c2, deptypes=c2_deptypes)
|
||||
|
||||
|
||||
@pytest.mark.regression("33499")
|
||||
@@ -1087,16 +1087,16 @@ def test_indexing_prefers_direct_or_transitive_link_deps():
|
||||
z3_flavor_1 = Spec("z3 +through_a1")
|
||||
z3_flavor_2 = Spec("z3 +through_z1")
|
||||
|
||||
root.add_dependency_edge(a1, deptype=("build", "run", "test"))
|
||||
root.add_dependency_edge(a1, deptypes=("build", "run", "test"))
|
||||
|
||||
# unique package as a dep of a build/run/test type dep.
|
||||
a1.add_dependency_edge(a2, deptype="all")
|
||||
a1.add_dependency_edge(z3_flavor_1, deptype="all")
|
||||
a1.add_dependency_edge(a2, deptypes="all")
|
||||
a1.add_dependency_edge(z3_flavor_1, deptypes="all")
|
||||
|
||||
# chain of link type deps root -> z1 -> z2 -> z3
|
||||
root.add_dependency_edge(z1, deptype="link")
|
||||
z1.add_dependency_edge(z2, deptype="link")
|
||||
z2.add_dependency_edge(z3_flavor_2, deptype="link")
|
||||
root.add_dependency_edge(z1, deptypes="link")
|
||||
z1.add_dependency_edge(z2, deptypes="link")
|
||||
z2.add_dependency_edge(z3_flavor_2, deptypes="link")
|
||||
|
||||
# Indexing should prefer the link-type dep.
|
||||
assert "through_z1" in root["z3"].variants
|
||||
|
@@ -61,25 +61,25 @@ def test_spec_list_expansions(self):
|
||||
@pytest.mark.parametrize(
|
||||
"specs,expected",
|
||||
[
|
||||
# Constraints are ordered randomly
|
||||
# Constraints are ordered carefully to apply to appropriate node
|
||||
(
|
||||
[
|
||||
{
|
||||
"matrix": [
|
||||
["^zmpi"],
|
||||
["%gcc@4.5.0"],
|
||||
["hypre", "libelf"],
|
||||
["~shared"],
|
||||
["cflags=-O3", 'cflags="-g -O0"'],
|
||||
["^foo"],
|
||||
["^zmpi"],
|
||||
["%gcc@4.5.0"],
|
||||
["cflags=-O3", 'cflags="-g -O0"'],
|
||||
]
|
||||
}
|
||||
],
|
||||
[
|
||||
"hypre cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi",
|
||||
'hypre cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi',
|
||||
"libelf cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi",
|
||||
'libelf cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi',
|
||||
"hypre ~shared ^foo ^zmpi cflags=-O3 %gcc@4.5.0",
|
||||
'hypre ~shared ^foo ^zmpi cflags="-g -O0" %gcc@4.5.0',
|
||||
"libelf ~shared ^foo ^zmpi cflags=-O3 %gcc@4.5.0",
|
||||
'libelf ~shared ^foo ^zmpi cflags="-g -O0" %gcc@4.5.0',
|
||||
],
|
||||
),
|
||||
# A constraint affects both the root and a dependency
|
||||
|
@@ -1108,7 +1108,7 @@ def test_error_message_unknown_variant(self):
|
||||
def test_satisfies_dependencies_ordered(self):
|
||||
d = Spec("zmpi ^fake")
|
||||
s = Spec("mpileaks")
|
||||
s._add_dependency(d, ())
|
||||
s._add_dependency(d, deptypes=())
|
||||
assert s.satisfies("mpileaks ^zmpi ^fake", strict=True)
|
||||
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
@@ -1156,7 +1156,9 @@ def test_is_extension_after_round_trip_to_dict(config, mock_packages, spec_str):
|
||||
|
||||
def test_malformed_spec_dict():
|
||||
with pytest.raises(SpecError, match="malformed"):
|
||||
Spec.from_dict({"spec": {"nodes": [{"dependencies": {"name": "foo"}}]}})
|
||||
Spec.from_dict(
|
||||
{"spec": {"_meta": {"version": 2}, "nodes": [{"dependencies": {"name": "foo"}}]}}
|
||||
)
|
||||
|
||||
|
||||
def test_spec_dict_hashless_dep():
|
||||
@@ -1164,9 +1166,10 @@ def test_spec_dict_hashless_dep():
|
||||
Spec.from_dict(
|
||||
{
|
||||
"spec": {
|
||||
"_meta": {"version": 2},
|
||||
"nodes": [
|
||||
{"name": "foo", "hash": "thehash", "dependencies": [{"name": "bar"}]}
|
||||
]
|
||||
],
|
||||
}
|
||||
}
|
||||
)
|
||||
@@ -1252,7 +1255,7 @@ def test_concretize_partial_old_dag_hash_spec(mock_packages, config):
|
||||
|
||||
# add it to an abstract spec as a dependency
|
||||
top = Spec("dt-diamond")
|
||||
top.add_dependency_edge(bottom, ())
|
||||
top.add_dependency_edge(bottom, deptypes=())
|
||||
|
||||
# concretize with the already-concrete dependency
|
||||
top.concretize()
|
||||
|
@@ -13,7 +13,9 @@
|
||||
import ast
|
||||
import collections
|
||||
import collections.abc
|
||||
import gzip
|
||||
import inspect
|
||||
import json
|
||||
import os
|
||||
|
||||
import pytest
|
||||
@@ -507,3 +509,33 @@ def test_legacy_yaml(tmpdir, install_mockery, mock_packages):
|
||||
("version", "1.2.11"),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"specfile,expected_hash,reader_cls",
|
||||
[
|
||||
# First version supporting JSON format for specs
|
||||
("specfiles/hdf5.v013.json.gz", "vglgw4reavn65vx5d4dlqn6rjywnq76d", spack.spec.SpecfileV1),
|
||||
# Introduces full hash in the format, still has 3 hashes
|
||||
("specfiles/hdf5.v016.json.gz", "stp45yvzte43xdauknaj3auxlxb4xvzs", spack.spec.SpecfileV1),
|
||||
# Introduces "build_specs", see https://github.com/spack/spack/pull/22845
|
||||
("specfiles/hdf5.v017.json.gz", "xqh5iyjjtrp2jw632cchacn3l7vqzf3m", spack.spec.SpecfileV2),
|
||||
# Use "full hash" everywhere, see https://github.com/spack/spack/pull/28504
|
||||
("specfiles/hdf5.v019.json.gz", "iulacrbz7o5v5sbj7njbkyank3juh6d3", spack.spec.SpecfileV3),
|
||||
],
|
||||
)
|
||||
def test_load_json_specfiles(specfile, expected_hash, reader_cls):
|
||||
fullpath = os.path.join(spack.paths.test_path, "data", specfile)
|
||||
with gzip.open(fullpath, "rt", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
s1 = Spec.from_dict(data)
|
||||
s2 = reader_cls.load(data)
|
||||
|
||||
assert s2.dag_hash() == expected_hash
|
||||
assert s1.dag_hash() == s2.dag_hash()
|
||||
assert s1 == s2
|
||||
assert Spec.from_json(s2.to_json()).dag_hash() == s2.dag_hash()
|
||||
|
||||
openmpi_edges = s2.edges_to_dependencies(name="openmpi")
|
||||
assert len(openmpi_edges) == 1
|
||||
|
@@ -18,8 +18,8 @@ def create_dag(nodes, edges):
|
||||
dict: mapping from package name to abstract Spec with proper deps.
|
||||
"""
|
||||
specs = {name: Spec(name) for name in nodes}
|
||||
for parent, child, deptype in edges:
|
||||
specs[parent].add_dependency_edge(specs[child], deptype)
|
||||
for parent, child, deptypes in edges:
|
||||
specs[parent].add_dependency_edge(specs[child], deptypes=deptypes)
|
||||
return specs
|
||||
|
||||
|
||||
|
@@ -238,7 +238,7 @@ class HasManyMetadataAttributes:
|
||||
url = "https://example.com/foo.tar.gz"
|
||||
git = "https://example.com/foo/bar.git"
|
||||
|
||||
maintainers = ["alice", "bob"]
|
||||
maintainers("alice", "bob")
|
||||
tags = ["foo", "bar", "baz"]
|
||||
|
||||
depends_on("foo")
|
||||
|
@@ -912,7 +912,7 @@ def inspect_path(root, inspections, exclude=None):
|
||||
env = EnvironmentModifications()
|
||||
# Inspect the prefix to check for the existence of common directories
|
||||
for relative_path, variables in inspections.items():
|
||||
expected = os.path.join(root, relative_path)
|
||||
expected = os.path.join(root, os.path.normpath(relative_path))
|
||||
|
||||
if os.path.isdir(expected) and not exclude(expected):
|
||||
for variable in variables:
|
||||
|
@@ -7,10 +7,22 @@ default:
|
||||
# Job templates
|
||||
########################################
|
||||
|
||||
.pr:
|
||||
.aws-pr-creds:
|
||||
variables:
|
||||
AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
|
||||
.aws-protected-creds:
|
||||
variables:
|
||||
AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
|
||||
.pr-refs:
|
||||
only:
|
||||
- /^pr[\d]+_.*$/
|
||||
- /^github\/pr[\d]+_.*$/
|
||||
|
||||
.pr:
|
||||
extends: [ ".pr-refs" ]
|
||||
variables:
|
||||
SPACK_BUILDCACHE_DESTINATION: "s3://spack-binaries-prs/${CI_COMMIT_REF_NAME}/${SPACK_CI_STACK_NAME}"
|
||||
SPACK_PIPELINE_TYPE: "spack_pull_request"
|
||||
@@ -21,7 +33,6 @@ default:
|
||||
- /^develop$/
|
||||
- /^releases\/v.*/
|
||||
- /^v.*/
|
||||
- /^github\/develop$/
|
||||
|
||||
.protected:
|
||||
extends: [ ".protected-refs" ]
|
||||
@@ -30,7 +41,7 @@ default:
|
||||
SPACK_COPY_BUILDCACHE: "s3://spack-binaries/${CI_COMMIT_REF_NAME}"
|
||||
SPACK_PIPELINE_TYPE: "spack_protected_branch"
|
||||
|
||||
.generate:
|
||||
.generate-base:
|
||||
stage: generate
|
||||
script:
|
||||
- uname -a || true
|
||||
@@ -49,7 +60,6 @@ default:
|
||||
artifacts:
|
||||
paths:
|
||||
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
tags: ["spack", "aws", "public", "medium", "x86_64"]
|
||||
variables:
|
||||
KUBERNETES_CPU_REQUEST: 4000m
|
||||
KUBERNETES_MEMORY_REQUEST: 16G
|
||||
@@ -58,12 +68,15 @@ default:
|
||||
retry:
|
||||
max: 2
|
||||
when:
|
||||
- runner_system_failure
|
||||
- stuck_or_timeout_failure
|
||||
- always
|
||||
|
||||
.generate:
|
||||
extends: [ ".generate-base" ]
|
||||
tags: ["spack", "public", "medium", "x86_64"]
|
||||
|
||||
.generate-aarch64:
|
||||
extends: [ ".generate" ]
|
||||
tags: ["spack", "aws", "public", "medium", "aarch64"]
|
||||
tags: ["spack", "public", "medium", "aarch64"]
|
||||
|
||||
.pr-generate:
|
||||
extends: [ ".pr", ".generate" ]
|
||||
@@ -81,20 +94,14 @@ default:
|
||||
stage: build
|
||||
|
||||
.pr-build:
|
||||
extends: [ ".pr", ".build" ]
|
||||
variables:
|
||||
AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
extends: [ ".pr", ".build", ".aws-pr-creds" ]
|
||||
|
||||
.protected-build:
|
||||
extends: [ ".protected", ".build" ]
|
||||
variables:
|
||||
AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
extends: [ ".protected", ".build", ".aws-protected-creds" ]
|
||||
|
||||
protected-publish:
|
||||
stage: publish
|
||||
extends: [ ".protected" ]
|
||||
extends: [ ".protected", ".aws-protected-creds" ]
|
||||
image: "ghcr.io/spack/python-aws-bash:0.0.1"
|
||||
tags: ["spack", "public", "medium", "aws", "x86_64"]
|
||||
retry:
|
||||
@@ -199,7 +206,6 @@ protected-publish:
|
||||
# variables:
|
||||
# AWS_ACCESS_KEY_ID: ${PR_MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
# AWS_SECRET_ACCESS_KEY: ${PR_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
|
||||
# .mac-protected-build:
|
||||
# extends: [ ".mac-protected", ".build" ]
|
||||
# variables:
|
||||
@@ -317,6 +323,43 @@ e4s-protected-build:
|
||||
- artifacts: True
|
||||
job: e4s-protected-generate
|
||||
|
||||
########################################
|
||||
# GPU Testing Pipeline
|
||||
########################################
|
||||
.gpu-tests:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: gpu-tests
|
||||
|
||||
gpu-tests-pr-generate:
|
||||
extends: [ ".gpu-tests", ".pr-generate"]
|
||||
image: ecpe4s/ubuntu20.04-runner-x86_64:2023-01-01
|
||||
|
||||
gpu-tests-protected-generate:
|
||||
extends: [ ".gpu-tests", ".protected-generate"]
|
||||
image: ecpe4s/ubuntu20.04-runner-x86_64:2023-01-01
|
||||
|
||||
gpu-tests-pr-build:
|
||||
extends: [ ".gpu-tests", ".pr-build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: gpu-tests-pr-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: gpu-tests-pr-generate
|
||||
|
||||
gpu-tests-protected-build:
|
||||
extends: [ ".gpu-tests", ".protected-build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: gpu-tests-protected-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: gpu-tests-protected-generate
|
||||
|
||||
########################################
|
||||
# E4S OneAPI Pipeline
|
||||
########################################
|
||||
@@ -326,11 +369,11 @@ e4s-protected-build:
|
||||
|
||||
e4s-oneapi-pr-generate:
|
||||
extends: [ ".e4s-oneapi", ".pr-generate"]
|
||||
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2022-07-01
|
||||
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023-01-01
|
||||
|
||||
e4s-oneapi-protected-generate:
|
||||
extends: [ ".e4s-oneapi", ".protected-generate"]
|
||||
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2022-07-01
|
||||
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023-01-01
|
||||
|
||||
e4s-oneapi-pr-build:
|
||||
extends: [ ".e4s-oneapi", ".pr-build" ]
|
||||
@@ -357,41 +400,41 @@ e4s-oneapi-protected-build:
|
||||
########################################
|
||||
# E4S on Power
|
||||
########################################
|
||||
# .power-e4s-generate-tags-and-image:
|
||||
# image: { "name": "ghcr.io/scottwittenburg/ecpe4s-ubuntu20.04-runner-ppc64le:2021-07-01", "entrypoint": [""] }
|
||||
# tags: ["spack", "public", "medium", "ppc64le"]
|
||||
.e4s-power-generate-tags-and-image:
|
||||
image: { "name": "ecpe4s/ubuntu20.04-runner-ppc64le:2023-01-01", "entrypoint": [""] }
|
||||
tags: ["spack", "public", "large", "ppc64le"]
|
||||
|
||||
# .e4s-on-power:
|
||||
# variables:
|
||||
# SPACK_CI_STACK_NAME: e4s-on-power
|
||||
.e4s-power:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: e4s-power
|
||||
|
||||
# e4s-on-power-pr-generate:
|
||||
# extends: [ ".e4s-on-power", ".pr-generate", ".power-e4s-generate-tags-and-image"]
|
||||
e4s-power-pr-generate:
|
||||
extends: [ ".e4s-power", ".pr-generate", ".e4s-power-generate-tags-and-image"]
|
||||
|
||||
# e4s-on-power-protected-generate:
|
||||
# extends: [ ".e4s-on-power", ".protected-generate", ".power-e4s-generate-tags-and-image"]
|
||||
e4s-power-protected-generate:
|
||||
extends: [ ".e4s-power", ".protected-generate", ".e4s-power-generate-tags-and-image"]
|
||||
|
||||
# e4s-on-power-pr-build:
|
||||
# extends: [ ".e4s-on-power", ".pr-build" ]
|
||||
# trigger:
|
||||
# include:
|
||||
# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
# job: e4s-on-power-pr-generate
|
||||
# strategy: depend
|
||||
# needs:
|
||||
# - artifacts: True
|
||||
# job: e4s-on-power-pr-generate
|
||||
e4s-power-pr-build:
|
||||
extends: [ ".e4s-power", ".pr-build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: e4s-power-pr-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: e4s-power-pr-generate
|
||||
|
||||
# e4s-on-power-protected-build:
|
||||
# extends: [ ".e4s-on-power", ".protected-build" ]
|
||||
# trigger:
|
||||
# include:
|
||||
# - artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
# job: e4s-on-power-protected-generate
|
||||
# strategy: depend
|
||||
# needs:
|
||||
# - artifacts: True
|
||||
# job: e4s-on-power-protected-generate
|
||||
e4s-power-protected-build:
|
||||
extends: [ ".e4s-power", ".protected-build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: e4s-power-protected-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: e4s-power-protected-generate
|
||||
|
||||
#########################################
|
||||
# Build tests for different build-systems
|
||||
|
@@ -1,269 +0,0 @@
|
||||
spack:
|
||||
view: false
|
||||
|
||||
concretizer:
|
||||
reuse: false
|
||||
unify: false
|
||||
|
||||
config:
|
||||
concretizer: clingo
|
||||
install_tree:
|
||||
root: /home/software/spack
|
||||
padded_length: 512
|
||||
projections:
|
||||
all: '{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler:
|
||||
- gcc@9.3.0
|
||||
providers:
|
||||
blas:
|
||||
- openblas
|
||||
mpi:
|
||||
- mpich
|
||||
target:
|
||||
- ppc64le
|
||||
variants: +mpi
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls +plugins
|
||||
version:
|
||||
- 2.36.1
|
||||
doxygen:
|
||||
version:
|
||||
- 1.8.20
|
||||
elfutils:
|
||||
variants: +bzip2 ~nls +xz
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared api=v18
|
||||
version:
|
||||
- 1.12.0
|
||||
libfabric:
|
||||
variants: fabrics=sockets,tcp,udp,rxm
|
||||
libunwind:
|
||||
variants: +pic +xz
|
||||
mesa:
|
||||
variants: ~llvm
|
||||
mesa18:
|
||||
variants: ~llvm
|
||||
mpich:
|
||||
variants: ~wrapperrpath
|
||||
ncurses:
|
||||
variants: +termlib
|
||||
openblas:
|
||||
variants: threads=openmp
|
||||
trilinos:
|
||||
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
xz:
|
||||
variants: +pic
|
||||
|
||||
definitions:
|
||||
|
||||
- cuda_specs:
|
||||
- amrex +cuda cuda_arch=70
|
||||
- caliper +cuda cuda_arch=70
|
||||
- chai ~benchmarks ~tests +cuda cuda_arch=70 ^umpire ~shared
|
||||
- ginkgo +cuda cuda_arch=70
|
||||
- heffte +cuda cuda_arch=70
|
||||
- hpx +cuda cuda_arch=70
|
||||
- hypre +cuda cuda_arch=70
|
||||
- kokkos +wrapper +cuda cuda_arch=70
|
||||
- kokkos-kernels +cuda cuda_arch=70 ^kokkos +wrapper +cuda cuda_arch=70
|
||||
- magma +cuda cuda_arch=70
|
||||
- mfem +cuda cuda_arch=70
|
||||
- parsec +cuda cuda_arch=70
|
||||
- petsc +cuda cuda_arch=70
|
||||
- raja +cuda cuda_arch=70
|
||||
- slate +cuda cuda_arch=70
|
||||
- slepc +cuda cuda_arch=70
|
||||
- strumpack ~slate +cuda cuda_arch=70
|
||||
- sundials +cuda cuda_arch=70
|
||||
- superlu-dist +cuda cuda_arch=70
|
||||
- tasmanian +cuda cuda_arch=70
|
||||
- trilinos@13.2.0 +cuda cuda_arch=70
|
||||
- umpire ~shared +cuda cuda_arch=70
|
||||
- vtk-m +cuda cuda_arch=70
|
||||
- zfp +cuda cuda_arch=70
|
||||
#- ascent ~shared +cuda cuda_arch=70
|
||||
#- axom +cuda cuda_arch=70 ^umpire ~shared
|
||||
#- dealii +cuda cuda_arch=70 # gmsh
|
||||
#- flecsi +cuda cuda_arch=70
|
||||
#- paraview +cuda cuda_arch=70
|
||||
|
||||
- default_specs:
|
||||
- adios
|
||||
- adios2
|
||||
- aml
|
||||
- amrex
|
||||
- arborx
|
||||
- archer
|
||||
- argobots
|
||||
- ascent
|
||||
- axom ^umpire@4.1.2
|
||||
- bolt
|
||||
- cabana
|
||||
- caliper
|
||||
- chai ~benchmarks ~tests ^umpire@4.1.2
|
||||
- charliecloud
|
||||
- conduit
|
||||
- darshan-runtime
|
||||
- darshan-util
|
||||
- datatransferkit
|
||||
- dyninst
|
||||
- faodel ~tcmalloc
|
||||
- flecsi
|
||||
- flit
|
||||
- flux-core
|
||||
- fortrilinos
|
||||
- gasnet
|
||||
- ginkgo
|
||||
- globalarrays
|
||||
- gmp
|
||||
- gotcha
|
||||
- gptune
|
||||
- hdf5
|
||||
- heffte +fftw
|
||||
- hpctoolkit
|
||||
- hpx
|
||||
- hypre
|
||||
- kokkos +openmp
|
||||
- kokkos-kernels +openmp
|
||||
- legion
|
||||
- libnrm
|
||||
- libquo
|
||||
- libunwind
|
||||
- llvm targets=amdgpu,nvptx +clang +compiler-rt +libcxx +lld +lldb +llvm_dylib +flang ~cuda
|
||||
- loki
|
||||
- mercury
|
||||
- metall
|
||||
- mfem
|
||||
- mpark-variant
|
||||
- mpifileutils ~xattr
|
||||
- netlib-scalapack
|
||||
- ninja
|
||||
- nrm
|
||||
- nvhpc
|
||||
- omega-h
|
||||
- openmpi
|
||||
- openpmd-api ^hdf5@1.12.0 +fortran +shared +hl api=default
|
||||
- papi
|
||||
- papyrus@1.0.1
|
||||
- parallel-netcdf
|
||||
- paraview
|
||||
- parsec ~cuda
|
||||
- pdt
|
||||
- petsc
|
||||
- plasma
|
||||
- precice
|
||||
- pumi
|
||||
- py-jupyterhub
|
||||
- py-libensemble
|
||||
- py-petsc4py
|
||||
- py-warpx ^warpx dims=2 ^hdf5@1.12.0 +fortran +shared +hl api=default
|
||||
- py-warpx ^warpx dims=3 ^hdf5@1.12.0 +fortran +shared +hl api=default
|
||||
- py-warpx ^warpx dims=rz ^hdf5@1.12.0 +fortran +shared +hl api=default
|
||||
- qthreads scheduler=distrib
|
||||
- raja
|
||||
- rempi
|
||||
- scr
|
||||
- slate ~cuda
|
||||
- slepc
|
||||
- stc
|
||||
- strumpack ~slate
|
||||
- sundials
|
||||
- superlu
|
||||
- superlu-dist
|
||||
- swig
|
||||
- swig@4.0.2-fortran
|
||||
- sz
|
||||
- tasmanian
|
||||
- tau +mpi +python
|
||||
- trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
- turbine
|
||||
- umap
|
||||
- unifyfs@0.9.1
|
||||
- upcxx
|
||||
- variorum
|
||||
- veloc
|
||||
- vtk-m
|
||||
- zfp
|
||||
#- dealii
|
||||
#- geopm
|
||||
#- phist
|
||||
#- qt
|
||||
#- qwt
|
||||
#- stat
|
||||
#- umpire
|
||||
|
||||
- arch:
|
||||
- '%gcc target=ppc64le'
|
||||
|
||||
|
||||
specs:
|
||||
|
||||
- matrix:
|
||||
- - $default_specs
|
||||
- - $arch
|
||||
|
||||
- matrix:
|
||||
- - $cuda_specs
|
||||
- - $arch
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/e4s" }
|
||||
|
||||
gitlab-ci:
|
||||
|
||||
script:
|
||||
- uname -a || true
|
||||
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||
- nproc
|
||||
- curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.powerpc64le-linux-gnu.tar.gz' -o gmake.tar.gz
|
||||
- printf '8096d202fe0a0c400b8c0573c4b9e009f2f10d2fa850a3f495340f16e9c42454 gmake.tar.gz' | sha256sum --check --strict --quiet
|
||||
- tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- spack arch
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- spack --color=always --backtrace ci rebuild
|
||||
after_script:
|
||||
- cat /proc/loadavg || true
|
||||
|
||||
match_behavior: first
|
||||
mappings:
|
||||
- match:
|
||||
- cuda
|
||||
- dyninst
|
||||
- hpx
|
||||
- llvm
|
||||
- llvm-amdgpu
|
||||
- precice
|
||||
- rocblas
|
||||
- rocsolver
|
||||
- strumpack
|
||||
- sundials
|
||||
- trilinos
|
||||
- vtk-h
|
||||
- vtk-m
|
||||
- warpx
|
||||
runner-attributes:
|
||||
image: { "name": "ghcr.io/spack/e4s-ubuntu-20.04:v2021-10-18", "entrypoint": [""] }
|
||||
tags: ["spack", "public", "xlarge", "ppc64le"]
|
||||
- match: ['os=ubuntu20.04']
|
||||
runner-attributes:
|
||||
image: { "name": "ghcr.io/spack/e4s-ubuntu-20.04:v2021-10-18", "entrypoint": [""] }
|
||||
tags: ["spack", "public", "large", "ppc64le"]
|
||||
broken-specs-url: "s3://spack-binaries-develop/broken-specs"
|
||||
service-job-attributes:
|
||||
before_script:
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
image: { "name": "ghcr.io/spack/e4s-ubuntu-20.04:v2021-10-18", "entrypoint": [""] }
|
||||
tags: ["spack", "public", "medium", "ppc64le"]
|
||||
|
||||
cdash:
|
||||
build-group: New PR testing workflow
|
||||
url: https://cdash.spack.io
|
||||
project: Spack Testing
|
||||
site: Cloud Gitlab Infrastructure
|
@@ -3,7 +3,7 @@ spack:
|
||||
|
||||
concretizer:
|
||||
reuse: false
|
||||
unify: when_possible
|
||||
unify: false
|
||||
|
||||
config:
|
||||
build_jobs: 32
|
||||
@@ -16,37 +16,37 @@ spack:
|
||||
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: dpcpp@2022.1.0
|
||||
spec: dpcpp@2023.0.0
|
||||
paths:
|
||||
cc: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/icx
|
||||
cxx: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/dpcpp
|
||||
f77: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/ifx
|
||||
fc: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/ifx
|
||||
cc: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/icx
|
||||
cxx: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/dpcpp
|
||||
f77: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/ifx
|
||||
fc: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/ifx
|
||||
flags: {}
|
||||
operating_system: ubuntu20.04
|
||||
target: x86_64
|
||||
modules: [compiler]
|
||||
environment:
|
||||
prepend_path:
|
||||
LD_LIBRARY_PATH: /opt/intel/oneapi/compiler/2022.1.0/linux/compiler/lib/intel64_lin
|
||||
LD_LIBRARY_PATH: /opt/intel/oneapi/compiler/2023.0.0/linux/compiler/lib/intel64_lin
|
||||
extra_rpaths: []
|
||||
- compiler:
|
||||
spec: oneapi@2022.1.0
|
||||
spec: oneapi@2023.0.0
|
||||
paths:
|
||||
cc: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/icx
|
||||
cxx: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/icpx
|
||||
f77: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/ifx
|
||||
fc: /opt/intel/oneapi/compiler/2022.1.0/linux/bin/ifx
|
||||
cc: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/icx
|
||||
cxx: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/icpx
|
||||
f77: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/ifx
|
||||
fc: /opt/intel/oneapi/compiler/2023.0.0/linux/bin/ifx
|
||||
flags: {}
|
||||
operating_system: ubuntu20.04
|
||||
target: x86_64
|
||||
modules: [compiler]
|
||||
environment:
|
||||
prepend_path:
|
||||
LD_LIBRARY_PATH: /opt/intel/oneapi/compiler/2022.1.0/linux/compiler/lib/intel64_lin
|
||||
LD_LIBRARY_PATH: /opt/intel/oneapi/compiler/2023.0.0/linux/compiler/lib/intel64_lin
|
||||
extra_rpaths: []
|
||||
- compiler:
|
||||
spec: gcc@9.4.0
|
||||
spec: gcc@11.1.0
|
||||
paths:
|
||||
cc: /usr/bin/gcc
|
||||
cxx: /usr/bin/g++
|
||||
@@ -60,20 +60,13 @@ spack:
|
||||
extra_rpaths: []
|
||||
|
||||
packages:
|
||||
adios2:
|
||||
require: "%gcc"
|
||||
all:
|
||||
require: "%oneapi"
|
||||
require: '%oneapi'
|
||||
providers:
|
||||
blas: [openblas]
|
||||
mpi: [mpich]
|
||||
target: [x86_64]
|
||||
variants: +mpi
|
||||
binutils:
|
||||
require: "%gcc"
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
cuda:
|
||||
version: [11.4.2]
|
||||
elfutils:
|
||||
variants: +bzip2 ~nls +xz
|
||||
hdf5:
|
||||
@@ -82,8 +75,6 @@ spack:
|
||||
variants: fabrics=sockets,tcp,udp,rxm
|
||||
libunwind:
|
||||
variants: +pic +xz
|
||||
llvm:
|
||||
require: "%gcc" # undefined reference to `_intel_fast_memset' becauase of -nodefaultlibs
|
||||
mpich:
|
||||
variants: ~wrapperrpath
|
||||
ncurses:
|
||||
@@ -92,35 +83,55 @@ spack:
|
||||
variants: threads=openmp
|
||||
python:
|
||||
version: [3.8.13]
|
||||
ruby:
|
||||
require: "%gcc" # https://github.com/spack/spack/issues/31954
|
||||
rust:
|
||||
require: "%gcc" # undefined reference becauase of -nodefaultlibs
|
||||
trilinos:
|
||||
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
|
||||
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
|
||||
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
|
||||
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
vtk-m:
|
||||
require: "~openmp"
|
||||
require: ~openmp
|
||||
xz:
|
||||
variants: +pic
|
||||
mesa:
|
||||
version: [21.3.8]
|
||||
|
||||
binutils:
|
||||
require: '%gcc'
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
bison:
|
||||
require: '%gcc'
|
||||
krb5:
|
||||
require: '%gcc'
|
||||
llvm:
|
||||
require: '%gcc'
|
||||
m4:
|
||||
require: '%gcc'
|
||||
openssh:
|
||||
require: '%gcc'
|
||||
papi:
|
||||
require: '%gcc'
|
||||
py-scipy:
|
||||
require: '%gcc'
|
||||
ruby:
|
||||
require: '%gcc'
|
||||
rust:
|
||||
require: '%gcc'
|
||||
unzip:
|
||||
require: '%gcc'
|
||||
|
||||
specs:
|
||||
# CPU
|
||||
- adios
|
||||
- adios2
|
||||
- alquimia
|
||||
- aml
|
||||
- amrex
|
||||
- arborx
|
||||
- archer
|
||||
- argobots
|
||||
- ascent
|
||||
- amrex
|
||||
- axom
|
||||
- bolt
|
||||
- bricks
|
||||
- butterflypack
|
||||
- cabana
|
||||
- caliper
|
||||
@@ -129,20 +140,15 @@ spack:
|
||||
- darshan-runtime
|
||||
- darshan-util
|
||||
- datatransferkit
|
||||
- exaworks
|
||||
- faodel
|
||||
- flit
|
||||
- flux-core
|
||||
- fortrilinos
|
||||
- gasnet
|
||||
- ginkgo
|
||||
- globalarrays
|
||||
- gmp
|
||||
- gotcha
|
||||
- gptune
|
||||
- hdf5 +fortran +hl +shared
|
||||
- heffte +fftw
|
||||
- hpx max_cpu_count=512 networking=mpi
|
||||
- hypre
|
||||
- kokkos-kernels +openmp
|
||||
- kokkos +openmp
|
||||
@@ -160,7 +166,6 @@ spack:
|
||||
- nccmp
|
||||
- nco
|
||||
- netlib-scalapack
|
||||
- nrm
|
||||
- omega-h
|
||||
- openmpi
|
||||
- openpmd-api
|
||||
@@ -174,17 +179,11 @@ spack:
|
||||
- plumed
|
||||
- precice
|
||||
- pumi
|
||||
- py-cinemasci
|
||||
- py-jupyterhub
|
||||
- py-libensemble
|
||||
- py-petsc4py
|
||||
- py-warpx ^warpx dims=2
|
||||
- py-warpx ^warpx dims=3
|
||||
- py-warpx ^warpx dims=rz
|
||||
- qthreads scheduler=distrib
|
||||
- raja
|
||||
- rempi
|
||||
- scr
|
||||
- slate ~cuda
|
||||
- slepc
|
||||
- stc
|
||||
@@ -193,14 +192,12 @@ spack:
|
||||
- superlu-dist
|
||||
- superlu
|
||||
- swig
|
||||
- swig@4.0.2-fortran
|
||||
- sz
|
||||
- tasmanian
|
||||
- tau +mpi +python
|
||||
- trilinos@13.0.1 +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
|
||||
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
|
||||
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
|
||||
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
|
||||
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
|
||||
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
- turbine
|
||||
- umap
|
||||
- umpire
|
||||
@@ -213,54 +210,72 @@ spack:
|
||||
|
||||
# GPU
|
||||
- aml +ze
|
||||
- amrex +sycl
|
||||
- arborx +sycl ^kokkos +sycl +openmp std=17 +tests +examples
|
||||
- cabana +sycl ^kokkos+sycl +openmp std=17 +tests +examples
|
||||
- cabana +sycl ^kokkos +sycl +openmp std=17 +tests +examples
|
||||
- kokkos +sycl +openmp std=17 +tests +examples %oneapi
|
||||
- kokkos-kernels build_type=Release %oneapi ^kokkos +sycl +openmp std=17 +tests +examples %oneapi
|
||||
- kokkos-kernels build_type=Release ^kokkos +sycl +openmp std=17 +tests +examples %oneapi
|
||||
|
||||
# CPU BUILD FAILURES
|
||||
#- adios2@2.8.0 # adios2
|
||||
#- charliecloud@0.26 # charliecloud
|
||||
#- dyninst@12.1.0 # old intel-tbb
|
||||
#- geopm@1.1.0 # geopm
|
||||
#- h5bench@1.2 # h5bench
|
||||
#- hpctoolkit # dyninst
|
||||
#- phist@1.9.5 # phist
|
||||
#- paraview +qt # qt
|
||||
#- pruners-ninja@1.0.1 # pruners-ninja
|
||||
#- variorum@0.4.1 # variorum
|
||||
|
||||
# CPU BUILD FAILURES - NOTES
|
||||
# adios2: /usr/bin/ld: ../../lib/libadios2_fortran.so.2.8.2: version node not found for symbol adios2_adios_init_mod@adios2_adios_init_serial_smod._; /usr/bin/ld: failed to set dynamic section sizes: bad value
|
||||
# CPU FAILURES
|
||||
# - bricks # bricks
|
||||
# - charliecloud # charliecloud
|
||||
# - dyninst # old intel-tbb
|
||||
# - exaworks # py-setuptools-scm
|
||||
# - flux-core # py-setuptools-scm
|
||||
# - geopm # geopm
|
||||
# - ginkgo # ginkgo
|
||||
# - gptune # py-scipy@1.3.3
|
||||
# - h5bench # h5bench
|
||||
# - hpctoolkit # dyninst
|
||||
# - hpx max_cpu_count=512 networking=mpi # boost cxxstd=17
|
||||
# - nrm # py-scipy
|
||||
# - paraview +qt # qt
|
||||
# - phist # phist
|
||||
# - pruners-ninja # pruners-ninja
|
||||
# - py-cinemasci # py-scipy@1.3.3, py-setuptools-scm
|
||||
# - py-jupyterhub # py-setuptools-scm
|
||||
# - py-warpx ^warpx dims=2 # py-scipy@1.5.4
|
||||
# - py-warpx ^warpx dims=3 # py-scipy@1.5.4
|
||||
# - py-warpx ^warpx dims=rz # py-scipy@1.5.4
|
||||
# - scr # libyogrt
|
||||
# - swig@4.0.2-fortran # swig
|
||||
# - tau +mpi +python # tau
|
||||
# - variorum # variorum
|
||||
# --
|
||||
# amrex: /opt/intel/oneapi/compiler/2023.0.0/linux/bin-llvm/../include/sycl/detail/defines_elementary.hpp:52:40: note: expanded from macro '__SYCL2020_DEPRECATED'
|
||||
# amrex: /opt/intel/oneapi/compiler/2023.0.0/linux/bin-llvm/../include/sycl/detail/defines_elementary.hpp:52:40: note: expanded from macro '__SYCL2020_DEPRECATED'
|
||||
# binutils: gold/powerpc.cc:3590: undefined reference to `gold::Sized_symbol<64>::Value_type gold::Symbol_table::compute_final_value<64>(gold::Sized_symbol<64> const*, gold::Symbol_table::Compute_final_value_status*) const'
|
||||
# boost cxxstd=17: ./boost/mpl/aux_/integral_wrapper.hpp:73:31: error: integer value -1 is outside the valid range of values [0, 3] for this enumeration type [-Wenum-constexpr-conversion]
|
||||
# bricks: cc1plus: error: bad value ('OFF') for '-mtune=' switch
|
||||
# charliecloud: autoreconf phase: RuntimeError: configure script not found in ...
|
||||
# flux-sched: include/yaml-cpp/emitter.h:164:9: error: comparison with NaN always evaluates to false in fast floating point modes [-Werror,-Wtautological-constant-compare]
|
||||
# flux-sched: include/yaml-cpp/emitter.h:171:24: error: comparison with infinity always evaluates to false in fast floating point modes [-Werror,-Wtautological-constant-compare]
|
||||
# ginkgo: icpx: error: clang frontend command failed with exit code 139
|
||||
# h5bench: commons/h5bench_util.h:196: multiple definition of `has_vol_async';
|
||||
# intel-tbb: clang++clang++clang++clang++clang++clang++clang++: : : : : : : clang++error: : unknown argument: '-flifetime-dse=1'
|
||||
# libyogrt: configure: error: slurm is not in specified location!
|
||||
# phist: fortran_bindings/test/kernels.F90(63): error #8284: If the actual argument is scalar, the dummy argument shall be scalar unless the actual argument is of type character or is an element of an array that is not assumed shape, pointer, or polymorphic. [ARGV]
|
||||
# pruners-ninja: test/ninja_test_util.c:34: multiple definition of `a';
|
||||
# py-cryptography: ??
|
||||
# py-scipy@1.3.3: gcc: error: unrecognized command-line option '-fp-model=strict'
|
||||
# py-scipy@1.5.4: gcc: error: unrecognized command-line option '-fp-model=strict'
|
||||
# py-setuptools-scm: ??
|
||||
# ruby: limits.c:415:34: error: invalid suffix 'D' on floating constant
|
||||
# rust: /usr/bin/ld: /opt/intel/oneapi/compiler/2022.1.0/linux/bin-llvm/../compiler/lib/intel64_lin/libimf.a(libm_feature_flag.o): in function `__libm_feature_flag_init': libm_feature_flag.c:(.text+0x25): undefined reference to `__intel_cpu_feature_indicator_x'
|
||||
# swig@4.0.2-fortran: /spack/opt/spack/linux-ubuntu20.04-x86_64/gcc-11.1.0/m4-1.4.19-p3otmjixpi6zibdsyoqib5dpzfshq3nj/bin/m4:/spack/opt/spack/linux-ubuntu20.04-x86_64/oneapi-2023.0.0/bison-3.8.2-xca2sot4jhd72hvj2m2b3ajchagczvau/share/bison/skeletons/yacc.c:420: undefined macro `b4_symbol(103, tag)'
|
||||
# tau: Error: Unable to identify ifort lib directory
|
||||
# variorum: ld: Intel/CMakeFiles/variorum_intel.dir/msr_core.c.o:(.bss+0x0): multiple definition of `g_platform'; CMakeFiles/variorum.dir/config_architecture.c.o:(.bss+0x0): first defined here
|
||||
# vtk-m +openmp: clang++: error: clang frontend command failed with exit code 139 (use -v to see invocation)
|
||||
|
||||
# GPU BUILD FAILURES
|
||||
#- ginkgo@1.4.0 +oneapi %dpcpp ^cmake%oneapi # ginkgo
|
||||
#- hpctoolkit@2022.04.15 +level_zero # dyninst
|
||||
#- sundials@6.2.0 +sycl cxxstd=17 # sundials
|
||||
#- tau@2.31.1 +mpi +opencl +level_zero ~pdt %oneapi ^binutils%gcc@9.4.0 ^papi%gcc@9.4.0 # tau
|
||||
|
||||
# GPU BUILD FAILURES - NOTES
|
||||
# berkeley-db %dpcpp: dpcpp: dpcpperror: : no such file or directory: '/tmp/conftest-9d8d34.o'
|
||||
# ginkgo %dpcpp: CMakeTestCXXCompiler.cmake:62: /usr/bin/ld: warning: libsvml.so, needed by /opt/intel/oneapi/compiler/2022.1.0/linux/bin-llvm/../lib/libsycl.so, not found (try using -rpath or -rpath-link) ...
|
||||
# ncurses %dpcpp: If you have ncurses 4.2 applications, you should read the INSTALL document, and install the terminfo without the -x optiontic: error while loading shared libraries: libsvml.so: cannot open shared object file: No such file or directory
|
||||
# sundials: include/sunmemory/sunmemory_sycl.h:20:10: fatal error: 'CL/sycl.hpp' file not found
|
||||
# tau: requires libdrm-dev
|
||||
# GPU FAILURES
|
||||
# - amrex +sycl # amrex
|
||||
# - ginkgo +oneapi # ginkgo
|
||||
# - hpctoolkit +level_zero # dyninst
|
||||
# - sundials +sycl cxxstd=17 # sundials
|
||||
# - tau +mpi +opencl +level_zero ~pdt # tau
|
||||
# --
|
||||
|
||||
# SKIPPED
|
||||
#- flecsi@1.4.2 # dependency pfunit marks oneapi as an unsupported compiler
|
||||
# - flecsi # dependency pfunit marks oneapi as an unsupported compiler
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/e4s-oneapi" }
|
||||
|
||||
@@ -286,14 +301,14 @@ spack:
|
||||
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||
- export PATH=/bootstrap/runner/view/bin:${PATH}
|
||||
- . /bootstrap/runner/install/linux-ubuntu20.04-x86_64/gcc-9.4.0/lmod-8.7.2-ri26z7qy6ixtgpsqinswx3w6tuggluv5/lmod/8.7.2/init/bash
|
||||
- . /bootstrap/runner/install/linux-ubuntu20.04-x86_64/gcc-11.1.0/lmod-8.7.2-b7eq7rjeckn3m4o4lglsakilibkznjom/lmod/8.7.2/init/bash
|
||||
- module use /opt/intel/oneapi/modulefiles
|
||||
- module load compiler
|
||||
- spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
after_script:
|
||||
- cat /proc/loadavg || true
|
||||
|
||||
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2022-07-01
|
||||
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023-01-01
|
||||
|
||||
match_behavior: first
|
||||
mappings:
|
||||
@@ -478,7 +493,7 @@ spack:
|
||||
before_script:
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2022-07-01
|
||||
image: ecpe4s/ubuntu20.04-runner-x86_64-oneapi:2023-01-01
|
||||
tags: ["spack", "public", "x86_64"]
|
||||
|
||||
signing-job-attributes:
|
||||
|
423
share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml
Normal file
423
share/spack/gitlab/cloud_pipelines/stacks/e4s-power/spack.yaml
Normal file
@@ -0,0 +1,423 @@
|
||||
spack:
|
||||
view: false
|
||||
|
||||
concretizer:
|
||||
reuse: false
|
||||
unify: false
|
||||
|
||||
config:
|
||||
concretizer: clingo
|
||||
install_tree:
|
||||
root: /home/software/spack
|
||||
padded_length: 512
|
||||
projections:
|
||||
all: '{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc@11.1.0]
|
||||
providers:
|
||||
blas: [openblas]
|
||||
mpi: [mpich]
|
||||
target: [ppc64le]
|
||||
variants: +mpi cuda_arch=70
|
||||
tbb:
|
||||
require: intel-tbb
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
cuda:
|
||||
version: [11.7.0]
|
||||
elfutils:
|
||||
variants: +bzip2 ~nls +xz
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
variants: fabrics=sockets,tcp,udp,rxm
|
||||
libunwind:
|
||||
variants: +pic +xz
|
||||
mpich:
|
||||
variants: ~wrapperrpath
|
||||
ncurses:
|
||||
variants: +termlib
|
||||
openblas:
|
||||
variants: threads=openmp
|
||||
paraview:
|
||||
require: '@5.11 ~qt+osmesa'
|
||||
python:
|
||||
version: [3.7.15]
|
||||
trilinos:
|
||||
require: +amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext +ifpack
|
||||
+intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro
|
||||
+phalanx +rol +rythmos +sacado +stk +shards +shylu +stratimikos +teko +tempus
|
||||
+tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
xz:
|
||||
variants: +pic
|
||||
mesa:
|
||||
version: [21.3.8]
|
||||
faodel:
|
||||
require: ~tcmalloc # needed for ppc64le
|
||||
|
||||
specs:
|
||||
# CPU
|
||||
- adios
|
||||
- alquimia
|
||||
- aml
|
||||
- amrex
|
||||
- arborx
|
||||
- argobots
|
||||
- axom
|
||||
- bolt
|
||||
- butterflypack
|
||||
- cabana
|
||||
- caliper
|
||||
- chai ~benchmarks ~tests
|
||||
- charliecloud
|
||||
- conduit
|
||||
- datatransferkit
|
||||
- dyninst
|
||||
- ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 ~paraview +pnetcdf +sz +unifyfs +veloc +visit +vtkm +zfp # +paraview fails: FAILED: VTK/Filters/Statistics/CMakeFiles/FiltersStatistics-objects.dir/vtkPCAStatistics.cxx.o: /tmp/ccgvkIk5.s: Assembler messages: /tmp/ccgvkIk5.s:260012: Error: invalid machine `power10'
|
||||
- exaworks
|
||||
- flecsi
|
||||
- flit
|
||||
- flux-core
|
||||
- fortrilinos
|
||||
- gasnet
|
||||
- ginkgo
|
||||
- globalarrays
|
||||
- gmp
|
||||
- gotcha
|
||||
- gptune
|
||||
- h5bench
|
||||
- hdf5-vol-async
|
||||
- heffte +fftw
|
||||
- hpctoolkit
|
||||
- hpx max_cpu_count=512 networking=mpi
|
||||
- hypre
|
||||
- kokkos +openmp
|
||||
- kokkos-kernels +openmp
|
||||
- lammps
|
||||
- legion
|
||||
- libnrm
|
||||
- libquo
|
||||
- libunwind
|
||||
- mercury
|
||||
- metall
|
||||
- mfem
|
||||
- mpark-variant
|
||||
- mpifileutils ~xattr
|
||||
- nccmp
|
||||
- nco
|
||||
- netlib-scalapack
|
||||
- nrm
|
||||
- nvhpc
|
||||
- omega-h
|
||||
- openmpi
|
||||
- openpmd-api
|
||||
- papi
|
||||
- papyrus
|
||||
- parsec ~cuda
|
||||
- pdt
|
||||
- petsc
|
||||
- phist
|
||||
- plasma
|
||||
- plumed
|
||||
- pumi
|
||||
- py-h5py
|
||||
- py-jupyterhub
|
||||
- py-libensemble +mpi +nlopt
|
||||
- py-petsc4py
|
||||
- py-warpx ^warpx dims=2
|
||||
- py-warpx ^warpx dims=3
|
||||
- py-warpx ^warpx dims=rz
|
||||
- qthreads scheduler=distrib
|
||||
- quantum-espresso
|
||||
- raja
|
||||
- rempi
|
||||
- scr
|
||||
- slate ~cuda
|
||||
- slepc
|
||||
- stc
|
||||
- strumpack ~slate
|
||||
- sundials
|
||||
- superlu
|
||||
- superlu-dist
|
||||
- swig
|
||||
- swig@4.0.2-fortran
|
||||
- tasmanian
|
||||
- tau +mpi +python
|
||||
- trilinos@13.0.1 +belos +ifpack2 +stokhos
|
||||
- turbine
|
||||
- umap
|
||||
- umpire
|
||||
- upcxx
|
||||
- wannier90
|
||||
|
||||
# CUDA
|
||||
- amrex +cuda
|
||||
- arborx +cuda ^kokkos +wrapper
|
||||
- cabana +cuda ^kokkos +wrapper +cuda_lambda +cuda
|
||||
- caliper +cuda
|
||||
- chai ~benchmarks ~tests +cuda ^umpire ~shared
|
||||
- ecp-data-vis-sdk +cuda cuda_arch=70 +adios2 +hdf5 ~paraview +vtkm +zfp # +paraview fails: FAILED: VTK/Filters/Statistics/CMakeFiles/FiltersStatistics-objects.dir/vtkPCAStatistics.cxx.o; /tmp/ccjmJhb6.s: Assembler messages: /tmp/ccjmJhb6.s:260012: Error: invalid machine `power10'
|
||||
- flecsi +cuda
|
||||
- flux-core +cuda
|
||||
- ginkgo +cuda
|
||||
- heffte +cuda
|
||||
- hpctoolkit +cuda
|
||||
- hpx max_cpu_count=512 +cuda
|
||||
- hypre +cuda
|
||||
- kokkos +wrapper +cuda
|
||||
- kokkos-kernels +cuda ^kokkos +wrapper +cuda
|
||||
- magma +cuda
|
||||
- mfem +cuda
|
||||
- omega-h +cuda
|
||||
- papi +cuda
|
||||
- petsc +cuda
|
||||
- py-torch +cuda
|
||||
- raja +cuda
|
||||
- slate +cuda
|
||||
- slepc +cuda
|
||||
- strumpack ~slate +cuda
|
||||
- sundials +cuda
|
||||
- superlu-dist +cuda
|
||||
- tasmanian +cuda
|
||||
- tau +mpi +cuda
|
||||
- trilinos@13.4.0 +belos +ifpack2 +stokhos +cuda
|
||||
- umpire ~shared +cuda
|
||||
- parsec +cuda
|
||||
|
||||
# CPU FAILURES
|
||||
# - archer # llvm@8
|
||||
# - bricks # bricks
|
||||
# - geopm # geopm
|
||||
# - loki # loki
|
||||
# - precice # precice
|
||||
# - pruners-ninja # pruners-ninja
|
||||
# - variorum # Intel/variorum_cpuid.c:11:5: error: impossible constraint in 'asm'
|
||||
# --
|
||||
# bricks: VSBrick-7pt.py-Scalar-8x8x8-1:30:3: error: 'vfloat512' was not declared in this scope
|
||||
# fltk: /usr/bin/ld: ../lib/libfltk_png.a(pngrutil.o): in function `png_read_filter_row': pngrutil.c:(.text.png_read_filter_row+0x90): undefined reference to `png_init_filter_functions_vsx'
|
||||
# geopm: libtool.m4: error: problem compiling CXX test program
|
||||
# llvm@8: clang/lib/Lex/Lexer.cpp:2547:34: error: ISO C++ forbids declaration of 'type name' with no type [-fpermissive]
|
||||
# loki: include/loki/SmallObj.h:462:57: error: ISO C++17 does not allow dynamic exception specifications
|
||||
# precice: /tmp/ccYNMwgE.s: Assembler messages: /tmp/ccYNMwgE.s:278115: Error: invalid machine `power10'
|
||||
# pruners-ninja: test/ninja_test_util.c:34: multiple definition of `a';
|
||||
|
||||
# CUDA FAILURES
|
||||
# - bricks +cuda # bricks
|
||||
# - dealii +cuda # fltk
|
||||
# --
|
||||
# bricks: VSBrick-7pt.py-Scalar-8x8x8-1:30:3: error: 'vfloat512' was not declared in this scope
|
||||
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/e4s-power" }
|
||||
|
||||
gitlab-ci:
|
||||
|
||||
script:
|
||||
- uname -a || true
|
||||
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||
- nproc
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- spack arch
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
- if [[ -r /mnt/key/e4s.gpg ]]; then spack gpg trust /mnt/key/e4s.gpg; fi
|
||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||
- spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
after_script:
|
||||
- cat /proc/loadavg || true
|
||||
|
||||
image: ecpe4s/ubuntu20.04-runner-ppc64le:2023-01-01
|
||||
|
||||
match_behavior: first
|
||||
mappings:
|
||||
- match:
|
||||
- hipblas
|
||||
- llvm
|
||||
- llvm-amdgpu
|
||||
- rocblas
|
||||
runner-attributes:
|
||||
tags: [ "spack", "huge", "ppc64le" ]
|
||||
variables:
|
||||
CI_JOB_SIZE: huge
|
||||
|
||||
- match:
|
||||
- cuda
|
||||
- dyninst
|
||||
- ginkgo
|
||||
- hpx
|
||||
- kokkos-kernels
|
||||
- kokkos-nvcc-wrapper
|
||||
- magma
|
||||
- mfem
|
||||
- mpich
|
||||
- openturns
|
||||
- precice
|
||||
- raja
|
||||
- rust
|
||||
- slate
|
||||
- trilinos
|
||||
- vtk-m
|
||||
- warpx
|
||||
runner-attributes:
|
||||
tags: [ "spack", "large", "ppc64le" ]
|
||||
variables:
|
||||
CI_JOB_SIZE: large
|
||||
|
||||
- match:
|
||||
- adios2
|
||||
- amrex
|
||||
- archer
|
||||
- ascent
|
||||
- axom
|
||||
- binutils
|
||||
- blaspp
|
||||
- boost
|
||||
- butterflypack
|
||||
- cabana
|
||||
- caliper
|
||||
- camp
|
||||
- chai
|
||||
- conduit
|
||||
- datatransferkit
|
||||
- faodel
|
||||
- ffmpeg
|
||||
- fftw
|
||||
- fortrilinos
|
||||
- gperftools
|
||||
- gptune
|
||||
- hdf5
|
||||
- heffte
|
||||
- hpctoolkit
|
||||
- hwloc
|
||||
- hypre
|
||||
- kokkos
|
||||
- lammps
|
||||
- lapackpp
|
||||
- legion
|
||||
- libzmq
|
||||
- llvm-openmp-ompt
|
||||
- mbedtls
|
||||
- netlib-scalapack
|
||||
- omega-h
|
||||
- openmpi
|
||||
- openpmd-api
|
||||
- pagmo2
|
||||
- papyrus
|
||||
- parsec
|
||||
- pdt
|
||||
- petsc
|
||||
- pumi
|
||||
- py-ipython-genutils
|
||||
- py-petsc4py
|
||||
- py-scipy
|
||||
- py-statsmodels
|
||||
- py-warlock
|
||||
- py-warpx
|
||||
- pygmo
|
||||
- slepc
|
||||
- slurm
|
||||
- strumpack
|
||||
- sundials
|
||||
- superlu-dist
|
||||
- tasmanian
|
||||
- tau
|
||||
- upcxx
|
||||
- vtk-h
|
||||
- zfp
|
||||
runner-attributes:
|
||||
tags: [ "spack", "medium", "ppc64le" ]
|
||||
variables:
|
||||
CI_JOB_SIZE: "medium"
|
||||
|
||||
- match:
|
||||
- alsa-lib
|
||||
- ant
|
||||
- antlr
|
||||
- argobots
|
||||
- automake
|
||||
- berkeley-db
|
||||
- bison
|
||||
- blt
|
||||
- cmake
|
||||
- curl
|
||||
- darshan-util
|
||||
- diffutils
|
||||
- exmcutils
|
||||
- expat
|
||||
- flit
|
||||
- freetype
|
||||
- gdbm
|
||||
- gotcha
|
||||
- hpcviewer
|
||||
- jansson
|
||||
- json-c
|
||||
- libbsd
|
||||
- libevent
|
||||
- libjpeg-turbo
|
||||
- libnrm
|
||||
- libpng
|
||||
- libunistring
|
||||
- lua-luaposix
|
||||
- m4
|
||||
- mpfr
|
||||
- ncurses
|
||||
- openblas
|
||||
- openjdk
|
||||
- papi
|
||||
- parallel-netcdf
|
||||
- pcre2
|
||||
- perl-data-dumper
|
||||
- pkgconf
|
||||
- py-alembic
|
||||
- py-idna
|
||||
- py-testpath
|
||||
- qhull
|
||||
- snappy
|
||||
- swig
|
||||
- tar
|
||||
- tcl
|
||||
- texinfo
|
||||
- unzip
|
||||
- util-linux-uuid
|
||||
- util-macros
|
||||
- yaml-cpp
|
||||
- zlib
|
||||
- zstd
|
||||
runner-attributes:
|
||||
tags: [ "spack", "small", "ppc64le" ]
|
||||
variables:
|
||||
CI_JOB_SIZE: "small"
|
||||
|
||||
- match: ['os=ubuntu20.04']
|
||||
runner-attributes:
|
||||
tags: ["spack", "ppc64le"]
|
||||
variables:
|
||||
CI_JOB_SIZE: "default"
|
||||
|
||||
broken-specs-url: "s3://spack-binaries/broken-specs"
|
||||
|
||||
service-job-attributes:
|
||||
before_script:
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
image: ecpe4s/ubuntu20.04-runner-ppc64le:2023-01-01
|
||||
tags: ["spack", "public", "ppc64le"]
|
||||
|
||||
signing-job-attributes:
|
||||
image: { "name": "ghcr.io/spack/notary:latest", "entrypoint": [""] }
|
||||
tags: ["spack", "aws"]
|
||||
script:
|
||||
- aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache /tmp
|
||||
- /sign.sh
|
||||
- aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache
|
||||
|
||||
cdash:
|
||||
build-group: E4S Power
|
||||
url: https://cdash.spack.io
|
||||
project: Spack Testing
|
||||
site: Cloud Gitlab Infrastructure
|
@@ -26,6 +26,8 @@ spack:
|
||||
require: "intel-tbb"
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
boost:
|
||||
variants: +python +filesystem +iostreams +system
|
||||
cuda:
|
||||
version: [11.7.0]
|
||||
elfutils:
|
||||
@@ -70,6 +72,7 @@ spack:
|
||||
- bolt
|
||||
- bricks
|
||||
- butterflypack
|
||||
- boost +python +filesystem +iostreams +system
|
||||
- cabana
|
||||
- caliper
|
||||
- chai ~benchmarks ~tests
|
||||
@@ -267,6 +270,7 @@ spack:
|
||||
|
||||
match_behavior: first
|
||||
mappings:
|
||||
|
||||
- match:
|
||||
- hipblas
|
||||
- llvm
|
||||
|
325
share/spack/gitlab/cloud_pipelines/stacks/gpu-tests/spack.yaml
Normal file
325
share/spack/gitlab/cloud_pipelines/stacks/gpu-tests/spack.yaml
Normal file
@@ -0,0 +1,325 @@
|
||||
spack:
|
||||
view: false
|
||||
|
||||
concretizer:
|
||||
reuse: false
|
||||
unify: false
|
||||
|
||||
config:
|
||||
build_jobs: 32
|
||||
concretizer: clingo
|
||||
install_tree:
|
||||
root: /home/software/spack
|
||||
padded_length: 512
|
||||
projections:
|
||||
all: '{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'
|
||||
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc@11.1.0]
|
||||
providers:
|
||||
blas: [openblas]
|
||||
mpi: [mpich]
|
||||
target: [x86_64]
|
||||
variants: +mpi amdgpu_target=gfx90a cuda_arch=80
|
||||
tbb:
|
||||
require: "intel-tbb"
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
boost:
|
||||
variants: +python +filesystem +iostreams +system
|
||||
elfutils:
|
||||
variants: +bzip2 ~nls +xz
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
variants: fabrics=sockets,tcp,udp,rxm
|
||||
libunwind:
|
||||
variants: +pic +xz
|
||||
mpich:
|
||||
variants: ~wrapperrpath
|
||||
ncurses:
|
||||
variants: +termlib
|
||||
openblas:
|
||||
variants: threads=openmp
|
||||
paraview:
|
||||
# Don't build GUI support or GLX rendering for HPC/container deployments
|
||||
require: "@5.11 ~qt+osmesa"
|
||||
python:
|
||||
version: [3.8.13]
|
||||
trilinos:
|
||||
require: +amesos +amesos2 +anasazi +aztec +boost +epetra +epetraext
|
||||
+ifpack +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
|
||||
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stratimikos
|
||||
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
xz:
|
||||
variants: +pic
|
||||
mesa:
|
||||
version: [21.3.8]
|
||||
|
||||
specs:
|
||||
- kokkos +rocm amdgpu_target=gfx90a
|
||||
- raja +cuda cuda_arch=80 ^cuda@12.0.0
|
||||
|
||||
# FAILURES
|
||||
# - kokkos +wrapper +cuda cuda_arch=80 ^cuda@12.0.0 # https://github.com/spack/spack/issues/35378
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/gpu-tests" }
|
||||
|
||||
gitlab-ci:
|
||||
|
||||
script:
|
||||
- uname -a || true
|
||||
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||
- nproc
|
||||
- nvidia-smi || true
|
||||
- curl -Lfs 'https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz' -o gmake.tar.gz
|
||||
- printf 'fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz' | sha256sum --check --strict --quiet
|
||||
- tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- spack arch
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
# AWS runners mount E4S public key (verification), UO runners mount public/private (signing/verification)
|
||||
- if [[ -r /mnt/key/e4s.gpg ]]; then spack gpg trust /mnt/key/e4s.gpg; fi
|
||||
# UO runners mount intermediate ci public key (verification), AWS runners mount public/private (signing/verification)
|
||||
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||
- spack --color=always --backtrace ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
|
||||
after_script:
|
||||
- cat /proc/loadavg || true
|
||||
|
||||
image: ecpe4s/ubuntu20.04-runner-x86_64:2023-01-01
|
||||
|
||||
broken-tests-packages:
|
||||
- gptune
|
||||
|
||||
match_behavior: first
|
||||
mappings:
|
||||
- match:
|
||||
- kokkos +rocm amdgpu_target=gfx90a
|
||||
runner-attributes:
|
||||
tags: [ "rocm-5.4.0", "mi210" ]
|
||||
variables:
|
||||
CI_JOB_SIZE: large
|
||||
|
||||
- match:
|
||||
- kokkos +cuda cuda_arch=80 ^cuda@12.0.0
|
||||
- raja +cuda cuda_arch=80 ^cuda@12.0.0
|
||||
runner-attributes:
|
||||
tags: [ "nvidia-525.85.12", "cuda-12.0", "a100" ]
|
||||
variables:
|
||||
CI_JOB_SIZE: large
|
||||
|
||||
- match:
|
||||
- hipblas
|
||||
- llvm
|
||||
- llvm-amdgpu
|
||||
- rocblas
|
||||
- paraview
|
||||
- py-torch
|
||||
runner-attributes:
|
||||
tags: [ "spack", "huge", "x86_64" ]
|
||||
variables:
|
||||
CI_JOB_SIZE: huge
|
||||
KUBERNETES_CPU_REQUEST: 11000m
|
||||
KUBERNETES_MEMORY_REQUEST: 42G
|
||||
|
||||
- match:
|
||||
- cuda
|
||||
- dealii
|
||||
- dray
|
||||
- dyninst
|
||||
- ginkgo
|
||||
- hpx
|
||||
- kokkos-kernels
|
||||
- kokkos-nvcc-wrapper
|
||||
- magma
|
||||
- mfem
|
||||
- mpich
|
||||
- nvhpc
|
||||
- oce
|
||||
- openturns
|
||||
- plumed
|
||||
- precice
|
||||
- py-tensorflow
|
||||
- qt
|
||||
- raja
|
||||
- rocfft
|
||||
- rocsolver
|
||||
- rocsparse
|
||||
- rust
|
||||
- slate
|
||||
- trilinos
|
||||
- visit
|
||||
- vtk
|
||||
- vtk-m
|
||||
- warpx
|
||||
runner-attributes:
|
||||
tags: [ "spack", "large", "x86_64" ]
|
||||
variables:
|
||||
CI_JOB_SIZE: large
|
||||
KUBERNETES_CPU_REQUEST: 8000m
|
||||
KUBERNETES_MEMORY_REQUEST: 12G
|
||||
|
||||
- match:
|
||||
- adios2
|
||||
- amrex
|
||||
- archer
|
||||
- ascent
|
||||
- axom
|
||||
- binutils
|
||||
- blaspp
|
||||
- boost
|
||||
- butterflypack
|
||||
- cabana
|
||||
- caliper
|
||||
- camp
|
||||
- chai
|
||||
- conduit
|
||||
- datatransferkit
|
||||
- faodel
|
||||
- ffmpeg
|
||||
- fftw
|
||||
- fortrilinos
|
||||
- gperftools
|
||||
- gptune
|
||||
- hdf5
|
||||
- heffte
|
||||
- hpctoolkit
|
||||
- hwloc
|
||||
- hypre
|
||||
- kokkos
|
||||
- lammps
|
||||
- lapackpp
|
||||
- legion
|
||||
- libzmq
|
||||
- llvm-openmp-ompt
|
||||
- mbedtls
|
||||
- netlib-scalapack
|
||||
- omega-h
|
||||
- openmpi
|
||||
- openpmd-api
|
||||
- pagmo2
|
||||
- papyrus
|
||||
- parsec
|
||||
- pdt
|
||||
- petsc
|
||||
- pumi
|
||||
- py-ipython-genutils
|
||||
- py-petsc4py
|
||||
- py-scipy
|
||||
- py-statsmodels
|
||||
- py-warlock
|
||||
- py-warpx
|
||||
- pygmo
|
||||
- slepc
|
||||
- slurm
|
||||
- strumpack
|
||||
- sundials
|
||||
- superlu-dist
|
||||
- tasmanian
|
||||
- tau
|
||||
- upcxx
|
||||
- vtk-h
|
||||
- zfp
|
||||
runner-attributes:
|
||||
tags: [ "spack", "medium", "x86_64" ]
|
||||
variables:
|
||||
CI_JOB_SIZE: "medium"
|
||||
KUBERNETES_CPU_REQUEST: "2000m"
|
||||
KUBERNETES_MEMORY_REQUEST: "4G"
|
||||
|
||||
- match:
|
||||
- alsa-lib
|
||||
- ant
|
||||
- antlr
|
||||
- argobots
|
||||
- automake
|
||||
- berkeley-db
|
||||
- bison
|
||||
- blt
|
||||
- cmake
|
||||
- curl
|
||||
- darshan-util
|
||||
- diffutils
|
||||
- exmcutils
|
||||
- expat
|
||||
- flit
|
||||
- freetype
|
||||
- gdbm
|
||||
- gotcha
|
||||
- hpcviewer
|
||||
- jansson
|
||||
- json-c
|
||||
- libbsd
|
||||
- libevent
|
||||
- libjpeg-turbo
|
||||
- libnrm
|
||||
- libpng
|
||||
- libunistring
|
||||
- lua-luaposix
|
||||
- m4
|
||||
- mpfr
|
||||
- ncurses
|
||||
- openblas
|
||||
- openjdk
|
||||
- papi
|
||||
- parallel-netcdf
|
||||
- pcre2
|
||||
- perl-data-dumper
|
||||
- pkgconf
|
||||
- py-alembic
|
||||
- py-idna
|
||||
- py-testpath
|
||||
- qhull
|
||||
- snappy
|
||||
- swig
|
||||
- tar
|
||||
- tcl
|
||||
- texinfo
|
||||
- unzip
|
||||
- util-linux-uuid
|
||||
- util-macros
|
||||
- yaml-cpp
|
||||
- zlib
|
||||
- zstd
|
||||
runner-attributes:
|
||||
tags: [ "spack", "small", "x86_64" ]
|
||||
variables:
|
||||
CI_JOB_SIZE: "small"
|
||||
KUBERNETES_CPU_REQUEST: "500m"
|
||||
KUBERNETES_MEMORY_REQUEST: "500M"
|
||||
|
||||
- match: ['os=ubuntu20.04']
|
||||
runner-attributes:
|
||||
tags: ["spack", "x86_64"]
|
||||
variables:
|
||||
CI_JOB_SIZE: "default"
|
||||
|
||||
broken-specs-url: "s3://spack-binaries/broken-specs"
|
||||
|
||||
service-job-attributes:
|
||||
before_script:
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
image: ecpe4s/ubuntu20.04-runner-x86_64:2023-01-01
|
||||
tags: ["spack", "public", "x86_64"]
|
||||
|
||||
signing-job-attributes:
|
||||
image: { "name": "ghcr.io/spack/notary:latest", "entrypoint": [""] }
|
||||
tags: ["spack", "aws"]
|
||||
script:
|
||||
- aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache /tmp
|
||||
- /sign.sh
|
||||
- aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache
|
||||
|
||||
cdash:
|
||||
build-group: GPU Testing
|
||||
url: https://cdash.spack.io
|
||||
project: Spack Testing
|
||||
site: Cloud Gitlab Infrastructure
|
@@ -12,6 +12,6 @@ class Maintainers1(Package):
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/maintainers-1.0.tar.gz"
|
||||
|
||||
maintainers = ["user1", "user2"]
|
||||
maintainers("user1", "user2")
|
||||
|
||||
version("1.0", "0123456789abcdef0123456789abcdef")
|
||||
|
@@ -12,6 +12,6 @@ class Maintainers2(Package):
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/maintainers2-1.0.tar.gz"
|
||||
|
||||
maintainers = ["user2", "user3"]
|
||||
maintainers("user2", "user3")
|
||||
|
||||
version("1.0", "0123456789abcdef0123456789abcdef")
|
||||
|
@@ -0,0 +1,17 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from spack.package import *
|
||||
from spack.pkg.builtin.mock.maintainers_1 import Maintainers1
|
||||
|
||||
|
||||
class Maintainers3(Maintainers1):
|
||||
"""A second package with a maintainers field."""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/maintainers2-1.0.tar.gz"
|
||||
|
||||
maintainers("user0", "user3")
|
||||
|
||||
version("1.0", "0123456789abcdef0123456789abcdef")
|
@@ -13,8 +13,7 @@ class PyExtension1(PythonPackage):
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/extension1-1.0.tar.gz"
|
||||
|
||||
# Override settings in base class
|
||||
maintainers = []
|
||||
maintainers("user1", "user2")
|
||||
|
||||
version("1.0", "00000000000000000000000000000110")
|
||||
version("2.0", "00000000000000000000000000000120")
|
||||
|
@@ -15,7 +15,7 @@ class Abacus(MakefilePackage):
|
||||
for large-scale electronic-structure simulations
|
||||
from first principles"""
|
||||
|
||||
maintainers = ["bitllion"]
|
||||
maintainers("bitllion")
|
||||
|
||||
homepage = "http://abacus.ustc.edu.cn/"
|
||||
git = "https://github.com/abacusmodeling/abacus-develop.git"
|
||||
|
@@ -12,7 +12,7 @@ class AbseilCpp(CMakePackage):
|
||||
homepage = "https://abseil.io/"
|
||||
url = "https://github.com/abseil/abseil-cpp/archive/refs/tags/20211102.0.tar.gz"
|
||||
|
||||
maintainers = ["jcftang"]
|
||||
maintainers("jcftang")
|
||||
tags = ["windows"]
|
||||
|
||||
version(
|
||||
|
@@ -126,7 +126,7 @@ class Acfl(Package):
|
||||
homepage = "https://developer.arm.com/tools-and-software/server-and-hpc/arm-allinea-studio"
|
||||
url = "https://developer.arm.com/-/media/Files/downloads/hpc/arm-compiler-for-linux/22-1/arm-compiler-for-linux_22.1_Ubuntu-20.04_aarch64.tar"
|
||||
|
||||
maintainers = ["annop-w"]
|
||||
maintainers("annop-w")
|
||||
|
||||
# Build Versions: establish OS for URL
|
||||
acfl_os = get_os()
|
||||
|
@@ -12,7 +12,7 @@ class ActsDd4hep(CMakePackage):
|
||||
homepage = "https://github.com/acts-project/acts-dd4hep"
|
||||
url = "https://github.com/acts-project/acts-dd4hep/archive/refs/tags/v1.0.0.tar.gz"
|
||||
|
||||
maintainers = ["HadrienG2", "wdconinc"]
|
||||
maintainers("HadrienG2", "wdconinc")
|
||||
|
||||
version("1.0.1", sha256="e40f34ebc30b3c33a6802c9d94136e65072d8dcee0b7db57a645f08a64ea5334")
|
||||
version("1.0.0", sha256="991f996944c88efa837880f919239e50d12c5c9361e220bc9422438dd608308c")
|
||||
|
@@ -32,14 +32,17 @@ class Acts(CMakePackage, CudaPackage):
|
||||
homepage = "https://acts.web.cern.ch/ACTS/"
|
||||
git = "https://github.com/acts-project/acts.git"
|
||||
list_url = "https://github.com/acts-project/acts/releases/"
|
||||
maintainers = ["HadrienG2"]
|
||||
maintainers("HadrienG2")
|
||||
|
||||
tags = ["hep"]
|
||||
|
||||
# Supported Acts versions
|
||||
version("main", branch="main")
|
||||
version("master", branch="main", deprecated=True) # For compatibility
|
||||
version("23.0.0", commit="5af1b1b5feb8ca8f4c2c69106a1b9ef612c70d9c", submodules=True)
|
||||
version("22.0.1", commit="a4ac99dd72828c5eb3fac06e146f3391958fca8c", submodules=True)
|
||||
version("22.0.0", commit="0fb6f8d2ace65338915451201e9ceb6cee11fb5e", submodules=True)
|
||||
version("21.1.1", commit="8ae825de246e8e574d05d9eaf05ba4a937c69aa9", submodules=True)
|
||||
version("21.1.0", commit="3b4b5c741c8541491d496a36b917b00b344d52d1", submodules=True)
|
||||
version("21.0.0", commit="d8cb0fac3a44e1d44595a481f977df9bd70195fb", submodules=True)
|
||||
version("20.3.0", commit="b1859b322744cb033328fd57d9e74fb5326aa56b", submodules=True)
|
||||
@@ -278,7 +281,7 @@ class Acts(CMakePackage, CudaPackage):
|
||||
depends_on("python", when="+python")
|
||||
depends_on("python@3.8:", when="+python @19.11:19")
|
||||
depends_on("python@3.8:", when="+python @21:")
|
||||
depends_on("py-onnx-runtime", when="+onnx")
|
||||
depends_on("py-onnxruntime", when="+onnx")
|
||||
depends_on("py-pybind11 @2.6.2:", when="+python @18:")
|
||||
depends_on("py-pytest", when="+python +unit_tests")
|
||||
depends_on("root @6.10: cxxstd=14", when="+tgeo @:0.8.0")
|
||||
@@ -336,6 +339,7 @@ def plugin_cmake_variant(plugin_name, spack_variant):
|
||||
cmake_variant("FATRAS", "fatras"),
|
||||
cmake_variant("FATRAS_GEANT4", "fatras_geant4"),
|
||||
example_cmake_variant("GEANT4", "geant4"),
|
||||
plugin_cmake_variant("GEANT4", "geant4"),
|
||||
example_cmake_variant("HEPMC3", "hepmc3"),
|
||||
plugin_cmake_variant("IDENTIFICATION", "identification"),
|
||||
cmake_variant(integration_tests_label, "integration_tests"),
|
||||
|
@@ -16,7 +16,7 @@ class Actsvg(CMakePackage):
|
||||
list_url = "https://github.com/acts-project/actsvg/releases"
|
||||
git = "https://github.com/acts-project/actsvg.git"
|
||||
|
||||
maintainers = ["HadrienG2", "wdconinc"]
|
||||
maintainers("HadrienG2", "wdconinc")
|
||||
|
||||
version("0.4.26", sha256="a1dfad15b616cac8191a355c1a87544571c36349400e3de56b9e5be6fa73714c")
|
||||
|
||||
|
@@ -14,7 +14,7 @@ class Adiak(CMakePackage):
|
||||
url = "https://github.com/LLNL/Adiak/releases/download/v0.1/adiak-v0.1.1.tar.gz"
|
||||
git = "https://github.com/LLNL/Adiak"
|
||||
|
||||
maintainers = ["daboehme", "mplegendre"]
|
||||
maintainers("daboehme", "mplegendre")
|
||||
|
||||
variant("mpi", default=True, description="Build with MPI support")
|
||||
variant("shared", default=True, description="Build dynamic libraries")
|
||||
|
@@ -17,7 +17,7 @@ class Adios(AutotoolsPackage):
|
||||
url = "https://github.com/ornladios/ADIOS/archive/v1.12.0.tar.gz"
|
||||
git = "https://github.com/ornladios/ADIOS.git"
|
||||
|
||||
maintainers = ["ax3l"]
|
||||
maintainers("ax3l")
|
||||
|
||||
version("develop", branch="master")
|
||||
version("1.13.1", sha256="b1c6949918f5e69f701cabfe5987c0b286793f1057d4690f04747852544e157b")
|
||||
|
@@ -16,7 +16,7 @@ class Adios2(CMakePackage, CudaPackage):
|
||||
url = "https://github.com/ornladios/ADIOS2/archive/v2.8.0.tar.gz"
|
||||
git = "https://github.com/ornladios/ADIOS2.git"
|
||||
|
||||
maintainers = ["ax3l", "chuckatkins", "vicentebolea", "williamfgc"]
|
||||
maintainers("ax3l", "chuckatkins", "vicentebolea", "williamfgc")
|
||||
|
||||
tags = ["e4s"]
|
||||
|
||||
@@ -54,7 +54,6 @@ class Adios2(CMakePackage, CudaPackage):
|
||||
# change how we're supporting differnt library types in the package at anytime if
|
||||
# spack decides on a standardized way of doing it across packages
|
||||
variant("shared", default=True, when="+pic", description="Build shared libraries")
|
||||
variant("pic", default=True, description="Build pic-enabled static libraries")
|
||||
|
||||
# Features
|
||||
variant("mpi", default=True, description="Enable MPI")
|
||||
@@ -95,12 +94,24 @@ class Adios2(CMakePackage, CudaPackage):
|
||||
conflicts("%oneapi@:2022.1.0", when="+fortran")
|
||||
|
||||
depends_on("cmake@3.12.0:", type="build")
|
||||
depends_on("pkgconfig", type="build")
|
||||
|
||||
depends_on("libffi", when="+sst") # optional in DILL
|
||||
depends_on("libfabric@1.6.0:", when="+sst") # optional in EVPath and SST
|
||||
# depends_on('bison', when='+sst') # optional in FFS, broken package
|
||||
# depends_on('flex', when='+sst') # optional in FFS, depends on BISON
|
||||
for _platform in ["linux", "darwin", "cray"]:
|
||||
depends_on("pkgconfig", type="build", when="platform=%s" % _platform)
|
||||
variant(
|
||||
"pic",
|
||||
default=False,
|
||||
description="Build pic-enabled static libraries",
|
||||
when="platform=%s" % _platform,
|
||||
)
|
||||
# libffi and libfabric and not currently supported on Windows
|
||||
# see Paraview's superbuild handling of libfabric at
|
||||
# https://gitlab.kitware.com/paraview/paraview-superbuild/-/blob/master/projects/adios2.cmake#L3
|
||||
depends_on("libffi", when="+sst platform=%s" % _platform) # optional in DILL
|
||||
depends_on(
|
||||
"libfabric@1.6.0:", when="+sst platform=%s" % _platform
|
||||
) # optional in EVPath and SST
|
||||
# depends_on('bison', when='+sst') # optional in FFS, broken package
|
||||
# depends_on('flex', when='+sst') # optional in FFS, depends on BISON
|
||||
|
||||
depends_on("mpi", when="+mpi")
|
||||
depends_on("libzmq", when="+dataman")
|
||||
|
@@ -16,7 +16,7 @@ class Adms(AutotoolsPackage):
|
||||
url = "https://github.com/Qucs/ADMS/releases/download/release-2.3.7/adms-2.3.7.tar.gz"
|
||||
git = "https://github.com/Qucs/ADMS.git"
|
||||
|
||||
maintainers = ["cessenat"]
|
||||
maintainers("cessenat")
|
||||
|
||||
version("master", branch="master")
|
||||
version("2.3.7", sha256="3a78e1283ecdc3f356410474b3ff44c4dcc82cb89772087fd3bbde8a1038ce08")
|
||||
|
@@ -14,7 +14,7 @@ class AdolC(AutotoolsPackage):
|
||||
homepage = "https://github.com/coin-or/ADOL-C"
|
||||
url = "https://github.com/coin-or/ADOL-C/archive/releases/2.7.2.tar.gz"
|
||||
git = "https://github.com/coin-or/ADOL-C.git"
|
||||
maintainers = ["jppelteret"]
|
||||
maintainers("jppelteret")
|
||||
|
||||
version("master", branch="master")
|
||||
version("2.7.2", sha256="701e0856baae91b98397960d5e0a87a549988de9d4002d0e9a56fa08f5455f6e")
|
||||
|
@@ -15,7 +15,7 @@ class Agile(AutotoolsPackage):
|
||||
|
||||
tags = ["hep"]
|
||||
|
||||
maintainers = ["vvolkl"]
|
||||
maintainers("vvolkl")
|
||||
|
||||
version("1.5.1", sha256="e38536300060e4b845ccaaed824c7495944f9117a0d7e4ee74a18bf278e2012f")
|
||||
|
||||
|
@@ -19,7 +19,7 @@ class Akantu(CMakePackage):
|
||||
url = "https://gitlab.com/akantu/akantu/-/archive/v3.0.0/akantu-v3.0.0.tar.gz"
|
||||
git = "https://gitlab.com/akantu/akantu.git"
|
||||
|
||||
maintainers = ["nrichart"]
|
||||
maintainers("nrichart")
|
||||
|
||||
version("master", branch="master")
|
||||
version("3.0.0", sha256="7e8f64e25956eba44def1b2d891f6db8ba824e4a82ff0d51d6b585b60ab465db")
|
||||
|
@@ -17,7 +17,7 @@ class Albany(CMakePackage):
|
||||
homepage = "http://gahansen.github.io/Albany"
|
||||
git = "https://github.com/gahansen/Albany.git"
|
||||
|
||||
maintainers = ["gahansen"]
|
||||
maintainers("gahansen")
|
||||
|
||||
version("develop", branch="master")
|
||||
|
||||
|
@@ -14,7 +14,7 @@ class Alpaka(CMakePackage, CudaPackage):
|
||||
url = "https://github.com/alpaka-group/alpaka/archive/refs/tags/0.6.0.tar.gz"
|
||||
git = "https://github.com/alpaka-group/alpaka.git"
|
||||
|
||||
maintainers = ["vvolkl"]
|
||||
maintainers("vvolkl")
|
||||
|
||||
version("develop", branch="develop")
|
||||
version("0.8.0", sha256="e01bc377a7657d9a3e0c5f8d3f83dffbd7d0b830283c59efcbc1fb98cf88de43")
|
||||
|
@@ -16,7 +16,7 @@ class Alpgen(CMakePackage, MakefilePackage):
|
||||
homepage = "http://mlm.home.cern.ch/mlm/alpgen/"
|
||||
url = "http://mlm.home.cern.ch/mlm/alpgen/V2.1/v214.tgz"
|
||||
|
||||
maintainers = ["iarspider"]
|
||||
maintainers("iarspider")
|
||||
tags = ["hep"]
|
||||
|
||||
version("2.1.4", sha256="2f43f7f526793fe5f81a3a3e1adeffe21b653a7f5851efc599ed69ea13985c5e")
|
||||
|
@@ -13,7 +13,7 @@ class Alquimia(CMakePackage):
|
||||
homepage = "https://github.com/LBL-EESA/alquimia-dev"
|
||||
git = "https://github.com/LBL-EESA/alquimia-dev.git"
|
||||
|
||||
maintainers = ["smolins", "balay"]
|
||||
maintainers("smolins", "balay")
|
||||
|
||||
version("develop")
|
||||
version("1.0.10", commit="b2c11b6cde321f4a495ef9fcf267cb4c7a9858a0") # tag v.1.0.10
|
||||
|
@@ -22,7 +22,7 @@ class Aluminum(CMakePackage, CudaPackage, ROCmPackage):
|
||||
git = "https://github.com/LLNL/Aluminum.git"
|
||||
tags = ["ecp", "radiuss"]
|
||||
|
||||
maintainers = ["bvanessen"]
|
||||
maintainers("bvanessen")
|
||||
|
||||
version("master", branch="master")
|
||||
version("1.0.0-lbann", tag="v1.0.0-lbann")
|
||||
|
@@ -26,7 +26,7 @@ class Amber(Package, CudaPackage):
|
||||
url = "file://{0}/Amber18.tar.bz2".format(os.getcwd())
|
||||
manual_download = True
|
||||
|
||||
maintainers = ["hseara"]
|
||||
maintainers("hseara")
|
||||
|
||||
version("20", sha256="a4c53639441c8cc85adee397933d07856cc4a723c82c6bea585cd76c197ead75")
|
||||
version("18", sha256="2060897c0b11576082d523fb63a51ba701bc7519ff7be3d299d5ec56e8e6e277")
|
||||
|
@@ -22,7 +22,7 @@ class AmdAocl(BundlePackage):
|
||||
|
||||
homepage = "https://developer.amd.com/amd-aocl/"
|
||||
|
||||
maintainers = ["amd-toolchain-support"]
|
||||
maintainers("amd-toolchain-support")
|
||||
|
||||
version("4.0")
|
||||
version("3.2")
|
||||
|
@@ -30,7 +30,7 @@ class Amdblis(BlisBase):
|
||||
url = "https://github.com/amd/blis/archive/3.0.tar.gz"
|
||||
git = "https://github.com/amd/blis.git"
|
||||
|
||||
maintainers = ["amd-toolchain-support"]
|
||||
maintainers("amd-toolchain-support")
|
||||
|
||||
version("4.0", sha256="cddd31176834a932753ac0fc4c76332868feab3e9ac607fa197d8b44c1e74a41")
|
||||
version("3.2", sha256="5a400ee4fc324e224e12f73cc37b915a00f92b400443b15ce3350278ad46fff6")
|
||||
|
@@ -35,7 +35,7 @@ class Amdfftw(FftwBase):
|
||||
url = "https://github.com/amd/amd-fftw/archive/3.0.tar.gz"
|
||||
git = "https://github.com/amd/amd-fftw.git"
|
||||
|
||||
maintainers = ["amd-toolchain-support"]
|
||||
maintainers("amd-toolchain-support")
|
||||
|
||||
version("4.0", sha256="5f02cb05f224bd86bd88ec6272b294c26dba3b1d22c7fb298745fd7b9d2271c0")
|
||||
version("3.2", sha256="31cab17a93e03b5b606e88dd6116a1055b8f49542d7d0890dbfcca057087b8d0")
|
||||
|
@@ -42,7 +42,7 @@ class Amdlibflame(LibflameBase):
|
||||
url = "https://github.com/amd/libflame/archive/3.0.tar.gz"
|
||||
git = "https://github.com/amd/libflame.git"
|
||||
|
||||
maintainers = ["amd-toolchain-support"]
|
||||
maintainers("amd-toolchain-support")
|
||||
|
||||
version("4.0", sha256="bcb05763aa1df1e88f0da5e43ff86d956826cbea1d9c5ff591d78a3e091c66a4")
|
||||
version("3.2", sha256="6b5337fb668b82d0ed0a4ab4b5af4e2f72e4cedbeeb4a8b6eb9a3ef057fb749a")
|
||||
@@ -63,6 +63,7 @@ class Amdlibflame(LibflameBase):
|
||||
provides("flame@5.2", when="@2:")
|
||||
|
||||
depends_on("python+pythoncmd", type="build")
|
||||
depends_on("gmake@4:", when="@3.0.1,3.1:", type="build")
|
||||
|
||||
@property
|
||||
def lapack_libs(self):
|
||||
|
@@ -27,7 +27,7 @@ class Amdlibm(SConsPackage):
|
||||
homepage = "https://developer.amd.com/amd-aocl/amd-math-library-libm/"
|
||||
git = "https://github.com/amd/aocl-libm-ose.git"
|
||||
url = "https://github.com/amd/aocl-libm-ose/archive/refs/tags/3.0.tar.gz"
|
||||
maintainers = ["amd-toolchain-support"]
|
||||
maintainers("amd-toolchain-support")
|
||||
|
||||
version("4.0", sha256="038c1eab544be77598eccda791b26553d3b9e2ee4ab3f5ad85fdd2a77d015a7d")
|
||||
version("3.2", sha256="c75b287c38a3ce997066af1f5c8d2b19fc460d5e56678ea81f3ac33eb79ec890")
|
||||
|
@@ -28,7 +28,7 @@ class Amdscalapack(ScalapackBase):
|
||||
homepage = "https://developer.amd.com/amd-aocl/scalapack/"
|
||||
git = "https://github.com/amd/scalapack.git"
|
||||
|
||||
maintainers = ["amd-toolchain-support"]
|
||||
maintainers("amd-toolchain-support")
|
||||
|
||||
version("4.0", sha256="f02913b5984597b22cdb9a36198ed61039a1bf130308e778dc31b2a7eb88b33b")
|
||||
version("3.2", sha256="9e00979bb1be39d627bdacb01774bc043029840d542fafc934d16fec3e3b0892")
|
||||
|
@@ -19,7 +19,7 @@ class Amgx(CMakePackage, CudaPackage):
|
||||
homepage = "https://developer.nvidia.com/amgx"
|
||||
url = "https://github.com/nvidia/amgx/archive/v2.1.0.tar.gz"
|
||||
|
||||
maintainers = ["js947"]
|
||||
maintainers("js947")
|
||||
|
||||
version("2.1.0", sha256="6245112b768a1dc3486b2b3c049342e232eb6281a6021fffa8b20c11631f63cc")
|
||||
version("2.0.1", sha256="6f9991f1836fbf4ba2114ce9f49febd0edc069a24f533bd94fd9aa9be72435a7")
|
||||
|
@@ -16,7 +16,7 @@ class Aml(AutotoolsPackage):
|
||||
|
||||
homepage = "https://argo-aml.readthedocs.io/"
|
||||
|
||||
maintainers = ["perarnau"]
|
||||
maintainers("perarnau")
|
||||
|
||||
test_requires_compiler = True
|
||||
|
||||
|
@@ -17,7 +17,7 @@ class Ampl(Package):
|
||||
homepage = "https://ampl.com/"
|
||||
manual_download = True
|
||||
|
||||
maintainers = ["robgics"]
|
||||
maintainers("robgics")
|
||||
|
||||
# Use the version as you would expect the user to know it, not necessarily the
|
||||
# version as it appears in the file name. To get the checksum, use sha256sum.
|
||||
|
@@ -14,7 +14,7 @@ class Ampt(MakefilePackage):
|
||||
homepage = "http://myweb.ecu.edu/linz/ampt/"
|
||||
url = "http://myweb.ecu.edu/linz/ampt/ampt-v1.26t9b-v2.26t9b.zip"
|
||||
|
||||
maintainers = ["vvolkl"]
|
||||
maintainers("vvolkl")
|
||||
|
||||
tags = ["hep"]
|
||||
|
||||
|
39
var/spack/repos/builtin/packages/amqp-cpp/package.py
Normal file
39
var/spack/repos/builtin/packages/amqp-cpp/package.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class AmqpCpp(CMakePackage):
|
||||
"""AMQP-CPP is a C++ library for communicating with a RabbitMQ message
|
||||
broker. The library can be used to parse incoming data from, and generate
|
||||
frames to, a RabbitMQ server."""
|
||||
|
||||
homepage = "https://github.com/CopernicaMarketingSoftware/AMQP-CPP"
|
||||
git = "https://github.com/CopernicaMarketingSoftware/AMQP-CPP.git"
|
||||
url = "https://github.com/CopernicaMarketingSoftware/AMQP-CPP/archive/refs/tags/v4.3.19.tar.gz"
|
||||
|
||||
maintainers("lpottier")
|
||||
|
||||
version("4.3.19", sha256="ca29bb349c498948576a4604bed5fd3c27d87240b271a4441ccf04ba3797b31d")
|
||||
|
||||
variant(
|
||||
"tcp",
|
||||
default=False,
|
||||
description="Build TCP module. TCP module is supported for Linux only.",
|
||||
)
|
||||
variant("shared", default=True, description="Build as a shared library (static by default)")
|
||||
|
||||
conflicts("tcp", when="platform=darwin", msg="TCP module requires Linux")
|
||||
|
||||
depends_on("cmake@3.5:", type="build")
|
||||
depends_on("openssl@1.1.1:", when="+tcp", type=("build", "link", "run"))
|
||||
|
||||
def cmake_args(self):
|
||||
args = [
|
||||
self.define_from_variant("AMQP-CPP_LINUX_TCP", "tcp"),
|
||||
self.define_from_variant("AMQP-CPP_BUILD_SHARED", "shared"),
|
||||
]
|
||||
return args
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user