Compare commits
121 Commits
hs/fix/qua
...
features/r
Author | SHA1 | Date | |
---|---|---|---|
![]() |
2cc0e533c0 | ||
![]() |
7a8eaa1657 | ||
![]() |
d8baa193b3 | ||
![]() |
2c3f2c5733 | ||
![]() |
302d74394b | ||
![]() |
cf94dc7823 | ||
![]() |
4411ee3382 | ||
![]() |
f790ce0f72 | ||
![]() |
64d53037db | ||
![]() |
4aef50739b | ||
![]() |
a6e966f6f2 | ||
![]() |
1f428c4188 | ||
![]() |
731e48b1bd | ||
![]() |
74ff9ad821 | ||
![]() |
16a4eff689 | ||
![]() |
d0b0d8db50 | ||
![]() |
54f591cce5 | ||
![]() |
8677bb4d43 | ||
![]() |
b66b80a96a | ||
![]() |
10e21f399c | ||
![]() |
56892f6140 | ||
![]() |
7eddc4b1f8 | ||
![]() |
3c7392bbcc | ||
![]() |
bb0517f4d9 | ||
![]() |
c8994ee50f | ||
![]() |
4b2f5638f2 | ||
![]() |
31312a379f | ||
![]() |
b0d5f272b0 | ||
![]() |
1c93fef160 | ||
![]() |
8bb5f4faf4 | ||
![]() |
f76ab5f72f | ||
![]() |
49c831edc3 | ||
![]() |
c943c8c1d2 | ||
![]() |
e0e6f29584 | ||
![]() |
72bc3bb803 | ||
![]() |
dba8fe2b96 | ||
![]() |
4487598d60 | ||
![]() |
495537cf56 | ||
![]() |
22c3b4099f | ||
![]() |
13978d11a0 | ||
![]() |
a22114b20b | ||
![]() |
c10624390f | ||
![]() |
fb3d9de80b | ||
![]() |
fbb688af07 | ||
![]() |
d34b709425 | ||
![]() |
cb0b188cf6 | ||
![]() |
9a2b0aca66 | ||
![]() |
89a8ab3233 | ||
![]() |
5d87166c07 | ||
![]() |
15c989b3fe | ||
![]() |
b7f556e4b4 | ||
![]() |
36f32ceda3 | ||
![]() |
01d77ed915 | ||
![]() |
0049f8332d | ||
![]() |
39c10c3116 | ||
![]() |
71d1901831 | ||
![]() |
41e0863b86 | ||
![]() |
a75d83f65c | ||
![]() |
f2f13964fb | ||
![]() |
9b032018d6 | ||
![]() |
7d470c05be | ||
![]() |
664fe9e9e6 | ||
![]() |
2745a519e2 | ||
![]() |
4348ee1c75 | ||
![]() |
8e39fb1e54 | ||
![]() |
09458312a3 | ||
![]() |
5fd0693df4 | ||
![]() |
f58684429d | ||
![]() |
409611a479 | ||
![]() |
dd98cfb839 | ||
![]() |
5c91667dab | ||
![]() |
9efd6f3f11 | ||
![]() |
a8f5289801 | ||
![]() |
ac635aa777 | ||
![]() |
45dcddf9c3 | ||
![]() |
f1660722e7 | ||
![]() |
04b44d841c | ||
![]() |
7f30502297 | ||
![]() |
61b1586c51 | ||
![]() |
8579efcadf | ||
![]() |
1c3e2b5425 | ||
![]() |
011ef0aaaf | ||
![]() |
9642f3f49a | ||
![]() |
a6c9b55fad | ||
![]() |
608ed967e1 | ||
![]() |
742eaa32b7 | ||
![]() |
763b35a2e0 | ||
![]() |
12280f864c | ||
![]() |
253ba05732 | ||
![]() |
195b869e1c | ||
![]() |
393961ffd6 | ||
![]() |
392a58e9be | ||
![]() |
0e8e97a811 | ||
![]() |
43a0cbe7a2 | ||
![]() |
bb35a98079 | ||
![]() |
fa7e0e8230 | ||
![]() |
2c128751f5 | ||
![]() |
fb0493a366 | ||
![]() |
6d1b6e7087 | ||
![]() |
759518182c | ||
![]() |
7ebabfcf0e | ||
![]() |
6203ae31d2 | ||
![]() |
6b13017ded | ||
![]() |
2c51b5853f | ||
![]() |
d0cbd056a8 | ||
![]() |
e1b579a8b4 | ||
![]() |
b02dcf697d | ||
![]() |
6e046b04c7 | ||
![]() |
d196795437 | ||
![]() |
0d444fb4e7 | ||
![]() |
467e631260 | ||
![]() |
f21de698f7 | ||
![]() |
59532986be | ||
![]() |
36fd547b40 | ||
![]() |
b5f9dea6d0 | ||
![]() |
5904834295 | ||
![]() |
2da8a1d1e3 | ||
![]() |
d50eba40d9 | ||
![]() |
8d3a733b77 | ||
![]() |
dfa86dce08 | ||
![]() |
3d82e5c573 |
18
.github/workflows/build-containers.yml
vendored
18
.github/workflows/build-containers.yml
vendored
@@ -40,17 +40,17 @@ jobs:
|
||||
# 1: Platforms to build for
|
||||
# 2: Base image (e.g. ubuntu:22.04)
|
||||
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||
[centos-stream9, 'linux/amd64,linux/arm64', 'centos:stream9'],
|
||||
[leap15, 'linux/amd64,linux/arm64', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64', 'ubuntu:22.04'],
|
||||
[ubuntu-noble, 'linux/amd64,linux/arm64', 'ubuntu:24.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
||||
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
||||
[fedora39, 'linux/amd64,linux/arm64', 'fedora:39'],
|
||||
[fedora40, 'linux/amd64,linux/arm64', 'fedora:40']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
|
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -81,6 +81,10 @@ jobs:
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
|
||||
import-check:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/import-check.yaml
|
||||
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
if: ${{ always() }}
|
||||
|
1
.github/workflows/coverage.yml
vendored
1
.github/workflows/coverage.yml
vendored
@@ -33,3 +33,4 @@ jobs:
|
||||
with:
|
||||
verbose: true
|
||||
fail_ci_if_error: false
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
49
.github/workflows/import-check.yaml
vendored
Normal file
49
.github/workflows/import-check.yaml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: import-check
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
# Check we don't make the situation with circular imports worse
|
||||
import-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: julia-actions/setup-julia@v2
|
||||
with:
|
||||
version: '1.10'
|
||||
- uses: julia-actions/cache@v2
|
||||
|
||||
# PR: use the base of the PR as the old commit
|
||||
- name: Checkout PR base commit
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
path: old
|
||||
# not a PR: use the previous commit as the old commit
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: old
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
run: git -C old reset --hard HEAD^
|
||||
|
||||
- name: Checkout new commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
path: new
|
||||
- name: Install circular import checker
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: e38bcd0aa46368e30648b61b7f0d8c1ca68aadff
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j dependencies
|
||||
- name: Circular import check
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j compare "SPACK_ROOT=../old ../new"
|
60
.github/workflows/valid-style.yml
vendored
60
.github/workflows/valid-style.yml
vendored
@@ -86,66 +86,6 @@ jobs:
|
||||
spack -d bootstrap now --dev
|
||||
spack -d style -t black
|
||||
spack unit-test -V
|
||||
# Check we don't make the situation with circular imports worse
|
||||
import-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: julia-actions/setup-julia@v2
|
||||
with:
|
||||
version: '1.10'
|
||||
- uses: julia-actions/cache@v2
|
||||
|
||||
# PR: use the base of the PR as the old commit
|
||||
- name: Checkout PR base commit
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.base.sha }}
|
||||
path: old
|
||||
# not a PR: use the previous commit as the old commit
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 2
|
||||
path: old
|
||||
- name: Checkout previous commit
|
||||
if: github.event_name != 'pull_request'
|
||||
run: git -C old reset --hard HEAD^
|
||||
|
||||
- name: Checkout new commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
path: new
|
||||
- name: Install circular import checker
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
repository: haampie/circular-import-fighter
|
||||
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
|
||||
path: circular-import-fighter
|
||||
- name: Install dependencies
|
||||
working-directory: circular-import-fighter
|
||||
run: make -j dependencies
|
||||
- name: Problematic imports before
|
||||
working-directory: circular-import-fighter
|
||||
run: make SPACK_ROOT=../old SUFFIX=.old
|
||||
- name: Problematic imports after
|
||||
working-directory: circular-import-fighter
|
||||
run: make SPACK_ROOT=../new SUFFIX=.new
|
||||
- name: Compare import cycles
|
||||
working-directory: circular-import-fighter
|
||||
run: |
|
||||
edges_before="$(head -n1 solution.old)"
|
||||
edges_after="$(head -n1 solution.new)"
|
||||
if [ "$edges_after" -gt "$edges_before" ]; then
|
||||
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
||||
printf 'previously this was %s\033[0m\n' "$edges_before"
|
||||
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
|
||||
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
|
||||
exit 1
|
||||
else
|
||||
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
||||
fi
|
||||
|
||||
# Further style checks from pylint
|
||||
pylint:
|
||||
|
@@ -1,5 +1,5 @@
|
||||
config:
|
||||
locks: false
|
||||
build_stage::
|
||||
- '$spack/.staging'
|
||||
- '$user_cache_path/stage'
|
||||
stage_name: '{name}-{version}-{hash:7}'
|
||||
|
@@ -272,9 +272,9 @@ often lists dependencies and the flags needed to locate them. The
|
||||
"environment variables" section lists environment variables that the
|
||||
build system uses to pass flags to the compiler and linker.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Addings flags to configure
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding flags to configure
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For most of the flags you encounter, you will want a variant to
|
||||
optionally enable/disable them. You can then optionally pass these
|
||||
@@ -285,7 +285,7 @@ function like so:
|
||||
|
||||
def configure_args(self):
|
||||
args = []
|
||||
|
||||
...
|
||||
if self.spec.satisfies("+mpi"):
|
||||
args.append("--enable-mpi")
|
||||
else:
|
||||
@@ -299,7 +299,10 @@ Alternatively, you can use the :ref:`enable_or_disable <autotools_enable_or_dis
|
||||
.. code-block:: python
|
||||
|
||||
def configure_args(self):
|
||||
return [self.enable_or_disable("mpi")]
|
||||
args = []
|
||||
...
|
||||
args.extend(self.enable_or_disable("mpi"))
|
||||
return args
|
||||
|
||||
|
||||
Note that we are explicitly disabling MPI support if it is not
|
||||
@@ -344,7 +347,14 @@ typically used to enable or disable some feature within the package.
|
||||
default=False,
|
||||
description="Memchecker support for debugging [degrades performance]"
|
||||
)
|
||||
config_args.extend(self.enable_or_disable("memchecker"))
|
||||
...
|
||||
|
||||
def configure_args(self):
|
||||
args = []
|
||||
...
|
||||
args.extend(self.enable_or_disable("memchecker"))
|
||||
|
||||
return args
|
||||
|
||||
In this example, specifying the variant ``+memchecker`` will generate
|
||||
the following configuration options:
|
||||
|
@@ -361,7 +361,6 @@ and the tags associated with the class of runners to build on.
|
||||
* ``.linux_neoverse_n1``
|
||||
* ``.linux_neoverse_v1``
|
||||
* ``.linux_neoverse_v2``
|
||||
* ``.linux_power``
|
||||
* ``.linux_skylake``
|
||||
* ``.linux_x86_64``
|
||||
* ``.linux_x86_64_v4``
|
||||
|
@@ -112,6 +112,19 @@ the original but may concretize differently in the presence of different
|
||||
explicit or default configuration settings (e.g., a different version of
|
||||
Spack or for a different user account).
|
||||
|
||||
Environments created from a manifest will copy any included configs
|
||||
from relative paths inside the environment. Relative paths from
|
||||
outside the environment will cause errors, and absolute paths will be
|
||||
kept absolute. For example, if ``spack.yaml`` includes:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
include: [./config.yaml]
|
||||
|
||||
then the created environment will have its own copy of the file
|
||||
``config.yaml`` copied from the location in the original environment.
|
||||
|
||||
Create an environment from a ``spack.lock`` file using:
|
||||
|
||||
.. code-block:: console
|
||||
@@ -160,7 +173,7 @@ accepts. If an environment already exists then spack will simply activate it
|
||||
and ignore the create-specific flags.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
|
||||
$ spack env activate --create -p myenv
|
||||
# ...
|
||||
# [creates if myenv does not exist yet]
|
||||
@@ -424,8 +437,8 @@ Developing Packages in a Spack Environment
|
||||
|
||||
The ``spack develop`` command allows one to develop Spack packages in
|
||||
an environment. It requires a spec containing a concrete version, and
|
||||
will configure Spack to install the package from local source.
|
||||
If a version is not provided from the command line interface then spack
|
||||
will configure Spack to install the package from local source.
|
||||
If a version is not provided from the command line interface then spack
|
||||
will automatically pick the highest version the package has defined.
|
||||
This means any infinity versions (``develop``, ``main``, ``stable``) will be
|
||||
preferred in this selection process.
|
||||
@@ -435,9 +448,9 @@ set, and Spack will ensure the package and its dependents are rebuilt
|
||||
any time the environment is installed if the package's local source
|
||||
code has been modified. Spack's native implementation to check for modifications
|
||||
is to check if ``mtime`` is newer than the installation.
|
||||
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
||||
in your package class. This is particularly useful for projects using custom spack repo's
|
||||
to drive development and want to optimize performance.
|
||||
A custom check can be created by overriding the ``detect_dev_src_change`` method
|
||||
in your package class. This is particularly useful for projects using custom spack repo's
|
||||
to drive development and want to optimize performance.
|
||||
|
||||
Spack ensures that all instances of a
|
||||
developed package in the environment are concretized to match the
|
||||
@@ -453,7 +466,7 @@ Further development on ``foo`` can be tested by re-installing the environment,
|
||||
and eventually committed and pushed to the upstream git repo.
|
||||
|
||||
If the package being developed supports out-of-source builds then users can use the
|
||||
``--build_directory`` flag to control the location and name of the build directory.
|
||||
``--build_directory`` flag to control the location and name of the build directory.
|
||||
This is a shortcut to set the ``package_attributes:build_directory`` in the
|
||||
``packages`` configuration (see :ref:`assigning-package-attributes`).
|
||||
The supplied location will become the build-directory for that package in all future builds.
|
||||
|
@@ -668,7 +668,7 @@ def copy(src, dest, _permissions=False):
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
OSError: if *src* does not match any files or directories
|
||||
ValueError: if *src* matches multiple files but *dest* is
|
||||
not a directory
|
||||
"""
|
||||
@@ -679,7 +679,7 @@ def copy(src, dest, _permissions=False):
|
||||
|
||||
files = glob.glob(src)
|
||||
if not files:
|
||||
raise IOError("No such file or directory: '{0}'".format(src))
|
||||
raise OSError("No such file or directory: '{0}'".format(src))
|
||||
if len(files) > 1 and not os.path.isdir(dest):
|
||||
raise ValueError(
|
||||
"'{0}' matches multiple files but '{1}' is not a directory".format(src, dest)
|
||||
@@ -710,7 +710,7 @@ def install(src, dest):
|
||||
dest (str): the destination file or directory
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
OSError: if *src* does not match any files or directories
|
||||
ValueError: if *src* matches multiple files but *dest* is
|
||||
not a directory
|
||||
"""
|
||||
@@ -748,7 +748,7 @@ def copy_tree(
|
||||
_permissions (bool): for internal use only
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
OSError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
if _permissions:
|
||||
@@ -762,7 +762,7 @@ def copy_tree(
|
||||
|
||||
files = glob.glob(src)
|
||||
if not files:
|
||||
raise IOError("No such file or directory: '{0}'".format(src))
|
||||
raise OSError("No such file or directory: '{0}'".format(src))
|
||||
|
||||
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
|
||||
# all symlinks to this list while traversing the tree, then when finished, make all
|
||||
@@ -843,7 +843,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
|
||||
ignore (typing.Callable): function indicating which files to ignore
|
||||
|
||||
Raises:
|
||||
IOError: if *src* does not match any files or directories
|
||||
OSError: if *src* does not match any files or directories
|
||||
ValueError: if *src* is a parent directory of *dest*
|
||||
"""
|
||||
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
|
||||
|
@@ -308,7 +308,7 @@ class LinkTree:
|
||||
|
||||
def __init__(self, source_root):
|
||||
if not os.path.exists(source_root):
|
||||
raise IOError("No such file or directory: '%s'", source_root)
|
||||
raise OSError("No such file or directory: '%s'", source_root)
|
||||
|
||||
self._root = source_root
|
||||
|
||||
|
@@ -391,7 +391,7 @@ def _poll_lock(self, op: int) -> bool:
|
||||
|
||||
return True
|
||||
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
# EAGAIN and EACCES == locked by another process (so try again)
|
||||
if e.errno not in (errno.EAGAIN, errno.EACCES):
|
||||
raise
|
||||
|
@@ -918,7 +918,7 @@ def _writer_daemon(
|
||||
try:
|
||||
if stdin_file.read(1) == "v":
|
||||
echo = not echo
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
# If SIGTTIN is ignored, the system gives EIO
|
||||
# to let the caller know the read failed b/c it
|
||||
# was in the bg. Ignore that too.
|
||||
@@ -1013,7 +1013,7 @@ def wrapped(*args, **kwargs):
|
||||
while True:
|
||||
try:
|
||||
return function(*args, **kwargs)
|
||||
except IOError as e:
|
||||
except OSError as e:
|
||||
if e.errno == errno.EINTR:
|
||||
continue
|
||||
raise
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.24.0.dev0"
|
||||
__version__ = "1.0.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -11,6 +11,7 @@
|
||||
from typing import Any, List, Optional, Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import stable_partition
|
||||
|
||||
import spack.builder
|
||||
@@ -458,11 +459,23 @@ def cmake(
|
||||
) -> None:
|
||||
"""Runs ``cmake`` in the build directory"""
|
||||
|
||||
# skip cmake phase if it is an incremental develop build
|
||||
if spec.is_develop and os.path.isfile(
|
||||
os.path.join(self.build_directory, "CMakeCache.txt")
|
||||
):
|
||||
return
|
||||
if spec.is_develop:
|
||||
# skip cmake phase if it is an incremental develop build
|
||||
|
||||
# Determine the files that will re-run CMake that are generated from a successful
|
||||
# configure step based on state
|
||||
primary_generator = _extract_primary_generator(self.generator)
|
||||
configure_artifact = "Makefile"
|
||||
if primary_generator == "Ninja":
|
||||
configure_artifact = "ninja.build"
|
||||
|
||||
if os.path.isfile(os.path.join(self.build_directory, configure_artifact)):
|
||||
tty.msg(
|
||||
"Incremental build criteria satisfied."
|
||||
"Skipping CMake configure step. To force configuration run"
|
||||
f" `spack clean {pkg.name}`"
|
||||
)
|
||||
return
|
||||
|
||||
options = self.std_cmake_args
|
||||
options += self.cmake_args()
|
||||
|
@@ -15,7 +15,7 @@ class CudaPackage(PackageBase):
|
||||
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
|
||||
and is meant to unify and facilitate its usage.
|
||||
|
||||
Maintainers: ax3l, Rombur, davidbeckingsale
|
||||
Maintainers: ax3l, Rombur, davidbeckingsale, pauleonix
|
||||
"""
|
||||
|
||||
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
|
||||
@@ -47,6 +47,12 @@ class CudaPackage(PackageBase):
|
||||
"89",
|
||||
"90",
|
||||
"90a",
|
||||
"100",
|
||||
"100a",
|
||||
"101",
|
||||
"101a",
|
||||
"120",
|
||||
"120a",
|
||||
)
|
||||
|
||||
# FIXME: keep cuda and cuda_arch separate to make usage easier until
|
||||
@@ -99,39 +105,56 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
# CUDA version vs Architecture
|
||||
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
|
||||
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
|
||||
# Tesla support:
|
||||
depends_on("cuda@:6.0", when="cuda_arch=10")
|
||||
depends_on("cuda@:6.5", when="cuda_arch=11")
|
||||
depends_on("cuda@2.1:6.5", when="cuda_arch=12")
|
||||
depends_on("cuda@2.1:6.5", when="cuda_arch=13")
|
||||
|
||||
# Fermi support:
|
||||
depends_on("cuda@3.0:8.0", when="cuda_arch=20")
|
||||
depends_on("cuda@3.2:8.0", when="cuda_arch=21")
|
||||
|
||||
# Kepler support:
|
||||
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
|
||||
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
|
||||
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
|
||||
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
|
||||
|
||||
# Maxwell support:
|
||||
depends_on("cuda@6.0:", when="cuda_arch=50")
|
||||
depends_on("cuda@6.5:", when="cuda_arch=52")
|
||||
depends_on("cuda@6.5:", when="cuda_arch=53")
|
||||
|
||||
# Pascal support:
|
||||
depends_on("cuda@8.0:", when="cuda_arch=60")
|
||||
depends_on("cuda@8.0:", when="cuda_arch=61")
|
||||
depends_on("cuda@8.0:", when="cuda_arch=62")
|
||||
|
||||
# Volta support:
|
||||
depends_on("cuda@9.0:", when="cuda_arch=70")
|
||||
# Turing support:
|
||||
depends_on("cuda@9.0:", when="cuda_arch=72")
|
||||
depends_on("cuda@10.0:", when="cuda_arch=75")
|
||||
|
||||
# Ampere support:
|
||||
depends_on("cuda@11.0:", when="cuda_arch=80")
|
||||
depends_on("cuda@11.1:", when="cuda_arch=86")
|
||||
depends_on("cuda@11.4:", when="cuda_arch=87")
|
||||
# Ada support:
|
||||
depends_on("cuda@11.8:", when="cuda_arch=89")
|
||||
|
||||
# Hopper support:
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90")
|
||||
depends_on("cuda@12.0:", when="cuda_arch=90a")
|
||||
|
||||
# Blackwell support:
|
||||
depends_on("cuda@12.8:", when="cuda_arch=100")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=100a")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=101")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=101a")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=120")
|
||||
depends_on("cuda@12.8:", when="cuda_arch=120a")
|
||||
# From the NVIDIA install guide we know of conflicts for particular
|
||||
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
|
||||
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
|
||||
@@ -163,6 +186,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
|
||||
conflicts("%gcc@15:", when="+cuda ^cuda@:12.8")
|
||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||
@@ -171,6 +195,7 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
|
||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
|
||||
conflicts("%clang@20:", when="+cuda ^cuda@:12.8")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
|
@@ -14,7 +14,7 @@
|
||||
import zipfile
|
||||
from collections import namedtuple
|
||||
from typing import Callable, Dict, List, Set
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
from urllib.request import Request
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -62,6 +62,8 @@
|
||||
|
||||
PushResult = namedtuple("PushResult", "success url")
|
||||
|
||||
urlopen = web_util.urlopen # alias for mocking in tests
|
||||
|
||||
|
||||
def get_change_revisions():
|
||||
"""If this is a git repo get the revisions to use when checking
|
||||
@@ -627,29 +629,19 @@ def download_and_extract_artifacts(url, work_dir):
|
||||
if token:
|
||||
headers["PRIVATE-TOKEN"] = token
|
||||
|
||||
opener = build_opener(HTTPHandler)
|
||||
|
||||
request = Request(url, headers=headers)
|
||||
request.get_method = lambda: "GET"
|
||||
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response_code = response.getcode()
|
||||
|
||||
if response_code != 200:
|
||||
msg = f"Error response code ({response_code}) in reproduce_ci_job"
|
||||
raise SpackError(msg)
|
||||
|
||||
request = Request(url, headers=headers, method="GET")
|
||||
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
|
||||
os.makedirs(work_dir, exist_ok=True)
|
||||
|
||||
if not os.path.exists(work_dir):
|
||||
os.makedirs(work_dir)
|
||||
try:
|
||||
response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
with open(artifacts_zip_path, "wb") as out_file:
|
||||
shutil.copyfileobj(response, out_file)
|
||||
except OSError as e:
|
||||
raise SpackError(f"Error fetching artifacts: {e}")
|
||||
|
||||
with open(artifacts_zip_path, "wb") as out_file:
|
||||
shutil.copyfileobj(response, out_file)
|
||||
|
||||
zip_file = zipfile.ZipFile(artifacts_zip_path)
|
||||
zip_file.extractall(work_dir)
|
||||
zip_file.close()
|
||||
with zipfile.ZipFile(artifacts_zip_path) as zip_file:
|
||||
zip_file.extractall(work_dir)
|
||||
|
||||
os.remove(artifacts_zip_path)
|
||||
|
||||
|
@@ -110,10 +110,7 @@ def external_find(args):
|
||||
# Note that KeyboardInterrupt does not subclass Exception
|
||||
# (so CTRL-C will terminate the program as expected).
|
||||
skip_msg = "Skipping manifest and continuing with other external checks"
|
||||
if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
|
||||
errno.EPERM,
|
||||
errno.EACCES,
|
||||
]:
|
||||
if isinstance(e, OSError) and e.errno in (errno.EPERM, errno.EACCES):
|
||||
# The manifest file does not have sufficient permissions enabled:
|
||||
# print a warning and keep going
|
||||
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)
|
||||
|
@@ -177,16 +177,15 @@ def test_run(args):
|
||||
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
|
||||
if spec and not matching:
|
||||
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||
"""
|
||||
TODO: Need to write out a log message and/or CDASH Testing
|
||||
output that package not installed IF continue to process
|
||||
these issues here.
|
||||
|
||||
if args.log_format:
|
||||
# Proceed with the spec assuming the test process
|
||||
# to ensure report package as skipped (e.g., for CI)
|
||||
specs_to_test.append(spec)
|
||||
"""
|
||||
# TODO: Need to write out a log message and/or CDASH Testing
|
||||
# output that package not installed IF continue to process
|
||||
# these issues here.
|
||||
|
||||
# if args.log_format:
|
||||
# # Proceed with the spec assuming the test process
|
||||
# # to ensure report package as skipped (e.g., for CI)
|
||||
# specs_to_test.append(spec)
|
||||
|
||||
specs_to_test.extend(matching)
|
||||
|
||||
|
@@ -57,7 +57,7 @@ def validate(configuration_file):
|
||||
# Set the default value of the concretization strategy to unify and
|
||||
# warn if the user explicitly set another value
|
||||
env_dict.setdefault("concretizer", {"unify": True})
|
||||
if not env_dict["concretizer"]["unify"] is True:
|
||||
if env_dict["concretizer"]["unify"] is not True:
|
||||
warnings.warn(
|
||||
'"concretizer:unify" is not set to "true", which means the '
|
||||
"generated image may contain different variants of the same "
|
||||
|
@@ -581,7 +581,7 @@ def _error_on_nonempty_view_dir(new_root):
|
||||
# Check if the target path lexists
|
||||
try:
|
||||
st = os.lstat(new_root)
|
||||
except (IOError, OSError):
|
||||
except OSError:
|
||||
return
|
||||
|
||||
# Empty directories are fine
|
||||
@@ -861,7 +861,7 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
|
||||
):
|
||||
try:
|
||||
shutil.rmtree(old_root)
|
||||
except (IOError, OSError) as e:
|
||||
except OSError as e:
|
||||
msg = "Failed to remove old view at %s\n" % old_root
|
||||
msg += str(e)
|
||||
tty.warn(msg)
|
||||
@@ -2554,7 +2554,7 @@ def is_latest_format(manifest):
|
||||
try:
|
||||
with open(manifest, encoding="utf-8") as f:
|
||||
data = syaml.load(f)
|
||||
except (OSError, IOError):
|
||||
except OSError:
|
||||
return True
|
||||
top_level_key = _top_level_key(data)
|
||||
changed = spack.schema.env.update(data[top_level_key])
|
||||
@@ -2634,6 +2634,32 @@ def _ensure_env_dir():
|
||||
|
||||
shutil.copy(envfile, target_manifest)
|
||||
|
||||
# Copy relative path includes that live inside the environment dir
|
||||
try:
|
||||
manifest = EnvironmentManifestFile(environment_dir)
|
||||
except Exception:
|
||||
# error handling for bad manifests is handled on other code paths
|
||||
return
|
||||
|
||||
includes = manifest[TOP_LEVEL_KEY].get("include", [])
|
||||
for include in includes:
|
||||
if os.path.isabs(include):
|
||||
continue
|
||||
|
||||
abspath = pathlib.Path(os.path.normpath(environment_dir / include))
|
||||
common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath]))
|
||||
if common_path != environment_dir:
|
||||
tty.debug(f"Will not copy relative include from outside environment: {include}")
|
||||
continue
|
||||
|
||||
orig_abspath = os.path.normpath(envfile.parent / include)
|
||||
if not os.path.exists(orig_abspath):
|
||||
tty.warn(f"Included file does not exist; will not copy: '{include}'")
|
||||
continue
|
||||
|
||||
fs.touchp(abspath)
|
||||
shutil.copy(orig_abspath, abspath)
|
||||
|
||||
|
||||
class EnvironmentManifestFile(collections.abc.Mapping):
|
||||
"""Manages the in-memory representation of a manifest file, and its synchronization
|
||||
|
@@ -187,7 +187,7 @@ def path_for_extension(target_name: str, *, paths: List[str]) -> str:
|
||||
if name == target_name:
|
||||
return path
|
||||
else:
|
||||
raise IOError('extension "{0}" not found'.format(target_name))
|
||||
raise OSError('extension "{0}" not found'.format(target_name))
|
||||
|
||||
|
||||
def get_module(cmd_name):
|
||||
|
@@ -427,7 +427,7 @@ def needs_file(spec, file):
|
||||
try:
|
||||
with open(manifest_file, "r", encoding="utf-8") as f:
|
||||
manifest = s_json.load(f)
|
||||
except (OSError, IOError):
|
||||
except OSError:
|
||||
# if we can't load it, assume it doesn't know about the file.
|
||||
manifest = {}
|
||||
return test_path in manifest
|
||||
@@ -831,7 +831,7 @@ def get_spec_from_file(filename):
|
||||
try:
|
||||
with open(filename, "r", encoding="utf-8") as f:
|
||||
return spack.spec.Spec.from_yaml(f)
|
||||
except IOError:
|
||||
except OSError:
|
||||
return None
|
||||
|
||||
|
||||
|
@@ -26,7 +26,7 @@ def is_shared_library_elf(filepath):
|
||||
with open(filepath, "rb") as f:
|
||||
elf = parse_elf(f, interpreter=True, dynamic_section=True)
|
||||
return elf.has_pt_dynamic and (elf.has_soname or not elf.has_pt_interp)
|
||||
except (IOError, OSError, ElfParsingError):
|
||||
except (OSError, ElfParsingError):
|
||||
return False
|
||||
|
||||
|
||||
|
@@ -166,7 +166,7 @@ def filter_shebangs_in_directory(directory, filenames=None):
|
||||
# Only look at executable, non-symlink files.
|
||||
try:
|
||||
st = os.lstat(path)
|
||||
except (IOError, OSError):
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
if stat.S_ISLNK(st.st_mode) or stat.S_ISDIR(st.st_mode) or not st.st_mode & is_exe:
|
||||
|
@@ -814,7 +814,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
|
||||
# Include build dependencies if pkg is going to be built from sources, or
|
||||
# if build deps are explicitly requested.
|
||||
if include_build_deps or not (
|
||||
cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite
|
||||
cache_only or pkg.spec.installed and pkg.spec.dag_hash() not in self.overwrite
|
||||
):
|
||||
depflag |= dt.BUILD
|
||||
if self.run_tests(pkg):
|
||||
|
@@ -163,7 +163,7 @@ def format_help_sections(self, level):
|
||||
# lazily add all commands to the parser when needed.
|
||||
add_all_commands(self)
|
||||
|
||||
"""Print help on subcommands in neatly formatted sections."""
|
||||
# Print help on subcommands in neatly formatted sections.
|
||||
formatter = self._get_formatter()
|
||||
|
||||
# Create a list of subcommand actions. Argparse internals are nasty!
|
||||
|
@@ -64,7 +64,7 @@ def from_local_path(path: str):
|
||||
@staticmethod
|
||||
def from_url(url: str):
|
||||
"""Create an anonymous mirror by URL. This method validates the URL."""
|
||||
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
|
||||
if urllib.parse.urlparse(url).scheme not in supported_url_schemes:
|
||||
raise ValueError(
|
||||
f'"{url}" is not a valid mirror URL. '
|
||||
f"Scheme must be one of {supported_url_schemes}."
|
||||
|
@@ -383,6 +383,7 @@ def create_opener():
|
||||
"""Create an opener that can handle OCI authentication."""
|
||||
opener = urllib.request.OpenerDirector()
|
||||
for handler in [
|
||||
urllib.request.ProxyHandler(),
|
||||
urllib.request.UnknownHandler(),
|
||||
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
|
||||
spack.util.web.SpackHTTPDefaultErrorHandler(),
|
||||
|
@@ -2,7 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# flake8: noqa: F401
|
||||
# flake8: noqa: F401, E402
|
||||
"""spack.package defines the public API for Spack packages, by re-exporting useful symbols from
|
||||
other modules. Packages should import this module, instead of importing from spack.* directly
|
||||
to ensure forward compatibility with future versions of Spack."""
|
||||
@@ -13,6 +13,17 @@
|
||||
# import most common types used in packages
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
|
||||
class tty:
|
||||
import llnl.util.tty as _tty
|
||||
|
||||
debug = _tty.debug
|
||||
error = _tty.error
|
||||
info = _tty.info
|
||||
msg = _tty.msg
|
||||
warn = _tty.warn
|
||||
|
||||
|
||||
from llnl.util.filesystem import (
|
||||
FileFilter,
|
||||
FileList,
|
||||
@@ -49,7 +60,6 @@
|
||||
)
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
# These props will be overridden when the build env is set up.
|
||||
from spack.build_environment import MakeExecutable
|
||||
from spack.build_systems.aspell_dict import AspellDictPackage
|
||||
from spack.build_systems.autotools import AutotoolsPackage
|
||||
@@ -136,8 +146,10 @@
|
||||
)
|
||||
from spack.phase_callbacks import run_after, run_before
|
||||
from spack.spec import Spec
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable, ProcessError, which, which_string
|
||||
from spack.util.filesystem import fix_darwin_install_name
|
||||
from spack.util.prefix import Prefix
|
||||
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
|
||||
from spack.version import Version, ver
|
||||
|
||||
|
@@ -31,6 +31,7 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import classproperty, memoized
|
||||
|
||||
import spack
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.dependency
|
||||
@@ -60,16 +61,13 @@
|
||||
from spack.util.package_hash import package_hash
|
||||
from spack.util.typing import SupportsRichComparison
|
||||
from spack.version import GitVersion, StandardVersion
|
||||
from spack.version import ver as version_from_str
|
||||
|
||||
FLAG_HANDLER_RETURN_TYPE = Tuple[
|
||||
Optional[Iterable[str]], Optional[Iterable[str]], Optional[Iterable[str]]
|
||||
]
|
||||
FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
|
||||
|
||||
"""Allowed URL schemes for spack packages."""
|
||||
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
|
||||
|
||||
|
||||
#: Filename for the Spack build/install log.
|
||||
_spack_build_logfile = "spack-build-out.txt"
|
||||
|
||||
@@ -701,9 +699,6 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
#: Verbosity level, preserved across installs.
|
||||
_verbose = None
|
||||
|
||||
#: index of patches by sha256 sum, built lazily
|
||||
_patches_by_hash = None
|
||||
|
||||
#: Package homepage where users can find more information about the package
|
||||
homepage: Optional[str] = None
|
||||
|
||||
@@ -738,10 +733,22 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
#: TestSuite instance used to manage stand-alone tests for 1+ specs.
|
||||
test_suite: Optional[Any] = None
|
||||
|
||||
#: compatibility requirements with Spack
|
||||
#: if value is ``None``, requirements from repo are still applied
|
||||
required_spack_version = None
|
||||
|
||||
def __init__(self, spec):
|
||||
# this determines how the package should be built.
|
||||
self.spec: spack.spec.Spec = spec
|
||||
|
||||
# is this package more restrictive in compatibility than the repo is
|
||||
if self.required_spack_version:
|
||||
spack_version = version_from_str(spack.spack_version)
|
||||
required_version = version_from_str(self.required_spack_version)
|
||||
if not spack_version.satisfies(required_version):
|
||||
msg = f"Package {self.name} requires Spack version {self.required_spack_version}."
|
||||
raise PackageError(msg)
|
||||
|
||||
# Allow custom staging paths for packages
|
||||
self.path = None
|
||||
|
||||
|
@@ -6,7 +6,7 @@
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
from typing import Any, Dict, Optional, Set, Tuple, Type, Union
|
||||
from typing import Any, Dict, Optional, Tuple, Type, Union
|
||||
|
||||
import llnl.util.filesystem
|
||||
from llnl.url import allowed_archive
|
||||
@@ -503,38 +503,36 @@ def patch_for_package(self, sha256: str, pkg: "spack.package_base.PackageBase")
|
||||
patch_dict["sha256"] = sha256
|
||||
return from_dict(patch_dict, repository=self.repository)
|
||||
|
||||
def update_packages(self, pkgs_fullname: Set[str]) -> None:
|
||||
def update_package(self, pkg_fullname: str) -> None:
|
||||
"""Update the patch cache.
|
||||
|
||||
Args:
|
||||
pkg_fullname: package to update.
|
||||
"""
|
||||
# remove this package from any patch entries that reference it.
|
||||
if self.index:
|
||||
empty = []
|
||||
for sha256, package_to_patch in self.index.items():
|
||||
remove = []
|
||||
for fullname, patch_dict in package_to_patch.items():
|
||||
if patch_dict["owner"] in pkgs_fullname:
|
||||
remove.append(fullname)
|
||||
empty = []
|
||||
for sha256, package_to_patch in self.index.items():
|
||||
remove = []
|
||||
for fullname, patch_dict in package_to_patch.items():
|
||||
if patch_dict["owner"] == pkg_fullname:
|
||||
remove.append(fullname)
|
||||
|
||||
for fullname in remove:
|
||||
package_to_patch.pop(fullname)
|
||||
for fullname in remove:
|
||||
package_to_patch.pop(fullname)
|
||||
|
||||
if not package_to_patch:
|
||||
empty.append(sha256)
|
||||
if not package_to_patch:
|
||||
empty.append(sha256)
|
||||
|
||||
# remove any entries that are now empty
|
||||
for sha256 in empty:
|
||||
del self.index[sha256]
|
||||
# remove any entries that are now empty
|
||||
for sha256 in empty:
|
||||
del self.index[sha256]
|
||||
|
||||
# update the index with per-package patch indexes
|
||||
for pkg_fullname in pkgs_fullname:
|
||||
pkg_cls = self.repository.get_pkg_class(pkg_fullname)
|
||||
partial_index = self._index_patches(pkg_cls, self.repository)
|
||||
for sha256, package_to_patch in partial_index.items():
|
||||
p2p = self.index.setdefault(sha256, {})
|
||||
p2p.update(package_to_patch)
|
||||
pkg_cls = self.repository.get_pkg_class(pkg_fullname)
|
||||
partial_index = self._index_patches(pkg_cls, self.repository)
|
||||
for sha256, package_to_patch in partial_index.items():
|
||||
p2p = self.index.setdefault(sha256, {})
|
||||
p2p.update(package_to_patch)
|
||||
|
||||
def update(self, other: "PatchCache") -> None:
|
||||
"""Update this cache with the contents of another.
|
||||
|
@@ -2,7 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Classes and functions to manage providers of virtual dependencies"""
|
||||
from typing import Dict, Iterable, List, Optional, Set, Union
|
||||
from typing import Dict, List, Optional, Set
|
||||
|
||||
import spack.error
|
||||
import spack.spec
|
||||
@@ -26,7 +26,7 @@ class _IndexBase:
|
||||
#: Calling providers_for(spec) will find specs that provide a
|
||||
#: matching implementation of MPI. Derived class need to construct
|
||||
#: this attribute according to the semantics above.
|
||||
providers: Dict[str, Dict["spack.spec.Spec", Set["spack.spec.Spec"]]]
|
||||
providers: Dict[str, Dict[str, Set[str]]]
|
||||
|
||||
def providers_for(self, virtual_spec):
|
||||
"""Return a list of specs of all packages that provide virtual
|
||||
@@ -99,56 +99,66 @@ def __init__(
|
||||
self.repository = repository
|
||||
self.restrict = restrict
|
||||
self.providers = {}
|
||||
if specs:
|
||||
self.update_packages(specs)
|
||||
|
||||
def update_packages(self, specs: Iterable[Union[str, "spack.spec.Spec"]]):
|
||||
specs = specs or []
|
||||
for spec in specs:
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
spec = spack.spec.Spec(spec)
|
||||
|
||||
if self.repository.is_virtual_safe(spec.name):
|
||||
continue
|
||||
|
||||
self.update(spec)
|
||||
|
||||
def update(self, spec):
|
||||
"""Update the provider index with additional virtual specs.
|
||||
|
||||
Args:
|
||||
spec: spec potentially providing additional virtual specs
|
||||
"""
|
||||
for spec in specs:
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
spec = spack.spec.Spec(spec)
|
||||
if not isinstance(spec, spack.spec.Spec):
|
||||
spec = spack.spec.Spec(spec)
|
||||
|
||||
if not spec.name or self.repository.is_virtual_safe(spec.name):
|
||||
# Only non-virtual packages with name can provide virtual specs.
|
||||
continue
|
||||
if not spec.name:
|
||||
# Empty specs do not have a package
|
||||
return
|
||||
|
||||
pkg_provided = self.repository.get_pkg_class(spec.name).provided
|
||||
for provider_spec_readonly, provided_specs in pkg_provided.items():
|
||||
for provided_spec in provided_specs:
|
||||
# TODO: fix this comment.
|
||||
# We want satisfaction other than flags
|
||||
provider_spec = provider_spec_readonly.copy()
|
||||
provider_spec.compiler_flags = spec.compiler_flags.copy()
|
||||
msg = "cannot update an index passing the virtual spec '{}'".format(spec.name)
|
||||
assert not self.repository.is_virtual_safe(spec.name), msg
|
||||
|
||||
if spec.intersects(provider_spec, deps=False):
|
||||
provided_name = provided_spec.name
|
||||
pkg_provided = self.repository.get_pkg_class(spec.name).provided
|
||||
for provider_spec_readonly, provided_specs in pkg_provided.items():
|
||||
for provided_spec in provided_specs:
|
||||
# TODO: fix this comment.
|
||||
# We want satisfaction other than flags
|
||||
provider_spec = provider_spec_readonly.copy()
|
||||
provider_spec.compiler_flags = spec.compiler_flags.copy()
|
||||
|
||||
provider_map = self.providers.setdefault(provided_name, {})
|
||||
if provided_spec not in provider_map:
|
||||
provider_map[provided_spec] = set()
|
||||
if spec.intersects(provider_spec, deps=False):
|
||||
provided_name = provided_spec.name
|
||||
|
||||
if self.restrict:
|
||||
provider_set = provider_map[provided_spec]
|
||||
provider_map = self.providers.setdefault(provided_name, {})
|
||||
if provided_spec not in provider_map:
|
||||
provider_map[provided_spec] = set()
|
||||
|
||||
# If this package existed in the index before,
|
||||
# need to take the old versions out, as they're
|
||||
# now more constrained.
|
||||
old = {s for s in provider_set if s.name == spec.name}
|
||||
provider_set.difference_update(old)
|
||||
if self.restrict:
|
||||
provider_set = provider_map[provided_spec]
|
||||
|
||||
# Now add the new version.
|
||||
provider_set.add(spec)
|
||||
# If this package existed in the index before,
|
||||
# need to take the old versions out, as they're
|
||||
# now more constrained.
|
||||
old = set([s for s in provider_set if s.name == spec.name])
|
||||
provider_set.difference_update(old)
|
||||
|
||||
else:
|
||||
# Before putting the spec in the map, constrain
|
||||
# it so that it provides what was asked for.
|
||||
constrained = spec.copy()
|
||||
constrained.constrain(provider_spec)
|
||||
provider_map[provided_spec].add(constrained)
|
||||
# Now add the new version.
|
||||
provider_set.add(spec)
|
||||
|
||||
else:
|
||||
# Before putting the spec in the map, constrain
|
||||
# it so that it provides what was asked for.
|
||||
constrained = spec.copy()
|
||||
constrained.constrain(provider_spec)
|
||||
provider_map[provided_spec].add(constrained)
|
||||
|
||||
def to_json(self, stream=None):
|
||||
"""Dump a JSON representation of this object.
|
||||
@@ -183,13 +193,14 @@ def merge(self, other):
|
||||
|
||||
spdict[provided_spec] = spdict[provided_spec].union(opdict[provided_spec])
|
||||
|
||||
def remove_providers(self, pkgs_fullname: Set[str]):
|
||||
def remove_provider(self, pkg_name):
|
||||
"""Remove a provider from the ProviderIndex."""
|
||||
empty_pkg_dict = []
|
||||
for pkg, pkg_dict in self.providers.items():
|
||||
empty_pset = []
|
||||
for provided, pset in pkg_dict.items():
|
||||
pset.difference_update(pkgs_fullname)
|
||||
same_name = set(p for p in pset if p.fullname == pkg_name)
|
||||
pset.difference_update(same_name)
|
||||
|
||||
if not pset:
|
||||
empty_pset.append(provided)
|
||||
|
@@ -236,22 +236,15 @@ def relocate_elf_binaries(binaries: Iterable[str], prefix_to_prefix: Dict[str, s
|
||||
_set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter)
|
||||
|
||||
|
||||
def _warn_if_link_cant_be_relocated(link: str, target: str):
|
||||
if not os.path.isabs(target):
|
||||
return
|
||||
tty.warn(f'Symbolic link at "{link}" to "{target}" cannot be relocated')
|
||||
|
||||
|
||||
def relocate_links(links: Iterable[str], prefix_to_prefix: Dict[str, str]) -> None:
|
||||
"""Relocate links to a new install prefix."""
|
||||
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
|
||||
for link in links:
|
||||
old_target = readlink(link)
|
||||
if not os.path.isabs(old_target):
|
||||
continue
|
||||
match = regex.match(old_target)
|
||||
|
||||
# No match.
|
||||
if match is None:
|
||||
_warn_if_link_cant_be_relocated(link, old_target)
|
||||
continue
|
||||
|
||||
new_target = prefix_to_prefix[match.group()] + old_target[match.end() :]
|
||||
|
@@ -32,6 +32,7 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack
|
||||
import spack.caches
|
||||
import spack.config
|
||||
import spack.error
|
||||
@@ -40,6 +41,7 @@
|
||||
import spack.spec
|
||||
import spack.tag
|
||||
import spack.tengine
|
||||
import spack.version
|
||||
import spack.util.file_cache
|
||||
import spack.util.git
|
||||
import spack.util.naming as nm
|
||||
@@ -49,6 +51,11 @@
|
||||
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
|
||||
ROOT_PYTHON_NAMESPACE = "spack.pkg"
|
||||
|
||||
_required_repo_version = "0:"
|
||||
|
||||
#: Version of the repo interface that this version of Spack is compatible with
|
||||
required_repo_version = spack.version.ver(_required_repo_version)
|
||||
|
||||
|
||||
def python_package_for_repo(namespace):
|
||||
"""Returns the full namespace of a repository, given its relative one
|
||||
@@ -465,7 +472,7 @@ def read(self, stream):
|
||||
"""Read this index from a provided file object."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def update(self, pkgs_fullname: Set[str]):
|
||||
def update(self, pkg_fullname):
|
||||
"""Update the index in memory with information about a package."""
|
||||
|
||||
@abc.abstractmethod
|
||||
@@ -482,8 +489,8 @@ def _create(self):
|
||||
def read(self, stream):
|
||||
self.index = spack.tag.TagIndex.from_json(stream, self.repository)
|
||||
|
||||
def update(self, pkgs_fullname: Set[str]):
|
||||
self.index.update_packages({p.split(".")[-1] for p in pkgs_fullname})
|
||||
def update(self, pkg_fullname):
|
||||
self.index.update_package(pkg_fullname.split(".")[-1])
|
||||
|
||||
def write(self, stream):
|
||||
self.index.to_json(stream)
|
||||
@@ -498,14 +505,15 @@ def _create(self):
|
||||
def read(self, stream):
|
||||
self.index = spack.provider_index.ProviderIndex.from_json(stream, self.repository)
|
||||
|
||||
def update(self, pkgs_fullname: Set[str]):
|
||||
def update(self, pkg_fullname):
|
||||
name = pkg_fullname.split(".")[-1]
|
||||
is_virtual = (
|
||||
lambda name: not self.repository.exists(name)
|
||||
or self.repository.get_pkg_class(name).virtual
|
||||
not self.repository.exists(name) or self.repository.get_pkg_class(name).virtual
|
||||
)
|
||||
non_virtual_pkgs_fullname = {p for p in pkgs_fullname if not is_virtual(p.split(".")[-1])}
|
||||
self.index.remove_providers(non_virtual_pkgs_fullname)
|
||||
self.index.update_packages(non_virtual_pkgs_fullname)
|
||||
if is_virtual:
|
||||
return
|
||||
self.index.remove_provider(pkg_fullname)
|
||||
self.index.update(pkg_fullname)
|
||||
|
||||
def write(self, stream):
|
||||
self.index.to_json(stream)
|
||||
@@ -530,8 +538,8 @@ def read(self, stream):
|
||||
def write(self, stream):
|
||||
self.index.to_json(stream)
|
||||
|
||||
def update(self, pkgs_fullname: Set[str]):
|
||||
self.index.update_packages(pkgs_fullname)
|
||||
def update(self, pkg_fullname):
|
||||
self.index.update_package(pkg_fullname)
|
||||
|
||||
|
||||
class RepoIndex:
|
||||
@@ -621,7 +629,9 @@ def _build_index(self, name: str, indexer: Indexer):
|
||||
if new_index_mtime != index_mtime:
|
||||
needs_update = self.checker.modified_since(new_index_mtime)
|
||||
|
||||
indexer.update({f"{self.namespace}.{pkg_name}" for pkg_name in needs_update})
|
||||
for pkg_name in needs_update:
|
||||
indexer.update(f"{self.namespace}.{pkg_name}")
|
||||
|
||||
indexer.write(new)
|
||||
|
||||
return indexer.index
|
||||
@@ -948,7 +958,7 @@ def check(condition, msg):
|
||||
self.config_file = os.path.join(self.root, repo_config_name)
|
||||
check(os.path.isfile(self.config_file), f"No {repo_config_name} found in '{root}'")
|
||||
|
||||
# Read configuration and validate namespace
|
||||
# Read configuration and validate
|
||||
config = self._read_config()
|
||||
check(
|
||||
"namespace" in config,
|
||||
@@ -962,6 +972,19 @@ def check(condition, msg):
|
||||
"Namespaces must be valid python identifiers separated by '.'",
|
||||
)
|
||||
|
||||
required_version = spack.version.ver(config.get("required_spack_version", ":"))
|
||||
spack_version = spack.version.ver(spack.spack_version)
|
||||
check(
|
||||
spack_version.satisfies(required_version),
|
||||
f"Repo {self.namespace} requires Spack version {required_version}",
|
||||
)
|
||||
|
||||
repo_version = spack.version.ver(config.get("version", "0"))
|
||||
check(
|
||||
repo_version.satisfies(required_repo_version),
|
||||
f"Spack requires repo version {required_repo_version}",
|
||||
)
|
||||
|
||||
# Set up 'full_namespace' to include the super-namespace
|
||||
self.full_namespace = python_package_for_repo(self.namespace)
|
||||
|
||||
@@ -1038,7 +1061,7 @@ def _read_config(self) -> Dict[str, str]:
|
||||
|
||||
return yaml_data["repo"]
|
||||
|
||||
except IOError:
|
||||
except OSError:
|
||||
tty.die(f"Error reading {self.config_file} when opening {self.root}")
|
||||
|
||||
def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase":
|
||||
@@ -1366,7 +1389,7 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
|
||||
if subdir != packages_dir_name:
|
||||
config.write(f" subdirectory: '{subdir}'\n")
|
||||
|
||||
except (IOError, OSError) as e:
|
||||
except OSError as e:
|
||||
# try to clean up.
|
||||
if existed:
|
||||
shutil.rmtree(config_path, ignore_errors=True)
|
||||
|
@@ -1,6 +1,7 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import codecs
|
||||
import collections
|
||||
import hashlib
|
||||
import os
|
||||
@@ -13,7 +14,7 @@
|
||||
import xml.sax.saxutils
|
||||
from typing import Dict, Optional
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import HTTPSHandler, Request, build_opener
|
||||
from urllib.request import Request
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
@@ -24,10 +25,10 @@
|
||||
import spack.spec
|
||||
import spack.tengine
|
||||
import spack.util.git
|
||||
import spack.util.web as web_util
|
||||
from spack.error import SpackError
|
||||
from spack.util.crypto import checksum
|
||||
from spack.util.log_parse import parse_log_events
|
||||
from spack.util.web import ssl_create_default_context
|
||||
|
||||
from .base import Reporter
|
||||
from .extract import extract_test_parts
|
||||
@@ -433,7 +434,6 @@ def upload(self, filename):
|
||||
# Compute md5 checksum for the contents of this file.
|
||||
md5sum = checksum(hashlib.md5, filename, block_size=8192)
|
||||
|
||||
opener = build_opener(HTTPSHandler(context=ssl_create_default_context()))
|
||||
with open(filename, "rb") as f:
|
||||
params_dict = {
|
||||
"build": self.buildname,
|
||||
@@ -443,26 +443,21 @@ def upload(self, filename):
|
||||
}
|
||||
encoded_params = urlencode(params_dict)
|
||||
url = "{0}&{1}".format(self.cdash_upload_url, encoded_params)
|
||||
request = Request(url, data=f)
|
||||
request = Request(url, data=f, method="PUT")
|
||||
request.add_header("Content-Type", "text/xml")
|
||||
request.add_header("Content-Length", os.path.getsize(filename))
|
||||
if self.authtoken:
|
||||
request.add_header("Authorization", "Bearer {0}".format(self.authtoken))
|
||||
try:
|
||||
# By default, urllib2 only support GET and POST.
|
||||
# CDash expects this file to be uploaded via PUT.
|
||||
request.get_method = lambda: "PUT"
|
||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
response = web_util.urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||
if self.current_package_name not in self.buildIds:
|
||||
resp_value = response.read()
|
||||
if isinstance(resp_value, bytes):
|
||||
resp_value = resp_value.decode("utf-8")
|
||||
resp_value = codecs.getreader("utf-8")(response).read()
|
||||
match = self.buildid_regexp.search(resp_value)
|
||||
if match:
|
||||
buildid = match.group(1)
|
||||
self.buildIds[self.current_package_name] = buildid
|
||||
except Exception as e:
|
||||
print("Upload to CDash failed: {0}".format(e))
|
||||
print(f"Upload to CDash failed: {e}")
|
||||
|
||||
def finalize_report(self):
|
||||
if self.buildIds:
|
||||
|
@@ -1527,9 +1527,8 @@ def __init__(self, spec_like=None, *, external_path=None, external_modules=None)
|
||||
self._external_path = external_path
|
||||
self.external_modules = Spec._format_module_list(external_modules)
|
||||
|
||||
# This attribute is used to store custom information for
|
||||
# external specs. None signal that it was not set yet.
|
||||
self.extra_attributes = None
|
||||
# This attribute is used to store custom information for external specs.
|
||||
self.extra_attributes: dict = {}
|
||||
|
||||
# This attribute holds the original build copy of the spec if it is
|
||||
# deployed differently than it was built. None signals that the spec
|
||||
@@ -2351,15 +2350,10 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
)
|
||||
|
||||
if self.external:
|
||||
if self.extra_attributes:
|
||||
extra_attributes = syaml.sorted_dict(self.extra_attributes)
|
||||
else:
|
||||
extra_attributes = None
|
||||
|
||||
d["external"] = {
|
||||
"path": self.external_path,
|
||||
"module": self.external_modules,
|
||||
"extra_attributes": extra_attributes,
|
||||
"module": self.external_modules or None,
|
||||
"extra_attributes": syaml.sorted_dict(self.extra_attributes),
|
||||
}
|
||||
|
||||
if not self._concrete:
|
||||
@@ -3861,6 +3855,13 @@ def _cmp_iter(self):
|
||||
for item in self._cmp_node():
|
||||
yield item
|
||||
|
||||
# If there is ever a breaking change to hash computation, whether accidental or purposeful,
|
||||
# two specs can be identical modulo DAG hash, depending on what time they were concretized
|
||||
# From the perspective of many operation in Spack (database, build cache, etc) a different
|
||||
# DAG hash means a different spec. Here we ensure that two otherwise identical specs, one
|
||||
# serialized before the hash change and one after, are considered different.
|
||||
yield self.dag_hash() if self.concrete else None
|
||||
|
||||
# This needs to be in _cmp_iter so that no specs with different process hashes
|
||||
# are considered the same by `__hash__` or `__eq__`.
|
||||
#
|
||||
@@ -4726,7 +4727,10 @@ def __str__(self):
|
||||
bool_keys = []
|
||||
kv_keys = []
|
||||
for key in sorted_keys:
|
||||
bool_keys.append(key) if isinstance(self[key].value, bool) else kv_keys.append(key)
|
||||
if isinstance(self[key].value, bool):
|
||||
bool_keys.append(key)
|
||||
else:
|
||||
kv_keys.append(key)
|
||||
|
||||
# add spaces before and after key/value variants.
|
||||
string = io.StringIO()
|
||||
@@ -4905,7 +4909,7 @@ def from_node_dict(cls, node):
|
||||
spec.external_modules = node["external"]["module"]
|
||||
if spec.external_modules is False:
|
||||
spec.external_modules = None
|
||||
spec.extra_attributes = node["external"].get("extra_attributes", {})
|
||||
spec.extra_attributes = node["external"].get("extra_attributes") or {}
|
||||
|
||||
# specs read in are concrete unless marked abstract
|
||||
if node.get("concrete", True):
|
||||
|
@@ -5,7 +5,6 @@
|
||||
import collections
|
||||
import copy
|
||||
from collections.abc import Mapping
|
||||
from typing import Set
|
||||
|
||||
import spack.error
|
||||
import spack.repo
|
||||
@@ -111,20 +110,23 @@ def merge(self, other):
|
||||
spkgs, opkgs = self.tags[tag], other.tags[tag]
|
||||
self.tags[tag] = sorted(list(set(spkgs + opkgs)))
|
||||
|
||||
def update_packages(self, pkg_names: Set[str]):
|
||||
"""Updates a package in the tag index."""
|
||||
def update_package(self, pkg_name):
|
||||
"""Updates a package in the tag index.
|
||||
|
||||
Args:
|
||||
pkg_name (str): name of the package to be removed from the index
|
||||
"""
|
||||
pkg_cls = self.repository.get_pkg_class(pkg_name)
|
||||
|
||||
# Remove the package from the list of packages, if present
|
||||
for pkg_list in self._tag_dict.values():
|
||||
if pkg_names.isdisjoint(pkg_list):
|
||||
continue
|
||||
pkg_list[:] = [pkg for pkg in pkg_list if pkg not in pkg_names]
|
||||
if pkg_name in pkg_list:
|
||||
pkg_list.remove(pkg_name)
|
||||
|
||||
# Add it again under the appropriate tags
|
||||
for pkg_name in pkg_names:
|
||||
pkg_cls = self.repository.get_pkg_class(pkg_name)
|
||||
for tag in getattr(pkg_cls, "tags", []):
|
||||
tag = tag.lower()
|
||||
self._tag_dict[tag].append(pkg_cls.name)
|
||||
for tag in getattr(pkg_cls, "tags", []):
|
||||
tag = tag.lower()
|
||||
self._tag_dict[tag].append(pkg_cls.name)
|
||||
|
||||
|
||||
class TagIndexError(spack.error.SpackError):
|
||||
|
@@ -1,8 +1,10 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import io
|
||||
import os
|
||||
import subprocess
|
||||
from urllib.error import HTTPError
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -15,6 +17,7 @@
|
||||
import spack.paths as spack_paths
|
||||
import spack.repo as repo
|
||||
import spack.util.git
|
||||
from spack.test.conftest import MockHTTPResponse
|
||||
|
||||
pytestmark = [pytest.mark.usefixtures("mock_packages")]
|
||||
|
||||
@@ -162,38 +165,8 @@ def test_import_signing_key(mock_gnupghome):
|
||||
ci.import_signing_key(signing_key)
|
||||
|
||||
|
||||
class FakeWebResponder:
|
||||
def __init__(self, response_code=200, content_to_read=[]):
|
||||
self._resp_code = response_code
|
||||
self._content = content_to_read
|
||||
self._read = [False for c in content_to_read]
|
||||
|
||||
def open(self, request, data=None, timeout=object()):
|
||||
return self
|
||||
|
||||
def getcode(self):
|
||||
return self._resp_code
|
||||
|
||||
def read(self, length=None):
|
||||
if len(self._content) <= 0:
|
||||
return None
|
||||
|
||||
if not self._read[-1]:
|
||||
return_content = self._content[-1]
|
||||
if length:
|
||||
self._read[-1] = True
|
||||
else:
|
||||
self._read.pop()
|
||||
self._content.pop()
|
||||
return return_content
|
||||
|
||||
self._read.pop()
|
||||
self._content.pop()
|
||||
return None
|
||||
|
||||
|
||||
def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
|
||||
os.environ.update({"GITLAB_PRIVATE_TOKEN": "faketoken"})
|
||||
def test_download_and_extract_artifacts(tmpdir, monkeypatch):
|
||||
monkeypatch.setenv("GITLAB_PRIVATE_TOKEN", "faketoken")
|
||||
|
||||
url = "https://www.nosuchurlexists.itsfake/artifacts.zip"
|
||||
working_dir = os.path.join(tmpdir.strpath, "repro")
|
||||
@@ -201,10 +174,13 @@ def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
|
||||
spack_paths.test_path, "data", "ci", "gitlab", "artifacts.zip"
|
||||
)
|
||||
|
||||
with open(test_artifacts_path, "rb") as fd:
|
||||
fake_responder = FakeWebResponder(content_to_read=[fd.read()])
|
||||
def _urlopen_OK(*args, **kwargs):
|
||||
with open(test_artifacts_path, "rb") as f:
|
||||
return MockHTTPResponse(
|
||||
"200", "OK", {"Content-Type": "application/zip"}, io.BytesIO(f.read())
|
||||
)
|
||||
|
||||
monkeypatch.setattr(ci, "build_opener", lambda handler: fake_responder)
|
||||
monkeypatch.setattr(ci, "urlopen", _urlopen_OK)
|
||||
|
||||
ci.download_and_extract_artifacts(url, working_dir)
|
||||
|
||||
@@ -214,7 +190,11 @@ def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
|
||||
found_install = fs.find(working_dir, "install.sh")
|
||||
assert len(found_install) == 1
|
||||
|
||||
fake_responder._resp_code = 400
|
||||
def _urlopen_500(*args, **kwargs):
|
||||
raise HTTPError(url, 500, "Internal Server Error", {}, None)
|
||||
|
||||
monkeypatch.setattr(ci, "urlopen", _urlopen_500)
|
||||
|
||||
with pytest.raises(spack.error.SpackError):
|
||||
ci.download_and_extract_artifacts(url, working_dir)
|
||||
|
||||
@@ -328,16 +308,14 @@ def test_get_spec_filter_list(mutable_mock_env_path, mutable_mock_repo):
|
||||
e1.add("hypre")
|
||||
e1.concretize()
|
||||
|
||||
"""
|
||||
Concretizing the above environment results in the following graphs:
|
||||
# Concretizing the above environment results in the following graphs:
|
||||
|
||||
mpileaks -> mpich (provides mpi virtual dep of mpileaks)
|
||||
-> callpath -> dyninst -> libelf
|
||||
-> libdwarf -> libelf
|
||||
-> mpich (provides mpi dep of callpath)
|
||||
# mpileaks -> mpich (provides mpi virtual dep of mpileaks)
|
||||
# -> callpath -> dyninst -> libelf
|
||||
# -> libdwarf -> libelf
|
||||
# -> mpich (provides mpi dep of callpath)
|
||||
|
||||
hypre -> openblas-with-lapack (provides lapack and blas virtual deps of hypre)
|
||||
"""
|
||||
# hypre -> openblas-with-lapack (provides lapack and blas virtual deps of hypre)
|
||||
|
||||
touched = ["libdwarf"]
|
||||
|
||||
|
@@ -1038,6 +1038,58 @@ def test_init_from_yaml(environment_from_manifest):
|
||||
assert not e2.specs_by_hash
|
||||
|
||||
|
||||
def test_init_from_yaml_relative_includes(tmp_path):
|
||||
files = [
|
||||
"relative_copied/packages.yaml",
|
||||
"./relative_copied/compilers.yaml",
|
||||
"repos.yaml",
|
||||
"./config.yaml",
|
||||
]
|
||||
|
||||
manifest = f"""
|
||||
spack:
|
||||
specs: []
|
||||
include: {files}
|
||||
"""
|
||||
|
||||
e1_path = tmp_path / "e1"
|
||||
e1_manifest = e1_path / "spack.yaml"
|
||||
fs.mkdirp(e1_path)
|
||||
with open(e1_manifest, "w", encoding="utf-8") as f:
|
||||
f.write(manifest)
|
||||
|
||||
for f in files:
|
||||
fs.touchp(e1_path / f)
|
||||
|
||||
e2 = _env_create("test2", init_file=e1_manifest)
|
||||
|
||||
for f in files:
|
||||
assert os.path.exists(os.path.join(e2.path, f))
|
||||
|
||||
|
||||
def test_init_from_yaml_relative_includes_outside_env(tmp_path):
|
||||
files = ["../outside_env_not_copied/repos.yaml"]
|
||||
|
||||
manifest = f"""
|
||||
spack:
|
||||
specs: []
|
||||
include: {files}
|
||||
"""
|
||||
|
||||
# subdir to ensure parent of environment dir is not shared
|
||||
e1_path = tmp_path / "e1_subdir" / "e1"
|
||||
e1_manifest = e1_path / "spack.yaml"
|
||||
fs.mkdirp(e1_path)
|
||||
with open(e1_manifest, "w", encoding="utf-8") as f:
|
||||
f.write(manifest)
|
||||
|
||||
for f in files:
|
||||
fs.touchp(e1_path / f)
|
||||
|
||||
with pytest.raises(spack.config.ConfigFileError, match="Detected 1 missing include"):
|
||||
_ = _env_create("test2", init_file=e1_manifest)
|
||||
|
||||
|
||||
def test_env_view_external_prefix(tmp_path, mutable_database, mock_packages):
|
||||
fake_prefix = tmp_path / "a-prefix"
|
||||
fake_bin = fake_prefix / "bin"
|
||||
|
@@ -139,7 +139,7 @@ def test_gc_except_specific_environments(mutable_database, mutable_mock_env_path
|
||||
def test_gc_except_nonexisting_dir_env(mutable_database, mutable_mock_env_path, tmpdir):
|
||||
output = gc("-ye", tmpdir.strpath, fail_on_error=False)
|
||||
assert "No such environment" in output
|
||||
gc.returncode == 1
|
||||
assert gc.returncode == 1
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
|
@@ -26,9 +26,9 @@ def test_manpath_trailing_colon(
|
||||
else ("--sh", "export %s=%s", ";")
|
||||
)
|
||||
|
||||
"""Test that the commands generated by load add the MANPATH prefix
|
||||
inspections. Also test that Spack correctly preserves the default/existing
|
||||
manpath search path via a trailing colon"""
|
||||
# Test that the commands generated by load add the MANPATH prefix
|
||||
# inspections. Also test that Spack correctly preserves the default/existing
|
||||
# manpath search path via a trailing colon
|
||||
install("mpileaks")
|
||||
|
||||
sh_out = load(shell, "mpileaks")
|
||||
@@ -81,7 +81,9 @@ def extract_value(output, variable):
|
||||
|
||||
# Finally, do we list them in topo order?
|
||||
for i, pkg in enumerate(pkgs):
|
||||
set(s.name for s in mpileaks_spec[pkg].traverse(direction="parents")) in set(pkgs[:i])
|
||||
assert {s.name for s in mpileaks_spec[pkg].traverse(direction="parents")}.issubset(
|
||||
pkgs[: i + 1]
|
||||
)
|
||||
|
||||
# Lastly, do we keep track that mpileaks was loaded?
|
||||
assert (
|
||||
|
@@ -1,17 +1,6 @@
|
||||
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import pathlib
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
"""
|
||||
These tests include the following package DAGs:
|
||||
@@ -42,6 +31,18 @@
|
||||
y
|
||||
"""
|
||||
|
||||
import pathlib
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(mutable_config, monkeypatch, mock_stage):
|
||||
|
@@ -182,7 +182,7 @@ def test_requirement_adds_version_satisfies(
|
||||
|
||||
# Sanity check: early version of T does not include U
|
||||
s0 = spack.concretize.concretize_one("t@2.0")
|
||||
assert not ("u" in s0)
|
||||
assert "u" not in s0
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
|
@@ -458,7 +458,7 @@ def test_log_install_without_build_files(install_mockery):
|
||||
spec = spack.concretize.concretize_one("trivial-install-test-package")
|
||||
|
||||
# Attempt installing log without the build log file
|
||||
with pytest.raises(IOError, match="No such file or directory"):
|
||||
with pytest.raises(OSError, match="No such file or directory"):
|
||||
spack.installer.log(spec.package)
|
||||
|
||||
|
||||
|
@@ -470,7 +470,7 @@ def _repoerr(repo, name):
|
||||
|
||||
# The call to install_tree will raise the exception since not mocking
|
||||
# creation of dependency package files within *install* directories.
|
||||
with pytest.raises(IOError, match=path if sys.platform != "win32" else ""):
|
||||
with pytest.raises(OSError, match=path if sys.platform != "win32" else ""):
|
||||
inst.dump_packages(spec, path)
|
||||
|
||||
# Now try the error path, which requires the mock directory structure
|
||||
|
@@ -82,7 +82,7 @@ def test_non_existing_src(self, stage):
|
||||
"""Test using a non-existing source."""
|
||||
|
||||
with fs.working_dir(str(stage)):
|
||||
with pytest.raises(IOError, match="No such file or directory"):
|
||||
with pytest.raises(OSError, match="No such file or directory"):
|
||||
fs.copy("source/none", "dest")
|
||||
|
||||
def test_multiple_src_file_dest(self, stage):
|
||||
@@ -139,7 +139,7 @@ def test_non_existing_src(self, stage):
|
||||
"""Test using a non-existing source."""
|
||||
|
||||
with fs.working_dir(str(stage)):
|
||||
with pytest.raises(IOError, match="No such file or directory"):
|
||||
with pytest.raises(OSError, match="No such file or directory"):
|
||||
fs.install("source/none", "dest")
|
||||
|
||||
def test_multiple_src_file_dest(self, stage):
|
||||
@@ -220,7 +220,7 @@ def test_non_existing_src(self, stage):
|
||||
"""Test using a non-existing source."""
|
||||
|
||||
with fs.working_dir(str(stage)):
|
||||
with pytest.raises(IOError, match="No such file or directory"):
|
||||
with pytest.raises(OSError, match="No such file or directory"):
|
||||
fs.copy_tree("source/none", "dest")
|
||||
|
||||
def test_parent_dir(self, stage):
|
||||
@@ -301,7 +301,7 @@ def test_non_existing_src(self, stage):
|
||||
"""Test using a non-existing source."""
|
||||
|
||||
with fs.working_dir(str(stage)):
|
||||
with pytest.raises(IOError, match="No such file or directory"):
|
||||
with pytest.raises(OSError, match="No such file or directory"):
|
||||
fs.install_tree("source/none", "dest")
|
||||
|
||||
def test_parent_dir(self, stage):
|
||||
|
@@ -93,28 +93,26 @@
|
||||
pass
|
||||
|
||||
|
||||
"""This is a list of filesystem locations to test locks in. Paths are
|
||||
expanded so that %u is replaced with the current username. '~' is also
|
||||
legal and will be expanded to the user's home directory.
|
||||
|
||||
Tests are skipped for directories that don't exist, so you'll need to
|
||||
update this with the locations of NFS, Lustre, and other mounts on your
|
||||
system.
|
||||
"""
|
||||
#: This is a list of filesystem locations to test locks in. Paths are
|
||||
#: expanded so that %u is replaced with the current username. '~' is also
|
||||
#: legal and will be expanded to the user's home directory.
|
||||
#:
|
||||
#: Tests are skipped for directories that don't exist, so you'll need to
|
||||
#: update this with the locations of NFS, Lustre, and other mounts on your
|
||||
#: system.
|
||||
locations = [
|
||||
tempfile.gettempdir(),
|
||||
os.path.join("/nfs/tmp2/", getpass.getuser()),
|
||||
os.path.join("/p/lscratch*/", getpass.getuser()),
|
||||
]
|
||||
|
||||
"""This is the longest a failed multiproc test will take.
|
||||
Barriers will time out and raise an exception after this interval.
|
||||
In MPI mode, barriers don't time out (they hang). See mpi_multiproc_test.
|
||||
"""
|
||||
#: This is the longest a failed multiproc test will take.
|
||||
#: Barriers will time out and raise an exception after this interval.
|
||||
#: In MPI mode, barriers don't time out (they hang). See mpi_multiproc_test.
|
||||
barrier_timeout = 5
|
||||
|
||||
"""This is the lock timeout for expected failures.
|
||||
This may need to be higher for some filesystems."""
|
||||
#: This is the lock timeout for expected failures.
|
||||
#: This may need to be higher for some filesystems.
|
||||
lock_fail_timeout = 0.1
|
||||
|
||||
|
||||
@@ -286,9 +284,8 @@ def wait(self):
|
||||
comm.Barrier() # barrier after each MPI test.
|
||||
|
||||
|
||||
"""``multiproc_test()`` should be called by tests below.
|
||||
``multiproc_test()`` will work for either MPI runs or for local runs.
|
||||
"""
|
||||
#: ``multiproc_test()`` should be called by tests below.
|
||||
#: ``multiproc_test()`` will work for either MPI runs or for local runs.
|
||||
multiproc_test = mpi_multiproc_test if mpi else local_multiproc_test
|
||||
|
||||
|
||||
@@ -1339,7 +1336,7 @@ def test_poll_lock_exception(tmpdir, monkeypatch, err_num, err_msg):
|
||||
"""Test poll lock exception handling."""
|
||||
|
||||
def _lockf(fd, cmd, len, start, whence):
|
||||
raise IOError(err_num, err_msg)
|
||||
raise OSError(err_num, err_msg)
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
lockfile = "lockfile"
|
||||
@@ -1351,7 +1348,7 @@ def _lockf(fd, cmd, len, start, whence):
|
||||
if err_num in [errno.EAGAIN, errno.EACCES]:
|
||||
assert not lock._poll_lock(fcntl.LOCK_EX)
|
||||
else:
|
||||
with pytest.raises(IOError, match=err_msg):
|
||||
with pytest.raises(OSError, match=err_msg):
|
||||
lock._poll_lock(fcntl.LOCK_EX)
|
||||
|
||||
monkeypatch.undo()
|
||||
|
@@ -238,10 +238,7 @@ def test_exclude(self, modulefile_content, module_configuration, host_architectu
|
||||
|
||||
assert len([x for x in content if "module load " in x]) == 1
|
||||
|
||||
# Catch "Exception" to avoid using FileNotFoundError on Python 3
|
||||
# and IOError on Python 2 or common bases like EnvironmentError
|
||||
# which are not officially documented
|
||||
with pytest.raises(Exception):
|
||||
with pytest.raises(FileNotFoundError):
|
||||
modulefile_content(f"callpath target={host_architecture_str}")
|
||||
|
||||
content = modulefile_content(f"zmpi target={host_architecture_str}")
|
||||
|
@@ -38,6 +38,27 @@ def extra_repo(tmp_path_factory, request):
|
||||
return spack.repo.Repo(str(repo_dir), cache=repo_cache), request.param
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def versioned_repo(tmp_path_factory, request):
|
||||
def _execute(spack_version, repo_version):
|
||||
repo_namespace = "extra_test_repo"
|
||||
repo_dir = tmp_path_factory.mktemp(repo_namespace)
|
||||
cache_dir = tmp_path_factory.mktemp("cache")
|
||||
(repo_dir / "packages").mkdir(parents=True, exist_ok=True)
|
||||
(repo_dir / "repo.yaml").write_text(
|
||||
f"""
|
||||
repo:
|
||||
namespace: extra_test_repo
|
||||
required_spack_version: '{spack_version}'
|
||||
version: '{repo_version}'
|
||||
"""
|
||||
)
|
||||
repo_cache = spack.util.file_cache.FileCache(str(cache_dir))
|
||||
return spack.repo.Repo(str(repo_dir), cache=repo_cache)
|
||||
|
||||
return _execute
|
||||
|
||||
|
||||
def test_repo_getpkg(mutable_mock_repo):
|
||||
mutable_mock_repo.get_pkg_class("pkg-a")
|
||||
mutable_mock_repo.get_pkg_class("builtin.mock.pkg-a")
|
||||
@@ -303,3 +324,24 @@ def test_get_repo(self, mock_test_cache):
|
||||
# foo is not there, raise
|
||||
with pytest.raises(spack.repo.UnknownNamespaceError):
|
||||
repo.get_repo("foo")
|
||||
|
||||
|
||||
def test_incompatible_repo(versioned_repo):
|
||||
with pytest.raises(spack.repo.BadRepoError, match="requires Spack version"):
|
||||
# test added after Spack passed version 0.22
|
||||
versioned_repo(":0.22", ":")
|
||||
|
||||
with pytest.raises(spack.repo.BadRepoError, match="requires repo version"):
|
||||
# ":a" < "0", and all Spack versions require at least "0:"
|
||||
versioned_repo(":", ":a")
|
||||
|
||||
|
||||
def test_incompatible_package_version(mock_packages, monkeypatch):
|
||||
spec = spack.concretize.concretize_one("pkg-a")
|
||||
package = spack.repo.PATH.get(spec)
|
||||
|
||||
pkg_class = spec.package_class
|
||||
monkeypatch.setattr(pkg_class, "required_spack_version", ":0.22")
|
||||
|
||||
with pytest.raises(spack.error.PackageError, match="requires Spack version"):
|
||||
_ = spack.repo.PATH.get(spec)
|
||||
|
@@ -132,7 +132,8 @@ def test_reporters_extract_skipped(state):
|
||||
parts = spack.reporters.extract.extract_test_parts("fake", outputs)
|
||||
|
||||
assert len(parts) == 1
|
||||
parts[0]["completed"] == expected
|
||||
|
||||
assert parts[0]["completed"] == spack.reporters.extract.completed["skipped"]
|
||||
|
||||
|
||||
def test_reporters_skip_new():
|
||||
|
@@ -198,7 +198,7 @@ def script_dir(sbang_line):
|
||||
],
|
||||
)
|
||||
def test_shebang_interpreter_regex(shebang, interpreter):
|
||||
sbang.get_interpreter(shebang) == interpreter
|
||||
assert sbang.get_interpreter(shebang) == interpreter
|
||||
|
||||
|
||||
def test_shebang_handling(script_dir, sbang_line):
|
||||
|
@@ -428,31 +428,29 @@ def test_copy_through_spec_build_interface(self):
|
||||
c2 = s["mpileaks"]["mpileaks"].copy()
|
||||
assert c0 == c1 == c2 == s
|
||||
|
||||
"""
|
||||
Here is the graph with deptypes labeled (assume all packages have a 'dt'
|
||||
prefix). Arrows are marked with the deptypes ('b' for 'build', 'l' for
|
||||
'link', 'r' for 'run').
|
||||
# Here is the graph with deptypes labeled (assume all packages have a 'dt'
|
||||
# prefix). Arrows are marked with the deptypes ('b' for 'build', 'l' for
|
||||
# 'link', 'r' for 'run').
|
||||
|
||||
use -bl-> top
|
||||
# use -bl-> top
|
||||
|
||||
top -b-> build1
|
||||
top -bl-> link1
|
||||
top -r-> run1
|
||||
# top -b-> build1
|
||||
# top -bl-> link1
|
||||
# top -r-> run1
|
||||
|
||||
build1 -b-> build2
|
||||
build1 -bl-> link2
|
||||
build1 -r-> run2
|
||||
# build1 -b-> build2
|
||||
# build1 -bl-> link2
|
||||
# build1 -r-> run2
|
||||
|
||||
link1 -bl-> link3
|
||||
# link1 -bl-> link3
|
||||
|
||||
run1 -bl-> link5
|
||||
run1 -r-> run3
|
||||
# run1 -bl-> link5
|
||||
# run1 -r-> run3
|
||||
|
||||
link3 -b-> build2
|
||||
link3 -bl-> link4
|
||||
# link3 -b-> build2
|
||||
# link3 -bl-> link4
|
||||
|
||||
run3 -b-> build3
|
||||
"""
|
||||
# run3 -b-> build3
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,deptypes,expected",
|
||||
|
@@ -1989,3 +1989,26 @@ def test_equality_discriminate_on_propagation(lhs, rhs):
|
||||
|
||||
def test_comparison_multivalued_variants():
|
||||
assert Spec("x=a") < Spec("x=a,b") < Spec("x==a,b") < Spec("x==a,b,c")
|
||||
|
||||
|
||||
def test_comparison_after_breaking_hash_change():
|
||||
# We simulate a breaking change in DAG hash computation in Spack. We have two specs that are
|
||||
# entirely equal modulo DAG hash. When deserializing these specs, we don't want them to compare
|
||||
# as equal, because DAG hash is used throughout in Spack to distinguish between specs
|
||||
# (e.g. database, build caches, install dir).
|
||||
s = Spec("example@=1.0")
|
||||
s._mark_concrete(True)
|
||||
|
||||
# compute the dag hash and a change to it
|
||||
dag_hash = s.dag_hash()
|
||||
new_dag_hash = f"{'b' if dag_hash[0] == 'a' else 'a'}{dag_hash[1:]}"
|
||||
|
||||
before_breakage = s.to_dict()
|
||||
after_breakage = s.to_dict()
|
||||
after_breakage["spec"]["nodes"][0]["hash"] = new_dag_hash
|
||||
assert before_breakage != after_breakage
|
||||
|
||||
x = Spec.from_dict(before_breakage)
|
||||
y = Spec.from_dict(after_breakage)
|
||||
assert x != y
|
||||
assert len({x, y}) == 2
|
||||
|
@@ -125,7 +125,7 @@ def check_expand_archive(stage, stage_name, expected_file_list):
|
||||
|
||||
assert os.path.isfile(fn)
|
||||
with open(fn, encoding="utf-8") as _file:
|
||||
_file.read() == contents
|
||||
assert _file.read() == contents
|
||||
|
||||
|
||||
def check_fetch(stage, stage_name):
|
||||
|
@@ -154,6 +154,7 @@ def test_tag_no_tags(mock_packages):
|
||||
def test_tag_update_package(mock_packages):
|
||||
mock_index = mock_packages.tag_index
|
||||
index = spack.tag.TagIndex(repository=mock_packages)
|
||||
index.update_packages(set(spack.repo.all_package_names()))
|
||||
for name in spack.repo.all_package_names():
|
||||
index.update_package(name)
|
||||
|
||||
ensure_tags_results_equal(mock_index.tags, index.tags)
|
||||
|
@@ -20,12 +20,7 @@
|
||||
|
||||
datadir = os.path.join(spack_root, "lib", "spack", "spack", "test", "data", "compression")
|
||||
|
||||
ext_archive = {}
|
||||
[
|
||||
ext_archive.update({ext: ".".join(["Foo", ext])})
|
||||
for ext in llnl.url.ALLOWED_ARCHIVE_TYPES
|
||||
if "TAR" not in ext
|
||||
]
|
||||
ext_archive = {ext: f"Foo.{ext}" for ext in llnl.url.ALLOWED_ARCHIVE_TYPES if "TAR" not in ext}
|
||||
# Spack does not use Python native handling for tarballs or zip
|
||||
# Don't test tarballs or zip in native test
|
||||
native_archive_list = [
|
||||
|
@@ -204,13 +204,13 @@ def test_no_editor():
|
||||
def assert_exec(exe, args):
|
||||
assert False
|
||||
|
||||
with pytest.raises(EnvironmentError, match=r"No text editor found.*"):
|
||||
with pytest.raises(OSError, match=r"No text editor found.*"):
|
||||
ed.editor("/path/to/file", exec_fn=assert_exec)
|
||||
|
||||
def assert_exec(exe, args):
|
||||
return False
|
||||
|
||||
with pytest.raises(EnvironmentError, match=r"No text editor found.*"):
|
||||
with pytest.raises(OSError, match=r"No text editor found.*"):
|
||||
ed.editor("/path/to/file", exec_fn=assert_exec)
|
||||
|
||||
|
||||
@@ -220,5 +220,5 @@ def test_exec_fn_executable(editor_var, good_exe, bad_exe):
|
||||
assert ed.editor(exec_fn=ed.executable)
|
||||
|
||||
os.environ[editor_var] = bad_exe
|
||||
with pytest.raises(EnvironmentError, match=r"No text editor found.*"):
|
||||
with pytest.raises(OSError, match=r"No text editor found.*"):
|
||||
ed.editor(exec_fn=ed.executable)
|
||||
|
@@ -9,7 +9,7 @@
|
||||
defined by the EDITOR environment variable if VISUAL is not set or the
|
||||
specified editor fails (e.g. no DISPLAY for a graphical editor). If
|
||||
neither variable is set, we fall back to one of several common editors,
|
||||
raising an EnvironmentError if we are unable to find one.
|
||||
raising an OSError if we are unable to find one.
|
||||
"""
|
||||
import os
|
||||
import shlex
|
||||
@@ -141,7 +141,7 @@ def try_env_var(var):
|
||||
return True
|
||||
|
||||
# Fail if nothing could be found
|
||||
raise EnvironmentError(
|
||||
raise OSError(
|
||||
"No text editor found! Please set the VISUAL and/or EDITOR "
|
||||
"environment variable(s) to your preferred text editor."
|
||||
)
|
||||
|
@@ -46,7 +46,7 @@ def _process_ld_so_conf_queue(queue):
|
||||
try:
|
||||
with open(p, "rb") as f:
|
||||
lines = f.readlines()
|
||||
except (IOError, OSError):
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
for line in lines:
|
||||
@@ -132,7 +132,7 @@ def host_dynamic_linker_search_paths():
|
||||
|
||||
if os.path.exists(possible_conf):
|
||||
conf_file = possible_conf
|
||||
except (IOError, OSError, elf_utils.ElfParsingError):
|
||||
except (OSError, elf_utils.ElfParsingError):
|
||||
pass
|
||||
|
||||
# Note: ld_so_conf doesn't error if the file does not exist.
|
||||
|
@@ -436,8 +436,8 @@ def _dump_annotated(handler, data, stream=None):
|
||||
width = max(clen(a) for a in _ANNOTATIONS)
|
||||
formats = ["%%-%ds %%s\n" % (width + cextra(a)) for a in _ANNOTATIONS]
|
||||
|
||||
for f, a, l in zip(formats, _ANNOTATIONS, lines):
|
||||
stream.write(f % (a, l))
|
||||
for fmt, annotation, line in zip(formats, _ANNOTATIONS, lines):
|
||||
stream.write(fmt % (annotation, line))
|
||||
|
||||
if getvalue:
|
||||
return getvalue()
|
||||
|
@@ -1,6 +1,7 @@
|
||||
[project]
|
||||
name="spack"
|
||||
description="The spack package manager"
|
||||
requires-python=">=3.6"
|
||||
dependencies=[
|
||||
"clingo",
|
||||
"setuptools",
|
||||
@@ -67,9 +68,42 @@ features = [
|
||||
"ci",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 99
|
||||
extend-include = ["bin/spack"]
|
||||
extend-exclude = ["lib/spack/external", "*.pyi"]
|
||||
|
||||
[tool.ruff.format]
|
||||
skip-magic-trailing-comma = true
|
||||
|
||||
[tool.ruff.lint]
|
||||
extend-select = ["I"]
|
||||
ignore = ["E731", "E203"]
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
split-on-trailing-comma = false
|
||||
section-order = [
|
||||
"future",
|
||||
"standard-library",
|
||||
"third-party",
|
||||
"archspec",
|
||||
"llnl",
|
||||
"spack",
|
||||
"first-party",
|
||||
"local-folder",
|
||||
]
|
||||
|
||||
[tool.ruff.lint.isort.sections]
|
||||
spack = ["spack"]
|
||||
archspec = ["archspec"]
|
||||
llnl = ["llnl"]
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"var/spack/repos/*/package.py" = ["F403", "F405", "F811", "F821"]
|
||||
"*-ci-package.py" = ["F403", "F405", "F821"]
|
||||
|
||||
[tool.black]
|
||||
line-length = 99
|
||||
target-version = ['py36', 'py37', 'py38', 'py39', 'py310']
|
||||
include = '(lib/spack|var/spack/repos)/.*\.pyi?$|bin/spack$'
|
||||
extend-exclude = 'lib/spack/external'
|
||||
skip_magic_trailing_comma = true
|
||||
|
@@ -59,11 +59,6 @@ default:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
SPACK_TARGET_ARCH: "aarch64"
|
||||
|
||||
.linux_power:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
SPACK_TARGET_ARCH: "ppc64le"
|
||||
|
||||
.win64-msvc2019:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "win64"
|
||||
@@ -352,35 +347,6 @@ e4s-oneapi-build:
|
||||
- artifacts: True
|
||||
job: e4s-oneapi-generate
|
||||
|
||||
########################################
|
||||
# E4S on Power
|
||||
########################################
|
||||
.e4s-power-generate-tags-and-image:
|
||||
image: { "name": "ghcr.io/spack/ubuntu20.04-runner-ppc64-gcc-11.4:2023.08.01", "entrypoint": [""] }
|
||||
tags: ["spack", "public", "large", "ppc64le"]
|
||||
|
||||
.e4s-power:
|
||||
extends: [".linux_power"]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: e4s-power
|
||||
|
||||
e4s-power-generate:
|
||||
extends: [ ".e4s-power", ".generate-x86_64", ".e4s-power-generate-tags-and-image"]
|
||||
variables:
|
||||
# Override concretization pool for metal runners
|
||||
SPACK_CONCRETIZE_JOBS: 16
|
||||
|
||||
e4s-power-build:
|
||||
extends: [ ".e4s-power", ".build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: e4s-power-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: e4s-power-generate
|
||||
|
||||
#########################################
|
||||
# Build tests for different build-systems
|
||||
#########################################
|
||||
@@ -573,57 +539,6 @@ data-vis-sdk-build:
|
||||
- artifacts: True
|
||||
job: data-vis-sdk-generate
|
||||
|
||||
########################################
|
||||
# AWS ISC Applications (x86_64)
|
||||
########################################
|
||||
|
||||
# Call this AFTER .*-generate
|
||||
.aws-isc-overrides:
|
||||
# This controls image for generate step; build step is controlled by spack.yaml
|
||||
# Note that generator emits OS info for build so these should be the same.
|
||||
image: { "name": "ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09", "entrypoint": [""] }
|
||||
|
||||
.aws-isc:
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: aws-isc
|
||||
|
||||
aws-isc-generate:
|
||||
extends: [ ".aws-isc", ".generate-x86_64", ".aws-isc-overrides", ".tags-x86_64_v4" ]
|
||||
|
||||
aws-isc-build:
|
||||
extends: [ ".aws-isc", ".build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: aws-isc-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: aws-isc-generate
|
||||
|
||||
# Parallel Pipeline for aarch64 (reuses override image, but generates and builds on aarch64)
|
||||
|
||||
.aws-isc-aarch64:
|
||||
extends: [ ".linux_aarch64" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: aws-isc-aarch64
|
||||
|
||||
aws-isc-aarch64-generate:
|
||||
extends: [ ".aws-isc-aarch64", ".generate-aarch64", ".aws-isc-overrides" ]
|
||||
|
||||
aws-isc-aarch64-build:
|
||||
extends: [ ".aws-isc-aarch64", ".build" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: aws-isc-aarch64-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: aws-isc-aarch64-generate
|
||||
|
||||
|
||||
########################################
|
||||
# Spack Tutorial
|
||||
########################################
|
||||
|
@@ -1,22 +0,0 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
tags: ["ppc64le"]
|
||||
# Power runners overrides the default script
|
||||
# - don't download make
|
||||
# - no intermediate keys
|
||||
script::
|
||||
- uname -a || true
|
||||
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||
- nproc
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- spack arch
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- if [ -n "$SPACK_BUILD_JOBS" ]; then spack config add "config:build_jobs:$SPACK_BUILD_JOBS"; fi
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
- if [[ -r /mnt/key/e4s.gpg ]]; then spack gpg trust /mnt/key/e4s.gpg; fi
|
||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||
- spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
@@ -1,3 +0,0 @@
|
||||
concretizer:
|
||||
targets:
|
||||
granularity: generic
|
@@ -1,141 +0,0 @@
|
||||
spack:
|
||||
view: false
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
blas:
|
||||
- openblas
|
||||
mkl:
|
||||
- intel-oneapi-mkl
|
||||
mpi:
|
||||
- openmpi
|
||||
- mpich
|
||||
variants: +mpi
|
||||
tbb:
|
||||
require: intel-tbb
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
version:
|
||||
- 2.36.1
|
||||
doxygen:
|
||||
version:
|
||||
- 1.8.20
|
||||
elfutils:
|
||||
variants: ~nls
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
variants: fabrics=efa,tcp,udp,sockets,verbs,shm,mrail,rxd,rxm
|
||||
libunwind:
|
||||
variants: +pic +xz
|
||||
mesa:
|
||||
variants: ~llvm
|
||||
mpich:
|
||||
variants: ~wrapperrpath netmod=ofi device=ch4
|
||||
ncurses:
|
||||
variants: +termlib
|
||||
openblas:
|
||||
variants: threads=openmp
|
||||
openmpi:
|
||||
variants: fabrics=ofi +legacylaunchers
|
||||
openturns:
|
||||
version:
|
||||
- '1.18'
|
||||
relion:
|
||||
variants: ~mklfft
|
||||
# texlive:
|
||||
# version: [20210325]
|
||||
trilinos:
|
||||
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
|
||||
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
|
||||
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
|
||||
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
xz:
|
||||
variants: +pic
|
||||
|
||||
definitions:
|
||||
|
||||
- compiler_specs:
|
||||
- gcc@11.2
|
||||
# Licensing OK?
|
||||
# - intel-oneapi-compilers@2022.1
|
||||
# - nvhpc
|
||||
|
||||
- app_specs:
|
||||
- bwa
|
||||
# Depends on simde which requires newer compiler?
|
||||
#- bowtie2
|
||||
# Requires x86_64 specific ASM
|
||||
#- cistem
|
||||
- cromwell
|
||||
- fastqc
|
||||
- flux-sched
|
||||
- flux-core
|
||||
- flux-pmix
|
||||
- gatk
|
||||
- gromacs
|
||||
- lammps
|
||||
- wrf build_type=dm+sm
|
||||
- mfem
|
||||
- mpas-model ^parallelio+pnetcdf
|
||||
- nextflow
|
||||
- octave
|
||||
- openfoam
|
||||
- osu-micro-benchmarks
|
||||
- parallel
|
||||
# - paraview
|
||||
- picard
|
||||
- quantum-espresso
|
||||
- raja
|
||||
# Depends on bowtie2 -> simde which requires newer compiler?
|
||||
#- rsem
|
||||
# Errors on texlive
|
||||
#- rstudio
|
||||
- salmon
|
||||
- samtools
|
||||
- seqtk
|
||||
- snakemake
|
||||
- star
|
||||
# Requires gcc@9:
|
||||
#- ufs-weather-model
|
||||
# requires LLVM which fails without constraint
|
||||
#- visit
|
||||
|
||||
- lib_specs:
|
||||
- openmpi fabrics=ofi
|
||||
- openmpi fabrics=ofi +legacylaunchers
|
||||
- openmpi fabrics=auto
|
||||
- mpich
|
||||
- libfabric
|
||||
|
||||
- compiler:
|
||||
- '%gcc@7.3.1'
|
||||
|
||||
- target:
|
||||
- target=aarch64
|
||||
|
||||
|
||||
specs:
|
||||
|
||||
- matrix:
|
||||
- - $app_specs
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
- matrix:
|
||||
- - $lib_specs
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
- matrix:
|
||||
- - $compiler_specs
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: {name: ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09, entrypoint: ['']}
|
||||
|
||||
cdash:
|
||||
build-group: AWS Packages
|
@@ -1,153 +0,0 @@
|
||||
spack:
|
||||
view: false
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
blas:
|
||||
- openblas
|
||||
mkl:
|
||||
- intel-oneapi-mkl
|
||||
mpi:
|
||||
- openmpi
|
||||
- mpich
|
||||
variants: +mpi
|
||||
tbb:
|
||||
require: intel-tbb
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
version:
|
||||
- 2.36.1
|
||||
doxygen:
|
||||
version:
|
||||
- 1.8.20
|
||||
elfutils:
|
||||
variants: ~nls
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
variants: fabrics=efa,tcp,udp,sockets,verbs,shm,mrail,rxd,rxm
|
||||
libunwind:
|
||||
variants: +pic +xz
|
||||
mesa:
|
||||
variants: ~llvm
|
||||
mpich:
|
||||
variants: ~wrapperrpath netmod=ofi device=ch4
|
||||
ncurses:
|
||||
variants: +termlib
|
||||
openblas:
|
||||
variants: threads=openmp
|
||||
openmpi:
|
||||
variants: fabrics=ofi +legacylaunchers
|
||||
openturns:
|
||||
version:
|
||||
- '1.18'
|
||||
relion:
|
||||
variants: ~mklfft
|
||||
# texlive:
|
||||
# version: [20210325]
|
||||
trilinos:
|
||||
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
|
||||
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
|
||||
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
|
||||
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
xz:
|
||||
variants: +pic
|
||||
|
||||
definitions:
|
||||
|
||||
- compiler_specs:
|
||||
- gcc@11.2
|
||||
# Licensing OK?
|
||||
# - intel-oneapi-compilers@2022.1
|
||||
# - nvhpc
|
||||
|
||||
- cuda_specs:
|
||||
# Disabled for consistency with aarch64
|
||||
#- relion +cuda cuda_arch=70
|
||||
- raja +cuda cuda_arch=70
|
||||
- mfem +cuda cuda_arch=70
|
||||
|
||||
- app_specs:
|
||||
- bwa
|
||||
# Disabled for consistency with aarch64
|
||||
#- bowtie2
|
||||
# Disabled for consistency with aarch64
|
||||
#- cistem
|
||||
- cromwell
|
||||
- fastqc
|
||||
- flux-sched
|
||||
- flux-core
|
||||
- flux-pmix
|
||||
- gatk
|
||||
- gromacs
|
||||
- lammps
|
||||
- wrf build_type=dm+sm
|
||||
- mfem
|
||||
- mpas-model ^parallelio+pnetcdf
|
||||
- nextflow
|
||||
- octave
|
||||
- openfoam
|
||||
- osu-micro-benchmarks
|
||||
- parallel
|
||||
# - paraview
|
||||
- picard
|
||||
- quantum-espresso
|
||||
# Build broken for gcc@7.3.1 x86_64_v4 (error: '_mm512_loadu_epi32' was not declared in this scope)
|
||||
#- raja
|
||||
# Disabled for consistency with aarch64
|
||||
#- rsem
|
||||
# Errors on texlive
|
||||
#- rstudio
|
||||
- salmon
|
||||
- samtools
|
||||
- seqtk
|
||||
- snakemake
|
||||
- star
|
||||
# Requires gcc@9:
|
||||
#- ufs-weather-model
|
||||
# Disabled for consistency with aarch64
|
||||
#- visit
|
||||
|
||||
- lib_specs:
|
||||
- openmpi fabrics=ofi
|
||||
- openmpi fabrics=ofi +legacylaunchers
|
||||
- openmpi fabrics=auto
|
||||
- mpich
|
||||
- libfabric
|
||||
|
||||
- compiler:
|
||||
- '%gcc@7.3.1'
|
||||
|
||||
- target:
|
||||
- target=x86_64_v3
|
||||
|
||||
|
||||
specs:
|
||||
|
||||
- matrix:
|
||||
- - $cuda_specs
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
- matrix:
|
||||
- - $app_specs
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
- matrix:
|
||||
- - $lib_specs
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
- matrix:
|
||||
- - $compiler_specs
|
||||
- - $compiler
|
||||
- - $target
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: {name: ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09, entrypoint: ['']}
|
||||
|
||||
cdash:
|
||||
build-group: AWS Packages
|
@@ -1,271 +0,0 @@
|
||||
spack:
|
||||
view: false
|
||||
|
||||
concretizer:
|
||||
reuse: false
|
||||
unify: false
|
||||
|
||||
packages:
|
||||
all:
|
||||
require: "%gcc@9.4.0 target=ppc64le"
|
||||
compiler: [gcc@9.4.0]
|
||||
providers:
|
||||
blas: [openblas]
|
||||
mpi: [mpich]
|
||||
variants: +mpi cuda_arch=70
|
||||
binutils:
|
||||
variants: +ld +gold +headers +libiberty ~nls
|
||||
hdf5:
|
||||
variants: +fortran +hl +shared
|
||||
libfabric:
|
||||
variants: fabrics=sockets,tcp,udp,rxm
|
||||
openblas:
|
||||
variants: threads=openmp
|
||||
trilinos:
|
||||
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
|
||||
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
|
||||
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
|
||||
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
xz:
|
||||
variants: +pic
|
||||
mpi:
|
||||
require: mpich
|
||||
mpich:
|
||||
require: '~wrapperrpath ~hwloc %gcc@9.4.0 target=ppc64le'
|
||||
ncurses:
|
||||
require: '@6.3 +termlib %gcc@9.4.0 target=ppc64le'
|
||||
faodel:
|
||||
require: "~tcmalloc %gcc@9.4.0 target=ppc64le"
|
||||
tbb:
|
||||
require: intel-tbb
|
||||
vtk-m:
|
||||
require: "+examples %gcc@9.4.0 target=ppc64le"
|
||||
cuda:
|
||||
require: "@11.4.4 %gcc@9.4.0 target=ppc64le"
|
||||
paraview:
|
||||
require: "+examples %gcc@9.4.0 target=ppc64le"
|
||||
|
||||
|
||||
specs:
|
||||
# CPU
|
||||
- adios
|
||||
- alquimia
|
||||
- aml
|
||||
- amrex
|
||||
- arborx
|
||||
- argobots
|
||||
- axom
|
||||
- bolt
|
||||
- boost
|
||||
- bricks
|
||||
- butterflypack
|
||||
- cabana
|
||||
- caliper
|
||||
- chai
|
||||
- chapel ~rocm ~cuda
|
||||
- charliecloud
|
||||
- conduit
|
||||
- cp2k +mpi
|
||||
- datatransferkit
|
||||
- drishti
|
||||
- dxt-explorer
|
||||
- dyninst
|
||||
- exaworks
|
||||
- fftx
|
||||
- flecsi
|
||||
- flit
|
||||
- flux-core
|
||||
- fortrilinos
|
||||
- gasnet
|
||||
- ginkgo
|
||||
- globalarrays
|
||||
- gmp
|
||||
- gotcha
|
||||
- gptune
|
||||
- gromacs +cp2k ^cp2k +mpi build_system=cmake
|
||||
- h5bench
|
||||
- hdf5-vol-async
|
||||
- hdf5-vol-cache
|
||||
- hdf5-vol-log
|
||||
- heffte +fftw
|
||||
- hpctoolkit
|
||||
- hpx networking=mpi
|
||||
- hypre
|
||||
- kokkos +openmp
|
||||
- kokkos-kernels +openmp
|
||||
- laghos
|
||||
- lammps
|
||||
- lbann
|
||||
- legion
|
||||
- libnrm
|
||||
- libquo
|
||||
- libunwind
|
||||
- loki
|
||||
- mercury
|
||||
- metall
|
||||
- mfem
|
||||
- mgard +serial +openmp +timing +unstructured ~cuda
|
||||
- mpark-variant
|
||||
- mpifileutils ~xattr
|
||||
- nccmp
|
||||
- nco
|
||||
- netlib-scalapack
|
||||
- nrm
|
||||
- nvhpc
|
||||
- nwchem
|
||||
- omega-h
|
||||
- openfoam
|
||||
- openmpi
|
||||
- openpmd-api
|
||||
- papi
|
||||
- papyrus
|
||||
- paraview ~cuda ~rocm
|
||||
- parsec ~cuda
|
||||
- pdt
|
||||
- petsc
|
||||
- plasma
|
||||
- plumed
|
||||
- precice
|
||||
- pruners-ninja
|
||||
- pumi
|
||||
- py-amrex
|
||||
- py-h5py
|
||||
- py-jupyterhub
|
||||
- py-libensemble
|
||||
- py-petsc4py
|
||||
- qthreads scheduler=distrib
|
||||
- quantum-espresso
|
||||
- raja
|
||||
- rempi
|
||||
- scr
|
||||
- slate ~cuda
|
||||
- slepc
|
||||
- stc
|
||||
- strumpack ~slate
|
||||
- sundials
|
||||
- superlu
|
||||
- superlu-dist
|
||||
- swig@4.0.2-fortran
|
||||
- sz3
|
||||
- tasmanian
|
||||
- tau +mpi +python # +syscall fails: https://github.com/spack/spack/pull/40830#issuecomment-1790799772; tau: has issue with `spack env depfile` build
|
||||
- trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
- turbine
|
||||
- umap
|
||||
- umpire
|
||||
- upcxx
|
||||
- wannier90
|
||||
- warpx +python
|
||||
- wps
|
||||
- wrf
|
||||
- xyce +mpi +shared +pymi +pymi_static_tpls
|
||||
# INCLUDED IN ECP DAV CPU
|
||||
- adios2
|
||||
- ascent
|
||||
- darshan-runtime
|
||||
- darshan-util
|
||||
- faodel
|
||||
- hdf5
|
||||
- libcatalyst
|
||||
- parallel-netcdf
|
||||
- py-cinemasci
|
||||
- sz
|
||||
- unifyfs
|
||||
- veloc
|
||||
# - visit # libext, libxkbfile, libxrender, libxt, silo (https://github.com/spack/spack/issues/39538), cairo
|
||||
- vtk-m
|
||||
- zfp
|
||||
# - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 ~paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp # +visit: libext, libxkbfile, libxrender, libxt, silo (https://github.com/spack/spack/issues/39538), cairo
|
||||
# --
|
||||
# - dealii # fltk: https://github.com/spack/spack/issues/38791
|
||||
# - geopm-runtime # cairo: *** No autoreconf found, please install it ***
|
||||
# - glvis # glvis: https://github.com/spack/spack/issues/42839
|
||||
# - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp # py-numcodecs: gcc: error: unrecognized command line option '-mno-sse2'; did you mean '-mno-isel'? gcc: error: unrecognized command line option '-mno-avx2'
|
||||
# - phist +mpi # ghost@develop: gcc-9: error: unrecognized command line option '-march=native'; did you mean '-mcpu=native'?
|
||||
# - variorum # variorum: https://github.com/spack/spack/issues/38786
|
||||
|
||||
# PYTHON PACKAGES
|
||||
- opencv +python3
|
||||
- py-jax
|
||||
- py-jupyterlab
|
||||
- py-matplotlib
|
||||
- py-mpi4py
|
||||
- py-notebook
|
||||
- py-numba
|
||||
- py-numpy
|
||||
- py-openai
|
||||
- py-pandas
|
||||
- py-plotly
|
||||
- py-pooch
|
||||
- py-pytest
|
||||
- py-scikit-learn
|
||||
- py-scipy
|
||||
- py-seaborn
|
||||
# - py-horovod # py-torch, py-tensorflow
|
||||
# - py-tensorflow # error
|
||||
# - py-torch # error
|
||||
|
||||
# CUDA NOARCH
|
||||
- bricks +cuda
|
||||
- cabana +cuda ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=70
|
||||
- flux-core +cuda
|
||||
- hpctoolkit +cuda
|
||||
- papi +cuda
|
||||
- tau +mpi +cuda
|
||||
# --
|
||||
# - legion +cuda # legion: needs NVIDIA driver
|
||||
|
||||
# CUDA 70
|
||||
- amrex +cuda cuda_arch=70
|
||||
- arborx +cuda cuda_arch=70 ^kokkos +wrapper
|
||||
- caliper +cuda cuda_arch=70
|
||||
- chai +cuda cuda_arch=70 ^umpire ~shared
|
||||
- ecp-data-vis-sdk ~rocm +adios2 ~ascent +hdf5 +vtkm +zfp ~paraview +cuda cuda_arch=70
|
||||
- exago +mpi +python +raja +hiop ~rocm +cuda cuda_arch=70 ~ipopt ^hiop@1.0.0 ~sparse +mpi +raja ~rocm +cuda cuda_arch=70 #^raja@0.14.0
|
||||
- flecsi +cuda cuda_arch=70
|
||||
- ginkgo +cuda cuda_arch=70
|
||||
- gromacs +cuda cuda_arch=70
|
||||
- heffte +cuda cuda_arch=70
|
||||
- hpx +cuda cuda_arch=70
|
||||
- hypre +cuda cuda_arch=70
|
||||
- kokkos +wrapper +cuda cuda_arch=70
|
||||
- kokkos-kernels +cuda cuda_arch=70 ^kokkos +wrapper +cuda cuda_arch=70
|
||||
- magma +cuda cuda_arch=70
|
||||
- mfem +cuda cuda_arch=70
|
||||
- mgard +serial +openmp +timing +unstructured +cuda cuda_arch=70
|
||||
- omega-h +cuda cuda_arch=70
|
||||
- parsec +cuda cuda_arch=70
|
||||
- petsc +cuda cuda_arch=70
|
||||
- raja +cuda cuda_arch=70
|
||||
- slate +cuda cuda_arch=70
|
||||
- slepc +cuda cuda_arch=70
|
||||
- strumpack ~slate +cuda cuda_arch=70
|
||||
- sundials +cuda cuda_arch=70
|
||||
- superlu-dist +cuda cuda_arch=70
|
||||
- tasmanian +cuda cuda_arch=70
|
||||
- umpire ~shared +cuda cuda_arch=70
|
||||
# INCLUDED IN ECP DAV CUDA
|
||||
- adios2 +cuda cuda_arch=70
|
||||
# - ascent +cuda cuda_arch=70 # ascent: https://github.com/spack/spack/issues/38045
|
||||
- paraview +cuda cuda_arch=70
|
||||
- vtk-m +cuda cuda_arch=70
|
||||
- zfp +cuda cuda_arch=70
|
||||
# --
|
||||
# - axom +cuda cuda_arch=70 # axom: https://github.com/spack/spack/issues/29520
|
||||
# - cp2k +mpi +cuda cuda_arch=70 # dbcsr
|
||||
# - cusz +cuda cuda_arch=70 # cusz: https://github.com/spack/spack/issues/38787
|
||||
# - dealii +cuda cuda_arch=70 # fltk: https://github.com/spack/spack/issues/38791
|
||||
# - lammps +cuda cuda_arch=70 # lammps: needs NVIDIA driver
|
||||
# - lbann +cuda cuda_arch=70 # lbann: https://github.com/spack/spack/issues/38788
|
||||
# - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +cusz +mgard +cuda cuda_arch=70 ^cusz +cuda cuda_arch=70 # depends_on("cuda@11.7.1:", when="+cuda")
|
||||
# - py-torch +cuda cuda_arch=70 # skipped
|
||||
# - trilinos +cuda cuda_arch=70 # trilinos: https://github.com/trilinos/Trilinos/issues/11630
|
||||
# - upcxx +cuda cuda_arch=70 # upcxx: needs NVIDIA driver
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: ghcr.io/spack/ubuntu20.04-runner-ppc64-gcc-11.4:2023.08.01
|
||||
|
||||
cdash:
|
||||
build-group: E4S Power
|
@@ -13,29 +13,41 @@ spack:
|
||||
mpi: [mpich]
|
||||
tbb: [intel-tbb]
|
||||
variants: +mpi
|
||||
acts:
|
||||
require: +analysis +dd4hep +edm4hep +examples +fatras +geant4 +hepmc3 +podio +pythia8 +python ~svg +tgeo cxxstd=20
|
||||
celeritas:
|
||||
require: +geant4 +hepmc3 +root +shared cxxstd=20
|
||||
hip:
|
||||
require: '@5.7.1 +rocm'
|
||||
root:
|
||||
require: +davix +dcache +examples +fftw +fits +fortran +gdml +graphviz +gsl +http +math +minuit +mlp +mysql +opengl +postgres +pythia8 +python +r +roofit +root7 +rpath ~shadow +spectrum +sqlite +ssl +tbb +threads +tmva +tmva-cpu +unuran +vc +vdt +veccore +webgui +x +xml +xrootd # cxxstd=20
|
||||
# note: root cxxstd=20 not concretizable within sherpa
|
||||
vecgeom:
|
||||
require: +gdml +geant4 +root +shared cxxstd=20
|
||||
|
||||
# Mark geant4 data as external to prevent wasting bandwidth on GB-scale files
|
||||
geant4-data:
|
||||
buildable: false
|
||||
externals:
|
||||
- spec: geant4-data@11.3.0
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.2.2
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.2.0
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.1.0
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.0.0
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.3.0
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.2.2
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.2.0
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.1.0
|
||||
prefix: /usr
|
||||
- spec: geant4-data@11.0.0
|
||||
prefix: /usr
|
||||
|
||||
specs:
|
||||
# CPU
|
||||
- acts +analysis +dd4hep +edm4hep +examples +fatras +geant4 +hepmc3 +podio +pythia8 +python +tgeo cxxstd=20
|
||||
- acts ~cuda
|
||||
#- agile # fails on c++>11 compiler
|
||||
- alpgen
|
||||
- ampt
|
||||
- apfel +lhapdf +python
|
||||
- celeritas +geant4 +hepmc3 +openmp +root +shared +vecgeom cxxstd=20
|
||||
- celeritas ~cuda +openmp ~rocm +vecgeom
|
||||
- cepgen
|
||||
- cernlib +shared
|
||||
- collier
|
||||
@@ -82,19 +94,28 @@ spack:
|
||||
- py-vector
|
||||
- pythia8 +evtgen +fastjet +hdf5 +hepmc +hepmc3 +lhapdf ~madgraph5amc +python +rivet ~root # pythia8 and root circularly depend
|
||||
- rivet hepmc=3
|
||||
- root +davix +dcache +examples +fftw +fits +fortran +gdml +graphviz +gsl +http +math +minuit +mlp +mysql +opengl +postgres +pythia8 +python +r +roofit +root7 +rpath ~shadow +spectrum +sqlite +ssl +tbb +threads +tmva +unuran +vc +vdt +veccore +webgui +x +xml +xrootd
|
||||
- sherpa +analysis ~blackhat +gzip +hepmc3 +hepmc3root +lhapdf +lhole +openloops +pythia ~python ~recola ~rivet +root +ufo
|
||||
- root ~cuda
|
||||
- sherpa +analysis ~blackhat +gzip +hepmc3 +hepmc3root +lhapdf +lhole +openloops +pythia ~python ~recola ~rivet +root +ufo cxxstd=20
|
||||
- tauola +hepmc3 +lhapdf cxxstd=20
|
||||
- thepeg hepmc=3 ~rivet
|
||||
- vecgeom +gdml +geant4 +root
|
||||
- vecgeom ~cuda
|
||||
- whizard +fastjet +gosam hepmc=3 +lcio +lhapdf +openloops +openmp +pythia8
|
||||
- xrootd +davix +http +krb5 +python +readline +scitokens-cpp
|
||||
- yoda +root
|
||||
|
||||
# CUDA
|
||||
#- acts +cuda +traccc cuda_arch=80
|
||||
#- celeritas +cuda ~openmp +vecgeom cuda_arch=80
|
||||
- root +cuda +cudnn +tmva-gpu
|
||||
- vecgeom +cuda cuda_arch=80
|
||||
|
||||
# ROCm
|
||||
- celeritas +rocm amdgpu_target=gfx90a ~openmp ~vecgeom # only available with ORANGE
|
||||
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
image: "ghcr.io/spack/spack/ubuntu22.04-runner-amd64-gcc-11.4:2024.03.01"
|
||||
image: ghcr.io/spack/spack/ubuntu22.04-runner-amd64-gcc-11.4:2024.03.01
|
||||
|
||||
cdash:
|
||||
build-group: HEP
|
||||
|
@@ -18,7 +18,7 @@ spack:
|
||||
- hdf5+hl+mpi ^mpich
|
||||
- trilinos
|
||||
- trilinos +hdf5 ^hdf5+hl+mpi ^mpich
|
||||
- gcc@12.3.0
|
||||
- gcc@12
|
||||
- mpileaks
|
||||
- lmod@8.7.18
|
||||
- environment-modules
|
||||
|
@@ -1,2 +1,4 @@
|
||||
repo:
|
||||
namespace: builtin.mock
|
||||
version: 0
|
||||
required_spack_version: '0.23:1.0.0.dev0'
|
||||
|
@@ -100,10 +100,8 @@ def install(self, spec, prefix):
|
||||
for ext in exts:
|
||||
glob_str = os.path.join(pth, ext)
|
||||
files = glob.glob(glob_str)
|
||||
[
|
||||
for x in files:
|
||||
shutil.copy(
|
||||
os.path.join(self._7z_src_dir, x),
|
||||
os.path.join(prefix, os.path.basename(x)),
|
||||
)
|
||||
for x in files
|
||||
]
|
||||
|
@@ -179,7 +179,7 @@ def configure_args(self):
|
||||
if spec.satisfies("@:8"):
|
||||
oapp("--with-dft-flavor=atompaw+libxc")
|
||||
else:
|
||||
"--without-wannier90",
|
||||
oapp("--without-wannier90")
|
||||
|
||||
if spec.satisfies("+mpi"):
|
||||
oapp(f"CC={spec['mpi'].mpicc}")
|
||||
|
@@ -32,7 +32,7 @@ def edit(self, spec, prefix):
|
||||
# Dictionary mapping: compiler-name : ACE config-label
|
||||
supported = {"intel": "_icc", "gcc": ""}
|
||||
|
||||
if not (self.compiler.name in supported):
|
||||
if self.compiler.name not in supported:
|
||||
raise Exception(
|
||||
"compiler " + self.compiler.name + " not supported in ace spack-package"
|
||||
)
|
||||
|
@@ -40,6 +40,7 @@ class Acts(CMakePackage, CudaPackage):
|
||||
# Supported Acts versions
|
||||
version("main", branch="main")
|
||||
version("master", branch="main", deprecated=True) # For compatibility
|
||||
version("39.0.0", commit="b055202e2fbdd509bc186eb4782714bc46f38f3f", submodules=True)
|
||||
version("38.2.0", commit="9cb8f4494656553fd9b85955938b79b2fac4c9b0", submodules=True)
|
||||
version("38.1.0", commit="8a20c88808f10bf4fcdfd7c6e077f23614c3ab90", submodules=True)
|
||||
version("38.0.0", commit="0a6b5155e29e3b755bf351b8a76067fff9b4214b", submodules=True)
|
||||
|
@@ -5,11 +5,8 @@
|
||||
import inspect
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.pkg.builtin.openfoam as openfoam
|
||||
from spack.package import *
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
class Additivefoam(Package):
|
||||
|
@@ -4,7 +4,6 @@
|
||||
|
||||
import os
|
||||
|
||||
from spack.build_environment import optimization_flags
|
||||
from spack.package import *
|
||||
from spack.pkg.builtin.fftw import FftwBase
|
||||
|
||||
@@ -95,14 +94,14 @@ class Amdfftw(FftwBase):
|
||||
)
|
||||
variant(
|
||||
"amd-dynamic-dispatcher",
|
||||
default=True,
|
||||
default=False,
|
||||
when="@4.1: %aocc@4.1.0:",
|
||||
description="Single portable optimized library"
|
||||
" to execute on different x86 CPU architectures",
|
||||
)
|
||||
variant(
|
||||
"amd-dynamic-dispatcher",
|
||||
default=True,
|
||||
default=False,
|
||||
when="@3.2: %gcc",
|
||||
description="Single portable optimized library"
|
||||
" to execute on different x86 CPU architectures",
|
||||
@@ -203,11 +202,10 @@ def configure(self, spec, prefix):
|
||||
if not self.compiler.f77 or not self.compiler.fc:
|
||||
options.append("--disable-fortran")
|
||||
|
||||
# Cross compilation is supported in amd-fftw by making use of target
|
||||
# variable to set AMD_ARCH configure option.
|
||||
# Spack user can not directly use AMD_ARCH for this purpose but should
|
||||
# use target variable to set appropriate -march option in AMD_ARCH.
|
||||
options.append(f"AMD_ARCH={optimization_flags(self.compiler, spec.target)}")
|
||||
if "avx512" in spec.target:
|
||||
options.append("CFLAGS=-mprefer-vector-width=512")
|
||||
else:
|
||||
options.append("CFLAGS=-mprefer-vector-width=256")
|
||||
|
||||
# Specific SIMD support.
|
||||
# float and double precisions are supported
|
||||
|
@@ -202,8 +202,8 @@ def configure_args(self):
|
||||
args.append("--enable-void-return-complex")
|
||||
|
||||
if spec.satisfies("@3.0:3.1 %aocc"):
|
||||
"""To enabled Fortran to C calling convention for
|
||||
complex types when compiling with aocc flang"""
|
||||
# To enable Fortran to C calling convention for complex types when compiling with
|
||||
# aocc flang
|
||||
args.append("--enable-f2c-dotc")
|
||||
|
||||
if spec.satisfies("@3.0.1: +ilp64"):
|
||||
|
@@ -25,6 +25,7 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
|
||||
license("BSD-3-Clause")
|
||||
|
||||
version("develop", branch="development")
|
||||
version("25.02", sha256="2680a5a9afba04e211cd48d27799c5a25abbb36c6c3d2b6c13cd4757c7176b23")
|
||||
version("25.01", sha256="29eb35cf67d66b0fd0654282454c210abfadf27fcff8478b256e3196f237c74f")
|
||||
version("24.12", sha256="ca4b41ac73fabb9cf3600b530c9823eb3625f337d9b7b9699c1089e81c67fc67")
|
||||
version("24.11", sha256="31cc37b39f15e02252875815f6066046fc56a479bf459362b9889b0d6a202df6")
|
||||
@@ -360,7 +361,7 @@ def cmake_args(self):
|
||||
args.append("-DAMReX_GPU_BACKEND=SYCL")
|
||||
# SYCL GPU backend only supported with Intel's oneAPI or DPC++ compilers
|
||||
sycl_compatible_compilers = ["icpx"]
|
||||
if not (os.path.basename(self.compiler.cxx) in sycl_compatible_compilers):
|
||||
if os.path.basename(self.compiler.cxx) not in sycl_compatible_compilers:
|
||||
raise InstallError(
|
||||
"AMReX's SYCL GPU Backend requires the oneAPI CXX (icpx) compiler."
|
||||
)
|
||||
|
@@ -6,7 +6,6 @@
|
||||
from os.path import split
|
||||
|
||||
from spack.package import *
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
|
||||
class Anaconda3(Package):
|
||||
|
@@ -2,7 +2,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
from spack.package import *
|
||||
from spack.pkg.builtin.llvm import LlvmDetection
|
||||
|
@@ -5,7 +5,6 @@
|
||||
import os
|
||||
|
||||
from spack.package import *
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
|
||||
class AoclDa(CMakePackage):
|
||||
|
@@ -6,8 +6,6 @@
|
||||
import os
|
||||
import socket
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.build_systems.cmake import CMakeBuilder
|
||||
from spack.package import *
|
||||
|
||||
|
@@ -3,8 +3,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.package import *
|
||||
from spack.util.environment import set_env
|
||||
|
||||
|
@@ -9,8 +9,6 @@
|
||||
import sys
|
||||
from os import environ as env
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
|
@@ -23,6 +23,10 @@ class Bash(AutotoolsPackage, GNUMirrorPackage):
|
||||
|
||||
depends_on("c", type="build") # generated
|
||||
|
||||
depends_on("autoconf", type="build")
|
||||
depends_on("automake", type="build")
|
||||
depends_on("libtool", type="build")
|
||||
|
||||
depends_on("ncurses")
|
||||
depends_on("readline@8.2:", when="@5.2:")
|
||||
depends_on("readline@5.0:")
|
||||
|
@@ -5,8 +5,6 @@
|
||||
import glob
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.package import *
|
||||
from spack.pkg.builtin.boost import Boost
|
||||
|
||||
|
@@ -118,7 +118,7 @@ class Berkeleygw(MakefilePackage):
|
||||
def edit(self, spec, prefix):
|
||||
# archive is a tar file, despite the .gz expension
|
||||
tar = which("tar")
|
||||
tar("-x", "-f", self.stage.archive_file, "--strip-components=1")
|
||||
tar("-x", "-o", "-f", self.stage.archive_file, "--strip-components=1")
|
||||
|
||||
# get generic arch.mk template
|
||||
if spec.satisfies("+mpi"):
|
||||
|
@@ -160,8 +160,11 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage):
|
||||
|
||||
with when("platform=darwin"):
|
||||
conflicts("+gold", msg="Binutils cannot build linkers on macOS")
|
||||
# 2.41 doesn't seem to have any problems.
|
||||
conflicts(
|
||||
"libs=shared", when="@2.37:2.40", msg="https://github.com/spack/spack/issues/35817"
|
||||
"libs=shared",
|
||||
when="@2.37:2.40,2.42:",
|
||||
msg="https://github.com/spack/spack/issues/35817",
|
||||
)
|
||||
|
||||
conflicts(
|
||||
|
@@ -4,8 +4,6 @@
|
||||
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.package import *
|
||||
from spack.package_test import compare_output
|
||||
from spack.pkg.builtin.boost import Boost
|
||||
|
@@ -19,6 +19,11 @@ class Cdo(AutotoolsPackage):
|
||||
|
||||
maintainers("skosukhin", "Try2Code")
|
||||
|
||||
version(
|
||||
"2.5.0",
|
||||
sha256="e865c05c1b52fd76b80e33421554db81b38b75210820bdc40e8690f4552f68e2",
|
||||
url="https://code.mpimet.mpg.de/attachments/download/29786/cdo-2.5.0.tar.gz",
|
||||
)
|
||||
version(
|
||||
"2.4.4",
|
||||
sha256="49f50bd18dacd585e9518cfd4f55548f692426edfb3b27ddcd1c653eab53d063",
|
||||
|
@@ -2,6 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.build_systems.cmake import CMakeBuilder
|
||||
from spack.package import *
|
||||
|
||||
|
||||
@@ -81,13 +82,22 @@ class Celeritas(CMakePackage, CudaPackage, ROCmPackage):
|
||||
depends_on("py-breathe", type="build", when="+doc")
|
||||
depends_on("py-sphinx", type="build", when="+doc")
|
||||
|
||||
with when("+cuda"):
|
||||
depends_on("thrust")
|
||||
with when("+rocm"):
|
||||
depends_on("hiprand")
|
||||
depends_on("rocprim")
|
||||
depends_on("rocrand")
|
||||
depends_on("rocthrust")
|
||||
|
||||
for _std in _cxxstd_values:
|
||||
depends_on("geant4 cxxstd=" + _std, when="+geant4 cxxstd=" + _std)
|
||||
depends_on("root cxxstd=" + _std, when="+root cxxstd=" + _std)
|
||||
depends_on("vecgeom cxxstd=" + _std, when="+vecgeom cxxstd=" + _std)
|
||||
|
||||
depends_on("vecgeom +cuda cuda_arch=none", when="+vecgeom +cuda cuda_arch=none")
|
||||
for _arch in CudaPackage.cuda_arch_values:
|
||||
depends_on("vecgeom+cuda cuda_arch=" + _arch, when="+vecgeom +cuda cuda_arch=" + _arch)
|
||||
depends_on(f"vecgeom +cuda cuda_arch={_arch}", when=f"+vecgeom +cuda cuda_arch={_arch}")
|
||||
|
||||
conflicts("+rocm", when="+cuda", msg="AMD and NVIDIA accelerators are incompatible")
|
||||
conflicts("+rocm", when="+vecgeom", msg="HIP support is only available with ORANGE")
|
||||
@@ -111,7 +121,7 @@ def cmake_args(self):
|
||||
from_variant("CELERITAS_BUILD_DOCS", "doc"),
|
||||
define("CELERITAS_BUILD_DEMOS", False),
|
||||
define("CELERITAS_BUILD_TESTS", False),
|
||||
from_variant("Celeritas_USE_HIP", "rocm"),
|
||||
from_variant("CELERITAS_USE_HIP", "rocm"),
|
||||
define("CELERITAS_USE_MPI", False),
|
||||
define("CELERITAS_USE_Python", True),
|
||||
]
|
||||
@@ -119,6 +129,22 @@ def cmake_args(self):
|
||||
for pkg in ["CUDA", "Geant4", "HepMC3", "OpenMP", "ROOT", "SWIG", "VecGeom"]:
|
||||
args.append(from_variant("CELERITAS_USE_" + pkg, pkg.lower()))
|
||||
|
||||
if self.spec.satisfies("+cuda"):
|
||||
args.append(CMakeBuilder.define_cuda_architectures(self))
|
||||
if self.spec.satisfies("+rocm"):
|
||||
args.append(CMakeBuilder.define_hip_architectures(self))
|
||||
args.append(
|
||||
define(
|
||||
"CMAKE_HIP_FLAGS",
|
||||
" ".join(
|
||||
[
|
||||
f"-I{self.spec[p].prefix.include}"
|
||||
for p in ["hiprand", "rocprim", "rocrand", "rocthrust"]
|
||||
]
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
if self.version < Version("0.5"):
|
||||
# JSON is required for 0.5 and later
|
||||
args.append(define("CELERITAS_USE_JSON", True))
|
||||
|
@@ -276,7 +276,7 @@ def charmarch(self):
|
||||
# build-target=LIBS backend={0}'.format(b))
|
||||
|
||||
def install(self, spec, prefix):
|
||||
if not ("backend=mpi" in self.spec) or not ("backend=netlrts" in self.spec):
|
||||
if "backend=mpi" not in self.spec or "backend=netlrts" not in self.spec:
|
||||
if self.spec.satisfies("+pthreads"):
|
||||
raise InstallError(
|
||||
"The pthreads option is only available on the Netlrts and MPI network layers."
|
||||
@@ -396,7 +396,7 @@ def install(self, spec, prefix):
|
||||
copy(filepath, tmppath)
|
||||
os.remove(filepath)
|
||||
os.rename(tmppath, filepath)
|
||||
except (IOError, OSError):
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
tmp_path = join_path(builddir, "tmp")
|
||||
|
@@ -8,7 +8,6 @@
|
||||
import spack.user_environment
|
||||
from spack.package import *
|
||||
from spack.pkg.builtin.clingo import Clingo
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
|
||||
class ClingoBootstrap(Clingo):
|
||||
|
@@ -30,11 +30,13 @@ class Cmake(Package):
|
||||
license("BSD-3-Clause")
|
||||
|
||||
version("master", branch="master")
|
||||
version("3.31.5", sha256="66fb53a145648be56b46fa9e8ccade3a4d0dfc92e401e52ce76bdad1fea43d27")
|
||||
version("3.31.4", sha256="a6130bfe75f5ba5c73e672e34359f7c0a1931521957e8393a5c2922c8b0f7f25")
|
||||
version("3.31.3", sha256="fac45bc6d410b49b3113ab866074888d6c9e9dc81a141874446eb239ac38cb87")
|
||||
version("3.31.2", sha256="42abb3f48f37dbd739cdfeb19d3712db0c5935ed5c2aef6c340f9ae9114238a2")
|
||||
version("3.31.1", sha256="c4fc2a9bd0cd5f899ccb2fb81ec422e175090bc0de5d90e906dd453b53065719")
|
||||
version("3.31.0", sha256="300b71db6d69dcc1ab7c5aae61cbc1aa2778a3e00cbd918bc720203e311468c3")
|
||||
version("3.30.7", sha256="470e44d9c7caa3bd869ef953071b84f565b5d378d0a9eccbbbcd72031f21b9de")
|
||||
version("3.30.6", sha256="a7aa25cdd8545156fe0fec95ebbd53cb2b5173a8717e227f6e8a755185c168cf")
|
||||
version("3.30.5", sha256="9f55e1a40508f2f29b7e065fa08c29f82c402fa0402da839fffe64a25755a86d")
|
||||
version("3.30.4", sha256="c759c97274f1e7aaaafcb1f0d261f9de9bf3a5d6ecb7e2df616324a46fe704b2")
|
||||
|
@@ -12,58 +12,11 @@ class CodeServer(Package):
|
||||
homepage = "https://coder.com/docs/code-server/latest"
|
||||
url = "https://github.com/coder/code-server/releases/download/v4.4.0/code-server-4.4.0-linux-amd64.tar.gz"
|
||||
|
||||
version("4.12.0", sha256="d50ee947c4144a6ff2656e664ecbb3f70b75168b8a6e8c3eef47787f3c240c26")
|
||||
version("4.11.0", sha256="4eb233054941ec298caec6fc84dfba0a72c1bc5fadc0fe4896b10f3f4a291d51")
|
||||
version("4.10.1", sha256="f34ce611a9c058982a5e9d200fdf009788e3a564e970b053f4145574bce21b09")
|
||||
version("4.4.0", sha256="e3dd265acb18c2230c72d19bbce619ac5c1bd800ebb26e5e169c4d613069500d")
|
||||
version("4.3.0", sha256="42c71e98de85270b164b023ef8eb0692cf7700c03081ba5a44eaca014a92eb57")
|
||||
version("4.2.0", sha256="98be5bc43ac604c49ae11da259e318b581757a59a25edeee5cf55317ca589ec6")
|
||||
version("4.1.0", sha256="f720b20d1f615b78f3a1be9b1614f3d99ed722b8da3047a4143dbe5835e52ce3")
|
||||
version("4.0.2", sha256="68c11afa3288707a6880920013d8bac7404cd590eb4f63cac92979d0b0bf4fd1")
|
||||
version("4.0.1", sha256="5fe6d26e9d19e685946f0f392d9c822e5303a800cac3ac54a6a2c26104d239fd")
|
||||
version("3.12.0", sha256="d3ca41a55e36d73d80300702af2687e25d440cff6b613bb58a2c88d9b8a0a38f")
|
||||
version("3.11.1", sha256="d34b0b79582196d59d44ac971aabb7f15cb05d837318b94f62470dc8475665e9")
|
||||
version("3.11.0", sha256="dddb97f044ed615a4b8a526328fca6ad703b9c671a28a6090d84668a18755589")
|
||||
version("3.10.2", sha256="47154a6b9e61a0313ba499dd5d948613a17841c2f580612f9721c31964622bf5")
|
||||
version("3.10.1", sha256="18175624df78976488dbcc2a26f2582a71cef5ca0a419e691b1b70da0b27c7ef")
|
||||
version("3.10.0", sha256="5dfce848747f3dd5074cba435cca6730ac99d6d3aa3f50e0a9bf222ad12d3e97")
|
||||
version("3.9.3", sha256="eba42eaf868c2144795b1ac54929e3b252ae35403bf8553b3412a5ac4f365a41")
|
||||
version("3.9.2", sha256="5dbda5ac598223006f72bcb700b133a752aabe4468ed8105806d1d69b5364408")
|
||||
version("3.9.1", sha256="f2648a4387c5a5be8666fb82a7b8a58274c45b91942251ab337e202e078ae8a5")
|
||||
version("3.9.0", sha256="229b0fb95d78a7f7ff0dd55bc151a7521fcd699af50151faf67f6c7ce51110f4")
|
||||
version("3.8.1", sha256="130cf94e3921d0e2adfa33e875bf1aa81fd28548aac94fd31fbc589baa68d45f")
|
||||
version("3.8.0", sha256="70b069f26b30c38cca5fa07b5f25db4d15976de80af3a644b9105d1b5e23e7d5")
|
||||
version("3.7.4", sha256="01ca0e48df44df70cdf702644b013102024a5b30edf6c1fbb2e10b0310056382")
|
||||
version("3.7.3", sha256="7a90f3171c9bc6f65266066e35cc34d48a032910c136ea21116d28f3d7214547")
|
||||
version("3.7.2", sha256="c3054f214392b1b2eb4c77c57cb950ac5d733d349a426975e8bf32028e65a226")
|
||||
version("3.7.1", sha256="bebd9e0c46e0fd4b4f295fd91fc2db135a694614db972095e9842bf7969f4cee")
|
||||
version("3.7.0", sha256="5f8df8ed3924e8e594674d73fb50b00a06efa529f96a0495a5ee8c39c68f3ce1")
|
||||
version("3.6.2", sha256="fd4ac7d61f3e1b2a5034f1706e409c77fad299adef0ede204828d8ecfe317e45")
|
||||
version("3.6.1", sha256="bbe4ef9585e093b3521deb34a0820d2136172271862d6396df21c2e9a26c6374")
|
||||
version("3.6.0", sha256="d1ae4f7263741e0551358d3ed77dad587b33b352d827623d4df25e98f9e21019")
|
||||
version("3.5.0", sha256="90c19c84611becac4af1fb0bd5324ab30f9200769fa7914cd10ccb6b88c657bb")
|
||||
version("3.4.1", sha256="afdb89f4dc7201c03cb35d4f8dc1ccb6060bd0da324a6789089de264d3406817")
|
||||
version("3.4.0", sha256="918c28696b73b96dc9361977f93e788d5c8884b5d4a088d206f05d5b8bccb738")
|
||||
version("3.3.1", sha256="57b9855b20f511e22776ee8a53d1ff30f864498814c4c0b0af3510f71d7a2969")
|
||||
version("3.3.0", sha256="6ca5148a447b41753d5151c5a49a8af24122c7b0808609782aec454e66be4f2c")
|
||||
version("3.2.0", sha256="a8157e8766d6a0e255c72db25e8677a57adb8d889d653e78750b4d26a6ff7400")
|
||||
version("3.1.1", sha256="5dd922d28b2e351c146081849d987fb1e439ee7d53b941434b2eecb2a194da71")
|
||||
version("3.1.0", sha256="5ef85c8f280ce781a176a8b77386b333efe892755a5c325a1782e4eac6016e59")
|
||||
version("3.0.2", sha256="04367cfeb23991f3dc3f1ef8e3dfe5e9d683bb50c9e1fa69e3c21757facfd7ee")
|
||||
version("4.96.4", sha256="b3f9025d00f2cdf61caf83945ef7225d4a3eb576c4c007e45868f45713e39c8e")
|
||||
|
||||
depends_on("c", type="build") # generated
|
||||
depends_on("cxx", type="build") # generated
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version <= Version("3.2.0"):
|
||||
return "https://github.com/coder/code-server/releases/download/{0}/code-server-{0}-linux-x86_64.tar.gz".format(
|
||||
version
|
||||
)
|
||||
else:
|
||||
return "https://github.com/coder/code-server/releases/download/v{0}/code-server-{0}-linux-amd64.tar.gz".format(
|
||||
version
|
||||
)
|
||||
|
||||
def install(self, spec, prefix):
|
||||
install_tree(".", prefix)
|
||||
|
||||
|
@@ -3,7 +3,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack.package import *
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
|
||||
class Conda4aarch64(Package):
|
||||
|
@@ -8,8 +8,6 @@
|
||||
import socket
|
||||
from os import environ as env
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.package import *
|
||||
|
||||
|
||||
|
@@ -38,10 +38,11 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
|
||||
git = "https://github.com/cp2k/cp2k.git"
|
||||
list_url = "https://github.com/cp2k/cp2k/releases"
|
||||
|
||||
maintainers("dev-zero", "mtaillefumier")
|
||||
maintainers("dev-zero", "mtaillefumier", "RMeli", "abussy")
|
||||
|
||||
license("GPL-2.0-or-later")
|
||||
|
||||
version("2025.1", sha256="65c8ad5488897b0f995919b9fa77f2aba4b61677ba1e3c19bb093d5c08a8ce1d")
|
||||
version("2024.3", sha256="a6eeee773b6b1fb417def576e4049a89a08a0ed5feffcd7f0b33c7d7b48f19ba")
|
||||
version("2024.2", sha256="cc3e56c971dee9e89b705a1103765aba57bf41ad39a11c89d3de04c8b8cdf473")
|
||||
version("2024.1", sha256="a7abf149a278dfd5283dc592a2c4ae803b37d040df25d62a5e35af5c4557668f")
|
||||
@@ -115,7 +116,9 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
|
||||
)
|
||||
variant("pytorch", default=False, description="Enable libtorch support")
|
||||
variant("quip", default=False, description="Enable quip support")
|
||||
variant("dftd4", when="@2024.2:", default=False, description="Enable DFT-D4 support")
|
||||
variant("mpi_f08", default=False, description="Use MPI F08 module")
|
||||
variant("smeagol", default=False, description="Enable libsmeagol support", when="@2025.2:")
|
||||
|
||||
variant(
|
||||
"enable_regtests",
|
||||
@@ -156,6 +159,7 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
|
||||
)
|
||||
|
||||
depends_on("python@3", type="build")
|
||||
depends_on("pkgconfig", type="build", when="build_system=cmake")
|
||||
|
||||
depends_on("blas")
|
||||
depends_on("lapack")
|
||||
@@ -193,13 +197,14 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
|
||||
)
|
||||
|
||||
with when("+libxc"):
|
||||
depends_on("pkgconfig", type="build", when="@7.0:")
|
||||
depends_on("pkgconfig", type="build", when="@7.0: ^libxc@:6")
|
||||
depends_on("libxc@4.0.3:4", when="@7.0:8.1")
|
||||
depends_on("libxc@5.1.3:5.1", when="@8.2:8")
|
||||
depends_on("libxc@5.1.7:5.1", when="@9:2022.2")
|
||||
depends_on("libxc@6.1:", when="@2023.1:")
|
||||
depends_on("libxc@6.2:", when="@2023.2:")
|
||||
depends_on("libxc@:6", when="@:2024.3")
|
||||
depends_on("libxc@7 build_system=cmake", when="@2025.2:")
|
||||
|
||||
with when("+spla"):
|
||||
depends_on("spla+cuda+fortran", when="+cuda")
|
||||
@@ -260,6 +265,8 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
|
||||
depends_on("plumed+mpi", when="+mpi")
|
||||
depends_on("plumed~mpi", when="~mpi")
|
||||
|
||||
depends_on("libsmeagol", when="+smeagol")
|
||||
|
||||
# while we link statically against PEXSI, its own deps may be linked in
|
||||
# dynamically, therefore can't set this as pure build-type dependency.
|
||||
depends_on("pexsi+fortran@0.10.0:", when="+pexsi")
|
||||
@@ -290,6 +297,8 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
|
||||
|
||||
depends_on("spglib", when="+spglib")
|
||||
|
||||
depends_on("dftd4@3.6.0: build_system=cmake", when="+dftd4")
|
||||
|
||||
with when("build_system=cmake"):
|
||||
depends_on("cmake@3.22:", type="build")
|
||||
|
||||
@@ -621,6 +630,18 @@ def edit(self, pkg, spec, prefix):
|
||||
ldflags += [spglib.search_flags]
|
||||
libs.append(spglib.ld_flags)
|
||||
|
||||
if spec.satisfies("+dftd4"):
|
||||
cppflags += ["-D__DFTD4"]
|
||||
dftd4 = spec["dftd4"].libs
|
||||
ldflags += [dftd4.search_flags]
|
||||
libs.append(dftd4.ld_flags)
|
||||
|
||||
if spec.satisfies("+smeagol"):
|
||||
cppflags += ["-D__SMEAGOL"]
|
||||
smeagol = spec["libsmeagol"].libs
|
||||
ldflags += [smeagol.search_flags]
|
||||
libs.append(smeagol.ld_flags)
|
||||
|
||||
cc = spack_cc if "~mpi" in spec else spec["mpi"].mpicc
|
||||
cxx = spack_cxx if "~mpi" in spec else spec["mpi"].mpicxx
|
||||
fc = spack_fc if "~mpi" in spec else spec["mpi"].mpifc
|
||||
@@ -763,8 +784,8 @@ def edit(self, pkg, spec, prefix):
|
||||
"Point environment variable LIBSMM_PATH to "
|
||||
"the absolute path of the libsmm.a file"
|
||||
)
|
||||
except IOError:
|
||||
raise IOError(
|
||||
except OSError:
|
||||
raise OSError(
|
||||
"The file LIBSMM_PATH pointed to does not "
|
||||
"exist. Note that it must be absolute path."
|
||||
)
|
||||
@@ -994,7 +1015,9 @@ def cmake_args(self):
|
||||
self.define_from_variant("CP2K_USE_VORI", "libvori"),
|
||||
self.define_from_variant("CP2K_USE_SPLA", "spla"),
|
||||
self.define_from_variant("CP2K_USE_QUIP", "quip"),
|
||||
self.define_from_variant("CP2K_USE_DFTD4", "dftd4"),
|
||||
self.define_from_variant("CP2K_USE_MPI_F08", "mpi_f08"),
|
||||
self.define_from_variant("CP2K_USE_LIBSMEAGOL", "smeagol"),
|
||||
]
|
||||
|
||||
# we force the use elpa openmp threading support. might need to be revisited though
|
||||
|
@@ -15,6 +15,7 @@ class Cppgsl(CMakePackage):
|
||||
license("MIT")
|
||||
|
||||
version("main", branch="main")
|
||||
version("4.1.0", sha256="0a227fc9c8e0bf25115f401b9a46c2a68cd28f299d24ab195284eb3f1d7794bd")
|
||||
version("4.0.0", sha256="f0e32cb10654fea91ad56bde89170d78cfbf4363ee0b01d8f097de2ba49f6ce9")
|
||||
version("3.1.0", sha256="d3234d7f94cea4389e3ca70619b82e8fb4c2f33bb3a070799f1e18eef500a083")
|
||||
version("2.1.0", sha256="ef73814657b073e1be86c8f7353718771bf4149b482b6cb54f99e79b23ff899d")
|
||||
@@ -32,6 +33,7 @@ class Cppgsl(CMakePackage):
|
||||
)
|
||||
|
||||
depends_on("cmake@3.1.3:", type="build")
|
||||
depends_on("cmake@3.14:", type="build", when="@4.1:")
|
||||
|
||||
def cmake_args(self):
|
||||
return [
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user