Compare commits

..

2 Commits

Author SHA1 Message Date
Harmen Stoppels
10acffc92e fix incorrect type annotation of spack.provider_index._IndexBase.providers 2025-01-29 17:38:13 +01:00
Harmen Stoppels
f95e246355 index: avoid quadratic complexity through bulk update 2025-01-29 17:14:34 +01:00
237 changed files with 1571 additions and 1867 deletions

View File

@@ -40,17 +40,17 @@ jobs:
# 1: Platforms to build for
# 2: Base image (e.g. ubuntu:22.04)
dockerfile: [[amazon-linux, 'linux/amd64,linux/arm64', 'amazonlinux:2'],
[centos-stream9, 'linux/amd64,linux/arm64', 'centos:stream9'],
[leap15, 'linux/amd64,linux/arm64', 'opensuse/leap:15'],
[ubuntu-focal, 'linux/amd64,linux/arm64', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64', 'ubuntu:22.04'],
[ubuntu-noble, 'linux/amd64,linux/arm64', 'ubuntu:24.04'],
[almalinux8, 'linux/amd64,linux/arm64', 'almalinux:8'],
[almalinux9, 'linux/amd64,linux/arm64', 'almalinux:9'],
[centos-stream9, 'linux/amd64,linux/arm64,linux/ppc64le', 'centos:stream9'],
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
[fedora39, 'linux/amd64,linux/arm64', 'fedora:39'],
[fedora40, 'linux/amd64,linux/arm64', 'fedora:40']]
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
name: Build ${{ matrix.dockerfile[0] }}
if: github.repository == 'spack/spack'
steps:

View File

@@ -81,10 +81,6 @@ jobs:
with:
with_coverage: ${{ needs.changes.outputs.core }}
import-check:
needs: [ changes ]
uses: ./.github/workflows/import-check.yaml
all-prechecks:
needs: [ prechecks ]
if: ${{ always() }}

View File

@@ -33,4 +33,3 @@ jobs:
with:
verbose: true
fail_ci_if_error: false
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -1,49 +0,0 @@
name: import-check
on:
workflow_call:
jobs:
# Check we don't make the situation with circular imports worse
import-check:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@v2
with:
version: '1.10'
- uses: julia-actions/cache@v2
# PR: use the base of the PR as the old commit
- name: Checkout PR base commit
if: github.event_name == 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
ref: ${{ github.event.pull_request.base.sha }}
path: old
# not a PR: use the previous commit as the old commit
- name: Checkout previous commit
if: github.event_name != 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 2
path: old
- name: Checkout previous commit
if: github.event_name != 'pull_request'
run: git -C old reset --hard HEAD^
- name: Checkout new commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
path: new
- name: Install circular import checker
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: haampie/circular-import-fighter
ref: e38bcd0aa46368e30648b61b7f0d8c1ca68aadff
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter
run: make -j dependencies
- name: Circular import check
working-directory: circular-import-fighter
run: make -j compare "SPACK_ROOT=../old ../new"

View File

@@ -86,6 +86,66 @@ jobs:
spack -d bootstrap now --dev
spack -d style -t black
spack unit-test -V
# Check we don't make the situation with circular imports worse
import-check:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@v2
with:
version: '1.10'
- uses: julia-actions/cache@v2
# PR: use the base of the PR as the old commit
- name: Checkout PR base commit
if: github.event_name == 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
ref: ${{ github.event.pull_request.base.sha }}
path: old
# not a PR: use the previous commit as the old commit
- name: Checkout previous commit
if: github.event_name != 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 2
path: old
- name: Checkout previous commit
if: github.event_name != 'pull_request'
run: git -C old reset --hard HEAD^
- name: Checkout new commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
path: new
- name: Install circular import checker
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: haampie/circular-import-fighter
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter
run: make -j dependencies
- name: Problematic imports before
working-directory: circular-import-fighter
run: make SPACK_ROOT=../old SUFFIX=.old
- name: Problematic imports after
working-directory: circular-import-fighter
run: make SPACK_ROOT=../new SUFFIX=.new
- name: Compare import cycles
working-directory: circular-import-fighter
run: |
edges_before="$(head -n1 solution.old)"
edges_after="$(head -n1 solution.new)"
if [ "$edges_after" -gt "$edges_before" ]; then
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
printf 'previously this was %s\033[0m\n' "$edges_before"
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
exit 1
else
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
fi
# Further style checks from pylint
pylint:

View File

@@ -272,9 +272,9 @@ often lists dependencies and the flags needed to locate them. The
"environment variables" section lists environment variables that the
build system uses to pass flags to the compiler and linker.
^^^^^^^^^^^^^^^^^^^^^^^^^
Adding flags to configure
^^^^^^^^^^^^^^^^^^^^^^^^^
^^^^^^^^^^^^^^^^^^^^^^^^^^
Addings flags to configure
^^^^^^^^^^^^^^^^^^^^^^^^^^
For most of the flags you encounter, you will want a variant to
optionally enable/disable them. You can then optionally pass these
@@ -285,7 +285,7 @@ function like so:
def configure_args(self):
args = []
...
if self.spec.satisfies("+mpi"):
args.append("--enable-mpi")
else:
@@ -299,10 +299,7 @@ Alternatively, you can use the :ref:`enable_or_disable <autotools_enable_or_dis
.. code-block:: python
def configure_args(self):
args = []
...
args.extend(self.enable_or_disable("mpi"))
return args
return [self.enable_or_disable("mpi")]
Note that we are explicitly disabling MPI support if it is not
@@ -347,14 +344,7 @@ typically used to enable or disable some feature within the package.
default=False,
description="Memchecker support for debugging [degrades performance]"
)
...
def configure_args(self):
args = []
...
args.extend(self.enable_or_disable("memchecker"))
return args
config_args.extend(self.enable_or_disable("memchecker"))
In this example, specifying the variant ``+memchecker`` will generate
the following configuration options:

View File

@@ -361,6 +361,7 @@ and the tags associated with the class of runners to build on.
* ``.linux_neoverse_n1``
* ``.linux_neoverse_v1``
* ``.linux_neoverse_v2``
* ``.linux_power``
* ``.linux_skylake``
* ``.linux_x86_64``
* ``.linux_x86_64_v4``

View File

@@ -112,19 +112,6 @@ the original but may concretize differently in the presence of different
explicit or default configuration settings (e.g., a different version of
Spack or for a different user account).
Environments created from a manifest will copy any included configs
from relative paths inside the environment. Relative paths from
outside the environment will cause errors, and absolute paths will be
kept absolute. For example, if ``spack.yaml`` includes:
.. code-block:: yaml
spack:
include: [./config.yaml]
then the created environment will have its own copy of the file
``config.yaml`` copied from the location in the original environment.
Create an environment from a ``spack.lock`` file using:
.. code-block:: console
@@ -173,7 +160,7 @@ accepts. If an environment already exists then spack will simply activate it
and ignore the create-specific flags.
.. code-block:: console
$ spack env activate --create -p myenv
# ...
# [creates if myenv does not exist yet]
@@ -437,8 +424,8 @@ Developing Packages in a Spack Environment
The ``spack develop`` command allows one to develop Spack packages in
an environment. It requires a spec containing a concrete version, and
will configure Spack to install the package from local source.
If a version is not provided from the command line interface then spack
will configure Spack to install the package from local source.
If a version is not provided from the command line interface then spack
will automatically pick the highest version the package has defined.
This means any infinity versions (``develop``, ``main``, ``stable``) will be
preferred in this selection process.
@@ -448,9 +435,9 @@ set, and Spack will ensure the package and its dependents are rebuilt
any time the environment is installed if the package's local source
code has been modified. Spack's native implementation to check for modifications
is to check if ``mtime`` is newer than the installation.
A custom check can be created by overriding the ``detect_dev_src_change`` method
in your package class. This is particularly useful for projects using custom spack repo's
to drive development and want to optimize performance.
A custom check can be created by overriding the ``detect_dev_src_change`` method
in your package class. This is particularly useful for projects using custom spack repo's
to drive development and want to optimize performance.
Spack ensures that all instances of a
developed package in the environment are concretized to match the
@@ -466,7 +453,7 @@ Further development on ``foo`` can be tested by re-installing the environment,
and eventually committed and pushed to the upstream git repo.
If the package being developed supports out-of-source builds then users can use the
``--build_directory`` flag to control the location and name of the build directory.
``--build_directory`` flag to control the location and name of the build directory.
This is a shortcut to set the ``package_attributes:build_directory`` in the
``packages`` configuration (see :ref:`assigning-package-attributes`).
The supplied location will become the build-directory for that package in all future builds.

View File

@@ -668,7 +668,7 @@ def copy(src, dest, _permissions=False):
_permissions (bool): for internal use only
Raises:
OSError: if *src* does not match any files or directories
IOError: if *src* does not match any files or directories
ValueError: if *src* matches multiple files but *dest* is
not a directory
"""
@@ -679,7 +679,7 @@ def copy(src, dest, _permissions=False):
files = glob.glob(src)
if not files:
raise OSError("No such file or directory: '{0}'".format(src))
raise IOError("No such file or directory: '{0}'".format(src))
if len(files) > 1 and not os.path.isdir(dest):
raise ValueError(
"'{0}' matches multiple files but '{1}' is not a directory".format(src, dest)
@@ -710,7 +710,7 @@ def install(src, dest):
dest (str): the destination file or directory
Raises:
OSError: if *src* does not match any files or directories
IOError: if *src* does not match any files or directories
ValueError: if *src* matches multiple files but *dest* is
not a directory
"""
@@ -748,7 +748,7 @@ def copy_tree(
_permissions (bool): for internal use only
Raises:
OSError: if *src* does not match any files or directories
IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest*
"""
if _permissions:
@@ -762,7 +762,7 @@ def copy_tree(
files = glob.glob(src)
if not files:
raise OSError("No such file or directory: '{0}'".format(src))
raise IOError("No such file or directory: '{0}'".format(src))
# For Windows hard-links and junctions, the source path must exist to make a symlink. Add
# all symlinks to this list while traversing the tree, then when finished, make all
@@ -843,7 +843,7 @@ def install_tree(src, dest, symlinks=True, ignore=None):
ignore (typing.Callable): function indicating which files to ignore
Raises:
OSError: if *src* does not match any files or directories
IOError: if *src* does not match any files or directories
ValueError: if *src* is a parent directory of *dest*
"""
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)

View File

@@ -308,7 +308,7 @@ class LinkTree:
def __init__(self, source_root):
if not os.path.exists(source_root):
raise OSError("No such file or directory: '%s'", source_root)
raise IOError("No such file or directory: '%s'", source_root)
self._root = source_root

View File

@@ -391,7 +391,7 @@ def _poll_lock(self, op: int) -> bool:
return True
except OSError as e:
except IOError as e:
# EAGAIN and EACCES == locked by another process (so try again)
if e.errno not in (errno.EAGAIN, errno.EACCES):
raise

View File

@@ -918,7 +918,7 @@ def _writer_daemon(
try:
if stdin_file.read(1) == "v":
echo = not echo
except OSError as e:
except IOError as e:
# If SIGTTIN is ignored, the system gives EIO
# to let the caller know the read failed b/c it
# was in the bg. Ignore that too.
@@ -1013,7 +1013,7 @@ def wrapped(*args, **kwargs):
while True:
try:
return function(*args, **kwargs)
except OSError as e:
except IOError as e:
if e.errno == errno.EINTR:
continue
raise

View File

@@ -10,7 +10,7 @@
import spack.util.git
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "1.0.0.dev0"
__version__ = "0.24.0.dev0"
spack_version = __version__

View File

@@ -458,18 +458,11 @@ def cmake(
) -> None:
"""Runs ``cmake`` in the build directory"""
if spec.is_develop:
# skip cmake phase if it is an incremental develop build
# Determine the files that will re-run CMake that are generated from a successful
# configure step based on state
primary_generator = _extract_primary_generator(self.generator)
configure_artifact = "Makefile"
if primary_generator == "Ninja":
configure_artifact = "ninja.build"
if os.path.isfile(os.path.join(self.build_directory, configure_artifact)):
return
# skip cmake phase if it is an incremental develop build
if spec.is_develop and os.path.isfile(
os.path.join(self.build_directory, "CMakeCache.txt")
):
return
options = self.std_cmake_args
options += self.cmake_args()

View File

@@ -15,7 +15,7 @@ class CudaPackage(PackageBase):
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
and is meant to unify and facilitate its usage.
Maintainers: ax3l, Rombur, davidbeckingsale, pauleonix
Maintainers: ax3l, Rombur, davidbeckingsale
"""
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
@@ -47,12 +47,6 @@ class CudaPackage(PackageBase):
"89",
"90",
"90a",
"100",
"100a",
"101",
"101a",
"120",
"120a",
)
# FIXME: keep cuda and cuda_arch separate to make usage easier until
@@ -105,56 +99,39 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
# CUDA version vs Architecture
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
# Tesla support:
depends_on("cuda@:6.0", when="cuda_arch=10")
depends_on("cuda@:6.5", when="cuda_arch=11")
depends_on("cuda@2.1:6.5", when="cuda_arch=12")
depends_on("cuda@2.1:6.5", when="cuda_arch=13")
# Fermi support:
depends_on("cuda@3.0:8.0", when="cuda_arch=20")
depends_on("cuda@3.2:8.0", when="cuda_arch=21")
# Kepler support:
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
# Maxwell support:
depends_on("cuda@6.0:", when="cuda_arch=50")
depends_on("cuda@6.5:", when="cuda_arch=52")
depends_on("cuda@6.5:", when="cuda_arch=53")
# Pascal support:
depends_on("cuda@8.0:", when="cuda_arch=60")
depends_on("cuda@8.0:", when="cuda_arch=61")
depends_on("cuda@8.0:", when="cuda_arch=62")
# Volta support:
depends_on("cuda@9.0:", when="cuda_arch=70")
# Turing support:
depends_on("cuda@9.0:", when="cuda_arch=72")
depends_on("cuda@10.0:", when="cuda_arch=75")
# Ampere support:
depends_on("cuda@11.0:", when="cuda_arch=80")
depends_on("cuda@11.1:", when="cuda_arch=86")
depends_on("cuda@11.4:", when="cuda_arch=87")
# Ada support:
depends_on("cuda@11.8:", when="cuda_arch=89")
# Hopper support:
depends_on("cuda@12.0:", when="cuda_arch=90")
depends_on("cuda@12.0:", when="cuda_arch=90a")
# Blackwell support:
depends_on("cuda@12.8:", when="cuda_arch=100")
depends_on("cuda@12.8:", when="cuda_arch=100a")
depends_on("cuda@12.8:", when="cuda_arch=101")
depends_on("cuda@12.8:", when="cuda_arch=101a")
depends_on("cuda@12.8:", when="cuda_arch=120")
depends_on("cuda@12.8:", when="cuda_arch=120a")
# From the NVIDIA install guide we know of conflicts for particular
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
@@ -186,7 +163,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
conflicts("%gcc@15:", when="+cuda ^cuda@:12.8")
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
@@ -195,7 +171,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
conflicts("%clang@20:", when="+cuda ^cuda@:12.8")
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")

View File

@@ -14,7 +14,7 @@
import zipfile
from collections import namedtuple
from typing import Callable, Dict, List, Set
from urllib.request import Request
from urllib.request import HTTPHandler, Request, build_opener
import llnl.util.filesystem as fs
import llnl.util.tty as tty
@@ -62,8 +62,6 @@
PushResult = namedtuple("PushResult", "success url")
urlopen = web_util.urlopen # alias for mocking in tests
def get_change_revisions():
"""If this is a git repo get the revisions to use when checking
@@ -629,19 +627,29 @@ def download_and_extract_artifacts(url, work_dir):
if token:
headers["PRIVATE-TOKEN"] = token
request = Request(url, headers=headers, method="GET")
opener = build_opener(HTTPHandler)
request = Request(url, headers=headers)
request.get_method = lambda: "GET"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response_code = response.getcode()
if response_code != 200:
msg = f"Error response code ({response_code}) in reproduce_ci_job"
raise SpackError(msg)
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
os.makedirs(work_dir, exist_ok=True)
try:
response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
with open(artifacts_zip_path, "wb") as out_file:
shutil.copyfileobj(response, out_file)
except OSError as e:
raise SpackError(f"Error fetching artifacts: {e}")
if not os.path.exists(work_dir):
os.makedirs(work_dir)
with zipfile.ZipFile(artifacts_zip_path) as zip_file:
zip_file.extractall(work_dir)
with open(artifacts_zip_path, "wb") as out_file:
shutil.copyfileobj(response, out_file)
zip_file = zipfile.ZipFile(artifacts_zip_path)
zip_file.extractall(work_dir)
zip_file.close()
os.remove(artifacts_zip_path)

View File

@@ -110,7 +110,10 @@ def external_find(args):
# Note that KeyboardInterrupt does not subclass Exception
# (so CTRL-C will terminate the program as expected).
skip_msg = "Skipping manifest and continuing with other external checks"
if isinstance(e, OSError) and e.errno in (errno.EPERM, errno.EACCES):
if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
errno.EPERM,
errno.EACCES,
]:
# The manifest file does not have sufficient permissions enabled:
# print a warning and keep going
tty.warn("Unable to read manifest due to insufficient permissions.", skip_msg)

View File

@@ -177,15 +177,16 @@ def test_run(args):
matching = spack.store.STORE.db.query_local(spec, hashes=hashes, explicit=explicit)
if spec and not matching:
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
"""
TODO: Need to write out a log message and/or CDASH Testing
output that package not installed IF continue to process
these issues here.
# TODO: Need to write out a log message and/or CDASH Testing
# output that package not installed IF continue to process
# these issues here.
# if args.log_format:
# # Proceed with the spec assuming the test process
# # to ensure report package as skipped (e.g., for CI)
# specs_to_test.append(spec)
if args.log_format:
# Proceed with the spec assuming the test process
# to ensure report package as skipped (e.g., for CI)
specs_to_test.append(spec)
"""
specs_to_test.extend(matching)

View File

@@ -57,7 +57,7 @@ def validate(configuration_file):
# Set the default value of the concretization strategy to unify and
# warn if the user explicitly set another value
env_dict.setdefault("concretizer", {"unify": True})
if env_dict["concretizer"]["unify"] is not True:
if not env_dict["concretizer"]["unify"] is True:
warnings.warn(
'"concretizer:unify" is not set to "true", which means the '
"generated image may contain different variants of the same "

View File

@@ -263,6 +263,18 @@ def from_dict(cls, spec, dictionary):
return InstallRecord(spec, **d)
class ForbiddenLockError(SpackError):
"""Raised when an upstream DB attempts to acquire a lock"""
class ForbiddenLock:
def __getattr__(self, name):
raise ForbiddenLockError(f"Cannot access attribute '{name}' of lock")
def __reduce__(self):
return ForbiddenLock, tuple()
class LockConfiguration(NamedTuple):
"""Data class to configure locks in Database objects
@@ -605,12 +617,16 @@ def __init__(
self.db_lock_timeout = lock_cfg.database_timeout
tty.debug(f"DATABASE LOCK TIMEOUT: {str(self.db_lock_timeout)}s")
self.lock = lk.Lock(
str(self._lock_path),
default_timeout=self.db_lock_timeout,
desc="database",
enable=not self.is_upstream and lock_cfg.enable,
)
self.lock: Union[ForbiddenLock, lk.Lock]
if self.is_upstream:
self.lock = ForbiddenLock()
else:
self.lock = lk.Lock(
str(self._lock_path),
default_timeout=self.db_lock_timeout,
desc="database",
enable=lock_cfg.enable,
)
self._data: Dict[str, InstallRecord] = {}
# For every installed spec we keep track of its install prefix, so that
@@ -1034,9 +1050,6 @@ def _write(self, type=None, value=None, traceback=None):
This routine does no locking.
"""
if self.is_upstream:
raise UpstreamDatabaseLockingError("Cannot write to an upstream database")
self._ensure_parent_directories()
# Do not write if exceptions were raised
@@ -1657,24 +1670,38 @@ def query(
"""
valid_trees = ["all", "upstream", "local", self.root] + [u.root for u in self.upstream_dbs]
if install_tree not in valid_trees:
raise ValueError(
f"Invalid install_tree argument to Database.query(). Try one of {valid_trees}"
msg = "Invalid install_tree argument to Database.query()\n"
msg += f"Try one of {', '.join(valid_trees)}"
tty.error(msg)
return []
upstream_results = []
upstreams = self.upstream_dbs
if install_tree not in ("all", "upstream"):
upstreams = [u for u in self.upstream_dbs if u.root == install_tree]
for upstream_db in upstreams:
# queries for upstream DBs need to *not* lock - we may not
# have permissions to do this and the upstream DBs won't know about
# us anyway (so e.g. they should never uninstall specs)
upstream_results.extend(
upstream_db._query(
query_spec,
predicate_fn=predicate_fn,
installed=installed,
explicit=explicit,
start_date=start_date,
end_date=end_date,
hashes=hashes,
in_buildcache=in_buildcache,
origin=origin,
)
or []
)
if install_tree == "all":
databases = [self, *self.upstream_dbs]
elif install_tree == "upstream":
databases = self.upstream_dbs
elif install_tree == "local" or self.root == install_tree:
databases = [self]
else:
databases = [u for u in self.upstream_dbs if u.root == install_tree]
results: List[spack.spec.Spec] = []
for db in databases:
results.extend(
db.query_local(
local_results: Set["spack.spec.Spec"] = set()
if install_tree in ("all", "local") or self.root == install_tree:
local_results = set(
self.query_local(
query_spec,
predicate_fn=predicate_fn,
installed=installed,
@@ -1687,13 +1714,8 @@ def query(
)
)
# Stable deduplication on dag hash picks local specs over upstreams.
if len(databases) > 1:
results = list(llnl.util.lang.dedupe(results, key=lambda x: x.dag_hash()))
# reduce number of comparisons with slow default __lt__
results.sort(key=lambda s: s.name)
results.sort()
results = list(local_results) + list(x for x in upstream_results if x not in local_results)
results.sort() # type: ignore[call-overload]
return results
def query_one(

View File

@@ -581,7 +581,7 @@ def _error_on_nonempty_view_dir(new_root):
# Check if the target path lexists
try:
st = os.lstat(new_root)
except OSError:
except (IOError, OSError):
return
# Empty directories are fine
@@ -861,7 +861,7 @@ def regenerate(self, concrete_roots: List[Spec]) -> None:
):
try:
shutil.rmtree(old_root)
except OSError as e:
except (IOError, OSError) as e:
msg = "Failed to remove old view at %s\n" % old_root
msg += str(e)
tty.warn(msg)
@@ -2554,7 +2554,7 @@ def is_latest_format(manifest):
try:
with open(manifest, encoding="utf-8") as f:
data = syaml.load(f)
except OSError:
except (OSError, IOError):
return True
top_level_key = _top_level_key(data)
changed = spack.schema.env.update(data[top_level_key])
@@ -2634,32 +2634,6 @@ def _ensure_env_dir():
shutil.copy(envfile, target_manifest)
# Copy relative path includes that live inside the environment dir
try:
manifest = EnvironmentManifestFile(environment_dir)
except Exception:
# error handling for bad manifests is handled on other code paths
return
includes = manifest[TOP_LEVEL_KEY].get("include", [])
for include in includes:
if os.path.isabs(include):
continue
abspath = pathlib.Path(os.path.normpath(environment_dir / include))
common_path = pathlib.Path(os.path.commonpath([environment_dir, abspath]))
if common_path != environment_dir:
tty.debug(f"Will not copy relative include from outside environment: {include}")
continue
orig_abspath = os.path.normpath(envfile.parent / include)
if not os.path.exists(orig_abspath):
tty.warn(f"Included file does not exist; will not copy: '{include}'")
continue
fs.touchp(abspath)
shutil.copy(orig_abspath, abspath)
class EnvironmentManifestFile(collections.abc.Mapping):
"""Manages the in-memory representation of a manifest file, and its synchronization

View File

@@ -187,7 +187,7 @@ def path_for_extension(target_name: str, *, paths: List[str]) -> str:
if name == target_name:
return path
else:
raise OSError('extension "{0}" not found'.format(target_name))
raise IOError('extension "{0}" not found'.format(target_name))
def get_module(cmd_name):

View File

@@ -427,7 +427,7 @@ def needs_file(spec, file):
try:
with open(manifest_file, "r", encoding="utf-8") as f:
manifest = s_json.load(f)
except OSError:
except (OSError, IOError):
# if we can't load it, assume it doesn't know about the file.
manifest = {}
return test_path in manifest
@@ -831,7 +831,7 @@ def get_spec_from_file(filename):
try:
with open(filename, "r", encoding="utf-8") as f:
return spack.spec.Spec.from_yaml(f)
except OSError:
except IOError:
return None

View File

@@ -26,7 +26,7 @@ def is_shared_library_elf(filepath):
with open(filepath, "rb") as f:
elf = parse_elf(f, interpreter=True, dynamic_section=True)
return elf.has_pt_dynamic and (elf.has_soname or not elf.has_pt_interp)
except (OSError, ElfParsingError):
except (IOError, OSError, ElfParsingError):
return False

View File

@@ -166,7 +166,7 @@ def filter_shebangs_in_directory(directory, filenames=None):
# Only look at executable, non-symlink files.
try:
st = os.lstat(path)
except OSError:
except (IOError, OSError):
continue
if stat.S_ISLNK(st.st_mode) or stat.S_ISDIR(st.st_mode) or not st.st_mode & is_exe:

View File

@@ -814,7 +814,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
# Include build dependencies if pkg is going to be built from sources, or
# if build deps are explicitly requested.
if include_build_deps or not (
cache_only or pkg.spec.installed and pkg.spec.dag_hash() not in self.overwrite
cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite
):
depflag |= dt.BUILD
if self.run_tests(pkg):

View File

@@ -163,7 +163,7 @@ def format_help_sections(self, level):
# lazily add all commands to the parser when needed.
add_all_commands(self)
# Print help on subcommands in neatly formatted sections.
"""Print help on subcommands in neatly formatted sections."""
formatter = self._get_formatter()
# Create a list of subcommand actions. Argparse internals are nasty!

View File

@@ -64,7 +64,7 @@ def from_local_path(path: str):
@staticmethod
def from_url(url: str):
"""Create an anonymous mirror by URL. This method validates the URL."""
if urllib.parse.urlparse(url).scheme not in supported_url_schemes:
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
raise ValueError(
f'"{url}" is not a valid mirror URL. '
f"Scheme must be one of {supported_url_schemes}."

View File

@@ -383,7 +383,6 @@ def create_opener():
"""Create an opener that can handle OCI authentication."""
opener = urllib.request.OpenerDirector()
for handler in [
urllib.request.ProxyHandler(),
urllib.request.UnknownHandler(),
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
spack.util.web.SpackHTTPDefaultErrorHandler(),

View File

@@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# flake8: noqa: F401, E402
# flake8: noqa: F401
"""spack.package defines the public API for Spack packages, by re-exporting useful symbols from
other modules. Packages should import this module, instead of importing from spack.* directly
to ensure forward compatibility with future versions of Spack."""
@@ -13,17 +13,6 @@
# import most common types used in packages
from typing import Dict, List, Optional
class tty:
import llnl.util.tty as _tty
debug = _tty.debug
error = _tty.error
info = _tty.info
msg = _tty.msg
warn = _tty.warn
from llnl.util.filesystem import (
FileFilter,
FileList,
@@ -60,6 +49,7 @@ class tty:
)
from llnl.util.symlink import symlink
# These props will be overridden when the build env is set up.
from spack.build_environment import MakeExecutable
from spack.build_systems.aspell_dict import AspellDictPackage
from spack.build_systems.autotools import AutotoolsPackage
@@ -146,10 +136,8 @@ class tty:
)
from spack.phase_callbacks import run_after, run_before
from spack.spec import Spec
from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable, ProcessError, which, which_string
from spack.util.filesystem import fix_darwin_install_name
from spack.util.prefix import Prefix
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
from spack.version import Version, ver

View File

@@ -66,6 +66,10 @@
]
FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
"""Allowed URL schemes for spack packages."""
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
#: Filename for the Spack build/install log.
_spack_build_logfile = "spack-build-out.txt"
@@ -697,6 +701,9 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: Verbosity level, preserved across installs.
_verbose = None
#: index of patches by sha256 sum, built lazily
_patches_by_hash = None
#: Package homepage where users can find more information about the package
homepage: Optional[str] = None

View File

@@ -6,7 +6,7 @@
import os
import pathlib
import sys
from typing import Any, Dict, Optional, Tuple, Type, Union
from typing import Any, Dict, Optional, Set, Tuple, Type, Union
import llnl.util.filesystem
from llnl.url import allowed_archive
@@ -503,36 +503,38 @@ def patch_for_package(self, sha256: str, pkg: "spack.package_base.PackageBase")
patch_dict["sha256"] = sha256
return from_dict(patch_dict, repository=self.repository)
def update_package(self, pkg_fullname: str) -> None:
def update_packages(self, pkgs_fullname: Set[str]) -> None:
"""Update the patch cache.
Args:
pkg_fullname: package to update.
"""
# remove this package from any patch entries that reference it.
empty = []
for sha256, package_to_patch in self.index.items():
remove = []
for fullname, patch_dict in package_to_patch.items():
if patch_dict["owner"] == pkg_fullname:
remove.append(fullname)
if self.index:
empty = []
for sha256, package_to_patch in self.index.items():
remove = []
for fullname, patch_dict in package_to_patch.items():
if patch_dict["owner"] in pkgs_fullname:
remove.append(fullname)
for fullname in remove:
package_to_patch.pop(fullname)
for fullname in remove:
package_to_patch.pop(fullname)
if not package_to_patch:
empty.append(sha256)
if not package_to_patch:
empty.append(sha256)
# remove any entries that are now empty
for sha256 in empty:
del self.index[sha256]
# remove any entries that are now empty
for sha256 in empty:
del self.index[sha256]
# update the index with per-package patch indexes
pkg_cls = self.repository.get_pkg_class(pkg_fullname)
partial_index = self._index_patches(pkg_cls, self.repository)
for sha256, package_to_patch in partial_index.items():
p2p = self.index.setdefault(sha256, {})
p2p.update(package_to_patch)
for pkg_fullname in pkgs_fullname:
pkg_cls = self.repository.get_pkg_class(pkg_fullname)
partial_index = self._index_patches(pkg_cls, self.repository)
for sha256, package_to_patch in partial_index.items():
p2p = self.index.setdefault(sha256, {})
p2p.update(package_to_patch)
def update(self, other: "PatchCache") -> None:
"""Update this cache with the contents of another.

View File

@@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Classes and functions to manage providers of virtual dependencies"""
from typing import Dict, List, Optional, Set
from typing import Dict, Iterable, List, Optional, Set, Union
import spack.error
import spack.spec
@@ -26,7 +26,7 @@ class _IndexBase:
#: Calling providers_for(spec) will find specs that provide a
#: matching implementation of MPI. Derived class need to construct
#: this attribute according to the semantics above.
providers: Dict[str, Dict[str, Set[str]]]
providers: Dict[str, Dict["spack.spec.Spec", Set["spack.spec.Spec"]]]
def providers_for(self, virtual_spec):
"""Return a list of specs of all packages that provide virtual
@@ -99,66 +99,56 @@ def __init__(
self.repository = repository
self.restrict = restrict
self.providers = {}
if specs:
self.update_packages(specs)
specs = specs or []
for spec in specs:
if not isinstance(spec, spack.spec.Spec):
spec = spack.spec.Spec(spec)
if self.repository.is_virtual_safe(spec.name):
continue
self.update(spec)
def update(self, spec):
def update_packages(self, specs: Iterable[Union[str, "spack.spec.Spec"]]):
"""Update the provider index with additional virtual specs.
Args:
spec: spec potentially providing additional virtual specs
"""
if not isinstance(spec, spack.spec.Spec):
spec = spack.spec.Spec(spec)
for spec in specs:
if not isinstance(spec, spack.spec.Spec):
spec = spack.spec.Spec(spec)
if not spec.name:
# Empty specs do not have a package
return
if not spec.name or self.repository.is_virtual_safe(spec.name):
# Only non-virtual packages with name can provide virtual specs.
continue
msg = "cannot update an index passing the virtual spec '{}'".format(spec.name)
assert not self.repository.is_virtual_safe(spec.name), msg
pkg_provided = self.repository.get_pkg_class(spec.name).provided
for provider_spec_readonly, provided_specs in pkg_provided.items():
for provided_spec in provided_specs:
# TODO: fix this comment.
# We want satisfaction other than flags
provider_spec = provider_spec_readonly.copy()
provider_spec.compiler_flags = spec.compiler_flags.copy()
pkg_provided = self.repository.get_pkg_class(spec.name).provided
for provider_spec_readonly, provided_specs in pkg_provided.items():
for provided_spec in provided_specs:
# TODO: fix this comment.
# We want satisfaction other than flags
provider_spec = provider_spec_readonly.copy()
provider_spec.compiler_flags = spec.compiler_flags.copy()
if spec.intersects(provider_spec, deps=False):
provided_name = provided_spec.name
if spec.intersects(provider_spec, deps=False):
provided_name = provided_spec.name
provider_map = self.providers.setdefault(provided_name, {})
if provided_spec not in provider_map:
provider_map[provided_spec] = set()
provider_map = self.providers.setdefault(provided_name, {})
if provided_spec not in provider_map:
provider_map[provided_spec] = set()
if self.restrict:
provider_set = provider_map[provided_spec]
if self.restrict:
provider_set = provider_map[provided_spec]
# If this package existed in the index before,
# need to take the old versions out, as they're
# now more constrained.
old = {s for s in provider_set if s.name == spec.name}
provider_set.difference_update(old)
# If this package existed in the index before,
# need to take the old versions out, as they're
# now more constrained.
old = set([s for s in provider_set if s.name == spec.name])
provider_set.difference_update(old)
# Now add the new version.
provider_set.add(spec)
# Now add the new version.
provider_set.add(spec)
else:
# Before putting the spec in the map, constrain
# it so that it provides what was asked for.
constrained = spec.copy()
constrained.constrain(provider_spec)
provider_map[provided_spec].add(constrained)
else:
# Before putting the spec in the map, constrain
# it so that it provides what was asked for.
constrained = spec.copy()
constrained.constrain(provider_spec)
provider_map[provided_spec].add(constrained)
def to_json(self, stream=None):
"""Dump a JSON representation of this object.
@@ -193,14 +183,13 @@ def merge(self, other):
spdict[provided_spec] = spdict[provided_spec].union(opdict[provided_spec])
def remove_provider(self, pkg_name):
def remove_providers(self, pkgs_fullname: Set[str]):
"""Remove a provider from the ProviderIndex."""
empty_pkg_dict = []
for pkg, pkg_dict in self.providers.items():
empty_pset = []
for provided, pset in pkg_dict.items():
same_name = set(p for p in pset if p.fullname == pkg_name)
pset.difference_update(same_name)
pset.difference_update(pkgs_fullname)
if not pset:
empty_pset.append(provided)

View File

@@ -465,7 +465,7 @@ def read(self, stream):
"""Read this index from a provided file object."""
@abc.abstractmethod
def update(self, pkg_fullname):
def update(self, pkgs_fullname: Set[str]):
"""Update the index in memory with information about a package."""
@abc.abstractmethod
@@ -482,8 +482,8 @@ def _create(self):
def read(self, stream):
self.index = spack.tag.TagIndex.from_json(stream, self.repository)
def update(self, pkg_fullname):
self.index.update_package(pkg_fullname.split(".")[-1])
def update(self, pkgs_fullname: Set[str]):
self.index.update_packages({p.split(".")[-1] for p in pkgs_fullname})
def write(self, stream):
self.index.to_json(stream)
@@ -498,15 +498,14 @@ def _create(self):
def read(self, stream):
self.index = spack.provider_index.ProviderIndex.from_json(stream, self.repository)
def update(self, pkg_fullname):
name = pkg_fullname.split(".")[-1]
def update(self, pkgs_fullname: Set[str]):
is_virtual = (
not self.repository.exists(name) or self.repository.get_pkg_class(name).virtual
lambda name: not self.repository.exists(name)
or self.repository.get_pkg_class(name).virtual
)
if is_virtual:
return
self.index.remove_provider(pkg_fullname)
self.index.update(pkg_fullname)
non_virtual_pkgs_fullname = {p for p in pkgs_fullname if not is_virtual(p.split(".")[-1])}
self.index.remove_providers(non_virtual_pkgs_fullname)
self.index.update_packages(non_virtual_pkgs_fullname)
def write(self, stream):
self.index.to_json(stream)
@@ -531,8 +530,8 @@ def read(self, stream):
def write(self, stream):
self.index.to_json(stream)
def update(self, pkg_fullname):
self.index.update_package(pkg_fullname)
def update(self, pkgs_fullname: Set[str]):
self.index.update_packages(pkgs_fullname)
class RepoIndex:
@@ -622,9 +621,7 @@ def _build_index(self, name: str, indexer: Indexer):
if new_index_mtime != index_mtime:
needs_update = self.checker.modified_since(new_index_mtime)
for pkg_name in needs_update:
indexer.update(f"{self.namespace}.{pkg_name}")
indexer.update({f"{self.namespace}.{pkg_name}" for pkg_name in needs_update})
indexer.write(new)
return indexer.index
@@ -1041,7 +1038,7 @@ def _read_config(self) -> Dict[str, str]:
return yaml_data["repo"]
except OSError:
except IOError:
tty.die(f"Error reading {self.config_file} when opening {self.root}")
def get(self, spec: "spack.spec.Spec") -> "spack.package_base.PackageBase":
@@ -1369,7 +1366,7 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
if subdir != packages_dir_name:
config.write(f" subdirectory: '{subdir}'\n")
except OSError as e:
except (IOError, OSError) as e:
# try to clean up.
if existed:
shutil.rmtree(config_path, ignore_errors=True)

View File

@@ -1,7 +1,6 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import codecs
import collections
import hashlib
import os
@@ -14,7 +13,7 @@
import xml.sax.saxutils
from typing import Dict, Optional
from urllib.parse import urlencode
from urllib.request import Request
from urllib.request import HTTPSHandler, Request, build_opener
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
@@ -25,10 +24,10 @@
import spack.spec
import spack.tengine
import spack.util.git
import spack.util.web as web_util
from spack.error import SpackError
from spack.util.crypto import checksum
from spack.util.log_parse import parse_log_events
from spack.util.web import ssl_create_default_context
from .base import Reporter
from .extract import extract_test_parts
@@ -434,6 +433,7 @@ def upload(self, filename):
# Compute md5 checksum for the contents of this file.
md5sum = checksum(hashlib.md5, filename, block_size=8192)
opener = build_opener(HTTPSHandler(context=ssl_create_default_context()))
with open(filename, "rb") as f:
params_dict = {
"build": self.buildname,
@@ -443,21 +443,26 @@ def upload(self, filename):
}
encoded_params = urlencode(params_dict)
url = "{0}&{1}".format(self.cdash_upload_url, encoded_params)
request = Request(url, data=f, method="PUT")
request = Request(url, data=f)
request.add_header("Content-Type", "text/xml")
request.add_header("Content-Length", os.path.getsize(filename))
if self.authtoken:
request.add_header("Authorization", "Bearer {0}".format(self.authtoken))
try:
response = web_util.urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
# By default, urllib2 only support GET and POST.
# CDash expects this file to be uploaded via PUT.
request.get_method = lambda: "PUT"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
if self.current_package_name not in self.buildIds:
resp_value = codecs.getreader("utf-8")(response).read()
resp_value = response.read()
if isinstance(resp_value, bytes):
resp_value = resp_value.decode("utf-8")
match = self.buildid_regexp.search(resp_value)
if match:
buildid = match.group(1)
self.buildIds[self.current_package_name] = buildid
except Exception as e:
print(f"Upload to CDash failed: {e}")
print("Upload to CDash failed: {0}".format(e))
def finalize_report(self):
if self.buildIds:

View File

@@ -4726,10 +4726,7 @@ def __str__(self):
bool_keys = []
kv_keys = []
for key in sorted_keys:
if isinstance(self[key].value, bool):
bool_keys.append(key)
else:
kv_keys.append(key)
bool_keys.append(key) if isinstance(self[key].value, bool) else kv_keys.append(key)
# add spaces before and after key/value variants.
string = io.StringIO()

View File

@@ -5,6 +5,7 @@
import collections
import copy
from collections.abc import Mapping
from typing import Set
import spack.error
import spack.repo
@@ -110,23 +111,20 @@ def merge(self, other):
spkgs, opkgs = self.tags[tag], other.tags[tag]
self.tags[tag] = sorted(list(set(spkgs + opkgs)))
def update_package(self, pkg_name):
"""Updates a package in the tag index.
Args:
pkg_name (str): name of the package to be removed from the index
"""
pkg_cls = self.repository.get_pkg_class(pkg_name)
def update_packages(self, pkg_names: Set[str]):
"""Updates a package in the tag index."""
# Remove the package from the list of packages, if present
for pkg_list in self._tag_dict.values():
if pkg_name in pkg_list:
pkg_list.remove(pkg_name)
if pkg_names.isdisjoint(pkg_list):
continue
pkg_list[:] = [pkg for pkg in pkg_list if pkg not in pkg_names]
# Add it again under the appropriate tags
for tag in getattr(pkg_cls, "tags", []):
tag = tag.lower()
self._tag_dict[tag].append(pkg_cls.name)
for pkg_name in pkg_names:
pkg_cls = self.repository.get_pkg_class(pkg_name)
for tag in getattr(pkg_cls, "tags", []):
tag = tag.lower()
self._tag_dict[tag].append(pkg_cls.name)
class TagIndexError(spack.error.SpackError):

View File

@@ -1,10 +1,8 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
import os
import subprocess
from urllib.error import HTTPError
import pytest
@@ -17,7 +15,6 @@
import spack.paths as spack_paths
import spack.repo as repo
import spack.util.git
from spack.test.conftest import MockHTTPResponse
pytestmark = [pytest.mark.usefixtures("mock_packages")]
@@ -165,8 +162,38 @@ def test_import_signing_key(mock_gnupghome):
ci.import_signing_key(signing_key)
def test_download_and_extract_artifacts(tmpdir, monkeypatch):
monkeypatch.setenv("GITLAB_PRIVATE_TOKEN", "faketoken")
class FakeWebResponder:
def __init__(self, response_code=200, content_to_read=[]):
self._resp_code = response_code
self._content = content_to_read
self._read = [False for c in content_to_read]
def open(self, request, data=None, timeout=object()):
return self
def getcode(self):
return self._resp_code
def read(self, length=None):
if len(self._content) <= 0:
return None
if not self._read[-1]:
return_content = self._content[-1]
if length:
self._read[-1] = True
else:
self._read.pop()
self._content.pop()
return return_content
self._read.pop()
self._content.pop()
return None
def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
os.environ.update({"GITLAB_PRIVATE_TOKEN": "faketoken"})
url = "https://www.nosuchurlexists.itsfake/artifacts.zip"
working_dir = os.path.join(tmpdir.strpath, "repro")
@@ -174,13 +201,10 @@ def test_download_and_extract_artifacts(tmpdir, monkeypatch):
spack_paths.test_path, "data", "ci", "gitlab", "artifacts.zip"
)
def _urlopen_OK(*args, **kwargs):
with open(test_artifacts_path, "rb") as f:
return MockHTTPResponse(
"200", "OK", {"Content-Type": "application/zip"}, io.BytesIO(f.read())
)
with open(test_artifacts_path, "rb") as fd:
fake_responder = FakeWebResponder(content_to_read=[fd.read()])
monkeypatch.setattr(ci, "urlopen", _urlopen_OK)
monkeypatch.setattr(ci, "build_opener", lambda handler: fake_responder)
ci.download_and_extract_artifacts(url, working_dir)
@@ -190,11 +214,7 @@ def _urlopen_OK(*args, **kwargs):
found_install = fs.find(working_dir, "install.sh")
assert len(found_install) == 1
def _urlopen_500(*args, **kwargs):
raise HTTPError(url, 500, "Internal Server Error", {}, None)
monkeypatch.setattr(ci, "urlopen", _urlopen_500)
fake_responder._resp_code = 400
with pytest.raises(spack.error.SpackError):
ci.download_and_extract_artifacts(url, working_dir)
@@ -308,14 +328,16 @@ def test_get_spec_filter_list(mutable_mock_env_path, mutable_mock_repo):
e1.add("hypre")
e1.concretize()
# Concretizing the above environment results in the following graphs:
"""
Concretizing the above environment results in the following graphs:
# mpileaks -> mpich (provides mpi virtual dep of mpileaks)
# -> callpath -> dyninst -> libelf
# -> libdwarf -> libelf
# -> mpich (provides mpi dep of callpath)
mpileaks -> mpich (provides mpi virtual dep of mpileaks)
-> callpath -> dyninst -> libelf
-> libdwarf -> libelf
-> mpich (provides mpi dep of callpath)
# hypre -> openblas-with-lapack (provides lapack and blas virtual deps of hypre)
hypre -> openblas-with-lapack (provides lapack and blas virtual deps of hypre)
"""
touched = ["libdwarf"]

View File

@@ -1038,58 +1038,6 @@ def test_init_from_yaml(environment_from_manifest):
assert not e2.specs_by_hash
def test_init_from_yaml_relative_includes(tmp_path):
files = [
"relative_copied/packages.yaml",
"./relative_copied/compilers.yaml",
"repos.yaml",
"./config.yaml",
]
manifest = f"""
spack:
specs: []
include: {files}
"""
e1_path = tmp_path / "e1"
e1_manifest = e1_path / "spack.yaml"
fs.mkdirp(e1_path)
with open(e1_manifest, "w", encoding="utf-8") as f:
f.write(manifest)
for f in files:
fs.touchp(e1_path / f)
e2 = _env_create("test2", init_file=e1_manifest)
for f in files:
assert os.path.exists(os.path.join(e2.path, f))
def test_init_from_yaml_relative_includes_outside_env(tmp_path):
files = ["../outside_env_not_copied/repos.yaml"]
manifest = f"""
spack:
specs: []
include: {files}
"""
# subdir to ensure parent of environment dir is not shared
e1_path = tmp_path / "e1_subdir" / "e1"
e1_manifest = e1_path / "spack.yaml"
fs.mkdirp(e1_path)
with open(e1_manifest, "w", encoding="utf-8") as f:
f.write(manifest)
for f in files:
fs.touchp(e1_path / f)
with pytest.raises(spack.config.ConfigFileError, match="Detected 1 missing include"):
_ = _env_create("test2", init_file=e1_manifest)
def test_env_view_external_prefix(tmp_path, mutable_database, mock_packages):
fake_prefix = tmp_path / "a-prefix"
fake_bin = fake_prefix / "bin"

View File

@@ -139,7 +139,7 @@ def test_gc_except_specific_environments(mutable_database, mutable_mock_env_path
def test_gc_except_nonexisting_dir_env(mutable_database, mutable_mock_env_path, tmpdir):
output = gc("-ye", tmpdir.strpath, fail_on_error=False)
assert "No such environment" in output
assert gc.returncode == 1
gc.returncode == 1
@pytest.mark.db

View File

@@ -26,9 +26,9 @@ def test_manpath_trailing_colon(
else ("--sh", "export %s=%s", ";")
)
# Test that the commands generated by load add the MANPATH prefix
# inspections. Also test that Spack correctly preserves the default/existing
# manpath search path via a trailing colon
"""Test that the commands generated by load add the MANPATH prefix
inspections. Also test that Spack correctly preserves the default/existing
manpath search path via a trailing colon"""
install("mpileaks")
sh_out = load(shell, "mpileaks")
@@ -81,9 +81,7 @@ def extract_value(output, variable):
# Finally, do we list them in topo order?
for i, pkg in enumerate(pkgs):
assert {s.name for s in mpileaks_spec[pkg].traverse(direction="parents")}.issubset(
pkgs[: i + 1]
)
set(s.name for s in mpileaks_spec[pkg].traverse(direction="parents")) in set(pkgs[:i])
# Lastly, do we keep track that mpileaks was loaded?
assert (

View File

@@ -1,6 +1,17 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pathlib
import pytest
import spack.concretize
import spack.config
import spack.environment as ev
import spack.paths
import spack.repo
import spack.spec
import spack.util.spack_yaml as syaml
"""
These tests include the following package DAGs:
@@ -31,18 +42,6 @@
y
"""
import pathlib
import pytest
import spack.concretize
import spack.config
import spack.environment as ev
import spack.paths
import spack.repo
import spack.spec
import spack.util.spack_yaml as syaml
@pytest.fixture
def test_repo(mutable_config, monkeypatch, mock_stage):

View File

@@ -182,7 +182,7 @@ def test_requirement_adds_version_satisfies(
# Sanity check: early version of T does not include U
s0 = spack.concretize.concretize_one("t@2.0")
assert "u" not in s0
assert not ("u" in s0)
conf_str = """\
packages:

View File

@@ -165,8 +165,10 @@ def test_installed_upstream(upstream_and_downstream_db, tmpdir):
upstream_db._read()
for dep in spec.traverse(root=False):
assert downstream_db.get_by_hash(dep.dag_hash()) is not None
assert upstream_db.get_by_hash(dep.dag_hash()) is not None
record = downstream_db.get_by_hash(dep.dag_hash())
assert record is not None
with pytest.raises(spack.database.ForbiddenLockError):
upstream_db.get_by_hash(dep.dag_hash())
new_spec = spack.concretize.concretize_one("w")
downstream_db.add(new_spec)
@@ -256,7 +258,7 @@ def test_cannot_write_upstream(tmp_path, mock_packages, config):
# Create it as an upstream
db = spack.database.Database(str(tmp_path), is_upstream=True)
with pytest.raises(spack.database.UpstreamDatabaseLockingError):
with pytest.raises(spack.database.ForbiddenLockError):
db.add(spack.concretize.concretize_one("pkg-a"))

View File

@@ -458,7 +458,7 @@ def test_log_install_without_build_files(install_mockery):
spec = spack.concretize.concretize_one("trivial-install-test-package")
# Attempt installing log without the build log file
with pytest.raises(OSError, match="No such file or directory"):
with pytest.raises(IOError, match="No such file or directory"):
spack.installer.log(spec.package)

View File

@@ -470,7 +470,7 @@ def _repoerr(repo, name):
# The call to install_tree will raise the exception since not mocking
# creation of dependency package files within *install* directories.
with pytest.raises(OSError, match=path if sys.platform != "win32" else ""):
with pytest.raises(IOError, match=path if sys.platform != "win32" else ""):
inst.dump_packages(spec, path)
# Now try the error path, which requires the mock directory structure

View File

@@ -82,7 +82,7 @@ def test_non_existing_src(self, stage):
"""Test using a non-existing source."""
with fs.working_dir(str(stage)):
with pytest.raises(OSError, match="No such file or directory"):
with pytest.raises(IOError, match="No such file or directory"):
fs.copy("source/none", "dest")
def test_multiple_src_file_dest(self, stage):
@@ -139,7 +139,7 @@ def test_non_existing_src(self, stage):
"""Test using a non-existing source."""
with fs.working_dir(str(stage)):
with pytest.raises(OSError, match="No such file or directory"):
with pytest.raises(IOError, match="No such file or directory"):
fs.install("source/none", "dest")
def test_multiple_src_file_dest(self, stage):
@@ -220,7 +220,7 @@ def test_non_existing_src(self, stage):
"""Test using a non-existing source."""
with fs.working_dir(str(stage)):
with pytest.raises(OSError, match="No such file or directory"):
with pytest.raises(IOError, match="No such file or directory"):
fs.copy_tree("source/none", "dest")
def test_parent_dir(self, stage):
@@ -301,7 +301,7 @@ def test_non_existing_src(self, stage):
"""Test using a non-existing source."""
with fs.working_dir(str(stage)):
with pytest.raises(OSError, match="No such file or directory"):
with pytest.raises(IOError, match="No such file or directory"):
fs.install_tree("source/none", "dest")
def test_parent_dir(self, stage):

View File

@@ -93,26 +93,28 @@
pass
#: This is a list of filesystem locations to test locks in. Paths are
#: expanded so that %u is replaced with the current username. '~' is also
#: legal and will be expanded to the user's home directory.
#:
#: Tests are skipped for directories that don't exist, so you'll need to
#: update this with the locations of NFS, Lustre, and other mounts on your
#: system.
"""This is a list of filesystem locations to test locks in. Paths are
expanded so that %u is replaced with the current username. '~' is also
legal and will be expanded to the user's home directory.
Tests are skipped for directories that don't exist, so you'll need to
update this with the locations of NFS, Lustre, and other mounts on your
system.
"""
locations = [
tempfile.gettempdir(),
os.path.join("/nfs/tmp2/", getpass.getuser()),
os.path.join("/p/lscratch*/", getpass.getuser()),
]
#: This is the longest a failed multiproc test will take.
#: Barriers will time out and raise an exception after this interval.
#: In MPI mode, barriers don't time out (they hang). See mpi_multiproc_test.
"""This is the longest a failed multiproc test will take.
Barriers will time out and raise an exception after this interval.
In MPI mode, barriers don't time out (they hang). See mpi_multiproc_test.
"""
barrier_timeout = 5
#: This is the lock timeout for expected failures.
#: This may need to be higher for some filesystems.
"""This is the lock timeout for expected failures.
This may need to be higher for some filesystems."""
lock_fail_timeout = 0.1
@@ -284,8 +286,9 @@ def wait(self):
comm.Barrier() # barrier after each MPI test.
#: ``multiproc_test()`` should be called by tests below.
#: ``multiproc_test()`` will work for either MPI runs or for local runs.
"""``multiproc_test()`` should be called by tests below.
``multiproc_test()`` will work for either MPI runs or for local runs.
"""
multiproc_test = mpi_multiproc_test if mpi else local_multiproc_test
@@ -1336,7 +1339,7 @@ def test_poll_lock_exception(tmpdir, monkeypatch, err_num, err_msg):
"""Test poll lock exception handling."""
def _lockf(fd, cmd, len, start, whence):
raise OSError(err_num, err_msg)
raise IOError(err_num, err_msg)
with tmpdir.as_cwd():
lockfile = "lockfile"
@@ -1348,7 +1351,7 @@ def _lockf(fd, cmd, len, start, whence):
if err_num in [errno.EAGAIN, errno.EACCES]:
assert not lock._poll_lock(fcntl.LOCK_EX)
else:
with pytest.raises(OSError, match=err_msg):
with pytest.raises(IOError, match=err_msg):
lock._poll_lock(fcntl.LOCK_EX)
monkeypatch.undo()

View File

@@ -238,7 +238,10 @@ def test_exclude(self, modulefile_content, module_configuration, host_architectu
assert len([x for x in content if "module load " in x]) == 1
with pytest.raises(FileNotFoundError):
# Catch "Exception" to avoid using FileNotFoundError on Python 3
# and IOError on Python 2 or common bases like EnvironmentError
# which are not officially documented
with pytest.raises(Exception):
modulefile_content(f"callpath target={host_architecture_str}")
content = modulefile_content(f"zmpi target={host_architecture_str}")

View File

@@ -132,8 +132,7 @@ def test_reporters_extract_skipped(state):
parts = spack.reporters.extract.extract_test_parts("fake", outputs)
assert len(parts) == 1
assert parts[0]["completed"] == spack.reporters.extract.completed["skipped"]
parts[0]["completed"] == expected
def test_reporters_skip_new():

View File

@@ -198,7 +198,7 @@ def script_dir(sbang_line):
],
)
def test_shebang_interpreter_regex(shebang, interpreter):
assert sbang.get_interpreter(shebang) == interpreter
sbang.get_interpreter(shebang) == interpreter
def test_shebang_handling(script_dir, sbang_line):

View File

@@ -428,29 +428,31 @@ def test_copy_through_spec_build_interface(self):
c2 = s["mpileaks"]["mpileaks"].copy()
assert c0 == c1 == c2 == s
# Here is the graph with deptypes labeled (assume all packages have a 'dt'
# prefix). Arrows are marked with the deptypes ('b' for 'build', 'l' for
# 'link', 'r' for 'run').
"""
Here is the graph with deptypes labeled (assume all packages have a 'dt'
prefix). Arrows are marked with the deptypes ('b' for 'build', 'l' for
'link', 'r' for 'run').
# use -bl-> top
use -bl-> top
# top -b-> build1
# top -bl-> link1
# top -r-> run1
top -b-> build1
top -bl-> link1
top -r-> run1
# build1 -b-> build2
# build1 -bl-> link2
# build1 -r-> run2
build1 -b-> build2
build1 -bl-> link2
build1 -r-> run2
# link1 -bl-> link3
link1 -bl-> link3
# run1 -bl-> link5
# run1 -r-> run3
run1 -bl-> link5
run1 -r-> run3
# link3 -b-> build2
# link3 -bl-> link4
link3 -b-> build2
link3 -bl-> link4
# run3 -b-> build3
run3 -b-> build3
"""
@pytest.mark.parametrize(
"spec_str,deptypes,expected",

View File

@@ -125,7 +125,7 @@ def check_expand_archive(stage, stage_name, expected_file_list):
assert os.path.isfile(fn)
with open(fn, encoding="utf-8") as _file:
assert _file.read() == contents
_file.read() == contents
def check_fetch(stage, stage_name):

View File

@@ -154,7 +154,6 @@ def test_tag_no_tags(mock_packages):
def test_tag_update_package(mock_packages):
mock_index = mock_packages.tag_index
index = spack.tag.TagIndex(repository=mock_packages)
for name in spack.repo.all_package_names():
index.update_package(name)
index.update_packages(set(spack.repo.all_package_names()))
ensure_tags_results_equal(mock_index.tags, index.tags)

View File

@@ -20,7 +20,12 @@
datadir = os.path.join(spack_root, "lib", "spack", "spack", "test", "data", "compression")
ext_archive = {ext: f"Foo.{ext}" for ext in llnl.url.ALLOWED_ARCHIVE_TYPES if "TAR" not in ext}
ext_archive = {}
[
ext_archive.update({ext: ".".join(["Foo", ext])})
for ext in llnl.url.ALLOWED_ARCHIVE_TYPES
if "TAR" not in ext
]
# Spack does not use Python native handling for tarballs or zip
# Don't test tarballs or zip in native test
native_archive_list = [

View File

@@ -204,13 +204,13 @@ def test_no_editor():
def assert_exec(exe, args):
assert False
with pytest.raises(OSError, match=r"No text editor found.*"):
with pytest.raises(EnvironmentError, match=r"No text editor found.*"):
ed.editor("/path/to/file", exec_fn=assert_exec)
def assert_exec(exe, args):
return False
with pytest.raises(OSError, match=r"No text editor found.*"):
with pytest.raises(EnvironmentError, match=r"No text editor found.*"):
ed.editor("/path/to/file", exec_fn=assert_exec)
@@ -220,5 +220,5 @@ def test_exec_fn_executable(editor_var, good_exe, bad_exe):
assert ed.editor(exec_fn=ed.executable)
os.environ[editor_var] = bad_exe
with pytest.raises(OSError, match=r"No text editor found.*"):
with pytest.raises(EnvironmentError, match=r"No text editor found.*"):
ed.editor(exec_fn=ed.executable)

View File

@@ -9,7 +9,7 @@
defined by the EDITOR environment variable if VISUAL is not set or the
specified editor fails (e.g. no DISPLAY for a graphical editor). If
neither variable is set, we fall back to one of several common editors,
raising an OSError if we are unable to find one.
raising an EnvironmentError if we are unable to find one.
"""
import os
import shlex
@@ -141,7 +141,7 @@ def try_env_var(var):
return True
# Fail if nothing could be found
raise OSError(
raise EnvironmentError(
"No text editor found! Please set the VISUAL and/or EDITOR "
"environment variable(s) to your preferred text editor."
)

View File

@@ -46,7 +46,7 @@ def _process_ld_so_conf_queue(queue):
try:
with open(p, "rb") as f:
lines = f.readlines()
except OSError:
except (IOError, OSError):
continue
for line in lines:
@@ -132,7 +132,7 @@ def host_dynamic_linker_search_paths():
if os.path.exists(possible_conf):
conf_file = possible_conf
except (OSError, elf_utils.ElfParsingError):
except (IOError, OSError, elf_utils.ElfParsingError):
pass
# Note: ld_so_conf doesn't error if the file does not exist.

View File

@@ -436,8 +436,8 @@ def _dump_annotated(handler, data, stream=None):
width = max(clen(a) for a in _ANNOTATIONS)
formats = ["%%-%ds %%s\n" % (width + cextra(a)) for a in _ANNOTATIONS]
for fmt, annotation, line in zip(formats, _ANNOTATIONS, lines):
stream.write(fmt % (annotation, line))
for f, a, l in zip(formats, _ANNOTATIONS, lines):
stream.write(f % (a, l))
if getvalue:
return getvalue()

View File

@@ -1,7 +1,6 @@
[project]
name="spack"
description="The spack package manager"
requires-python=">=3.6"
dependencies=[
"clingo",
"setuptools",
@@ -68,42 +67,9 @@ features = [
"ci",
]
[tool.ruff]
line-length = 99
extend-include = ["bin/spack"]
extend-exclude = ["lib/spack/external", "*.pyi"]
[tool.ruff.format]
skip-magic-trailing-comma = true
[tool.ruff.lint]
extend-select = ["I"]
ignore = ["E731", "E203"]
[tool.ruff.lint.isort]
split-on-trailing-comma = false
section-order = [
"future",
"standard-library",
"third-party",
"archspec",
"llnl",
"spack",
"first-party",
"local-folder",
]
[tool.ruff.lint.isort.sections]
spack = ["spack"]
archspec = ["archspec"]
llnl = ["llnl"]
[tool.ruff.lint.per-file-ignores]
"var/spack/repos/*/package.py" = ["F403", "F405", "F811", "F821"]
"*-ci-package.py" = ["F403", "F405", "F821"]
[tool.black]
line-length = 99
target-version = ['py36', 'py37', 'py38', 'py39', 'py310']
include = '(lib/spack|var/spack/repos)/.*\.pyi?$|bin/spack$'
extend-exclude = 'lib/spack/external'
skip_magic_trailing_comma = true

View File

@@ -59,6 +59,11 @@ default:
SPACK_TARGET_PLATFORM: "linux"
SPACK_TARGET_ARCH: "aarch64"
.linux_power:
variables:
SPACK_TARGET_PLATFORM: "linux"
SPACK_TARGET_ARCH: "ppc64le"
.win64-msvc2019:
variables:
SPACK_TARGET_PLATFORM: "win64"
@@ -347,6 +352,35 @@ e4s-oneapi-build:
- artifacts: True
job: e4s-oneapi-generate
########################################
# E4S on Power
########################################
.e4s-power-generate-tags-and-image:
image: { "name": "ghcr.io/spack/ubuntu20.04-runner-ppc64-gcc-11.4:2023.08.01", "entrypoint": [""] }
tags: ["spack", "public", "large", "ppc64le"]
.e4s-power:
extends: [".linux_power"]
variables:
SPACK_CI_STACK_NAME: e4s-power
e4s-power-generate:
extends: [ ".e4s-power", ".generate-x86_64", ".e4s-power-generate-tags-and-image"]
variables:
# Override concretization pool for metal runners
SPACK_CONCRETIZE_JOBS: 16
e4s-power-build:
extends: [ ".e4s-power", ".build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: e4s-power-generate
strategy: depend
needs:
- artifacts: True
job: e4s-power-generate
#########################################
# Build tests for different build-systems
#########################################
@@ -539,6 +573,57 @@ data-vis-sdk-build:
- artifacts: True
job: data-vis-sdk-generate
########################################
# AWS ISC Applications (x86_64)
########################################
# Call this AFTER .*-generate
.aws-isc-overrides:
# This controls image for generate step; build step is controlled by spack.yaml
# Note that generator emits OS info for build so these should be the same.
image: { "name": "ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09", "entrypoint": [""] }
.aws-isc:
extends: [ ".linux_x86_64_v3" ]
variables:
SPACK_CI_STACK_NAME: aws-isc
aws-isc-generate:
extends: [ ".aws-isc", ".generate-x86_64", ".aws-isc-overrides", ".tags-x86_64_v4" ]
aws-isc-build:
extends: [ ".aws-isc", ".build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: aws-isc-generate
strategy: depend
needs:
- artifacts: True
job: aws-isc-generate
# Parallel Pipeline for aarch64 (reuses override image, but generates and builds on aarch64)
.aws-isc-aarch64:
extends: [ ".linux_aarch64" ]
variables:
SPACK_CI_STACK_NAME: aws-isc-aarch64
aws-isc-aarch64-generate:
extends: [ ".aws-isc-aarch64", ".generate-aarch64", ".aws-isc-overrides" ]
aws-isc-aarch64-build:
extends: [ ".aws-isc-aarch64", ".build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: aws-isc-aarch64-generate
strategy: depend
needs:
- artifacts: True
job: aws-isc-aarch64-generate
########################################
# Spack Tutorial
########################################

View File

@@ -0,0 +1,22 @@
ci:
pipeline-gen:
- build-job:
tags: ["ppc64le"]
# Power runners overrides the default script
# - don't download make
# - no intermediate keys
script::
- uname -a || true
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
- nproc
- . "./share/spack/setup-env.sh"
- spack --version
- spack arch
- cd ${SPACK_CONCRETE_ENV_DIR}
- spack env activate --without-view .
- if [ -n "$SPACK_BUILD_JOBS" ]; then spack config add "config:build_jobs:$SPACK_BUILD_JOBS"; fi
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
- if [[ -r /mnt/key/e4s.gpg ]]; then spack gpg trust /mnt/key/e4s.gpg; fi
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
- spack --color=always --backtrace ci rebuild > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt) 2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)

View File

@@ -0,0 +1,3 @@
concretizer:
targets:
granularity: generic

View File

@@ -0,0 +1,141 @@
spack:
view: false
packages:
all:
providers:
blas:
- openblas
mkl:
- intel-oneapi-mkl
mpi:
- openmpi
- mpich
variants: +mpi
tbb:
require: intel-tbb
binutils:
variants: +ld +gold +headers +libiberty ~nls
version:
- 2.36.1
doxygen:
version:
- 1.8.20
elfutils:
variants: ~nls
hdf5:
variants: +fortran +hl +shared
libfabric:
variants: fabrics=efa,tcp,udp,sockets,verbs,shm,mrail,rxd,rxm
libunwind:
variants: +pic +xz
mesa:
variants: ~llvm
mpich:
variants: ~wrapperrpath netmod=ofi device=ch4
ncurses:
variants: +termlib
openblas:
variants: threads=openmp
openmpi:
variants: fabrics=ofi +legacylaunchers
openturns:
version:
- '1.18'
relion:
variants: ~mklfft
# texlive:
# version: [20210325]
trilinos:
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
xz:
variants: +pic
definitions:
- compiler_specs:
- gcc@11.2
# Licensing OK?
# - intel-oneapi-compilers@2022.1
# - nvhpc
- app_specs:
- bwa
# Depends on simde which requires newer compiler?
#- bowtie2
# Requires x86_64 specific ASM
#- cistem
- cromwell
- fastqc
- flux-sched
- flux-core
- flux-pmix
- gatk
- gromacs
- lammps
- wrf build_type=dm+sm
- mfem
- mpas-model ^parallelio+pnetcdf
- nextflow
- octave
- openfoam
- osu-micro-benchmarks
- parallel
# - paraview
- picard
- quantum-espresso
- raja
# Depends on bowtie2 -> simde which requires newer compiler?
#- rsem
# Errors on texlive
#- rstudio
- salmon
- samtools
- seqtk
- snakemake
- star
# Requires gcc@9:
#- ufs-weather-model
# requires LLVM which fails without constraint
#- visit
- lib_specs:
- openmpi fabrics=ofi
- openmpi fabrics=ofi +legacylaunchers
- openmpi fabrics=auto
- mpich
- libfabric
- compiler:
- '%gcc@7.3.1'
- target:
- target=aarch64
specs:
- matrix:
- - $app_specs
- - $compiler
- - $target
- matrix:
- - $lib_specs
- - $compiler
- - $target
- matrix:
- - $compiler_specs
- - $compiler
- - $target
ci:
pipeline-gen:
- build-job:
image: {name: ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09, entrypoint: ['']}
cdash:
build-group: AWS Packages

View File

@@ -0,0 +1,153 @@
spack:
view: false
packages:
all:
providers:
blas:
- openblas
mkl:
- intel-oneapi-mkl
mpi:
- openmpi
- mpich
variants: +mpi
tbb:
require: intel-tbb
binutils:
variants: +ld +gold +headers +libiberty ~nls
version:
- 2.36.1
doxygen:
version:
- 1.8.20
elfutils:
variants: ~nls
hdf5:
variants: +fortran +hl +shared
libfabric:
variants: fabrics=efa,tcp,udp,sockets,verbs,shm,mrail,rxd,rxm
libunwind:
variants: +pic +xz
mesa:
variants: ~llvm
mpich:
variants: ~wrapperrpath netmod=ofi device=ch4
ncurses:
variants: +termlib
openblas:
variants: threads=openmp
openmpi:
variants: fabrics=ofi +legacylaunchers
openturns:
version:
- '1.18'
relion:
variants: ~mklfft
# texlive:
# version: [20210325]
trilinos:
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
xz:
variants: +pic
definitions:
- compiler_specs:
- gcc@11.2
# Licensing OK?
# - intel-oneapi-compilers@2022.1
# - nvhpc
- cuda_specs:
# Disabled for consistency with aarch64
#- relion +cuda cuda_arch=70
- raja +cuda cuda_arch=70
- mfem +cuda cuda_arch=70
- app_specs:
- bwa
# Disabled for consistency with aarch64
#- bowtie2
# Disabled for consistency with aarch64
#- cistem
- cromwell
- fastqc
- flux-sched
- flux-core
- flux-pmix
- gatk
- gromacs
- lammps
- wrf build_type=dm+sm
- mfem
- mpas-model ^parallelio+pnetcdf
- nextflow
- octave
- openfoam
- osu-micro-benchmarks
- parallel
# - paraview
- picard
- quantum-espresso
# Build broken for gcc@7.3.1 x86_64_v4 (error: '_mm512_loadu_epi32' was not declared in this scope)
#- raja
# Disabled for consistency with aarch64
#- rsem
# Errors on texlive
#- rstudio
- salmon
- samtools
- seqtk
- snakemake
- star
# Requires gcc@9:
#- ufs-weather-model
# Disabled for consistency with aarch64
#- visit
- lib_specs:
- openmpi fabrics=ofi
- openmpi fabrics=ofi +legacylaunchers
- openmpi fabrics=auto
- mpich
- libfabric
- compiler:
- '%gcc@7.3.1'
- target:
- target=x86_64_v3
specs:
- matrix:
- - $cuda_specs
- - $compiler
- - $target
- matrix:
- - $app_specs
- - $compiler
- - $target
- matrix:
- - $lib_specs
- - $compiler
- - $target
- matrix:
- - $compiler_specs
- - $compiler
- - $target
ci:
pipeline-gen:
- build-job:
image: {name: ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09, entrypoint: ['']}
cdash:
build-group: AWS Packages

View File

@@ -0,0 +1,271 @@
spack:
view: false
concretizer:
reuse: false
unify: false
packages:
all:
require: "%gcc@9.4.0 target=ppc64le"
compiler: [gcc@9.4.0]
providers:
blas: [openblas]
mpi: [mpich]
variants: +mpi cuda_arch=70
binutils:
variants: +ld +gold +headers +libiberty ~nls
hdf5:
variants: +fortran +hl +shared
libfabric:
variants: fabrics=sockets,tcp,udp,rxm
openblas:
variants: threads=openmp
trilinos:
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
xz:
variants: +pic
mpi:
require: mpich
mpich:
require: '~wrapperrpath ~hwloc %gcc@9.4.0 target=ppc64le'
ncurses:
require: '@6.3 +termlib %gcc@9.4.0 target=ppc64le'
faodel:
require: "~tcmalloc %gcc@9.4.0 target=ppc64le"
tbb:
require: intel-tbb
vtk-m:
require: "+examples %gcc@9.4.0 target=ppc64le"
cuda:
require: "@11.4.4 %gcc@9.4.0 target=ppc64le"
paraview:
require: "+examples %gcc@9.4.0 target=ppc64le"
specs:
# CPU
- adios
- alquimia
- aml
- amrex
- arborx
- argobots
- axom
- bolt
- boost
- bricks
- butterflypack
- cabana
- caliper
- chai
- chapel ~rocm ~cuda
- charliecloud
- conduit
- cp2k +mpi
- datatransferkit
- drishti
- dxt-explorer
- dyninst
- exaworks
- fftx
- flecsi
- flit
- flux-core
- fortrilinos
- gasnet
- ginkgo
- globalarrays
- gmp
- gotcha
- gptune
- gromacs +cp2k ^cp2k +mpi build_system=cmake
- h5bench
- hdf5-vol-async
- hdf5-vol-cache
- hdf5-vol-log
- heffte +fftw
- hpctoolkit
- hpx networking=mpi
- hypre
- kokkos +openmp
- kokkos-kernels +openmp
- laghos
- lammps
- lbann
- legion
- libnrm
- libquo
- libunwind
- loki
- mercury
- metall
- mfem
- mgard +serial +openmp +timing +unstructured ~cuda
- mpark-variant
- mpifileutils ~xattr
- nccmp
- nco
- netlib-scalapack
- nrm
- nvhpc
- nwchem
- omega-h
- openfoam
- openmpi
- openpmd-api
- papi
- papyrus
- paraview ~cuda ~rocm
- parsec ~cuda
- pdt
- petsc
- plasma
- plumed
- precice
- pruners-ninja
- pumi
- py-amrex
- py-h5py
- py-jupyterhub
- py-libensemble
- py-petsc4py
- qthreads scheduler=distrib
- quantum-espresso
- raja
- rempi
- scr
- slate ~cuda
- slepc
- stc
- strumpack ~slate
- sundials
- superlu
- superlu-dist
- swig@4.0.2-fortran
- sz3
- tasmanian
- tau +mpi +python # +syscall fails: https://github.com/spack/spack/pull/40830#issuecomment-1790799772; tau: has issue with `spack env depfile` build
- trilinos +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
- turbine
- umap
- umpire
- upcxx
- wannier90
- warpx +python
- wps
- wrf
- xyce +mpi +shared +pymi +pymi_static_tpls
# INCLUDED IN ECP DAV CPU
- adios2
- ascent
- darshan-runtime
- darshan-util
- faodel
- hdf5
- libcatalyst
- parallel-netcdf
- py-cinemasci
- sz
- unifyfs
- veloc
# - visit # libext, libxkbfile, libxrender, libxt, silo (https://github.com/spack/spack/issues/39538), cairo
- vtk-m
- zfp
# - ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 ~paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp # +visit: libext, libxkbfile, libxrender, libxt, silo (https://github.com/spack/spack/issues/39538), cairo
# --
# - dealii # fltk: https://github.com/spack/spack/issues/38791
# - geopm-runtime # cairo: *** No autoreconf found, please install it ***
# - glvis # glvis: https://github.com/spack/spack/issues/42839
# - libpressio +bitgrooming +bzip2 ~cuda ~cusz +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp # py-numcodecs: gcc: error: unrecognized command line option '-mno-sse2'; did you mean '-mno-isel'? gcc: error: unrecognized command line option '-mno-avx2'
# - phist +mpi # ghost@develop: gcc-9: error: unrecognized command line option '-march=native'; did you mean '-mcpu=native'?
# - variorum # variorum: https://github.com/spack/spack/issues/38786
# PYTHON PACKAGES
- opencv +python3
- py-jax
- py-jupyterlab
- py-matplotlib
- py-mpi4py
- py-notebook
- py-numba
- py-numpy
- py-openai
- py-pandas
- py-plotly
- py-pooch
- py-pytest
- py-scikit-learn
- py-scipy
- py-seaborn
# - py-horovod # py-torch, py-tensorflow
# - py-tensorflow # error
# - py-torch # error
# CUDA NOARCH
- bricks +cuda
- cabana +cuda ^kokkos +wrapper +cuda_lambda +cuda cuda_arch=70
- flux-core +cuda
- hpctoolkit +cuda
- papi +cuda
- tau +mpi +cuda
# --
# - legion +cuda # legion: needs NVIDIA driver
# CUDA 70
- amrex +cuda cuda_arch=70
- arborx +cuda cuda_arch=70 ^kokkos +wrapper
- caliper +cuda cuda_arch=70
- chai +cuda cuda_arch=70 ^umpire ~shared
- ecp-data-vis-sdk ~rocm +adios2 ~ascent +hdf5 +vtkm +zfp ~paraview +cuda cuda_arch=70
- exago +mpi +python +raja +hiop ~rocm +cuda cuda_arch=70 ~ipopt ^hiop@1.0.0 ~sparse +mpi +raja ~rocm +cuda cuda_arch=70 #^raja@0.14.0
- flecsi +cuda cuda_arch=70
- ginkgo +cuda cuda_arch=70
- gromacs +cuda cuda_arch=70
- heffte +cuda cuda_arch=70
- hpx +cuda cuda_arch=70
- hypre +cuda cuda_arch=70
- kokkos +wrapper +cuda cuda_arch=70
- kokkos-kernels +cuda cuda_arch=70 ^kokkos +wrapper +cuda cuda_arch=70
- magma +cuda cuda_arch=70
- mfem +cuda cuda_arch=70
- mgard +serial +openmp +timing +unstructured +cuda cuda_arch=70
- omega-h +cuda cuda_arch=70
- parsec +cuda cuda_arch=70
- petsc +cuda cuda_arch=70
- raja +cuda cuda_arch=70
- slate +cuda cuda_arch=70
- slepc +cuda cuda_arch=70
- strumpack ~slate +cuda cuda_arch=70
- sundials +cuda cuda_arch=70
- superlu-dist +cuda cuda_arch=70
- tasmanian +cuda cuda_arch=70
- umpire ~shared +cuda cuda_arch=70
# INCLUDED IN ECP DAV CUDA
- adios2 +cuda cuda_arch=70
# - ascent +cuda cuda_arch=70 # ascent: https://github.com/spack/spack/issues/38045
- paraview +cuda cuda_arch=70
- vtk-m +cuda cuda_arch=70
- zfp +cuda cuda_arch=70
# --
# - axom +cuda cuda_arch=70 # axom: https://github.com/spack/spack/issues/29520
# - cp2k +mpi +cuda cuda_arch=70 # dbcsr
# - cusz +cuda cuda_arch=70 # cusz: https://github.com/spack/spack/issues/38787
# - dealii +cuda cuda_arch=70 # fltk: https://github.com/spack/spack/issues/38791
# - lammps +cuda cuda_arch=70 # lammps: needs NVIDIA driver
# - lbann +cuda cuda_arch=70 # lbann: https://github.com/spack/spack/issues/38788
# - libpressio +bitgrooming +bzip2 +fpzip +hdf5 +libdistributed +lua +openmp +python +sz +sz3 +unix +zfp +json +remote +netcdf +cusz +mgard +cuda cuda_arch=70 ^cusz +cuda cuda_arch=70 # depends_on("cuda@11.7.1:", when="+cuda")
# - py-torch +cuda cuda_arch=70 # skipped
# - trilinos +cuda cuda_arch=70 # trilinos: https://github.com/trilinos/Trilinos/issues/11630
# - upcxx +cuda cuda_arch=70 # upcxx: needs NVIDIA driver
ci:
pipeline-gen:
- build-job:
image: ghcr.io/spack/ubuntu20.04-runner-ppc64-gcc-11.4:2023.08.01
cdash:
build-group: E4S Power

View File

@@ -83,7 +83,7 @@ spack:
- pythia8 +evtgen +fastjet +hdf5 +hepmc +hepmc3 +lhapdf ~madgraph5amc +python +rivet ~root # pythia8 and root circularly depend
- rivet hepmc=3
- root +davix +dcache +examples +fftw +fits +fortran +gdml +graphviz +gsl +http +math +minuit +mlp +mysql +opengl +postgres +pythia8 +python +r +roofit +root7 +rpath ~shadow +spectrum +sqlite +ssl +tbb +threads +tmva +unuran +vc +vdt +veccore +webgui +x +xml +xrootd
- sherpa +analysis ~blackhat +gzip +hepmc3 +hepmc3root +lhapdf +lhole +openloops +pythia ~python ~recola ~rivet +root +ufo cxxstd=20
- sherpa +analysis ~blackhat +gzip +hepmc3 +hepmc3root +lhapdf +lhole +openloops +pythia ~python ~recola ~rivet +root +ufo
- tauola +hepmc3 +lhapdf cxxstd=20
- thepeg hepmc=3 ~rivet
- vecgeom +gdml +geant4 +root

View File

@@ -18,7 +18,7 @@ spack:
- hdf5+hl+mpi ^mpich
- trilinos
- trilinos +hdf5 ^hdf5+hl+mpi ^mpich
- gcc@12
- gcc@12.3.0
- mpileaks
- lmod@8.7.18
- environment-modules

View File

@@ -100,8 +100,10 @@ def install(self, spec, prefix):
for ext in exts:
glob_str = os.path.join(pth, ext)
files = glob.glob(glob_str)
for x in files:
[
shutil.copy(
os.path.join(self._7z_src_dir, x),
os.path.join(prefix, os.path.basename(x)),
)
for x in files
]

View File

@@ -179,7 +179,7 @@ def configure_args(self):
if spec.satisfies("@:8"):
oapp("--with-dft-flavor=atompaw+libxc")
else:
oapp("--without-wannier90")
"--without-wannier90",
if spec.satisfies("+mpi"):
oapp(f"CC={spec['mpi'].mpicc}")

View File

@@ -32,7 +32,7 @@ def edit(self, spec, prefix):
# Dictionary mapping: compiler-name : ACE config-label
supported = {"intel": "_icc", "gcc": ""}
if self.compiler.name not in supported:
if not (self.compiler.name in supported):
raise Exception(
"compiler " + self.compiler.name + " not supported in ace spack-package"
)

View File

@@ -5,8 +5,11 @@
import inspect
import os
import llnl.util.tty as tty
import spack.pkg.builtin.openfoam as openfoam
from spack.package import *
from spack.version import Version
class Additivefoam(Package):

View File

@@ -202,8 +202,8 @@ def configure_args(self):
args.append("--enable-void-return-complex")
if spec.satisfies("@3.0:3.1 %aocc"):
# To enable Fortran to C calling convention for complex types when compiling with
# aocc flang
"""To enabled Fortran to C calling convention for
complex types when compiling with aocc flang"""
args.append("--enable-f2c-dotc")
if spec.satisfies("@3.0.1: +ilp64"):

View File

@@ -25,7 +25,6 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
license("BSD-3-Clause")
version("develop", branch="development")
version("25.02", sha256="2680a5a9afba04e211cd48d27799c5a25abbb36c6c3d2b6c13cd4757c7176b23")
version("25.01", sha256="29eb35cf67d66b0fd0654282454c210abfadf27fcff8478b256e3196f237c74f")
version("24.12", sha256="ca4b41ac73fabb9cf3600b530c9823eb3625f337d9b7b9699c1089e81c67fc67")
version("24.11", sha256="31cc37b39f15e02252875815f6066046fc56a479bf459362b9889b0d6a202df6")
@@ -361,7 +360,7 @@ def cmake_args(self):
args.append("-DAMReX_GPU_BACKEND=SYCL")
# SYCL GPU backend only supported with Intel's oneAPI or DPC++ compilers
sycl_compatible_compilers = ["icpx"]
if os.path.basename(self.compiler.cxx) not in sycl_compatible_compilers:
if not (os.path.basename(self.compiler.cxx) in sycl_compatible_compilers):
raise InstallError(
"AMReX's SYCL GPU Backend requires the oneAPI CXX (icpx) compiler."
)

View File

@@ -6,6 +6,7 @@
from os.path import split
from spack.package import *
from spack.util.environment import EnvironmentModifications
class Anaconda3(Package):

View File

@@ -2,6 +2,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from llnl.util import tty
from spack.package import *
from spack.pkg.builtin.llvm import LlvmDetection

View File

@@ -5,6 +5,7 @@
import os
from spack.package import *
from spack.util.environment import EnvironmentModifications
class AoclDa(CMakePackage):

View File

@@ -6,6 +6,8 @@
import os
import socket
import llnl.util.tty as tty
from spack.build_systems.cmake import CMakeBuilder
from spack.package import *

View File

@@ -3,6 +3,8 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import llnl.util.tty as tty
from spack.package import *
from spack.util.environment import set_env

View File

@@ -9,6 +9,8 @@
import sys
from os import environ as env
import llnl.util.tty as tty
from spack.package import *

View File

@@ -5,6 +5,8 @@
import glob
import os
import llnl.util.tty as tty
from spack.package import *
from spack.pkg.builtin.boost import Boost

View File

@@ -118,7 +118,7 @@ class Berkeleygw(MakefilePackage):
def edit(self, spec, prefix):
# archive is a tar file, despite the .gz expension
tar = which("tar")
tar("-x", "-o", "-f", self.stage.archive_file, "--strip-components=1")
tar("-x", "-f", self.stage.archive_file, "--strip-components=1")
# get generic arch.mk template
if spec.satisfies("+mpi"):

View File

@@ -160,11 +160,8 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage):
with when("platform=darwin"):
conflicts("+gold", msg="Binutils cannot build linkers on macOS")
# 2.41 doesn't seem to have any problems.
conflicts(
"libs=shared",
when="@2.37:2.40,2.42:",
msg="https://github.com/spack/spack/issues/35817",
"libs=shared", when="@2.37:2.40", msg="https://github.com/spack/spack/issues/35817"
)
conflicts(

View File

@@ -4,6 +4,8 @@
import os
import llnl.util.tty as tty
from spack.package import *
from spack.package_test import compare_output
from spack.pkg.builtin.boost import Boost

View File

@@ -19,11 +19,6 @@ class Cdo(AutotoolsPackage):
maintainers("skosukhin", "Try2Code")
version(
"2.5.0",
sha256="e865c05c1b52fd76b80e33421554db81b38b75210820bdc40e8690f4552f68e2",
url="https://code.mpimet.mpg.de/attachments/download/29786/cdo-2.5.0.tar.gz",
)
version(
"2.4.4",
sha256="49f50bd18dacd585e9518cfd4f55548f692426edfb3b27ddcd1c653eab53d063",

View File

@@ -276,7 +276,7 @@ def charmarch(self):
# build-target=LIBS backend={0}'.format(b))
def install(self, spec, prefix):
if "backend=mpi" not in self.spec or "backend=netlrts" not in self.spec:
if not ("backend=mpi" in self.spec) or not ("backend=netlrts" in self.spec):
if self.spec.satisfies("+pthreads"):
raise InstallError(
"The pthreads option is only available on the Netlrts and MPI network layers."
@@ -396,7 +396,7 @@ def install(self, spec, prefix):
copy(filepath, tmppath)
os.remove(filepath)
os.rename(tmppath, filepath)
except OSError:
except (IOError, OSError):
pass
tmp_path = join_path(builddir, "tmp")

View File

@@ -8,6 +8,7 @@
import spack.user_environment
from spack.package import *
from spack.pkg.builtin.clingo import Clingo
from spack.util.environment import EnvironmentModifications
class ClingoBootstrap(Clingo):

View File

@@ -30,13 +30,11 @@ class Cmake(Package):
license("BSD-3-Clause")
version("master", branch="master")
version("3.31.5", sha256="66fb53a145648be56b46fa9e8ccade3a4d0dfc92e401e52ce76bdad1fea43d27")
version("3.31.4", sha256="a6130bfe75f5ba5c73e672e34359f7c0a1931521957e8393a5c2922c8b0f7f25")
version("3.31.3", sha256="fac45bc6d410b49b3113ab866074888d6c9e9dc81a141874446eb239ac38cb87")
version("3.31.2", sha256="42abb3f48f37dbd739cdfeb19d3712db0c5935ed5c2aef6c340f9ae9114238a2")
version("3.31.1", sha256="c4fc2a9bd0cd5f899ccb2fb81ec422e175090bc0de5d90e906dd453b53065719")
version("3.31.0", sha256="300b71db6d69dcc1ab7c5aae61cbc1aa2778a3e00cbd918bc720203e311468c3")
version("3.30.7", sha256="470e44d9c7caa3bd869ef953071b84f565b5d378d0a9eccbbbcd72031f21b9de")
version("3.30.6", sha256="a7aa25cdd8545156fe0fec95ebbd53cb2b5173a8717e227f6e8a755185c168cf")
version("3.30.5", sha256="9f55e1a40508f2f29b7e065fa08c29f82c402fa0402da839fffe64a25755a86d")
version("3.30.4", sha256="c759c97274f1e7aaaafcb1f0d261f9de9bf3a5d6ecb7e2df616324a46fe704b2")

View File

@@ -3,6 +3,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
from spack.util.environment import EnvironmentModifications
class Conda4aarch64(Package):

View File

@@ -8,6 +8,8 @@
import socket
from os import environ as env
import llnl.util.tty as tty
from spack.package import *

View File

@@ -38,11 +38,10 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
git = "https://github.com/cp2k/cp2k.git"
list_url = "https://github.com/cp2k/cp2k/releases"
maintainers("dev-zero", "mtaillefumier", "RMeli", "abussy")
maintainers("dev-zero", "mtaillefumier")
license("GPL-2.0-or-later")
version("2025.1", sha256="65c8ad5488897b0f995919b9fa77f2aba4b61677ba1e3c19bb093d5c08a8ce1d")
version("2024.3", sha256="a6eeee773b6b1fb417def576e4049a89a08a0ed5feffcd7f0b33c7d7b48f19ba")
version("2024.2", sha256="cc3e56c971dee9e89b705a1103765aba57bf41ad39a11c89d3de04c8b8cdf473")
version("2024.1", sha256="a7abf149a278dfd5283dc592a2c4ae803b37d040df25d62a5e35af5c4557668f")
@@ -116,9 +115,7 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
)
variant("pytorch", default=False, description="Enable libtorch support")
variant("quip", default=False, description="Enable quip support")
variant("dftd4", when="@2024.2:", default=False, description="Enable DFT-D4 support")
variant("mpi_f08", default=False, description="Use MPI F08 module")
variant("smeagol", default=False, description="Enable libsmeagol support", when="@2025.2:")
variant(
"enable_regtests",
@@ -159,7 +156,6 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
)
depends_on("python@3", type="build")
depends_on("pkgconfig", type="build", when="build_system=cmake")
depends_on("blas")
depends_on("lapack")
@@ -197,14 +193,13 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
)
with when("+libxc"):
depends_on("pkgconfig", type="build", when="@7.0: ^libxc@:6")
depends_on("pkgconfig", type="build", when="@7.0:")
depends_on("libxc@4.0.3:4", when="@7.0:8.1")
depends_on("libxc@5.1.3:5.1", when="@8.2:8")
depends_on("libxc@5.1.7:5.1", when="@9:2022.2")
depends_on("libxc@6.1:", when="@2023.1:")
depends_on("libxc@6.2:", when="@2023.2:")
depends_on("libxc@:6", when="@:2024.3")
depends_on("libxc@7 build_system=cmake", when="@2025.2:")
with when("+spla"):
depends_on("spla+cuda+fortran", when="+cuda")
@@ -265,8 +260,6 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
depends_on("plumed+mpi", when="+mpi")
depends_on("plumed~mpi", when="~mpi")
depends_on("libsmeagol", when="+smeagol")
# while we link statically against PEXSI, its own deps may be linked in
# dynamically, therefore can't set this as pure build-type dependency.
depends_on("pexsi+fortran@0.10.0:", when="+pexsi")
@@ -297,8 +290,6 @@ class Cp2k(MakefilePackage, CMakePackage, CudaPackage, ROCmPackage):
depends_on("spglib", when="+spglib")
depends_on("dftd4@3.6.0: build_system=cmake", when="+dftd4")
with when("build_system=cmake"):
depends_on("cmake@3.22:", type="build")
@@ -630,18 +621,6 @@ def edit(self, pkg, spec, prefix):
ldflags += [spglib.search_flags]
libs.append(spglib.ld_flags)
if spec.satisfies("+dftd4"):
cppflags += ["-D__DFTD4"]
dftd4 = spec["dftd4"].libs
ldflags += [dftd4.search_flags]
libs.append(dftd4.ld_flags)
if spec.satisfies("+smeagol"):
cppflags += ["-D__SMEAGOL"]
smeagol = spec["libsmeagol"].libs
ldflags += [smeagol.search_flags]
libs.append(smeagol.ld_flags)
cc = spack_cc if "~mpi" in spec else spec["mpi"].mpicc
cxx = spack_cxx if "~mpi" in spec else spec["mpi"].mpicxx
fc = spack_fc if "~mpi" in spec else spec["mpi"].mpifc
@@ -784,8 +763,8 @@ def edit(self, pkg, spec, prefix):
"Point environment variable LIBSMM_PATH to "
"the absolute path of the libsmm.a file"
)
except OSError:
raise OSError(
except IOError:
raise IOError(
"The file LIBSMM_PATH pointed to does not "
"exist. Note that it must be absolute path."
)
@@ -1015,9 +994,7 @@ def cmake_args(self):
self.define_from_variant("CP2K_USE_VORI", "libvori"),
self.define_from_variant("CP2K_USE_SPLA", "spla"),
self.define_from_variant("CP2K_USE_QUIP", "quip"),
self.define_from_variant("CP2K_USE_DFTD4", "dftd4"),
self.define_from_variant("CP2K_USE_MPI_F08", "mpi_f08"),
self.define_from_variant("CP2K_USE_LIBSMEAGOL", "smeagol"),
]
# we force the use elpa openmp threading support. might need to be revisited though

View File

@@ -15,7 +15,6 @@ class Cppgsl(CMakePackage):
license("MIT")
version("main", branch="main")
version("4.1.0", sha256="0a227fc9c8e0bf25115f401b9a46c2a68cd28f299d24ab195284eb3f1d7794bd")
version("4.0.0", sha256="f0e32cb10654fea91ad56bde89170d78cfbf4363ee0b01d8f097de2ba49f6ce9")
version("3.1.0", sha256="d3234d7f94cea4389e3ca70619b82e8fb4c2f33bb3a070799f1e18eef500a083")
version("2.1.0", sha256="ef73814657b073e1be86c8f7353718771bf4149b482b6cb54f99e79b23ff899d")
@@ -33,7 +32,6 @@ class Cppgsl(CMakePackage):
)
depends_on("cmake@3.1.3:", type="build")
depends_on("cmake@3.14:", type="build", when="@4.1:")
def cmake_args(self):
return [

View File

@@ -25,7 +25,7 @@ class Creduce(CMakePackage):
depends_on("flex")
depends_on("libxml2")
depends_on("llvm")
depends_on("llvm@8.0", when="@:2.10")
depends_on("llvm@8.0.0", when="@:2.10")
depends_on("perl")
depends_on("perl-exporter-lite")
depends_on("perl-file-which")

View File

@@ -7,6 +7,8 @@
import re
from glob import glob
import llnl.util.tty as tty
from spack.package import *
# FIXME Remove hack for polymorphic versions
@@ -21,16 +23,6 @@
# format returned by platform.system() and 'arch' by platform.machine()
_versions = {
"12.8.0": {
"Linux-aarch64": (
"5bc211f00c4f544da6e3fc3a549b3eb0a7e038439f5f3de71caa688f2f6b132c",
"https://developer.download.nvidia.com/compute/cuda/12.8.0/local_installers/cuda_12.8.0_570.86.10_linux_sbsa.run",
),
"Linux-x86_64": (
"610867dcd6d94c4e36c4924f1d01b9db28ec08164e8af6c764f21b84200695f8",
"https://developer.download.nvidia.com/compute/cuda/12.8.0/local_installers/cuda_12.8.0_570.86.10_linux.run",
),
},
"12.6.3": {
"Linux-aarch64": (
"213ea63a6357020978a8b0a79a8c9d12a2a5941afa1cdc69d5a3f933fa8bed04",
@@ -749,7 +741,7 @@ def install(self, spec, prefix):
os.remove("/tmp/cuda-installer.log")
except OSError:
if spec.satisfies("@10.1:"):
raise InstallError(
tty.die(
"The cuda installer will segfault due to the "
"presence of /tmp/cuda-installer.log "
"please remove the file and try again "

View File

@@ -19,10 +19,7 @@ class Cusz(CMakePackage, CudaPackage):
conflicts("cuda_arch=none", when="+cuda")
version("develop", branch="develop")
version("0.14.0", commit="e57fd7cd9df923164af9dd307b0b3d37dd9df137")
version("0.9.0rc3", commit="c3c3a74d006c6de3c145255241fb181682bd1492")
# 0.9.0rc1 was listed as 0.6.0 for a while in spack
version("0.9.0rc1", commit="cafed521dc338fe2159ebb5b09a36fc318524bf7")
version("0.6.0", commit="cafed521dc338fe2159ebb5b09a36fc318524bf7")
version("0.3.1", commit="02be3cbd07db467decaf45ec9eb593ba6173c809")
version("0.3", sha256="0feb4f7fd64879fe147624dd5ad164adf3983f79b2e0383d35724f8d185dcb11")
@@ -34,10 +31,6 @@ class Cusz(CMakePackage, CudaPackage):
depends_on("cub", when="^cuda@:10.2.89")
patch("thrust-includes.patch", when="@0.10:0.14 ^cuda@12.8:")
patch("thrust-includes-0.9.patch", when="@0.9 ^cuda@12.8:")
conflicts("^cuda@12.8:", when="@:0.8")
def cmake_args(self):
cuda_arch = self.spec.variants["cuda_arch"].value
args = ["-DBUILD_TESTING=OFF", ("-DCMAKE_CUDA_ARCHITECTURES=%s" % cuda_arch)]

View File

@@ -1,27 +0,0 @@
diff --git a/src/stat/detail/compare.thrust.inl b/src/stat/detail/compare.thrust.inl
index ce49408..ec8d650 100644
--- a/src/stat/detail/compare.thrust.inl
+++ b/src/stat/detail/compare.thrust.inl
@@ -18,6 +18,9 @@
// #include <thrust/iterator/constant_iterator.h>
#include <thrust/device_ptr.h>
#include <thrust/execution_policy.h>
+#include <thrust/functional.h>
+#include <thrust/iterator/zip_iterator.h>
+#include <thrust/transform_reduce.h>
#include <thrust/tuple.h>
#include "cusz/type.h"
diff --git a/src/stat/detail/maxerr.thrust.inl b/src/stat/detail/maxerr.thrust.inl
index 2415655..9b31e88 100644
--- a/src/stat/detail/maxerr.thrust.inl
+++ b/src/stat/detail/maxerr.thrust.inl
@@ -18,6 +18,8 @@
// #include <thrust/iterator/constant_iterator.h>
#include <thrust/device_ptr.h>
#include <thrust/execution_policy.h>
+#include <thrust/extrema.h>
+#include <thrust/transform.h>
#include "cusz/type.h"

View File

@@ -1,27 +0,0 @@
diff --git a/psz/src/stat/detail/compare.thrust.inl b/psz/src/stat/detail/compare.thrust.inl
index f35c7df..719d68f 100644
--- a/psz/src/stat/detail/compare.thrust.inl
+++ b/psz/src/stat/detail/compare.thrust.inl
@@ -11,6 +11,9 @@
#include <thrust/device_ptr.h>
#include <thrust/execution_policy.h>
+#include <thrust/functional.h>
+#include <thrust/iterator/zip_iterator.h>
+#include <thrust/transform_reduce.h>
#include <thrust/tuple.h>
#include "cusz/type.h"
diff --git a/psz/src/stat/detail/maxerr.thrust.inl b/psz/src/stat/detail/maxerr.thrust.inl
index f7a4db5..ce7925a 100644
--- a/psz/src/stat/detail/maxerr.thrust.inl
+++ b/psz/src/stat/detail/maxerr.thrust.inl
@@ -11,6 +11,8 @@
#include <thrust/device_ptr.h>
#include <thrust/execution_policy.h>
+#include <thrust/extrema.h>
+#include <thrust/transform.h>
#include "cusz/type.h"
#include "port.hh"

View File

@@ -30,7 +30,7 @@ def dav_sdk_depends_on(spec, when=None, propagate=None):
# Map the propagated variants to the dependency variant. Some packages may need
# overrides to propagate a dependency as something else, e.g., {"visit": "libsim"}.
# Most call-sites will just use a list.
if type(propagate) is not dict:
if not type(propagate) is dict:
propagate = dict([(v, v) for v in propagate])
# Determine the base variant

View File

@@ -25,7 +25,6 @@ class Dd4hep(CMakePackage):
license("LGPL-3.0-or-later")
version("master", branch="master")
version("1.31", sha256="9c06a1b4462fc1b51161404889c74b37350162d0b0ac2154db27e3f102670bd1")
version("1.30", sha256="02de46151e945eff58cffd84b4b86d35051f4436608199c3efb4d2e1183889fe")
version("1.29", sha256="435d25a7ef093d8bf660f288b5a89b98556b4c1c293c55b93bf641fb4cba77e9")
version("1.28", sha256="b28d671eda0154073873a044a384486e66f1f200065deca99537aa84f07328ad")
@@ -119,16 +118,13 @@ class Dd4hep(CMakePackage):
depends_on("tbb", when="+tbb")
depends_on("intel-tbb@:2020.3", when="+tbb @:1.23")
depends_on("lcio", when="+lcio")
depends_on("edm4hep", when="+edm4hep")
depends_on("podio", when="+edm4hep")
depends_on("podio@:0.16.03", when="@:1.23 +edm4hep")
depends_on("podio@0.16:", when="@1.24: +edm4hep")
depends_on("podio@0.16.3:", when="@1.26: +edm4hep")
depends_on("podio@:0", when="@:1.29 +edm4hep")
depends_on("py-pytest", type=("build", "test"))
with when("+edm4hep"):
depends_on("edm4hep")
depends_on("edm4hep@0.10.5:", when="@1.31:")
depends_on("podio")
depends_on("podio@:0.16.03", when="@:1.23")
depends_on("podio@:0", when="@:1.29")
depends_on("podio@0.16:", when="@1.24:")
depends_on("podio@0.16.3:", when="@1.26:")
depends_on("podio@0.16.7:", when="@1.31:")
# See https://github.com/AIDASoft/DD4hep/pull/771 and https://github.com/AIDASoft/DD4hep/pull/876
conflicts(

View File

@@ -73,7 +73,7 @@ class DlaFuture(CMakePackage, CudaPackage, ROCmPackage):
generator("ninja")
depends_on("cmake@3.22:", type="build")
depends_on("pkgconfig", type=("build", "link"))
depends_on("pkgconfig", type="build")
depends_on("doxygen", type="build", when="+doc")
depends_on("mpi")

Some files were not shown because too many files have changed in this diff Show More