Compare commits

..

1 Commits

Author SHA1 Message Date
Todd Gamblin
3cdf4e7ccf packages: eliminate unnecessary implicit string concatenation
Python lets you do things like

```python
"these are " "one string"

'so are' "these"
```

This can be useful for breaking strings over multiple lines. It also often happens
unintentionally and indicates that there are subtle errors in the code.

There are a lot of variant descriptions that have implicit concatenation harmlessly
due to refactors, e.g.:

```python
    variant("myvariant", default=True, description="this used to be" "on two lines")
```

But there are also real bugs, like this, where the author probably omitted a comma and
didn't notice that `black` reformatted the implicit concatenation onto one line:

```python
args = [
     "--with-thing",
     "--with-second-thing" "--with-third-thing",
]
```

And other bugs like this, where the author probably intended to add a space, but didn't:

```python
options = "${CFLAGS}" "${SPECIAL_PIC_OPTION}"
```

Some things are harmless but confusing:

```python
"first part of string {0} " "second part {1}".format("zero", "one")
```

It's not broken. String concatenation happens *before* the `format()` call, and the
whole string is formatted. But it sure is hard to read.

Unfortunately, you can't detect this stuff with an AST pass, as implicit concatenation
is done at the parsing phase. I had to detect this with grep:

```console
> grep -l '^[^"]*"[^"]*" "' */package.py
> grep -l "^[^']*'[^']*' '" */package.py
```

- [x] Get rid of nearly all implicit string concatenation in packages

Signed-off-by: Todd Gamblin <tgamblin@llnl.gov>
2025-01-31 20:35:21 -08:00
578 changed files with 4160 additions and 7976 deletions

View File

@@ -81,10 +81,6 @@ jobs:
with:
with_coverage: ${{ needs.changes.outputs.core }}
import-check:
needs: [ changes ]
uses: ./.github/workflows/import-check.yaml
all-prechecks:
needs: [ prechecks ]
if: ${{ always() }}

View File

@@ -33,4 +33,3 @@ jobs:
with:
verbose: true
fail_ci_if_error: false
token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -1,49 +0,0 @@
name: import-check
on:
workflow_call:
jobs:
# Check we don't make the situation with circular imports worse
import-check:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@v2
with:
version: '1.10'
- uses: julia-actions/cache@v2
# PR: use the base of the PR as the old commit
- name: Checkout PR base commit
if: github.event_name == 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
ref: ${{ github.event.pull_request.base.sha }}
path: old
# not a PR: use the previous commit as the old commit
- name: Checkout previous commit
if: github.event_name != 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 2
path: old
- name: Checkout previous commit
if: github.event_name != 'pull_request'
run: git -C old reset --hard HEAD^
- name: Checkout new commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
path: new
- name: Install circular import checker
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: haampie/circular-import-fighter
ref: 4cdb0bf15f04ab6b49041d5ef1bfd9644cce7f33
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter
run: make -j dependencies
- name: Circular import check
working-directory: circular-import-fighter
run: make -j compare "SPACK_ROOT=../old ../new"

View File

@@ -1,7 +1,7 @@
black==25.1.0
black==24.10.0
clingo==5.7.1
flake8==7.1.2
isort==6.0.0
flake8==7.1.1
isort==5.13.2
mypy==1.11.2
types-six==1.17.0.20241205
vermin==1.6.0

View File

@@ -86,6 +86,66 @@ jobs:
spack -d bootstrap now --dev
spack -d style -t black
spack unit-test -V
# Check we don't make the situation with circular imports worse
import-check:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@v2
with:
version: '1.10'
- uses: julia-actions/cache@v2
# PR: use the base of the PR as the old commit
- name: Checkout PR base commit
if: github.event_name == 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
ref: ${{ github.event.pull_request.base.sha }}
path: old
# not a PR: use the previous commit as the old commit
- name: Checkout previous commit
if: github.event_name != 'pull_request'
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 2
path: old
- name: Checkout previous commit
if: github.event_name != 'pull_request'
run: git -C old reset --hard HEAD^
- name: Checkout new commit
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
path: new
- name: Install circular import checker
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: haampie/circular-import-fighter
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
path: circular-import-fighter
- name: Install dependencies
working-directory: circular-import-fighter
run: make -j dependencies
- name: Problematic imports before
working-directory: circular-import-fighter
run: make SPACK_ROOT=../old SUFFIX=.old
- name: Problematic imports after
working-directory: circular-import-fighter
run: make SPACK_ROOT=../new SUFFIX=.new
- name: Compare import cycles
working-directory: circular-import-fighter
run: |
edges_before="$(head -n1 solution.old)"
edges_after="$(head -n1 solution.new)"
if [ "$edges_after" -gt "$edges_before" ]; then
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
printf 'previously this was %s\033[0m\n' "$edges_before"
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
exit 1
else
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
fi
# Further style checks from pylint
pylint:

View File

@@ -43,22 +43,6 @@ concretizer:
# (e.g. py-setuptools, cmake etc.)
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
strategy: minimal
# Maximum number of duplicates in a DAG, when using a strategy that allows duplicates. "default" is the
# number used if there isn't a more specific alternative
max_dupes:
default: 1
# Virtuals
c: 2
cxx: 2
fortran: 1
# Regular packages
cmake: 2
gmake: 2
py-cython: 2
py-flit-core: 2
py-setuptools: 2
gcc: 2
llvm: 2
# Option to specify compatibility between operating systems for reuse of compilers and packages
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
@@ -79,7 +63,3 @@ concretizer:
# Setting this to false yields unreproducible results, so we advise to use that value only
# for debugging purposes (e.g. check which constraints can help Spack concretize faster).
error_on_timeout: true
# Static analysis may reduce the concretization time by generating smaller ASP problems, in
# cases where there are requirements that prevent part of the search space to be explored.
static_analysis: false

View File

@@ -1,5 +1,5 @@
config:
locks: false
build_stage::
- '$user_cache_path/stage'
- '$spack/.staging'
stage_name: '{name}-{version}-{hash:7}'

View File

@@ -820,69 +820,6 @@ presence of a ``SPACK_CDASH_AUTH_TOKEN`` environment variable during the
build group on CDash called "Release Testing" (that group will be created if
it didn't already exist).
.. _ci_artifacts:
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
CI Artifacts Directory Layout
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
When running the CI build using the command ``spack ci rebuild`` a number of directories are created for
storing data generated during the CI job. The default root directory for artifacts is ``job_scratch_root``.
This can be overridden by passing the argument ``--artifacts-root`` to the ``spack ci generate`` command
or by setting the ``SPACK_ARTIFACTS_ROOT`` environment variable in the build job scripts.
The top level directories under the artifact root are ``concrete_environment``, ``logs``, ``reproduction``,
``tests``, and ``user_data``. Spack does not restrict what is written to any of these directories nor does
it require user specified files be written to any specific directory.
------------------------
``concrete_environment``
------------------------
The directory ``concrete_environment`` is used to communicate the ci generate processed ``spack.yaml`` and
the concrete ``spack.lock`` for the CI environment.
--------
``logs``
--------
The directory ``logs`` contains the spack build log, ``spack-build-out.txt``, and the spack build environment
modification file, ``spack-build-mod-env.txt``. Additionally all files specified by the packages ``Builder``
property ``archive_files`` are also copied here (ie. ``CMakeCache.txt`` in ``CMakeBuilder``).
----------------
``reproduction``
----------------
The directory ``reproduction`` is used to store the files needed by the ``spack reproduce-build`` command.
This includes ``repro.json``, copies of all of the files in ``concrete_environment``, the concrete spec
JSON file for the current spec being built, and all of the files written in the artifacts root directory.
The ``repro.json`` file is not versioned and is only designed to work with the version of spack CI was run with.
An example of what a ``repro.json`` may look like is here.
.. code:: json
{
"job_name": "adios2@2.9.2 /feaevuj %gcc@11.4.0 arch=linux-ubuntu20.04-x86_64_v3 E4S ROCm External",
"job_spec_json": "adios2.json",
"ci_project_dir": "/builds/spack/spack"
}
---------
``tests``
---------
The directory ``tests`` is used to store output from running ``spack test <job spec>``. This may or may not have
data in it depending on the package that was built and the availability of tests.
-------------
``user_data``
-------------
The directory ``user_data`` is used to store everything else that shouldn't be copied to the ``reproduction`` direcotory.
Users may use this to store additional logs or metrics or other types of files generated by the build job.
-------------------------------------
Using a custom spack in your pipeline
-------------------------------------

View File

@@ -1,4 +1,4 @@
sphinx==8.2.0
sphinx==8.1.3
sphinxcontrib-programoutput==0.18
sphinx_design==0.6.1
sphinx-rtd-theme==3.0.2
@@ -7,7 +7,7 @@ docutils==0.21.2
pygments==2.19.1
urllib3==2.3.0
pytest==8.3.4
isort==6.0.0
black==25.1.0
flake8==7.1.2
isort==5.13.2
black==24.10.0
flake8==7.1.1
mypy==1.11.1

View File

@@ -41,16 +41,6 @@ def __init__(self, dst, src_a=None, src_b=None):
self.src_a = src_a
self.src_b = src_b
def __repr__(self) -> str:
return f"MergeConflict(dst={self.dst!r}, src_a={self.src_a!r}, src_b={self.src_b!r})"
def _samefile(a: str, b: str):
try:
return os.path.samefile(a, b)
except OSError:
return False
class SourceMergeVisitor(BaseDirectoryVisitor):
"""
@@ -60,14 +50,9 @@ class SourceMergeVisitor(BaseDirectoryVisitor):
- A list of merge conflicts in dst/
"""
def __init__(
self, ignore: Optional[Callable[[str], bool]] = None, normalize_paths: bool = False
):
def __init__(self, ignore: Optional[Callable[[str], bool]] = None):
self.ignore = ignore if ignore is not None else lambda f: False
# On case-insensitive filesystems, normalize paths to detect duplications
self.normalize_paths = normalize_paths
# When mapping <src root> to <dst root>/<projection>, we need to prepend the <projection>
# bit to the relative path in the destination dir.
self.projection: str = ""
@@ -86,88 +71,10 @@ def __init__(
# and can run mkdir in order.
self.directories: Dict[str, Tuple[str, str]] = {}
# If the visitor is configured to normalize paths, keep a map of
# normalized path to: original path, root directory + relative path
self._directories_normalized: Dict[str, Tuple[str, str, str]] = {}
# Files to link. Maps dst_rel to (src_root, src_rel). This is an ordered dict, where files
# are guaranteed to be grouped by src_root in the order they were visited.
self.files: Dict[str, Tuple[str, str]] = {}
# If the visitor is configured to normalize paths, keep a map of
# normalized path to: original path, root directory + relative path
self._files_normalized: Dict[str, Tuple[str, str, str]] = {}
def _in_directories(self, proj_rel_path: str) -> bool:
"""
Check if a path is already in the directory list
"""
if self.normalize_paths:
return proj_rel_path.lower() in self._directories_normalized
else:
return proj_rel_path in self.directories
def _directory(self, proj_rel_path: str) -> Tuple[str, str, str]:
"""
Get the directory that is mapped to a path
"""
if self.normalize_paths:
return self._directories_normalized[proj_rel_path.lower()]
else:
return (proj_rel_path, *self.directories[proj_rel_path])
def _del_directory(self, proj_rel_path: str):
"""
Remove a directory from the list of directories
"""
del self.directories[proj_rel_path]
if self.normalize_paths:
del self._directories_normalized[proj_rel_path.lower()]
def _add_directory(self, proj_rel_path: str, root: str, rel_path: str):
"""
Add a directory to the list of directories.
Also stores the normalized version for later lookups
"""
self.directories[proj_rel_path] = (root, rel_path)
if self.normalize_paths:
self._directories_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
def _in_files(self, proj_rel_path: str) -> bool:
"""
Check if a path is already in the files list
"""
if self.normalize_paths:
return proj_rel_path.lower() in self._files_normalized
else:
return proj_rel_path in self.files
def _file(self, proj_rel_path: str) -> Tuple[str, str, str]:
"""
Get the file that is mapped to a path
"""
if self.normalize_paths:
return self._files_normalized[proj_rel_path.lower()]
else:
return (proj_rel_path, *self.files[proj_rel_path])
def _del_file(self, proj_rel_path: str):
"""
Remove a file from the list of files
"""
del self.files[proj_rel_path]
if self.normalize_paths:
del self._files_normalized[proj_rel_path.lower()]
def _add_file(self, proj_rel_path: str, root: str, rel_path: str):
"""
Add a file to the list of files
Also stores the normalized version for later lookups
"""
self.files[proj_rel_path] = (root, rel_path)
if self.normalize_paths:
self._files_normalized[proj_rel_path.lower()] = (proj_rel_path, root, rel_path)
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
"""
Register a directory if dst / rel_path is not blocked by a file or ignored.
@@ -177,28 +84,23 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
if self.ignore(rel_path):
# Don't recurse when dir is ignored.
return False
elif self._in_files(proj_rel_path):
# A file-dir conflict is fatal except if they're the same file (symlinked dir).
src_a = os.path.join(*self._file(proj_rel_path))
src_b = os.path.join(root, rel_path)
if not _samefile(src_a, src_b):
self.fatal_conflicts.append(
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
elif proj_rel_path in self.files:
# Can't create a dir where a file is.
src_a_root, src_a_relpath = self.files[proj_rel_path]
self.fatal_conflicts.append(
MergeConflict(
dst=proj_rel_path,
src_a=os.path.join(src_a_root, src_a_relpath),
src_b=os.path.join(root, rel_path),
)
return False
# Remove the link in favor of the dir.
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
self._del_file(existing_proj_rel_path)
self._add_directory(proj_rel_path, root, rel_path)
return True
elif self._in_directories(proj_rel_path):
)
return False
elif proj_rel_path in self.directories:
# No new directory, carry on.
return True
else:
# Register new directory.
self._add_directory(proj_rel_path, root, rel_path)
self.directories[proj_rel_path] = (root, rel_path)
return True
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
@@ -230,7 +132,7 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
if handle_as_dir:
return self.before_visit_dir(root, rel_path, depth)
self.visit_file(root, rel_path, depth, symlink=True)
self.visit_file(root, rel_path, depth)
return False
def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = False) -> None:
@@ -238,23 +140,30 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
if self.ignore(rel_path):
pass
elif self._in_directories(proj_rel_path):
# Can't create a file where a dir is, unless they are the same file (symlinked dir),
# in which case we simply drop the symlink in favor of the actual dir.
src_a = os.path.join(*self._directory(proj_rel_path))
src_b = os.path.join(root, rel_path)
if not symlink or not _samefile(src_a, src_b):
self.fatal_conflicts.append(
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
elif proj_rel_path in self.directories:
# Can't create a file where a dir is; fatal error
self.fatal_conflicts.append(
MergeConflict(
dst=proj_rel_path,
src_a=os.path.join(*self.directories[proj_rel_path]),
src_b=os.path.join(root, rel_path),
)
elif self._in_files(proj_rel_path):
)
elif proj_rel_path in self.files:
# When two files project to the same path, they conflict iff they are distinct.
# If they are the same (i.e. one links to the other), register regular files rather
# than symlinks. The reason is that in copy-type views, we need a copy of the actual
# file, not the symlink.
src_a = os.path.join(*self._file(proj_rel_path))
src_a = os.path.join(*self.files[proj_rel_path])
src_b = os.path.join(root, rel_path)
if not _samefile(src_a, src_b):
try:
samefile = os.path.samefile(src_a, src_b)
except OSError:
samefile = False
if not samefile:
# Distinct files produce a conflict.
self.file_conflicts.append(
MergeConflict(dst=proj_rel_path, src_a=src_a, src_b=src_b)
@@ -264,12 +173,12 @@ def visit_file(self, root: str, rel_path: str, depth: int, *, symlink: bool = Fa
if not symlink:
# Remove the link in favor of the actual file. The del is necessary to maintain the
# order of the files dict, which is grouped by root.
existing_proj_rel_path, _, _ = self._file(proj_rel_path)
self._del_file(existing_proj_rel_path)
self._add_file(proj_rel_path, root, rel_path)
del self.files[proj_rel_path]
self.files[proj_rel_path] = (root, rel_path)
else:
# Otherwise register this file to be linked.
self._add_file(proj_rel_path, root, rel_path)
self.files[proj_rel_path] = (root, rel_path)
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
# Treat symlinked files as ordinary files (without "dereferencing")
@@ -288,11 +197,11 @@ def set_projection(self, projection: str) -> None:
path = ""
for part in self.projection.split(os.sep):
path = os.path.join(path, part)
if not self._in_files(path):
self._add_directory(path, "<projection>", path)
if path not in self.files:
self.directories[path] = ("<projection>", path)
else:
# Can't create a dir where a file is.
_, src_a_root, src_a_relpath = self._file(path)
src_a_root, src_a_relpath = self.files[path]
self.fatal_conflicts.append(
MergeConflict(
dst=path,
@@ -318,8 +227,8 @@ def __init__(self, source_merge_visitor: SourceMergeVisitor):
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# If destination dir is a file in a src dir, add a conflict,
# and don't traverse deeper
if self.src._in_files(rel_path):
_, src_a_root, src_a_relpath = self.src._file(rel_path)
if rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
@@ -329,9 +238,8 @@ def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
# If destination dir was also a src dir, remove the mkdir
# action, and traverse deeper.
if self.src._in_directories(rel_path):
existing_proj_rel_path, _, _ = self.src._directory(rel_path)
self.src._del_directory(existing_proj_rel_path)
if rel_path in self.src.directories:
del self.src.directories[rel_path]
return True
# If the destination dir does not appear in the src dir,
@@ -344,24 +252,38 @@ def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bo
be seen as files; we should not accidentally merge
source dir with a symlinked dest dir.
"""
self.visit_file(root, rel_path, depth)
# Never descend into symlinked target dirs.
return False
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
# Can't merge a file if target already exists
if self.src._in_directories(rel_path):
_, src_a_root, src_a_relpath = self.src._directory(rel_path)
# Always conflict
if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
)
)
elif self.src._in_files(rel_path):
_, src_a_root, src_a_relpath = self.src._file(rel_path)
if rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
)
)
# Never descend into symlinked target dirs.
return False
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
# Can't merge a file if target already exists
if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
)
)
elif rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
self.src.fatal_conflicts.append(
MergeConflict(
rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)

View File

@@ -269,7 +269,7 @@ def __init__(
@staticmethod
def _poll_interval_generator(
_wait_times: Optional[Tuple[float, float, float]] = None,
_wait_times: Optional[Tuple[float, float, float]] = None
) -> Generator[float, None, None]:
"""This implements a backoff scheme for polling a contended resource
by suggesting a succession of wait times between polls.

View File

@@ -2,7 +2,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Utility classes for logging the output of blocks of code."""
"""Utility classes for logging the output of blocks of code.
"""
import atexit
import ctypes
import errno
@@ -343,6 +344,26 @@ def close(self):
self.file.close()
@contextmanager
def replace_environment(env):
"""Replace the current environment (`os.environ`) with `env`.
If `env` is empty (or None), this unsets all current environment
variables.
"""
env = env or {}
old_env = os.environ.copy()
try:
os.environ.clear()
for name, val in env.items():
os.environ[name] = val
yield
finally:
os.environ.clear()
for name, val in old_env.items():
os.environ[name] = val
def log_output(*args, **kwargs):
"""Context manager that logs its output to a file.
@@ -426,6 +447,7 @@ def __init__(
self.echo = echo
self.debug = debug
self.buffer = buffer
self.env = env # the environment to use for _writer_daemon
self.filter_fn = filter_fn
self._active = False # used to prevent re-entry
@@ -497,20 +519,21 @@ def __enter__(self):
# just don't forward input if this fails
pass
self.process = multiprocessing.Process(
target=_writer_daemon,
args=(
input_fd,
read_fd,
self.write_fd,
self.echo,
self.log_file,
child_pipe,
self.filter_fn,
),
)
self.process.daemon = True # must set before start()
self.process.start()
with replace_environment(self.env):
self.process = multiprocessing.Process(
target=_writer_daemon,
args=(
input_fd,
read_fd,
self.write_fd,
self.echo,
self.log_file,
child_pipe,
self.filter_fn,
),
)
self.process.daemon = True # must set before start()
self.process.start()
finally:
if input_fd:
@@ -706,7 +729,10 @@ class winlog:
Does not support the use of 'v' toggling as nixlog does.
"""
def __init__(self, file_like=None, echo=False, debug=0, buffer=False, filter_fn=None):
def __init__(
self, file_like=None, echo=False, debug=0, buffer=False, env=None, filter_fn=None
):
self.env = env
self.debug = debug
self.echo = echo
self.logfile = file_like
@@ -763,10 +789,11 @@ def background_reader(reader, echo_writer, _kill):
reader.close()
self._active = True
self._thread = Thread(
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
)
self._thread.start()
with replace_environment(self.env):
self._thread = Thread(
target=background_reader, args=(self.reader, self.echo_writer, self._kill)
)
self._thread.start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):

View File

@@ -10,7 +10,7 @@
import spack.util.git
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
__version__ = "1.0.0.dev0"
__version__ = "0.24.0.dev0"
spack_version = __version__

View File

@@ -1010,7 +1010,7 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
for dep_name, dep in deps_by_name.items():
def check_virtual_with_variants(spec, msg):
if not spack.repo.PATH.is_virtual(spec.name) or not spec.variants:
if not spec.virtual or not spec.variants:
return
error = error_cls(
f"{pkg_name}: {msg}",

View File

@@ -923,7 +923,7 @@ class FileTypes:
UNKNOWN = 2
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7f-\x9f]")
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7F-\x9F]")
def file_type(f: IO[bytes]) -> int:
@@ -2529,10 +2529,10 @@ def install_root_node(
allow_missing: when true, allows installing a node with missing dependencies
"""
# Early termination
if spec.external or not spec.concrete:
warnings.warn("Skipping external or abstract spec {0}".format(spec.format()))
if spec.external or spec.virtual:
warnings.warn("Skipping external or virtual package {0}".format(spec.format()))
return
elif spec.installed and not force:
elif spec.concrete and spec.installed and not force:
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
return

View File

@@ -292,12 +292,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
# Install the spec that should make the module importable
with spack.config.override(self.mirror_scope):
PackageInstaller(
[concrete_spec.package],
fail_fast=True,
package_use_cache=False,
dependencies_use_cache=False,
).install()
PackageInstaller([concrete_spec.package], fail_fast=True).install()
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
self.last_search = info
@@ -367,7 +362,6 @@ def ensure_module_importable_or_raise(module: str, abstract_spec: Optional[str]
for current_config in bootstrapping_sources():
if not source_is_enabled(current_config):
continue
with exception_handler.forward(current_config["name"], Exception):
if create_bootstrapper(current_config).try_import(module, abstract_spec):
return

View File

@@ -12,7 +12,6 @@
import spack.phase_callbacks
import spack.spec
import spack.util.prefix
from spack.directives import depends_on
from .cmake import CMakeBuilder, CMakePackage
@@ -372,10 +371,6 @@ class CachedCMakePackage(CMakePackage):
CMakeBuilder = CachedCMakeBuilder
# These dependencies are assumed in the builder
depends_on("c", type="build")
depends_on("cxx", type="build")
def flag_handler(self, name, flags):
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
return None, None, None # handled in the cmake cache

View File

@@ -70,16 +70,10 @@ def build_directory(self):
"""Return the directory containing the main Cargo.toml."""
return self.pkg.stage.source_path
@property
def std_build_args(self):
"""Standard arguments for ``cargo build`` provided as a property for
convenience of package writers."""
return ["-j", str(self.pkg.module.make_jobs)]
@property
def build_args(self):
"""Arguments for ``cargo build``."""
return []
return ["-j", str(self.pkg.module.make_jobs)]
@property
def check_args(self):
@@ -94,9 +88,7 @@ def build(
) -> None:
"""Runs ``cargo install`` in the source directory"""
with fs.working_dir(self.build_directory):
pkg.module.cargo(
"install", "--root", "out", "--path", ".", *self.std_build_args, *self.build_args
)
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
def install(
self, pkg: CargoPackage, spec: spack.spec.Spec, prefix: spack.util.prefix.Prefix

View File

@@ -11,7 +11,6 @@
from typing import Any, List, Optional, Tuple
import llnl.util.filesystem as fs
from llnl.util import tty
from llnl.util.lang import stable_partition
import spack.builder
@@ -459,23 +458,19 @@ def cmake(
) -> None:
"""Runs ``cmake`` in the build directory"""
if spec.is_develop:
# skip cmake phase if it is an incremental develop build
# Determine the files that will re-run CMake that are generated from a successful
# configure step based on state
primary_generator = _extract_primary_generator(self.generator)
# skip cmake phase if it is an incremental develop build
# These are the files that will re-run CMake that are generated from a successful
# configure step
primary_generator = _extract_primary_generator(self.generator)
if primary_generator == "Unix Makefiles":
configure_artifact = "Makefile"
if primary_generator == "Ninja":
configure_artifact = "ninja.build"
elif primary_generator == "Ninja":
configure_artifact = "ninja.build"
if os.path.isfile(os.path.join(self.build_directory, configure_artifact)):
tty.msg(
"Incremental build criteria satisfied."
"Skipping CMake configure step. To force configuration run"
f" `spack clean {pkg.name}`"
)
return
if spec.is_develop and os.path.isfile(
os.path.join(self.build_directory, configure_artifact)
):
return
options = self.std_cmake_args
options += self.cmake_args()

View File

@@ -15,7 +15,7 @@ class CudaPackage(PackageBase):
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
and is meant to unify and facilitate its usage.
Maintainers: ax3l, Rombur, davidbeckingsale, pauleonix
Maintainers: ax3l, Rombur, davidbeckingsale
"""
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
@@ -47,12 +47,6 @@ class CudaPackage(PackageBase):
"89",
"90",
"90a",
"100",
"100a",
"101",
"101a",
"120",
"120a",
)
# FIXME: keep cuda and cuda_arch separate to make usage easier until
@@ -105,56 +99,39 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
# CUDA version vs Architecture
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
# Tesla support:
depends_on("cuda@:6.0", when="cuda_arch=10")
depends_on("cuda@:6.5", when="cuda_arch=11")
depends_on("cuda@2.1:6.5", when="cuda_arch=12")
depends_on("cuda@2.1:6.5", when="cuda_arch=13")
# Fermi support:
depends_on("cuda@3.0:8.0", when="cuda_arch=20")
depends_on("cuda@3.2:8.0", when="cuda_arch=21")
# Kepler support:
depends_on("cuda@5.0:10.2", when="cuda_arch=30")
depends_on("cuda@5.0:10.2", when="cuda_arch=32")
depends_on("cuda@5.0:11.8", when="cuda_arch=35")
depends_on("cuda@6.5:11.8", when="cuda_arch=37")
# Maxwell support:
depends_on("cuda@6.0:", when="cuda_arch=50")
depends_on("cuda@6.5:", when="cuda_arch=52")
depends_on("cuda@6.5:", when="cuda_arch=53")
# Pascal support:
depends_on("cuda@8.0:", when="cuda_arch=60")
depends_on("cuda@8.0:", when="cuda_arch=61")
depends_on("cuda@8.0:", when="cuda_arch=62")
# Volta support:
depends_on("cuda@9.0:", when="cuda_arch=70")
# Turing support:
depends_on("cuda@9.0:", when="cuda_arch=72")
depends_on("cuda@10.0:", when="cuda_arch=75")
# Ampere support:
depends_on("cuda@11.0:", when="cuda_arch=80")
depends_on("cuda@11.1:", when="cuda_arch=86")
depends_on("cuda@11.4:", when="cuda_arch=87")
# Ada support:
depends_on("cuda@11.8:", when="cuda_arch=89")
# Hopper support:
depends_on("cuda@12.0:", when="cuda_arch=90")
depends_on("cuda@12.0:", when="cuda_arch=90a")
# Blackwell support:
depends_on("cuda@12.8:", when="cuda_arch=100")
depends_on("cuda@12.8:", when="cuda_arch=100a")
depends_on("cuda@12.8:", when="cuda_arch=101")
depends_on("cuda@12.8:", when="cuda_arch=101a")
depends_on("cuda@12.8:", when="cuda_arch=120")
depends_on("cuda@12.8:", when="cuda_arch=120a")
# From the NVIDIA install guide we know of conflicts for particular
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
# (gcc, clang). We don't restrict %gcc and %clang conflicts to
@@ -186,7 +163,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
conflicts("%gcc@15:", when="+cuda ^cuda@:12.8")
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
@@ -195,7 +171,6 @@ def compute_capabilities(arch_list: Iterable[str]) -> List[str]:
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
conflicts("%clang@20:", when="+cuda ^cuda@:12.8")
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")

View File

@@ -48,9 +48,6 @@ class MesonPackage(spack.package_base.PackageBase):
variant("strip", default=False, description="Strip targets on install")
depends_on("meson", type="build")
depends_on("ninja", type="build")
# Meson uses pkg-config for dependency detection, and this dependency is
# often overlooked by packages that use meson as a build system.
depends_on("pkgconfig", type="build")
# Python detection in meson requires distutils to be importable, but distutils no longer
# exists in Python 3.12. In Spack, we can't use setuptools as distutils replacement,
# because the distutils-precedence.pth startup file that setuptools ships with is not run

View File

@@ -142,7 +142,7 @@ def setup_run_environment(self, env):
$ source {prefix}/{component}/{version}/env/vars.sh
"""
# Only if environment modifications are desired (default is +envmods)
if "+envmods" in self.spec:
if "~envmods" not in self.spec:
env.extend(
EnvironmentModifications.from_sourcing_file(
self.component_prefix.env.join("vars.sh"), *self.env_script_args

View File

@@ -14,9 +14,8 @@
import zipfile
from collections import namedtuple
from typing import Callable, Dict, List, Set
from urllib.request import Request
from urllib.request import HTTPHandler, Request, build_opener
import llnl.path
import llnl.util.filesystem as fs
import llnl.util.tty as tty
from llnl.util.tty.color import cescape, colorize
@@ -63,8 +62,6 @@
PushResult = namedtuple("PushResult", "success url")
urlopen = web_util.urlopen # alias for mocking in tests
def get_change_revisions():
"""If this is a git repo get the revisions to use when checking
@@ -84,9 +81,6 @@ def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
whether or not the stack was changed. Returns True if the environment
manifest changed between the provided revisions (or additionally if the
`.gitlab-ci.yml` file itself changed). Returns False otherwise."""
# git returns posix paths always, normalize input to be comptaible
# with that
env_path = llnl.path.convert_to_posix_path(env_path)
git = spack.util.git.git()
if git:
with fs.working_dir(spack.paths.prefix):
@@ -616,7 +610,7 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
def download_and_extract_artifacts(url, work_dir) -> str:
def download_and_extract_artifacts(url, work_dir):
"""Look for gitlab artifacts.zip at the given url, and attempt to download
and extract the contents into the given work_dir
@@ -624,10 +618,6 @@ def download_and_extract_artifacts(url, work_dir) -> str:
url (str): Complete url to artifacts.zip file
work_dir (str): Path to destination where artifacts should be extracted
Output:
Artifacts root path relative to the archive root
"""
tty.msg(f"Fetching artifacts from: {url}")
@@ -637,33 +627,31 @@ def download_and_extract_artifacts(url, work_dir) -> str:
if token:
headers["PRIVATE-TOKEN"] = token
request = Request(url, headers=headers, method="GET")
opener = build_opener(HTTPHandler)
request = Request(url, headers=headers)
request.get_method = lambda: "GET"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
response_code = response.getcode()
if response_code != 200:
msg = f"Error response code ({response_code}) in reproduce_ci_job"
raise SpackError(msg)
artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
os.makedirs(work_dir, exist_ok=True)
try:
response = urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
with open(artifacts_zip_path, "wb") as out_file:
shutil.copyfileobj(response, out_file)
if not os.path.exists(work_dir):
os.makedirs(work_dir)
with zipfile.ZipFile(artifacts_zip_path) as zip_file:
zip_file.extractall(work_dir)
# Get the artifact root
artifact_root = ""
for f in zip_file.filelist:
if "spack.lock" in f.filename:
artifact_root = os.path.dirname(os.path.dirname(f.filename))
break
except OSError as e:
raise SpackError(f"Error fetching artifacts: {e}")
finally:
try:
os.remove(artifacts_zip_path)
except FileNotFoundError:
# If the file doesn't exist we are already raising
pass
with open(artifacts_zip_path, "wb") as out_file:
shutil.copyfileobj(response, out_file)
return artifact_root
zip_file = zipfile.ZipFile(artifacts_zip_path)
zip_file.extractall(work_dir)
zip_file.close()
os.remove(artifacts_zip_path)
def get_spack_info():
@@ -777,7 +765,7 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
return True
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head):
def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime):
"""Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
attempt to setup an environment in which the failure can be reproduced
locally. This entails the following:
@@ -791,11 +779,8 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
commands to run to reproduce the build once inside the container.
"""
work_dir = os.path.realpath(work_dir)
if os.path.exists(work_dir) and os.listdir(work_dir):
raise SpackError(f"Cannot run reproducer in non-emptry working dir:\n {work_dir}")
platform_script_ext = "ps1" if IS_WINDOWS else "sh"
artifact_root = download_and_extract_artifacts(url, work_dir)
download_and_extract_artifacts(url, work_dir)
gpg_path = None
if gpg_url:
@@ -857,9 +842,6 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
with open(repro_file, encoding="utf-8") as fd:
repro_details = json.load(fd)
spec_file = fs.find(work_dir, repro_details["job_spec_json"])[0]
reproducer_spec = spack.spec.Spec.from_specfile(spec_file)
repro_dir = os.path.dirname(repro_file)
rel_repro_dir = repro_dir.replace(work_dir, "").lstrip(os.path.sep)
@@ -920,20 +902,17 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
commit_regex = re.compile(r"commit\s+([^\s]+)")
merge_commit_regex = re.compile(r"Merge\s+([^\s]+)\s+into\s+([^\s]+)")
if use_local_head:
commit_1 = "HEAD"
# Try the more specific merge commit regex first
m = merge_commit_regex.search(spack_info)
if m:
# This was a merge commit and we captured the parents
commit_1 = m.group(1)
commit_2 = m.group(2)
else:
# Try the more specific merge commit regex first
m = merge_commit_regex.search(spack_info)
# Not a merge commit, just get the commit sha
m = commit_regex.search(spack_info)
if m:
# This was a merge commit and we captured the parents
commit_1 = m.group(1)
commit_2 = m.group(2)
else:
# Not a merge commit, just get the commit sha
m = commit_regex.search(spack_info)
if m:
commit_1 = m.group(1)
setup_result = False
if commit_1:
@@ -1008,8 +987,6 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
"entrypoint", entrypoint_script, work_dir, run=False, exit_on_failure=False
)
# Attempt to create a unique name for the reproducer container
container_suffix = "_" + reproducer_spec.dag_hash() if reproducer_spec else ""
docker_command = [
runtime,
"run",
@@ -1017,14 +994,14 @@ def reproduce_ci_job(url, work_dir, autostart, gpg_url, runtime, use_local_head)
"-t",
"--rm",
"--name",
f"spack_reproducer{container_suffix}",
"spack_reproducer",
"-v",
":".join([work_dir, mounted_workdir, "Z"]),
"-v",
":".join(
[
os.path.join(work_dir, artifact_root),
os.path.join(mount_as_dir, artifact_root),
os.path.join(work_dir, "jobs_scratch_dir"),
os.path.join(mount_as_dir, "jobs_scratch_dir"),
"Z",
]
),

View File

@@ -4,7 +4,7 @@
import re
import sys
from typing import Dict, Optional, Tuple
from typing import Dict, Optional
import llnl.string
import llnl.util.lang
@@ -181,11 +181,7 @@ def checksum(parser, args):
print()
if args.add_to_package:
path = spack.repo.PATH.filename_for_package_name(pkg.name)
num_versions_added = add_versions_to_pkg(path, version_lines)
tty.msg(f"Added {num_versions_added} new versions to {pkg.name} in {path}")
if not args.batch and sys.stdin.isatty():
editor(path)
add_versions_to_package(pkg, version_lines, args.batch)
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
@@ -231,9 +227,20 @@ def print_checksum_status(pkg: PackageBase, version_hashes: dict):
tty.die("Invalid checksums found.")
def _update_version_statements(package_src: str, version_lines: str) -> Tuple[int, str]:
"""Returns a tuple of number of versions added and the package's modified contents."""
def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool):
"""
Add checksumed versions to a package's instructions and open a user's
editor so they may double check the work of the function.
Args:
pkg (spack.package_base.PackageBase): A package class for a given package in Spack.
version_lines (str): A string of rendered version lines.
"""
# Get filename and path for package
filename = spack.repo.PATH.filename_for_package_name(pkg.name)
num_versions_added = 0
version_statement_re = re.compile(r"([\t ]+version\([^\)]*\))")
version_re = re.compile(r'[\t ]+version\(\s*"([^"]+)"[^\)]*\)')
@@ -245,34 +252,33 @@ def _update_version_statements(package_src: str, version_lines: str) -> Tuple[in
if match:
new_versions.append((Version(match.group(1)), ver_line))
split_contents = version_statement_re.split(package_src)
with open(filename, "r+", encoding="utf-8") as f:
contents = f.read()
split_contents = version_statement_re.split(contents)
for i, subsection in enumerate(split_contents):
# If there are no more versions to add we should exit
if len(new_versions) <= 0:
break
for i, subsection in enumerate(split_contents):
# If there are no more versions to add we should exit
if len(new_versions) <= 0:
break
# Check if the section contains a version
contents_version = version_re.match(subsection)
if contents_version is not None:
parsed_version = Version(contents_version.group(1))
# Check if the section contains a version
contents_version = version_re.match(subsection)
if contents_version is not None:
parsed_version = Version(contents_version.group(1))
if parsed_version < new_versions[0][0]:
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
num_versions_added += 1
if parsed_version < new_versions[0][0]:
split_contents[i:i] = [new_versions.pop(0)[1], " # FIXME", "\n"]
num_versions_added += 1
elif parsed_version == new_versions[0][0]:
new_versions.pop(0)
elif parsed_version == new_versions[0][0]:
new_versions.pop(0)
return num_versions_added, "".join(split_contents)
# Seek back to the start of the file so we can rewrite the file contents.
f.seek(0)
f.writelines("".join(split_contents))
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
tty.msg(f"Open {filename} to review the additions.")
def add_versions_to_pkg(path: str, version_lines: str) -> int:
"""Add new versions to a package.py file. Returns the number of versions added."""
with open(path, "r", encoding="utf-8") as f:
package_src = f.read()
num_versions_added, package_src = _update_version_statements(package_src, version_lines)
if num_versions_added > 0:
with open(path, "w", encoding="utf-8") as f:
f.write(package_src)
return num_versions_added
if sys.stdout.isatty() and not is_batch:
editor(filename)

View File

@@ -176,11 +176,6 @@ def setup_parser(subparser):
reproduce.add_argument(
"-s", "--autostart", help="Run docker reproducer automatically", action="store_true"
)
reproduce.add_argument(
"--use-local-head",
help="Use the HEAD of the local Spack instead of reproducing a commit",
action="store_true",
)
gpg_group = reproduce.add_mutually_exclusive_group(required=False)
gpg_group.add_argument(
"--gpg-file", help="Path to public GPG key for validating binary cache installs"
@@ -613,12 +608,7 @@ def ci_reproduce(args):
gpg_key_url = None
return spack_ci.reproduce_ci_job(
args.job_url,
args.working_dir,
args.autostart,
gpg_key_url,
args.runtime,
args.use_local_head,
args.job_url, args.working_dir, args.autostart, gpg_key_url, args.runtime
)

View File

@@ -2,11 +2,23 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import platform
import re
import sys
from datetime import datetime
from glob import glob
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
import spack
import spack.paths
import spack.platforms
import spack.spec
import spack.store
import spack.util.git
from spack.util.executable import which
description = "debugging commands for troubleshooting Spack"
section = "developer"
@@ -15,9 +27,63 @@
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="debug_command")
sp.add_parser("create-db-tarball", help="create a tarball of Spack's installation metadata")
sp.add_parser("report", help="print information useful for bug reports")
def _debug_tarball_suffix():
now = datetime.now()
suffix = now.strftime("%Y-%m-%d-%H%M%S")
git = spack.util.git.git()
if not git:
return "nobranch-nogit-%s" % suffix
with working_dir(spack.paths.prefix):
if not os.path.isdir(".git"):
return "nobranch.nogit.%s" % suffix
# Get symbolic branch name and strip any special chars (mainly '/')
symbolic = git("rev-parse", "--abbrev-ref", "--short", "HEAD", output=str).strip()
symbolic = re.sub(r"[^\w.-]", "-", symbolic)
# Get the commit hash too.
commit = git("rev-parse", "--short", "HEAD", output=str).strip()
if symbolic == commit:
return "nobranch.%s.%s" % (commit, suffix)
else:
return "%s.%s.%s" % (symbolic, commit, suffix)
def create_db_tarball(args):
tar = which("tar")
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
tarball_path = os.path.abspath(tarball_name)
base = os.path.basename(str(spack.store.STORE.root))
transform_args = []
# Currently --transform and -s are not supported by Windows native tar
if "GNU" in tar("--version", output=str):
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
elif sys.platform != "win32":
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
wd = os.path.dirname(str(spack.store.STORE.root))
with working_dir(wd):
files = [spack.store.STORE.db._index_path]
files += glob("%s/*/*/*/.spack/spec.json" % base)
files += glob("%s/*/*/*/.spack/spec.yaml" % base)
files = [os.path.relpath(f) for f in files]
args = ["-czf", tarball_path]
args += transform_args
args += files
tar(*args)
tty.msg("Created %s" % tarball_name)
def report(args):
host_platform = spack.platforms.host()
host_os = host_platform.default_operating_system()
@@ -29,5 +95,5 @@ def report(args):
def debug(parser, args):
if args.debug_command == "report":
report(args)
action = {"create-db-tarball": create_db_tarball, "report": report}
action[args.debug_command](args)

View File

@@ -9,9 +9,9 @@
import spack.cmd
import spack.environment as ev
import spack.package_base
import spack.store
from spack.cmd.common import arguments
from spack.solver.input_analysis import create_graph_analyzer
description = "show dependencies of a package"
section = "basic"
@@ -68,17 +68,15 @@ def dependencies(parser, args):
else:
spec = specs[0]
dependencies, virtuals, _ = create_graph_analyzer().possible_dependencies(
dependencies = spack.package_base.possible_dependencies(
spec,
transitive=args.transitive,
expand_virtuals=args.expand_virtuals,
allowed_deps=args.deptype,
depflag=args.deptype,
)
if not args.expand_virtuals:
dependencies.update(virtuals)
if spec.name in dependencies:
dependencies.remove(spec.name)
del dependencies[spec.name]
if dependencies:
colify(sorted(dependencies))

View File

@@ -125,7 +125,7 @@ def develop(parser, args):
version = spec.versions.concrete_range_as_version
if not version:
# look up the maximum version so infintiy versions are preferred for develop
version = max(spack.repo.PATH.get_pkg_class(spec.fullname).versions.keys())
version = max(spec.package_class.versions.keys())
tty.msg(f"Defaulting to highest version: {spec.name}@{version}")
spec.versions = spack.version.VersionList([version])

View File

@@ -545,7 +545,7 @@ def _not_license_excluded(self, x):
package does not explicitly forbid redistributing source."""
if self.private:
return True
elif spack.repo.PATH.get_pkg_class(x.fullname).redistribute_source(x):
elif x.package_class.redistribute_source(x):
return True
else:
tty.debug(

View File

@@ -41,11 +41,7 @@ def providers(parser, args):
specs = spack.cmd.parse_specs(args.virtual_package)
# Check prerequisites
non_virtual = [
str(s)
for s in specs
if not spack.repo.PATH.is_virtual(s.name) or s.name not in valid_virtuals
]
non_virtual = [str(s) for s in specs if not s.virtual or s.name not in valid_virtuals]
if non_virtual:
msg = "non-virtual specs cannot be part of the query "
msg += "[{0}]\n".format(", ".join(non_virtual))

View File

@@ -6,7 +6,7 @@
import os
import re
import sys
from itertools import islice, zip_longest
from itertools import zip_longest
from typing import Dict, List, Optional
import llnl.util.tty as tty
@@ -423,8 +423,7 @@ def _run_import_check(
continue
for m in is_abs_import.finditer(contents):
# Find at most two occurences: the first is the import itself, the second is its usage.
if len(list(islice(re.finditer(rf"{re.escape(m.group(1))}(?!\w)", contents), 2))) == 1:
if contents.count(m.group(1)) == 1:
to_remove.append(m.group(0))
exit_code = 1
print(f"{pretty_path}: redundant import: {m.group(1)}", file=out)
@@ -439,7 +438,7 @@ def _run_import_check(
module = _module_part(root, m.group(0))
if not module or module in to_add:
continue
if re.search(rf"import {re.escape(module)}(?!\w|\.)", contents):
if re.search(rf"import {re.escape(module)}\b(?!\.)", contents):
continue
to_add.add(module)
exit_code = 1

View File

@@ -252,9 +252,7 @@ def has_test_and_tags(pkg_class):
hashes = env.all_hashes() if env else None
specs = spack.store.STORE.db.query(hashes=hashes)
specs = list(
filter(lambda s: has_test_and_tags(spack.repo.PATH.get_pkg_class(s.fullname)), specs)
)
specs = list(filter(lambda s: has_test_and_tags(s.package_class), specs))
spack.cmd.display_specs(specs, long=True)

View File

@@ -216,7 +216,7 @@ def unit_test(parser, args, unknown_args):
# Ensure clingo is available before switching to the
# mock configuration used by unit tests
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_clingo_importable_or_raise()
spack.bootstrap.ensure_core_dependencies()
if pytest is None:
spack.bootstrap.ensure_environment_dependencies()
import pytest

View File

@@ -220,7 +220,7 @@ def concretize_one(spec: Union[str, Spec], tests: TestsType = False) -> Spec:
opt, i, answer = min(result.answers)
name = spec.name
# TODO: Consolidate this code with similar code in solve.py
if spack.repo.PATH.is_virtual(spec.name):
if spec.virtual:
providers = [s.name for s in answer.values() if s.package.provides(name)]
name = providers[0]

View File

@@ -57,7 +57,7 @@ def validate(configuration_file):
# Set the default value of the concretization strategy to unify and
# warn if the user explicitly set another value
env_dict.setdefault("concretizer", {"unify": True})
if env_dict["concretizer"]["unify"] is not True:
if not env_dict["concretizer"]["unify"] is True:
warnings.warn(
'"concretizer:unify" is not set to "true", which means the '
"generated image may contain different variants of the same "

View File

@@ -41,8 +41,6 @@
Union,
)
import spack.repo
try:
import uuid
@@ -1558,12 +1556,7 @@ def _query(
# If we did fine something, the query spec can't be virtual b/c we matched an actual
# package installation, so skip the virtual check entirely. If we *didn't* find anything,
# check all the deferred specs *if* the query is virtual.
if (
not results
and query_spec is not None
and deferred
and spack.repo.PATH.is_virtual(query_spec.name)
):
if not results and query_spec is not None and deferred and query_spec.virtual:
results = [spec for spec in deferred if spec.satisfies(query_spec)]
return results

View File

@@ -310,7 +310,7 @@ def find_windows_kit_roots() -> List[str]:
@staticmethod
def find_windows_kit_bin_paths(
kit_base: Union[Optional[str], Optional[list]] = None,
kit_base: Union[Optional[str], Optional[list]] = None
) -> List[str]:
"""Returns Windows kit bin directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base
@@ -325,7 +325,7 @@ def find_windows_kit_bin_paths(
@staticmethod
def find_windows_kit_lib_paths(
kit_base: Union[Optional[str], Optional[list]] = None,
kit_base: Union[Optional[str], Optional[list]] = None
) -> List[str]:
"""Returns Windows kit lib directory per version"""
kit_base = WindowsKitExternalPaths.find_windows_kit_roots() if not kit_base else kit_base

View File

@@ -243,7 +243,7 @@ def prefix_from_path(self, *, path: str) -> str:
raise NotImplementedError("must be implemented by derived classes")
def detect_specs(
self, *, pkg: Type["spack.package_base.PackageBase"], paths: Iterable[str]
self, *, pkg: Type["spack.package_base.PackageBase"], paths: List[str]
) -> List["spack.spec.Spec"]:
"""Given a list of files matching the search patterns, returns a list of detected specs.
@@ -259,8 +259,6 @@ def detect_specs(
)
return []
from spack.repo import PATH as repo_path
result = []
for candidate_path, items_in_prefix in _group_by_prefix(
llnl.util.lang.dedupe(paths)
@@ -307,10 +305,7 @@ def detect_specs(
resolved_specs[spec] = candidate_path
try:
# Validate the spec calling a package specific method
pkg_cls = repo_path.get_pkg_class(spec.name)
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
validate_fn(spec, spec.extra_attributes)
spec.validate_detection()
except Exception as e:
msg = (
f'"{spec}" has been detected on the system but will '

View File

@@ -568,7 +568,7 @@ def patch(
"""
def _execute_patch(
pkg_or_dep: Union[Type[spack.package_base.PackageBase], Dependency],
pkg_or_dep: Union[Type[spack.package_base.PackageBase], Dependency]
) -> None:
pkg = pkg_or_dep.pkg if isinstance(pkg_or_dep, Dependency) else pkg_or_dep

View File

@@ -9,7 +9,6 @@
import shutil
import stat
import sys
import tempfile
from typing import Callable, Dict, Optional
from typing_extensions import Literal
@@ -709,10 +708,7 @@ def add_specs(self, *specs: spack.spec.Spec) -> None:
def skip_list(file):
return os.path.basename(file) == spack.store.STORE.layout.metadata_dir
# Determine if the root is on a case-insensitive filesystem
normalize_paths = is_folder_on_case_insensitive_filesystem(self._root)
visitor = SourceMergeVisitor(ignore=skip_list, normalize_paths=normalize_paths)
visitor = SourceMergeVisitor(ignore=skip_list)
# Gather all the directories to be made and files to be linked
for spec in specs:
@@ -888,8 +884,3 @@ def get_dependencies(specs):
class ConflictingProjectionsError(SpackError):
"""Raised when a view has a projections file and is given one manually."""
def is_folder_on_case_insensitive_filesystem(path: str) -> bool:
with tempfile.NamedTemporaryFile(dir=path, prefix=".sentinel") as sentinel:
return os.path.exists(os.path.join(path, os.path.basename(sentinel.name).upper()))

View File

@@ -42,10 +42,10 @@
import llnl.util.tty.color
import spack.deptypes as dt
import spack.repo
import spack.spec
import spack.tengine
import spack.traverse
from spack.solver.input_analysis import create_graph_analyzer
def find(seq, predicate):
@@ -537,11 +537,10 @@ def edge_entry(self, edge):
def _static_edges(specs, depflag):
for spec in specs:
*_, edges = create_graph_analyzer().possible_dependencies(
spec.name, expand_virtuals=True, allowed_deps=depflag
)
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
possible = pkg_cls.possible_dependencies(expand_virtuals=True, depflag=depflag)
for parent_name, dependencies in edges.items():
for parent_name, dependencies in possible.items():
for dependency_name in dependencies:
yield spack.spec.DependencySpec(
spack.spec.Spec(parent_name),

View File

@@ -66,8 +66,6 @@
"libudev.so.*",
# cuda driver
"libcuda.so.*",
# intel-oneapi-runtime
"libur_loader.so.*",
]

View File

@@ -566,7 +566,7 @@ def copy_test_files(pkg: Pb, test_spec: spack.spec.Spec):
# copy test data into test stage data dir
try:
pkg_cls = spack.repo.PATH.get_pkg_class(test_spec.fullname)
pkg_cls = test_spec.package_class
except spack.repo.UnknownPackageError:
tty.debug(f"{test_spec.name}: skipping test data copy since no package class found")
return
@@ -623,7 +623,7 @@ def test_functions(
vpkgs = virtuals(pkg)
for vname in vpkgs:
try:
classes.append(spack.repo.PATH.get_pkg_class(vname))
classes.append((Spec(vname)).package_class)
except spack.repo.UnknownPackageError:
tty.debug(f"{vname}: virtual does not appear to have a package file")
@@ -668,7 +668,7 @@ def process_test_parts(pkg: Pb, test_specs: List[spack.spec.Spec], verbose: bool
# grab test functions associated with the spec, which may be virtual
try:
tests = test_functions(spack.repo.PATH.get_pkg_class(spec.fullname))
tests = test_functions(spec.package_class)
except spack.repo.UnknownPackageError:
# Some virtuals don't have a package so we don't want to report
# them as not having tests when that isn't appropriate.

View File

@@ -814,7 +814,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
# Include build dependencies if pkg is going to be built from sources, or
# if build deps are explicitly requested.
if include_build_deps or not (
cache_only or pkg.spec.installed and pkg.spec.dag_hash() not in self.overwrite
cache_only or pkg.spec.installed and not pkg.spec.dag_hash() in self.overwrite
):
depflag |= dt.BUILD
if self.run_tests(pkg):
@@ -2436,7 +2436,11 @@ def _real_install(self) -> None:
# DEBUGGING TIP - to debug this section, insert an IPython
# embed here, and run the sections below without log capture
log_contextmanager = log_output(
log_file, self.echo, True, filter_fn=self.filter_fn
log_file,
self.echo,
True,
env=self.unmodified_env,
filter_fn=self.filter_fn,
)
with log_contextmanager as logger:

View File

@@ -64,7 +64,7 @@ def from_local_path(path: str):
@staticmethod
def from_url(url: str):
"""Create an anonymous mirror by URL. This method validates the URL."""
if urllib.parse.urlparse(url).scheme not in supported_url_schemes:
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
raise ValueError(
f'"{url}" is not a valid mirror URL. '
f"Scheme must be one of {supported_url_schemes}."

View File

@@ -330,17 +330,18 @@ class BaseConfiguration:
default_projections = {"all": "{name}/{version}-{compiler.name}-{compiler.version}"}
def __init__(self, spec: spack.spec.Spec, module_set_name: str, explicit: bool) -> None:
# Module where type(self) is defined
m = inspect.getmodule(self)
assert m is not None # make mypy happy
self.module = m
# Spec for which we want to generate a module file
self.spec = spec
self.name = module_set_name
self.explicit = explicit
# Dictionary of configuration options that should be applied to the spec
# Dictionary of configuration options that should be applied
# to the spec
self.conf = merge_config_rules(self.module.configuration(self.name), self.spec)
@property
def module(self):
return inspect.getmodule(self)
@property
def projections(self):
"""Projection from specs to module names"""
@@ -774,6 +775,10 @@ def __init__(
) -> None:
self.spec = spec
# This class is meant to be derived. Get the module of the
# actual writer.
self.module = inspect.getmodule(self)
assert self.module is not None # make mypy happy
m = self.module
# Create the triplet of configuration/layout/context
@@ -811,10 +816,6 @@ def __init__(
name = type(self).__name__
raise ModulercHeaderNotDefined(msg.format(name))
@property
def module(self):
return inspect.getmodule(self)
def _get_template(self):
"""Gets the template that will be rendered for this spec."""
# Get templates and put them in the order of importance:

View File

@@ -209,7 +209,7 @@ def provides(self):
# All the other tokens in the hierarchy must be virtual dependencies
for x in self.hierarchy_tokens:
if self.spec.package.provides(x):
provides[x] = self.spec
provides[x] = self.spec[x]
return provides
@property

View File

@@ -383,7 +383,6 @@ def create_opener():
"""Create an opener that can handle OCI authentication."""
opener = urllib.request.OpenerDirector()
for handler in [
urllib.request.ProxyHandler(),
urllib.request.UnknownHandler(),
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
spack.util.web.SpackHTTPDefaultErrorHandler(),

View File

@@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# flake8: noqa: F401, E402
# flake8: noqa: F401
"""spack.package defines the public API for Spack packages, by re-exporting useful symbols from
other modules. Packages should import this module, instead of importing from spack.* directly
to ensure forward compatibility with future versions of Spack."""

View File

@@ -22,6 +22,7 @@
import textwrap
import time
import traceback
import typing
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union
from typing_extensions import Literal
@@ -696,6 +697,9 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: Verbosity level, preserved across installs.
_verbose = None
#: index of patches by sha256 sum, built lazily
_patches_by_hash = None
#: Package homepage where users can find more information about the package
homepage: Optional[str] = None
@@ -709,6 +713,19 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
#: Do not include @ here in order not to unnecessarily ping the users.
maintainers: List[str] = []
#: List of attributes to be excluded from a package's hash.
metadata_attrs = [
"homepage",
"url",
"urls",
"list_url",
"extendable",
"parallel",
"make_jobs",
"maintainers",
"tags",
]
#: Set to ``True`` to indicate the stand-alone test requires a compiler.
#: It is used to ensure a compiler and build dependencies like 'cmake'
#: are available to build a custom test code.
@@ -808,6 +825,104 @@ def get_variant(self, name: str) -> spack.variant.Variant:
except StopIteration:
raise ValueError(f"No variant '{name}' on spec: {self.spec}")
@classmethod
def possible_dependencies(
cls,
transitive: bool = True,
expand_virtuals: bool = True,
depflag: dt.DepFlag = dt.ALL,
visited: Optional[dict] = None,
missing: Optional[dict] = None,
virtuals: Optional[set] = None,
) -> Dict[str, Set[str]]:
"""Return dict of possible dependencies of this package.
Args:
transitive (bool or None): return all transitive dependencies if
True, only direct dependencies if False (default True)..
expand_virtuals (bool or None): expand virtual dependencies into
all possible implementations (default True)
depflag: dependency types to consider
visited (dict or None): dict of names of dependencies visited so
far, mapped to their immediate dependencies' names.
missing (dict or None): dict to populate with packages and their
*missing* dependencies.
virtuals (set): if provided, populate with virtuals seen so far.
Returns:
(dict): dictionary mapping dependency names to *their*
immediate dependencies
Each item in the returned dictionary maps a (potentially
transitive) dependency of this package to its possible
*immediate* dependencies. If ``expand_virtuals`` is ``False``,
virtual package names wil be inserted as keys mapped to empty
sets of dependencies. Virtuals, if not expanded, are treated as
though they have no immediate dependencies.
Missing dependencies by default are ignored, but if a
missing dict is provided, it will be populated with package names
mapped to any dependencies they have that are in no
repositories. This is only populated if transitive is True.
Note: the returned dict *includes* the package itself.
"""
visited = {} if visited is None else visited
missing = {} if missing is None else missing
visited.setdefault(cls.name, set())
for name, conditions in cls.dependencies_by_name(when=True).items():
# check whether this dependency could be of the type asked for
depflag_union = 0
for deplist in conditions.values():
for dep in deplist:
depflag_union |= dep.depflag
if not (depflag & depflag_union):
continue
# expand virtuals if enabled, otherwise just stop at virtuals
if spack.repo.PATH.is_virtual(name):
if virtuals is not None:
virtuals.add(name)
if expand_virtuals:
providers = spack.repo.PATH.providers_for(name)
dep_names = [spec.name for spec in providers]
else:
visited.setdefault(cls.name, set()).add(name)
visited.setdefault(name, set())
continue
else:
dep_names = [name]
# add the dependency names to the visited dict
visited.setdefault(cls.name, set()).update(set(dep_names))
# recursively traverse dependencies
for dep_name in dep_names:
if dep_name in visited:
continue
visited.setdefault(dep_name, set())
# skip the rest if not transitive
if not transitive:
continue
try:
dep_cls = spack.repo.PATH.get_pkg_class(dep_name)
except spack.repo.UnknownPackageError:
# log unknown packages
missing.setdefault(cls.name, set()).add(dep_name)
continue
dep_cls.possible_dependencies(
transitive, expand_virtuals, depflag, visited, missing, virtuals
)
return visited
@classproperty
def package_dir(cls):
"""Directory where the package.py file lives."""
@@ -2172,6 +2287,55 @@ def rpath_args(self):
build_system_flags = PackageBase.build_system_flags
def use_cray_compiler_names():
"""Compiler names for builds that rely on cray compiler names."""
os.environ["CC"] = "cc"
os.environ["CXX"] = "CC"
os.environ["FC"] = "ftn"
os.environ["F77"] = "ftn"
def possible_dependencies(
*pkg_or_spec: Union[str, spack.spec.Spec, typing.Type[PackageBase]],
transitive: bool = True,
expand_virtuals: bool = True,
depflag: dt.DepFlag = dt.ALL,
missing: Optional[dict] = None,
virtuals: Optional[set] = None,
) -> Dict[str, Set[str]]:
"""Get the possible dependencies of a number of packages.
See ``PackageBase.possible_dependencies`` for details.
"""
packages = []
for pos in pkg_or_spec:
if isinstance(pos, PackageMeta) and issubclass(pos, PackageBase):
packages.append(pos)
continue
if not isinstance(pos, spack.spec.Spec):
pos = spack.spec.Spec(pos)
if spack.repo.PATH.is_virtual(pos.name):
packages.extend(p.package_class for p in spack.repo.PATH.providers_for(pos.name))
continue
else:
packages.append(pos.package_class)
visited: Dict[str, Set[str]] = {}
for pkg in packages:
pkg.possible_dependencies(
visited=visited,
transitive=transitive,
expand_virtuals=expand_virtuals,
depflag=depflag,
missing=missing,
virtuals=virtuals,
)
return visited
def deprecated_version(pkg: PackageBase, version: Union[str, StandardVersion]) -> bool:
"""Return True iff the version is deprecated.

View File

@@ -236,15 +236,22 @@ def relocate_elf_binaries(binaries: Iterable[str], prefix_to_prefix: Dict[str, s
_set_elf_rpaths_and_interpreter(path, rpaths=rpaths, interpreter=interpreter)
def _warn_if_link_cant_be_relocated(link: str, target: str):
if not os.path.isabs(target):
return
tty.warn(f'Symbolic link at "{link}" to "{target}" cannot be relocated')
def relocate_links(links: Iterable[str], prefix_to_prefix: Dict[str, str]) -> None:
"""Relocate links to a new install prefix."""
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
for link in links:
old_target = readlink(link)
if not os.path.isabs(old_target):
continue
match = regex.match(old_target)
# No match.
if match is None:
_warn_if_link_cant_be_relocated(link, old_target)
continue
new_target = prefix_to_prefix[match.group()] + old_target[match.end() :]
@@ -283,21 +290,21 @@ def relocate_text_bin(binaries: Iterable[str], prefix_to_prefix: PrefixToPrefix)
def is_macho_magic(magic: bytes) -> bool:
return (
# In order of popularity: 64-bit mach-o le/be, 32-bit mach-o le/be.
magic.startswith(b"\xcf\xfa\xed\xfe")
or magic.startswith(b"\xfe\xed\xfa\xcf")
or magic.startswith(b"\xce\xfa\xed\xfe")
or magic.startswith(b"\xfe\xed\xfa\xce")
magic.startswith(b"\xCF\xFA\xED\xFE")
or magic.startswith(b"\xFE\xED\xFA\xCF")
or magic.startswith(b"\xCE\xFA\xED\xFE")
or magic.startswith(b"\xFE\xED\xFA\xCE")
# universal binaries: 0xcafebabe be (most common?) or 0xbebafeca le (not sure if exists).
# Here we need to disambiguate mach-o and JVM class files. In mach-o the next 4 bytes are
# the number of binaries; in JVM class files it's the java version number. We assume there
# are less than 10 binaries in a universal binary.
or (magic.startswith(b"\xca\xfe\xba\xbe") and int.from_bytes(magic[4:8], "big") < 10)
or (magic.startswith(b"\xbe\xba\xfe\xca") and int.from_bytes(magic[4:8], "little") < 10)
or (magic.startswith(b"\xCA\xFE\xBA\xBE") and int.from_bytes(magic[4:8], "big") < 10)
or (magic.startswith(b"\xBE\xBA\xFE\xCA") and int.from_bytes(magic[4:8], "little") < 10)
)
def is_elf_magic(magic: bytes) -> bool:
return magic.startswith(b"\x7fELF")
return magic.startswith(b"\x7FELF")
def is_binary(filename: str) -> bool:
@@ -406,8 +413,8 @@ def fixup_macos_rpaths(spec):
entries which makes it harder to adjust with ``install_name_tool
-delete_rpath``.
"""
if spec.external or not spec.concrete:
tty.warn("external/abstract spec cannot be fixed up: {0!s}".format(spec))
if spec.external or spec.virtual:
tty.warn("external or virtual package cannot be fixed up: {0!s}".format(spec))
return False
if "platform=darwin" not in spec:

View File

@@ -1179,9 +1179,8 @@ def all_package_paths(self) -> Generator[str, None, None]:
yield self.package_path(name)
def packages_with_tags(self, *tags: str) -> Set[str]:
v = set(self.tag_index[tags[0].lower()])
for tag in tags[1:]:
v.intersection_update(self.tag_index[tag.lower()])
v = set(self.all_package_names())
v.intersection_update(*(self.tag_index[tag.lower()] for tag in tags))
return v
def all_package_classes(self) -> Generator[Type["spack.package_base.PackageBase"], None, None]:

View File

@@ -1,7 +1,6 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import codecs
import collections
import hashlib
import os
@@ -14,7 +13,7 @@
import xml.sax.saxutils
from typing import Dict, Optional
from urllib.parse import urlencode
from urllib.request import Request
from urllib.request import HTTPSHandler, Request, build_opener
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
@@ -25,10 +24,10 @@
import spack.spec
import spack.tengine
import spack.util.git
import spack.util.web as web_util
from spack.error import SpackError
from spack.util.crypto import checksum
from spack.util.log_parse import parse_log_events
from spack.util.web import ssl_create_default_context
from .base import Reporter
from .extract import extract_test_parts
@@ -177,7 +176,7 @@ def build_report_for_package(self, report_dir, package, duration):
# something went wrong pre-cdash "configure" phase b/c we have an exception and only
# "update" was encounterd.
# dump the report in the configure line so teams can see what the issue is
if len(phases_encountered) == 1 and package.get("exception"):
if len(phases_encountered) == 1 and package["exception"]:
# TODO this mapping is not ideal since these are pre-configure errors
# we need to determine if a more appropriate cdash phase can be utilized
# for now we will add a message to the log explaining this
@@ -434,6 +433,7 @@ def upload(self, filename):
# Compute md5 checksum for the contents of this file.
md5sum = checksum(hashlib.md5, filename, block_size=8192)
opener = build_opener(HTTPSHandler(context=ssl_create_default_context()))
with open(filename, "rb") as f:
params_dict = {
"build": self.buildname,
@@ -443,21 +443,26 @@ def upload(self, filename):
}
encoded_params = urlencode(params_dict)
url = "{0}&{1}".format(self.cdash_upload_url, encoded_params)
request = Request(url, data=f, method="PUT")
request = Request(url, data=f)
request.add_header("Content-Type", "text/xml")
request.add_header("Content-Length", os.path.getsize(filename))
if self.authtoken:
request.add_header("Authorization", "Bearer {0}".format(self.authtoken))
try:
response = web_util.urlopen(request, timeout=SPACK_CDASH_TIMEOUT)
# By default, urllib2 only support GET and POST.
# CDash expects this file to be uploaded via PUT.
request.get_method = lambda: "PUT"
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
if self.current_package_name not in self.buildIds:
resp_value = codecs.getreader("utf-8")(response).read()
resp_value = response.read()
if isinstance(resp_value, bytes):
resp_value = resp_value.decode("utf-8")
match = self.buildid_regexp.search(resp_value)
if match:
buildid = match.group(1)
self.buildIds[self.current_package_name] = buildid
except Exception as e:
print(f"Upload to CDash failed: {e}")
print("Upload to CDash failed: {0}".format(e))
def finalize_report(self):
if self.buildIds:

View File

@@ -84,14 +84,9 @@
"duplicates": {
"type": "object",
"properties": {
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]},
"max_dupes": {
"type": "object",
"additional_properties": {"type": "integer", "minimum": 1},
},
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
},
},
"static_analysis": {"type": "boolean"},
"timeout": {"type": "integer", "minimum": 0},
"error_on_timeout": {"type": "boolean"},
"os_compatible": {"type": "object", "additionalProperties": {"type": "array"}},

View File

@@ -3,12 +3,12 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Schema for Cray descriptive manifest: this describes a set of
installed packages on the system and also specifies dependency
relationships between them (so this provides more information than
external entries in packages configuration).
installed packages on the system and also specifies dependency
relationships between them (so this provides more information than
external entries in packages configuration).
This does not specify a configuration - it is an input format
that is consumed and transformed into Spack DB records.
This does not specify a configuration - it is an input format
that is consumed and transformed into Spack DB records.
"""
from typing import Any, Dict

View File

@@ -6,7 +6,6 @@
import copy
import enum
import functools
import io
import itertools
import os
import pathlib
@@ -63,7 +62,7 @@
parse_files,
parse_term,
)
from .input_analysis import create_counter, create_graph_analyzer
from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter
from .requirements import RequirementKind, RequirementParser, RequirementRule
from .version_order import concretization_version_order
@@ -74,19 +73,17 @@
#: Enable the addition of a runtime node
WITH_RUNTIME = sys.platform != "win32"
class OutputConfiguration(NamedTuple):
"""Data class that contains configuration on what a clingo solve should output."""
#: Print out coarse timers for different solve phases
timers: bool
#: Whether to output Clingo's internal solver statistics
stats: bool
#: Optional output stream for the generated ASP program
out: Optional[io.IOBase]
#: If True, stop after setup and don't solve
setup_only: bool
#: Data class that contain configuration on what a
#: clingo solve should output.
#:
#: Args:
#: timers (bool): Print out coarse timers for different solve phases.
#: stats (bool): Whether to output Clingo's internal solver statistics.
#: out: Optional output stream for the generated ASP program.
#: setup_only (bool): if True, stop after setup and don't solve (default False).
OutputConfiguration = collections.namedtuple(
"OutputConfiguration", ["timers", "stats", "out", "setup_only"]
)
#: Default output configuration for a solve
DEFAULT_OUTPUT_CONFIGURATION = OutputConfiguration(
@@ -274,6 +271,15 @@ def remove_node(spec: spack.spec.Spec, facts: List[AspFunction]) -> List[AspFunc
return list(filter(lambda x: x.args[0] not in ("node", "virtual_node"), facts))
def _create_counter(specs: List[spack.spec.Spec], tests: bool):
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
if strategy == "full":
return FullDuplicatesCounter(specs, tests=tests)
if strategy == "minimal":
return MinimalDuplicatesCounter(specs, tests=tests)
return NoDuplicatesCounter(specs, tests=tests)
def all_libcs() -> Set[spack.spec.Spec]:
"""Return a set of all libc specs targeted by any configured compiler. If none, fall back to
libc determined from the current Python process if dynamically linked."""
@@ -501,7 +507,7 @@ def _compute_specs_from_answer_set(self):
# The specs must be unified to get here, so it is safe to associate any satisfying spec
# with the input. Multiple inputs may be matched to the same concrete spec
node = SpecBuilder.make_node(pkg=input_spec.name)
if spack.repo.PATH.is_virtual(input_spec.name):
if input_spec.virtual:
providers = [
spec.name for spec in answer.values() if spec.package.provides(input_spec.name)
]
@@ -1115,8 +1121,6 @@ class SpackSolverSetup:
"""Class to set up and run a Spack concretization solve."""
def __init__(self, tests: bool = False):
self.possible_graph = create_graph_analyzer()
# these are all initialized in setup()
self.gen: "ProblemInstanceBuilder" = ProblemInstanceBuilder()
self.requirement_parser = RequirementParser(spack.config.CONFIG)
@@ -2083,11 +2087,7 @@ def _spec_clauses(
f: Union[Type[_Head], Type[_Body]] = _Body if body else _Head
if spec.name:
clauses.append(
f.node(spec.name)
if not spack.repo.PATH.is_virtual(spec.name)
else f.virtual_node(spec.name)
)
clauses.append(f.node(spec.name) if not spec.virtual else f.virtual_node(spec.name))
if spec.namespace:
clauses.append(f.namespace(spec.name, spec.namespace))
@@ -2114,7 +2114,7 @@ def _spec_clauses(
for value in variant.value_as_tuple:
# ensure that the value *can* be valid for the spec
if spec.name and not spec.concrete and not spack.repo.PATH.is_virtual(spec.name):
if spec.name and not spec.concrete and not spec.virtual:
variant_defs = vt.prevalidate_variant_value(
self.pkg_class(spec.name), variant, spec
)
@@ -2397,20 +2397,38 @@ def keyfun(os):
def target_defaults(self, specs):
"""Add facts about targets and target compatibility."""
self.gen.h2("Default target")
platform = spack.platforms.host()
uarch = archspec.cpu.TARGETS.get(platform.default)
self.gen.h2("Target compatibility")
# Add targets explicitly requested from specs
candidate_targets = []
for x in self.possible_graph.candidate_targets():
if all(
self.possible_graph.unreachable(pkg_name=pkg_name, when_spec=f"target={x}")
for pkg_name in self.pkgs
):
tty.debug(f"[{__name__}] excluding target={x}, cause no package can use it")
continue
candidate_targets.append(x)
# Construct the list of targets which are compatible with the host
candidate_targets = [uarch] + uarch.ancestors
host_compatible = spack.config.CONFIG.get("concretizer:targets:host_compatible")
# Get configuration options
granularity = spack.config.get("concretizer:targets:granularity")
host_compatible = spack.config.get("concretizer:targets:host_compatible")
# Add targets which are not compatible with the current host
if not host_compatible:
additional_targets_in_family = sorted(
[
t
for t in archspec.cpu.TARGETS.values()
if (t.family.name == uarch.family.name and t not in candidate_targets)
],
key=lambda x: len(x.ancestors),
reverse=True,
)
candidate_targets += additional_targets_in_family
# Check if we want only generic architecture
if granularity == "generic":
candidate_targets = [t for t in candidate_targets if t.vendor == "generic"]
# Add targets explicitly requested from specs
for spec in specs:
if not spec.architecture or not spec.architecture.target:
continue
@@ -2426,8 +2444,6 @@ def target_defaults(self, specs):
if ancestor not in candidate_targets:
candidate_targets.append(ancestor)
platform = spack.platforms.host()
uarch = archspec.cpu.TARGETS.get(platform.default)
best_targets = {uarch.family.name}
for compiler_id, known_compiler in enumerate(self.possible_compilers):
if not known_compiler.available:
@@ -2485,6 +2501,7 @@ def target_defaults(self, specs):
self.gen.newline()
self.default_targets = list(sorted(set(self.default_targets)))
self.target_preferences()
def virtual_providers(self):
@@ -2588,14 +2605,7 @@ def define_variant_values(self):
# Tell the concretizer about possible values from specs seen in spec_clauses().
# We might want to order these facts by pkg and name if we are debugging.
for pkg_name, variant_def_id, value in self.variant_values_from_specs:
try:
vid = self.variant_ids_by_def_id[variant_def_id]
except KeyError:
tty.debug(
f"[{__name__}] cannot retrieve id of the {value} variant from {pkg_name}"
)
continue
vid = self.variant_ids_by_def_id[variant_def_id]
self.gen.fact(fn.pkg_fact(pkg_name, fn.variant_possible_value(vid, value)))
def register_concrete_spec(self, spec, possible):
@@ -2666,7 +2676,7 @@ def setup(
"""
check_packages_exist(specs)
node_counter = create_counter(specs, tests=self.tests, possible_graph=self.possible_graph)
node_counter = _create_counter(specs, tests=self.tests)
self.possible_virtuals = node_counter.possible_virtuals()
self.pkgs = node_counter.possible_dependencies()
self.libcs = sorted(all_libcs()) # type: ignore[type-var]
@@ -2674,9 +2684,7 @@ def setup(
# Fail if we already know an unreachable node is requested
for spec in specs:
missing_deps = [
str(d)
for d in spec.traverse()
if d.name not in self.pkgs and not spack.repo.PATH.is_virtual(d.name)
str(d) for d in spec.traverse() if d.name not in self.pkgs and not d.virtual
]
if missing_deps:
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
@@ -2893,11 +2901,7 @@ def literal_specs(self, specs):
pkg_name = clause.args[1]
self.gen.fact(fn.mentioned_in_literal(trigger_id, root_name, pkg_name))
requirements.append(
fn.attr(
"virtual_root" if spack.repo.PATH.is_virtual(spec.name) else "root", spec.name
)
)
requirements.append(fn.attr("virtual_root" if spec.virtual else "root", spec.name))
cache[imposed_spec_key] = (effect_id, requirements)
self.gen.fact(fn.pkg_fact(spec.name, fn.condition_effect(condition_id, effect_id)))
@@ -3485,7 +3489,7 @@ def external_spec_selected(self, node, idx):
self._specs[node].extra_attributes = spec_info.get("extra_attributes", {})
# If this is an extension, update the dependencies to include the extendee
package = spack.repo.PATH.get_pkg_class(self._specs[node].fullname)(self._specs[node])
package = self._specs[node].package_class(self._specs[node])
extendee_spec = package.extendee_spec
if extendee_spec:
@@ -4098,10 +4102,10 @@ def _check_input_and_extract_concrete_specs(specs):
reusable = []
for root in specs:
for s in root.traverse():
if s.virtual:
continue
if s.concrete:
reusable.append(s)
elif spack.repo.PATH.is_virtual(s.name):
continue
spack.spec.Spec.ensure_valid_variants(s)
return reusable

View File

@@ -265,7 +265,6 @@ error(100, "Cannot select a single version for virtual '{0}'", Virtual)
% If we select a deprecated version, mark the package as deprecated
attr("deprecated", node(ID, Package), Version) :-
attr("version", node(ID, Package), Version),
not external(node(ID, Package)),
pkg_fact(Package, deprecated_version(Version)).
error(100, "Package '{0}' needs the deprecated version '{1}', and this is not allowed", Package, Version)

View File

@@ -0,0 +1,179 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
from typing import List, Set
from llnl.util import lang
import spack.deptypes as dt
import spack.package_base
import spack.repo
import spack.spec
PossibleDependencies = Set[str]
class Counter:
"""Computes the possible packages and the maximum number of duplicates
allowed for each of them.
Args:
specs: abstract specs to concretize
tests: if True, add test dependencies to the list of possible packages
"""
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
runtime_pkgs = spack.repo.PATH.packages_with_tags("runtime")
runtime_virtuals = set()
for x in runtime_pkgs:
pkg_class = spack.repo.PATH.get_pkg_class(x)
runtime_virtuals.update(pkg_class.provided_virtual_names())
self.specs = specs + [spack.spec.Spec(x) for x in runtime_pkgs]
self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
self.all_types: dt.DepFlag = dt.ALL
if not tests:
self.link_run_types = dt.LINK | dt.RUN
self.all_types = dt.LINK | dt.RUN | dt.BUILD
self._possible_dependencies: PossibleDependencies = set()
self._possible_virtuals: Set[str] = (
set(x.name for x in specs if x.virtual) | runtime_virtuals
)
def possible_dependencies(self) -> PossibleDependencies:
"""Returns the list of possible dependencies"""
self.ensure_cache_values()
return self._possible_dependencies
def possible_virtuals(self) -> Set[str]:
"""Returns the list of possible virtuals"""
self.ensure_cache_values()
return self._possible_virtuals
def ensure_cache_values(self) -> None:
"""Ensure the cache values have been computed"""
if self._possible_dependencies:
return
self._compute_cache_values()
def possible_packages_facts(self, gen: "spack.solver.asp.PyclingoDriver", fn) -> None:
"""Emit facts associated with the possible packages"""
raise NotImplementedError("must be implemented by derived classes")
def _compute_cache_values(self):
raise NotImplementedError("must be implemented by derived classes")
class NoDuplicatesCounter(Counter):
def _compute_cache_values(self):
result = spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, depflag=self.all_types
)
self._possible_dependencies = set(result)
def possible_packages_facts(self, gen, fn):
gen.h2("Maximum number of nodes (packages)")
for package_name in sorted(self.possible_dependencies()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self.possible_dependencies()):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class MinimalDuplicatesCounter(NoDuplicatesCounter):
def __init__(self, specs, tests):
super().__init__(specs, tests)
self._link_run: PossibleDependencies = set()
self._direct_build: PossibleDependencies = set()
self._total_build: PossibleDependencies = set()
self._link_run_virtuals: Set[str] = set()
def _compute_cache_values(self):
self._link_run = set(
spack.package_base.possible_dependencies(
*self.specs, virtuals=self._possible_virtuals, depflag=self.link_run_types
)
)
self._link_run_virtuals.update(self._possible_virtuals)
for x in self._link_run:
build_dependencies = spack.repo.PATH.get_pkg_class(x).dependencies_of_type(dt.BUILD)
virtuals, reals = lang.stable_partition(
build_dependencies, spack.repo.PATH.is_virtual_safe
)
self._possible_virtuals.update(virtuals)
for virtual_dep in virtuals:
providers = spack.repo.PATH.providers_for(virtual_dep)
self._direct_build.update(str(x) for x in providers)
self._direct_build.update(reals)
self._total_build = set(
spack.package_base.possible_dependencies(
*self._direct_build, virtuals=self._possible_virtuals, depflag=self.all_types
)
)
self._possible_dependencies = set(self._link_run) | set(self._total_build)
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
gen.h2("Packages with at most a single node")
for package_name in sorted(self.possible_dependencies() - build_tools):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Packages with at multiple possible nodes (build-tools)")
for package_name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.max_dupes(package_name, 2))
gen.fact(fn.multiple_unification_sets(package_name))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class FullDuplicatesCounter(MinimalDuplicatesCounter):
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
counter = collections.Counter(
list(self._link_run) + list(self._total_build) + list(self._direct_build)
)
gen.h2("Maximum number of nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
count = min(count, 2)
gen.fact(fn.max_dupes(pkg, count))
gen.newline()
gen.h2("Build unification sets ")
for name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.multiple_unification_sets(name))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
counter = collections.Counter(
list(self._link_run_virtuals) + list(self._possible_virtuals)
)
gen.h2("Maximum number of virtual nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
gen.fact(fn.max_dupes(pkg, count))
gen.newline()

View File

@@ -1,539 +0,0 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Classes to analyze the input of a solve, and provide information to set up the ASP problem"""
import collections
from typing import Dict, List, NamedTuple, Set, Tuple, Union
import archspec.cpu
from llnl.util import lang, tty
import spack.binary_distribution
import spack.config
import spack.deptypes as dt
import spack.platforms
import spack.repo
import spack.spec
import spack.store
from spack.error import SpackError
RUNTIME_TAG = "runtime"
class PossibleGraph(NamedTuple):
real_pkgs: Set[str]
virtuals: Set[str]
edges: Dict[str, Set[str]]
class PossibleDependencyGraph:
"""Returns information needed to set up an ASP problem"""
def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
"""Returns true if the context can determine that the condition cannot ever
be met on pkg_name.
"""
raise NotImplementedError
def candidate_targets(self) -> List[archspec.cpu.Microarchitecture]:
"""Returns a list of targets that are candidate for concretization"""
raise NotImplementedError
def possible_dependencies(
self,
*specs: Union[spack.spec.Spec, str],
allowed_deps: dt.DepFlag,
transitive: bool = True,
strict_depflag: bool = False,
expand_virtuals: bool = True,
) -> PossibleGraph:
"""Returns the set of possible dependencies, and the set of possible virtuals.
Both sets always include runtime packages, which may be injected by compilers.
Args:
transitive: return transitive dependencies if True, only direct dependencies if False
allowed_deps: dependency types to consider
strict_depflag: if True, only the specific dep type is considered, if False any
deptype that intersects with allowed deptype is considered
expand_virtuals: expand virtual dependencies into all possible implementations
"""
raise NotImplementedError
class NoStaticAnalysis(PossibleDependencyGraph):
"""Implementation that tries to minimize the setup time (i.e. defaults to give fast
answers), rather than trying to reduce the ASP problem size with more complex analysis.
"""
def __init__(self, *, configuration: spack.config.Configuration, repo: spack.repo.RepoPath):
self.configuration = configuration
self.repo = repo
self.runtime_pkgs = set(self.repo.packages_with_tags(RUNTIME_TAG))
self.runtime_virtuals = set()
self._platform_condition = spack.spec.Spec(
f"platform={spack.platforms.host()} target={archspec.cpu.host().family}:"
)
for x in self.runtime_pkgs:
pkg_class = self.repo.get_pkg_class(x)
self.runtime_virtuals.update(pkg_class.provided_virtual_names())
try:
self.libc_pkgs = [x.name for x in self.providers_for("libc")]
except spack.repo.UnknownPackageError:
self.libc_pkgs = []
def is_virtual(self, name: str) -> bool:
return self.repo.is_virtual(name)
@lang.memoized
def is_allowed_on_this_platform(self, *, pkg_name: str) -> bool:
"""Returns true if a package is allowed on the current host"""
pkg_cls = self.repo.get_pkg_class(pkg_name)
for when_spec, conditions in pkg_cls.requirements.items():
if not when_spec.intersects(self._platform_condition):
continue
for requirements, _, _ in conditions:
if not any(x.intersects(self._platform_condition) for x in requirements):
tty.debug(f"[{__name__}] {pkg_name} is not for this platform")
return False
return True
def providers_for(self, virtual_str: str) -> List[spack.spec.Spec]:
"""Returns a list of possible providers for the virtual string in input."""
return self.repo.providers_for(virtual_str)
def can_be_installed(self, *, pkg_name) -> bool:
"""Returns True if a package can be installed, False otherwise."""
return True
def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
"""Returns true if the context can determine that the condition cannot ever
be met on pkg_name.
"""
return False
def candidate_targets(self) -> List[archspec.cpu.Microarchitecture]:
"""Returns a list of targets that are candidate for concretization"""
platform = spack.platforms.host()
default_target = archspec.cpu.TARGETS[platform.default]
# Construct the list of targets which are compatible with the host
candidate_targets = [default_target] + default_target.ancestors
granularity = self.configuration.get("concretizer:targets:granularity")
host_compatible = self.configuration.get("concretizer:targets:host_compatible")
# Add targets which are not compatible with the current host
if not host_compatible:
additional_targets_in_family = sorted(
[
t
for t in archspec.cpu.TARGETS.values()
if (t.family.name == default_target.family.name and t not in candidate_targets)
],
key=lambda x: len(x.ancestors),
reverse=True,
)
candidate_targets += additional_targets_in_family
# Check if we want only generic architecture
if granularity == "generic":
candidate_targets = [t for t in candidate_targets if t.vendor == "generic"]
return candidate_targets
def possible_dependencies(
self,
*specs: Union[spack.spec.Spec, str],
allowed_deps: dt.DepFlag,
transitive: bool = True,
strict_depflag: bool = False,
expand_virtuals: bool = True,
) -> PossibleGraph:
stack = [x for x in self._package_list(specs)]
virtuals: Set[str] = set()
edges: Dict[str, Set[str]] = {}
while stack:
pkg_name = stack.pop()
if pkg_name in edges:
continue
edges[pkg_name] = set()
# Since libc is not buildable, there is no need to extend the
# search space with libc dependencies.
if pkg_name in self.libc_pkgs:
continue
pkg_cls = self.repo.get_pkg_class(pkg_name=pkg_name)
for name, conditions in pkg_cls.dependencies_by_name(when=True).items():
if all(self.unreachable(pkg_name=pkg_name, when_spec=x) for x in conditions):
tty.debug(
f"[{__name__}] Not adding {name} as a dep of {pkg_name}, because "
f"conditions cannot be met"
)
continue
if not self._has_deptypes(
conditions, allowed_deps=allowed_deps, strict=strict_depflag
):
continue
if name in virtuals:
continue
dep_names = set()
if self.is_virtual(name):
virtuals.add(name)
if expand_virtuals:
providers = self.providers_for(name)
dep_names = {spec.name for spec in providers}
else:
dep_names = {name}
edges[pkg_name].update(dep_names)
if not transitive:
continue
for dep_name in dep_names:
if dep_name in edges:
continue
if not self._is_possible(pkg_name=dep_name):
continue
stack.append(dep_name)
real_packages = set(edges)
if not transitive:
# We exit early, so add children from the edges information
for root, children in edges.items():
real_packages.update(x for x in children if self._is_possible(pkg_name=x))
virtuals.update(self.runtime_virtuals)
real_packages = real_packages | self.runtime_pkgs
return PossibleGraph(real_pkgs=real_packages, virtuals=virtuals, edges=edges)
def _package_list(self, specs: Tuple[Union[spack.spec.Spec, str], ...]) -> List[str]:
stack = []
for current_spec in specs:
if isinstance(current_spec, str):
current_spec = spack.spec.Spec(current_spec)
if self.repo.is_virtual(current_spec.name):
stack.extend([p.name for p in self.providers_for(current_spec.name)])
continue
stack.append(current_spec.name)
return sorted(set(stack))
def _has_deptypes(self, dependencies, *, allowed_deps: dt.DepFlag, strict: bool) -> bool:
if strict is True:
return any(
dep.depflag == allowed_deps for deplist in dependencies.values() for dep in deplist
)
return any(
dep.depflag & allowed_deps for deplist in dependencies.values() for dep in deplist
)
def _is_possible(self, *, pkg_name):
try:
return self.is_allowed_on_this_platform(pkg_name=pkg_name) and self.can_be_installed(
pkg_name=pkg_name
)
except spack.repo.UnknownPackageError:
return False
class StaticAnalysis(NoStaticAnalysis):
"""Performs some static analysis of the configuration, store, etc. to provide more precise
answers on whether some packages can be installed, or used as a provider.
It increases the setup time, but might decrease the grounding and solve time considerably,
especially when requirements restrict the possible choices for providers.
"""
def __init__(
self,
*,
configuration: spack.config.Configuration,
repo: spack.repo.RepoPath,
store: spack.store.Store,
binary_index: spack.binary_distribution.BinaryCacheIndex,
):
super().__init__(configuration=configuration, repo=repo)
self.store = store
self.binary_index = binary_index
@lang.memoized
def providers_for(self, virtual_str: str) -> List[spack.spec.Spec]:
candidates = super().providers_for(virtual_str)
result = []
for spec in candidates:
if not self._is_provider_candidate(pkg_name=spec.name, virtual=virtual_str):
continue
result.append(spec)
return result
@lang.memoized
def buildcache_specs(self) -> List[spack.spec.Spec]:
self.binary_index.update()
return self.binary_index.get_all_built_specs()
@lang.memoized
def can_be_installed(self, *, pkg_name) -> bool:
if self.configuration.get(f"packages:{pkg_name}:buildable", True):
return True
if self.configuration.get(f"packages:{pkg_name}:externals", []):
return True
reuse = self.configuration.get("concretizer:reuse")
if reuse is not False and self.store.db.query(pkg_name):
return True
if reuse is not False and any(x.name == pkg_name for x in self.buildcache_specs()):
return True
tty.debug(f"[{__name__}] {pkg_name} cannot be installed")
return False
@lang.memoized
def _is_provider_candidate(self, *, pkg_name: str, virtual: str) -> bool:
if not self.is_allowed_on_this_platform(pkg_name=pkg_name):
return False
if not self.can_be_installed(pkg_name=pkg_name):
return False
virtual_spec = spack.spec.Spec(virtual)
if self.unreachable(pkg_name=virtual_spec.name, when_spec=pkg_name):
tty.debug(f"[{__name__}] {pkg_name} cannot be a provider for {virtual}")
return False
return True
@lang.memoized
def unreachable(self, *, pkg_name: str, when_spec: spack.spec.Spec) -> bool:
"""Returns true if the context can determine that the condition cannot ever
be met on pkg_name.
"""
candidates = self.configuration.get(f"packages:{pkg_name}:require", [])
if not candidates and pkg_name != "all":
return self.unreachable(pkg_name="all", when_spec=when_spec)
if not candidates:
return False
if isinstance(candidates, str):
candidates = [candidates]
union_requirement = spack.spec.Spec()
for c in candidates:
if not isinstance(c, str):
continue
try:
union_requirement.constrain(c)
except SpackError:
# Less optimized, but shouldn't fail
pass
if not union_requirement.intersects(when_spec):
return True
return False
def create_graph_analyzer() -> PossibleDependencyGraph:
static_analysis = spack.config.CONFIG.get("concretizer:static_analysis", False)
if static_analysis:
return StaticAnalysis(
configuration=spack.config.CONFIG,
repo=spack.repo.PATH,
store=spack.store.STORE,
binary_index=spack.binary_distribution.BINARY_INDEX,
)
return NoStaticAnalysis(configuration=spack.config.CONFIG, repo=spack.repo.PATH)
class Counter:
"""Computes the possible packages and the maximum number of duplicates
allowed for each of them.
Args:
specs: abstract specs to concretize
tests: if True, add test dependencies to the list of possible packages
"""
def __init__(
self, specs: List["spack.spec.Spec"], tests: bool, possible_graph: PossibleDependencyGraph
) -> None:
self.possible_graph = possible_graph
self.specs = specs
self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
self.all_types: dt.DepFlag = dt.ALL
if not tests:
self.link_run_types = dt.LINK | dt.RUN
self.all_types = dt.LINK | dt.RUN | dt.BUILD
self._possible_dependencies: Set[str] = set()
self._possible_virtuals: Set[str] = {
x.name for x in specs if spack.repo.PATH.is_virtual(x.name)
}
def possible_dependencies(self) -> Set[str]:
"""Returns the list of possible dependencies"""
self.ensure_cache_values()
return self._possible_dependencies
def possible_virtuals(self) -> Set[str]:
"""Returns the list of possible virtuals"""
self.ensure_cache_values()
return self._possible_virtuals
def ensure_cache_values(self) -> None:
"""Ensure the cache values have been computed"""
if self._possible_dependencies:
return
self._compute_cache_values()
def possible_packages_facts(self, gen: "spack.solver.asp.ProblemInstanceBuilder", fn) -> None:
"""Emit facts associated with the possible packages"""
raise NotImplementedError("must be implemented by derived classes")
def _compute_cache_values(self) -> None:
raise NotImplementedError("must be implemented by derived classes")
class NoDuplicatesCounter(Counter):
def _compute_cache_values(self) -> None:
self._possible_dependencies, virtuals, _ = self.possible_graph.possible_dependencies(
*self.specs, allowed_deps=self.all_types
)
self._possible_virtuals.update(virtuals)
def possible_packages_facts(self, gen: "spack.solver.asp.ProblemInstanceBuilder", fn) -> None:
gen.h2("Maximum number of nodes (packages)")
for package_name in sorted(self.possible_dependencies()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Maximum number of nodes (virtual packages)")
for package_name in sorted(self.possible_virtuals()):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self.possible_dependencies()):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class MinimalDuplicatesCounter(NoDuplicatesCounter):
def __init__(
self, specs: List["spack.spec.Spec"], tests: bool, possible_graph: PossibleDependencyGraph
) -> None:
super().__init__(specs, tests, possible_graph)
self._link_run: Set[str] = set()
self._direct_build: Set[str] = set()
self._total_build: Set[str] = set()
self._link_run_virtuals: Set[str] = set()
def _compute_cache_values(self) -> None:
self._link_run, virtuals, _ = self.possible_graph.possible_dependencies(
*self.specs, allowed_deps=self.link_run_types
)
self._possible_virtuals.update(virtuals)
self._link_run_virtuals.update(virtuals)
for x in self._link_run:
reals, virtuals, _ = self.possible_graph.possible_dependencies(
x, allowed_deps=dt.BUILD, transitive=False, strict_depflag=True
)
self._possible_virtuals.update(virtuals)
self._direct_build.update(reals)
self._total_build, virtuals, _ = self.possible_graph.possible_dependencies(
*self._direct_build, allowed_deps=self.all_types
)
self._possible_virtuals.update(virtuals)
self._possible_dependencies = set(self._link_run) | set(self._total_build)
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
gen.h2("Packages with at most a single node")
for package_name in sorted(self.possible_dependencies() - build_tools):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Packages with at multiple possible nodes (build-tools)")
default = spack.config.CONFIG.get("concretizer:duplicates:max_dupes:default", 2)
for package_name in sorted(self.possible_dependencies() & build_tools):
max_dupes = spack.config.CONFIG.get(
f"concretizer:duplicates:max_dupes:{package_name}", default
)
gen.fact(fn.max_dupes(package_name, max_dupes))
if max_dupes > 1:
gen.fact(fn.multiple_unification_sets(package_name))
gen.newline()
gen.h2("Maximum number of nodes (link-run virtuals)")
for package_name in sorted(self._link_run_virtuals):
gen.fact(fn.max_dupes(package_name, 1))
gen.newline()
gen.h2("Maximum number of nodes (other virtuals)")
for package_name in sorted(self.possible_virtuals() - self._link_run_virtuals):
max_dupes = spack.config.CONFIG.get(
f"concretizer:duplicates:max_dupes:{package_name}", default
)
gen.fact(fn.max_dupes(package_name, max_dupes))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
class FullDuplicatesCounter(MinimalDuplicatesCounter):
def possible_packages_facts(self, gen, fn):
build_tools = spack.repo.PATH.packages_with_tags("build-tools")
counter = collections.Counter(
list(self._link_run) + list(self._total_build) + list(self._direct_build)
)
gen.h2("Maximum number of nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
count = min(count, 2)
gen.fact(fn.max_dupes(pkg, count))
gen.newline()
gen.h2("Build unification sets ")
for name in sorted(self.possible_dependencies() & build_tools):
gen.fact(fn.multiple_unification_sets(name))
gen.newline()
gen.h2("Possible package in link-run subDAG")
for name in sorted(self._link_run):
gen.fact(fn.possible_in_link_run(name))
gen.newline()
counter = collections.Counter(
list(self._link_run_virtuals) + list(self._possible_virtuals)
)
gen.h2("Maximum number of virtual nodes")
for pkg, count in sorted(counter.items(), key=lambda x: (x[1], x[0])):
gen.fact(fn.max_dupes(pkg, count))
gen.newline()
def create_counter(
specs: List[spack.spec.Spec], tests: bool, possible_graph: PossibleDependencyGraph
) -> Counter:
strategy = spack.config.CONFIG.get("concretizer:duplicates:strategy", "none")
if strategy == "full":
return FullDuplicatesCounter(specs, tests=tests, possible_graph=possible_graph)
if strategy == "minimal":
return MinimalDuplicatesCounter(specs, tests=tests, possible_graph=possible_graph)
return NoDuplicatesCounter(specs, tests=tests, possible_graph=possible_graph)

View File

@@ -52,7 +52,6 @@
import enum
import io
import itertools
import json
import os
import pathlib
import platform
@@ -1338,20 +1337,14 @@ class SpecBuildInterface(lang.ObjectWrapper):
"command", default_handler=_command_default_handler, _indirect=True
)
def __init__(
self,
spec: "Spec",
name: str,
query_parameters: List[str],
_parent: "Spec",
is_virtual: bool,
):
def __init__(self, spec: "Spec", name: str, query_parameters: List[str], _parent: "Spec"):
super().__init__(spec)
# Adding new attributes goes after super() call since the ObjectWrapper
# resets __dict__ to behave like the passed object
original_spec = getattr(spec, "wrapped_obj", spec)
self.wrapped_obj = original_spec
self.token = original_spec, name, query_parameters, _parent, is_virtual
self.token = original_spec, name, query_parameters, _parent
is_virtual = spack.repo.PATH.is_virtual(name)
self.last_query = QueryState(
name=name, extra_parameters=query_parameters, isvirtual=is_virtual
)
@@ -1534,8 +1527,9 @@ def __init__(self, spec_like=None, *, external_path=None, external_modules=None)
self._external_path = external_path
self.external_modules = Spec._format_module_list(external_modules)
# This attribute is used to store custom information for external specs.
self.extra_attributes: dict = {}
# This attribute is used to store custom information for
# external specs. None signal that it was not set yet.
self.extra_attributes = None
# This attribute holds the original build copy of the spec if it is
# deployed differently than it was built. None signals that the spec
@@ -1912,22 +1906,10 @@ def package_class(self):
"""Internal package call gets only the class object for a package.
Use this to just get package metadata.
"""
warnings.warn(
"`Spec.package_class` is deprecated and will be removed in version 1.0.0. Use "
"`spack.repo.PATH.get_pkg_class(spec.fullname) instead.",
category=spack.error.SpackAPIWarning,
stacklevel=2,
)
return spack.repo.PATH.get_pkg_class(self.fullname)
@property
def virtual(self):
warnings.warn(
"`Spec.virtual` is deprecated and will be removed in version 1.0.0. Use "
"`spack.repo.PATH.is_virtual(spec.name)` instead.",
category=spack.error.SpackAPIWarning,
stacklevel=2,
)
return spack.repo.PATH.is_virtual(self.name)
@property
@@ -2146,9 +2128,7 @@ def spec_hash(self, hash):
if hash.override is not None:
return hash.override(self)
node_dict = self.to_node_dict(hash=hash)
json_text = json.dumps(
node_dict, ensure_ascii=True, indent=None, separators=(",", ":"), sort_keys=False
)
json_text = sjson.dump(node_dict)
# This implements "frankenhashes", preserving the last 7 characters of the
# original hash when splicing so that we can avoid relocation issues
out = spack.util.hash.b32_hash(json_text)
@@ -2371,10 +2351,15 @@ def to_node_dict(self, hash=ht.dag_hash):
)
if self.external:
if self.extra_attributes:
extra_attributes = syaml.sorted_dict(self.extra_attributes)
else:
extra_attributes = None
d["external"] = {
"path": self.external_path,
"module": self.external_modules or None,
"extra_attributes": syaml.sorted_dict(self.extra_attributes),
"module": self.external_modules,
"extra_attributes": extra_attributes,
}
if not self._concrete:
@@ -2709,7 +2694,7 @@ def name_and_dependency_types(s: str) -> Tuple[str, dt.DepFlag]:
return name, depflag
def spec_and_dependency_types(
s: Union[Spec, Tuple[Spec, str]],
s: Union[Spec, Tuple[Spec, str]]
) -> Tuple[Spec, dt.DepFlag]:
"""Given a non-string key in the literal, extracts the spec
and its dependency types.
@@ -2829,6 +2814,24 @@ def from_detection(
s.extra_attributes = extra_attributes
return s
def validate_detection(self):
"""Validate the detection of an external spec.
This method is used as part of Spack's detection protocol, and is
not meant for client code use.
"""
# Assert that _extra_attributes is a Mapping and not None,
# which likely means the spec was created with Spec.from_detection
msg = 'cannot validate "{0}" since it was not created ' "using Spec.from_detection".format(
self
)
assert isinstance(self.extra_attributes, collections.abc.Mapping), msg
# Validate the spec calling a package specific method
pkg_cls = spack.repo.PATH.get_pkg_class(self.name)
validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
validate_fn(self, self.extra_attributes)
def _patches_assigned(self):
"""Whether patches have been assigned to this spec by the concretizer."""
# FIXME: _patches_in_order_of_appearance is attached after concretization
@@ -2867,7 +2870,7 @@ def inject_patches_variant(root):
# Add any patches from the package to the spec.
patches = set()
for cond, patch_list in spack.repo.PATH.get_pkg_class(s.fullname).patches.items():
for cond, patch_list in s.package_class.patches.items():
if s.satisfies(cond):
for patch in patch_list:
patches.add(patch)
@@ -2880,7 +2883,7 @@ def inject_patches_variant(root):
if dspec.spec.concrete:
continue
pkg_deps = spack.repo.PATH.get_pkg_class(dspec.parent.fullname).dependencies
pkg_deps = dspec.parent.package_class.dependencies
patches = []
for cond, deps_by_name in pkg_deps.items():
@@ -3087,7 +3090,7 @@ def validate_or_raise(self):
# FIXME: raise just the first one encountered
for spec in self.traverse():
# raise an UnknownPackageError if the spec's package isn't real.
if spec.name and not spack.repo.PATH.is_virtual(spec.name):
if (not spec.virtual) and spec.name:
spack.repo.PATH.get_pkg_class(spec.fullname)
# validate compiler in addition to the package name.
@@ -3096,7 +3099,7 @@ def validate_or_raise(self):
raise UnsupportedCompilerError(spec.compiler.name)
# Ensure correctness of variants (if the spec is not virtual)
if not spack.repo.PATH.is_virtual(spec.name):
if not spec.virtual:
Spec.ensure_valid_variants(spec)
substitute_abstract_variants(spec)
@@ -3114,7 +3117,7 @@ def ensure_valid_variants(spec):
if spec.concrete:
return
pkg_cls = spack.repo.PATH.get_pkg_class(spec.fullname)
pkg_cls = spec.package_class
pkg_variants = pkg_cls.variant_names()
# reserved names are variants that may be set on any package
# but are not necessarily recorded by the package's class
@@ -3331,9 +3334,7 @@ def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
# If the names are different, we need to consider virtuals
if self.name != other.name and self.name and other.name:
self_virtual = spack.repo.PATH.is_virtual(self.name)
other_virtual = spack.repo.PATH.is_virtual(other.name)
if self_virtual and other_virtual:
if self.virtual and other.virtual:
# Two virtual specs intersect only if there are providers for both
lhs = spack.repo.PATH.providers_for(str(self))
rhs = spack.repo.PATH.providers_for(str(other))
@@ -3341,8 +3342,8 @@ def intersects(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
return bool(intersection)
# A provider can satisfy a virtual dependency.
elif self_virtual or other_virtual:
virtual_spec, non_virtual_spec = (self, other) if self_virtual else (other, self)
elif self.virtual or other.virtual:
virtual_spec, non_virtual_spec = (self, other) if self.virtual else (other, self)
try:
# Here we might get an abstract spec
pkg_cls = spack.repo.PATH.get_pkg_class(non_virtual_spec.fullname)
@@ -3412,20 +3413,12 @@ def _intersects_dependencies(self, other):
# These two loops handle cases where there is an overly restrictive
# vpkg in one spec for a provider in the other (e.g., mpi@3: is not
# compatible with mpich2)
for spec in self.traverse():
if (
spack.repo.PATH.is_virtual(spec.name)
and spec.name in other_index
and not other_index.providers_for(spec)
):
for spec in self.virtual_dependencies():
if spec.name in other_index and not other_index.providers_for(spec):
return False
for spec in other.traverse():
if (
spack.repo.PATH.is_virtual(spec.name)
and spec.name in self_index
and not self_index.providers_for(spec)
):
for spec in other.virtual_dependencies():
if spec.name in self_index and not self_index.providers_for(spec):
return False
return True
@@ -3455,9 +3448,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
# If the names are different, we need to consider virtuals
if self.name != other.name and self.name and other.name:
# A concrete provider can satisfy a virtual dependency.
if not spack.repo.PATH.is_virtual(self.name) and spack.repo.PATH.is_virtual(
other.name
):
if not self.virtual and other.virtual:
try:
# Here we might get an abstract spec
pkg_cls = spack.repo.PATH.get_pkg_class(self.fullname)
@@ -3525,7 +3516,7 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
lhs_edges: Dict[str, Set[DependencySpec]] = collections.defaultdict(set)
for rhs_edge in other.traverse_edges(root=False, cover="edges"):
# If we are checking for ^mpi we need to verify if there is any edge
if spack.repo.PATH.is_virtual(rhs_edge.spec.name):
if rhs_edge.spec.virtual:
rhs_edge.update_virtuals(virtuals=(rhs_edge.spec.name,))
if not rhs_edge.virtuals:
@@ -3569,6 +3560,10 @@ def satisfies(self, other: Union[str, "Spec"], deps: bool = True) -> bool:
for rhs in other.traverse(root=False)
)
def virtual_dependencies(self):
"""Return list of any virtual deps in this spec."""
return [spec for spec in self.traverse() if spec.virtual]
@property # type: ignore[misc] # decorated prop not supported in mypy
def patches(self):
"""Return patch objects for any patch sha256 sums on this Spec.
@@ -3758,23 +3753,30 @@ def __getitem__(self, name: str):
csv = query_parameters.pop().strip()
query_parameters = re.split(r"\s*,\s*", csv)
# Consider all direct dependencies and transitive runtime dependencies
order = itertools.chain(
self.edges_to_dependencies(depflag=dt.ALL),
self.traverse_edges(deptype=dt.LINK | dt.RUN, order="breadth", cover="edges"),
order = lambda: itertools.chain(
self.traverse_edges(deptype=dt.LINK, order="breadth", cover="edges"),
self.edges_to_dependencies(depflag=dt.BUILD | dt.RUN | dt.TEST),
self.traverse_edges(deptype=dt.ALL, order="breadth", cover="edges"),
)
# Consider runtime dependencies and direct build/test deps before transitive dependencies,
# and prefer matches closest to the root.
try:
edge = next((e for e in order if e.spec.name == name or name in e.virtuals))
except StopIteration as e:
raise KeyError(f"No spec with name {name} in {self}") from e
child: Spec = next(
e.spec
for e in itertools.chain(
(e for e in order() if e.spec.name == name or name in e.virtuals),
# for historical reasons
(e for e in order() if e.spec.concrete and e.spec.package.provides(name)),
)
)
except StopIteration:
raise KeyError(f"No spec with name {name} in {self}")
if self._concrete:
return SpecBuildInterface(
edge.spec, name, query_parameters, _parent=self, is_virtual=name in edge.virtuals
)
return SpecBuildInterface(child, name, query_parameters, _parent=self)
return edge.spec
return child
def __contains__(self, spec):
"""True if this spec or some dependency satisfies the spec.
@@ -3790,11 +3792,8 @@ def __contains__(self, spec):
# if anonymous or same name, we only have to look at the root
if not spec.name or spec.name == self.name:
return self.satisfies(spec)
try:
dep = self[spec.name]
except KeyError:
return False
return dep.satisfies(spec)
else:
return any(s.satisfies(spec) for s in self.traverse(root=False))
def eq_dag(self, other, deptypes=True, vs=None, vo=None):
"""True if the full dependency DAGs of specs are equal."""
@@ -3862,13 +3861,6 @@ def _cmp_iter(self):
for item in self._cmp_node():
yield item
# If there is ever a breaking change to hash computation, whether accidental or purposeful,
# two specs can be identical modulo DAG hash, depending on what time they were concretized
# From the perspective of many operation in Spack (database, build cache, etc) a different
# DAG hash means a different spec. Here we ensure that two otherwise identical specs, one
# serialized before the hash change and one after, are considered different.
yield self.dag_hash() if self.concrete else None
# This needs to be in _cmp_iter so that no specs with different process hashes
# are considered the same by `__hash__` or `__eq__`.
#
@@ -4712,7 +4704,7 @@ def concrete(self):
bool: True or False
"""
return self.spec._concrete or all(
v in self for v in spack.repo.PATH.get_pkg_class(self.spec.fullname).variant_names()
v in self for v in self.spec.package_class.variant_names()
)
def copy(self) -> "VariantMap":
@@ -4772,14 +4764,14 @@ def substitute_abstract_variants(spec: Spec):
elif name in vt.reserved_names:
continue
variant_defs = spack.repo.PATH.get_pkg_class(spec.fullname).variant_definitions(name)
variant_defs = spec.package_class.variant_definitions(name)
valid_defs = []
for when, vdef in variant_defs:
if when.intersects(spec):
valid_defs.append(vdef)
if not valid_defs:
if name not in spack.repo.PATH.get_pkg_class(spec.fullname).variant_names():
if name not in spec.package_class.variant_names():
unknown.append(name)
else:
whens = [str(when) for when, _ in variant_defs]
@@ -4851,9 +4843,7 @@ def reconstruct_virtuals_on_edges(spec):
possible_virtuals = set()
for node in spec.traverse():
try:
possible_virtuals.update(
{x for x in node.package.dependencies if spack.repo.PATH.is_virtual(x)}
)
possible_virtuals.update({x for x in node.package.dependencies if Spec(x).virtual})
except Exception as e:
warnings.warn(f"cannot reconstruct virtual dependencies on package {node.name}: {e}")
continue
@@ -4918,7 +4908,7 @@ def from_node_dict(cls, node):
spec.external_modules = node["external"]["module"]
if spec.external_modules is False:
spec.external_modules = None
spec.extra_attributes = node["external"].get("extra_attributes") or {}
spec.extra_attributes = node["external"].get("extra_attributes", {})
# specs read in are concrete unless marked abstract
if node.get("concrete", True):

View File

@@ -1,7 +1,7 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Test ABI-based splicing of dependencies"""
""" Test ABI-based splicing of dependencies """
from typing import List

View File

@@ -1,10 +1,8 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import io
import os
import subprocess
from urllib.error import HTTPError
import pytest
@@ -17,7 +15,6 @@
import spack.paths as spack_paths
import spack.repo as repo
import spack.util.git
from spack.test.conftest import MockHTTPResponse
pytestmark = [pytest.mark.usefixtures("mock_packages")]
@@ -165,8 +162,38 @@ def test_import_signing_key(mock_gnupghome):
ci.import_signing_key(signing_key)
def test_download_and_extract_artifacts(tmpdir, monkeypatch):
monkeypatch.setenv("GITLAB_PRIVATE_TOKEN", "faketoken")
class FakeWebResponder:
def __init__(self, response_code=200, content_to_read=[]):
self._resp_code = response_code
self._content = content_to_read
self._read = [False for c in content_to_read]
def open(self, request, data=None, timeout=object()):
return self
def getcode(self):
return self._resp_code
def read(self, length=None):
if len(self._content) <= 0:
return None
if not self._read[-1]:
return_content = self._content[-1]
if length:
self._read[-1] = True
else:
self._read.pop()
self._content.pop()
return return_content
self._read.pop()
self._content.pop()
return None
def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
os.environ.update({"GITLAB_PRIVATE_TOKEN": "faketoken"})
url = "https://www.nosuchurlexists.itsfake/artifacts.zip"
working_dir = os.path.join(tmpdir.strpath, "repro")
@@ -174,13 +201,10 @@ def test_download_and_extract_artifacts(tmpdir, monkeypatch):
spack_paths.test_path, "data", "ci", "gitlab", "artifacts.zip"
)
def _urlopen_OK(*args, **kwargs):
with open(test_artifacts_path, "rb") as f:
return MockHTTPResponse(
"200", "OK", {"Content-Type": "application/zip"}, io.BytesIO(f.read())
)
with open(test_artifacts_path, "rb") as fd:
fake_responder = FakeWebResponder(content_to_read=[fd.read()])
monkeypatch.setattr(ci, "urlopen", _urlopen_OK)
monkeypatch.setattr(ci, "build_opener", lambda handler: fake_responder)
ci.download_and_extract_artifacts(url, working_dir)
@@ -190,11 +214,7 @@ def _urlopen_OK(*args, **kwargs):
found_install = fs.find(working_dir, "install.sh")
assert len(found_install) == 1
def _urlopen_500(*args, **kwargs):
raise HTTPError(url, 500, "Internal Server Error", {}, None)
monkeypatch.setattr(ci, "urlopen", _urlopen_500)
fake_responder._resp_code = 400
with pytest.raises(spack.error.SpackError):
ci.download_and_extract_artifacts(url, working_dir)

View File

@@ -12,7 +12,7 @@
build_env = SpackCommand("build-env")
@pytest.mark.parametrize("pkg", [("pkg-c",), ("pkg-c", "--")])
@pytest.mark.parametrize("pkg", [("zlib",), ("zlib", "--")])
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
def test_it_just_runs(pkg):
build_env(*pkg)
@@ -38,7 +38,7 @@ def test_build_env_requires_a_spec(args):
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
def test_dump(shell_as, shell, tmpdir):
with tmpdir.as_cwd():
build_env("--dump", _out_file, "pkg-c")
build_env("--dump", _out_file, "zlib")
with open(_out_file, encoding="utf-8") as f:
if shell == "pwsh":
assert any(line.startswith("$Env:PATH") for line in f.readlines())
@@ -51,7 +51,7 @@ def test_dump(shell_as, shell, tmpdir):
@pytest.mark.usefixtures("config", "mock_packages", "working_env")
def test_pickle(tmpdir):
with tmpdir.as_cwd():
build_env("--pickle", _out_file, "pkg-c")
build_env("--pickle", _out_file, "zlib")
environment = pickle.load(open(_out_file, "rb"))
assert isinstance(environment, dict)
assert "PATH" in environment

View File

@@ -148,7 +148,7 @@ def test_update_key_index(
s = spack.concretize.concretize_one("libdwarf")
# Install a package
install("--fake", s.name)
install(s.name)
# Put installed package in the buildcache, which, because we're signing
# it, should result in the public key getting pushed to the buildcache
@@ -178,7 +178,7 @@ def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch):
s = spack.concretize.concretize_one("libdwarf")
# Install and generate build cache index
PackageInstaller([s.package], fake=True, explicit=True).install()
PackageInstaller([s.package], explicit=True).install()
metadata_file = spack.binary_distribution.tarball_name(s, ".spec.json")
@@ -220,7 +220,7 @@ def verify_mirror_contents():
# Install a package and put it in the buildcache
s = spack.concretize.concretize_one(out_env_pkg)
install("--fake", s.name)
install(s.name)
buildcache("push", "-u", "-f", src_mirror_url, s.name)
env("create", "test")

View File

@@ -3,7 +3,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import pathlib
import pytest
@@ -23,15 +22,7 @@
@pytest.fixture
def no_add(monkeypatch):
def add_versions_to_pkg(pkg, version_lines, open_in_editor):
raise AssertionError("Should not be called")
monkeypatch.setattr(spack.cmd.checksum, "add_versions_to_pkg", add_versions_to_pkg)
@pytest.fixture
def can_fetch_versions(monkeypatch, no_add):
def can_fetch_versions(monkeypatch):
"""Fake successful version detection."""
def fetch_remote_versions(pkg, concurrency):
@@ -54,7 +45,7 @@ def url_exists(url, curl=None):
@pytest.fixture
def cannot_fetch_versions(monkeypatch, no_add):
def cannot_fetch_versions(monkeypatch):
"""Fake unsuccessful version detection."""
def fetch_remote_versions(pkg, concurrency):
@@ -97,6 +88,7 @@ def test_checksum_args(arguments, expected):
(["--batch", "preferred-test"], "version of preferred-test"),
(["--latest", "preferred-test"], "Found 1 version"),
(["--preferred", "preferred-test"], "Found 1 version"),
(["--add-to-package", "preferred-test"], "Added 0 new versions to"),
(["--verify", "preferred-test"], "Verified 1 of 1"),
(["--verify", "zlib", "1.2.13"], "1.2.13 [-] No previous checksum"),
],
@@ -279,12 +271,15 @@ def test_checksum_interactive_unrecognized_command():
assert interactive_version_filter(v.copy(), input=input) == v
def test_checksum_versions(mock_packages, can_fetch_versions, monkeypatch):
def test_checksum_versions(mock_packages, can_fetch_versions):
pkg_cls = spack.repo.PATH.get_pkg_class("zlib")
versions = [str(v) for v in pkg_cls.versions]
output = spack_checksum("zlib", *versions)
assert "Found 3 versions" in output
assert "version(" in output
output = spack_checksum("--add-to-package", "zlib", *versions)
assert "Found 3 versions" in output
assert "Added 0 new versions to" in output
def test_checksum_missing_version(mock_packages, cannot_fetch_versions):
@@ -292,6 +287,7 @@ def test_checksum_missing_version(mock_packages, cannot_fetch_versions):
assert "Could not find any remote versions" in output
output = spack_checksum("--add-to-package", "preferred-test", "99.99.99", fail_on_error=False)
assert "Could not find any remote versions" in output
assert "Added 1 new versions to" not in output
def test_checksum_deprecated_version(mock_packages, can_fetch_versions):
@@ -301,6 +297,8 @@ def test_checksum_deprecated_version(mock_packages, can_fetch_versions):
"--add-to-package", "deprecated-versions", "1.1.0", fail_on_error=False
)
assert "Version 1.1.0 is deprecated" in output
# TODO alecbcs: broken assertion.
# assert "Added 0 new versions to" not in output
def test_checksum_url(mock_packages, config):
@@ -339,52 +337,3 @@ def test_checksum_manual_download_fails(mock_packages, monkeypatch):
monkeypatch.setattr(spack.package_base.PackageBase, "download_instr", error)
with pytest.raises(ManualDownloadRequiredError, match=error):
spack_checksum(name, *versions)
def test_upate_package_contents(tmp_path: pathlib.Path):
"""Test that the package.py file is updated with the new versions."""
pkg_path = tmp_path / "package.py"
pkg_path.write_text(
"""\
from spack.package import *
class Zlib(Package):
homepage = "http://zlib.net"
url = "http://zlib.net/fossils/zlib-1.2.11.tar.gz"
version("1.2.11", sha256="c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1")
version("1.2.8", sha256="36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d")
version("1.2.3", sha256="1795c7d067a43174113fdf03447532f373e1c6c57c08d61d9e4e9be5e244b05e")
variant("pic", default=True, description="test")
def install(self, spec, prefix):
make("install")
"""
)
version_lines = """\
version("1.2.13", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890")
version("1.2.5", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890")
version("1.2.3", sha256="1795c7d067a43174113fdf03447532f373e1c6c57c08d61d9e4e9be5e244b05e")
"""
# two new versions are added
assert spack.cmd.checksum.add_versions_to_pkg(str(pkg_path), version_lines) == 2
assert (
pkg_path.read_text()
== """\
from spack.package import *
class Zlib(Package):
homepage = "http://zlib.net"
url = "http://zlib.net/fossils/zlib-1.2.11.tar.gz"
version("1.2.13", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890") # FIXME
version("1.2.11", sha256="c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1")
version("1.2.8", sha256="36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d")
version("1.2.5", sha256="abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890") # FIXME
version("1.2.3", sha256="1795c7d067a43174113fdf03447532f373e1c6c57c08d61d9e4e9be5e244b05e")
variant("pic", default=True, description="test")
def install(self, spec, prefix):
make("install")
"""
)

View File

@@ -28,7 +28,6 @@
from spack.ci.generator_registry import generator
from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE
from spack.database import INDEX_JSON_FILE
from spack.error import SpackError
from spack.schema.buildcache_spec import schema as specfile_schema
from spack.schema.database_index import schema as db_idx_schema
from spack.spec import Spec
@@ -171,9 +170,7 @@ def test_ci_generate_with_env(ci_generate_test, tmp_path, mock_binary_index):
url: https://my.fake.cdash
project: Not used
site: Nothing
""",
"--artifacts-root",
str(tmp_path / "my_artifacts_root"),
"""
)
yaml_contents = syaml.load(outputfile.read_text())
@@ -195,7 +192,7 @@ def test_ci_generate_with_env(ci_generate_test, tmp_path, mock_binary_index):
assert "variables" in yaml_contents
assert "SPACK_ARTIFACTS_ROOT" in yaml_contents["variables"]
assert yaml_contents["variables"]["SPACK_ARTIFACTS_ROOT"] == "my_artifacts_root"
assert yaml_contents["variables"]["SPACK_ARTIFACTS_ROOT"] == "jobs_scratch_dir"
def test_ci_generate_with_env_missing_section(ci_generate_test, tmp_path, mock_binary_index):
@@ -1065,7 +1062,7 @@ def test_ci_rebuild_index(
with open(tmp_path / "spec.json", "w", encoding="utf-8") as f:
f.write(concrete_spec.to_json(hash=ht.dag_hash))
install_cmd("--fake", "--add", "-f", str(tmp_path / "spec.json"))
install_cmd("--add", "-f", str(tmp_path / "spec.json"))
buildcache_cmd("push", "-u", "-f", mirror_url, "callpath")
ci_cmd("rebuild-index")
@@ -1325,50 +1322,44 @@ def test_ci_reproduce(
env.concretize()
env.write()
def fake_download_and_extract_artifacts(url, work_dir, merge_commit_test=True):
with working_dir(tmp_path), ev.Environment(".") as env:
if not os.path.exists(repro_dir):
repro_dir.mkdir()
repro_dir.mkdir()
job_spec = env.concrete_roots()[0]
with open(repro_dir / "archivefiles.json", "w", encoding="utf-8") as f:
f.write(job_spec.to_json(hash=ht.dag_hash))
artifacts_root = repro_dir / "jobs_scratch_dir"
pipeline_path = artifacts_root / "pipeline.yml"
job_spec = env.concrete_roots()[0]
with open(repro_dir / "archivefiles.json", "w", encoding="utf-8") as f:
f.write(job_spec.to_json(hash=ht.dag_hash))
ci_cmd(
"generate",
"--output-file",
str(pipeline_path),
"--artifacts-root",
str(artifacts_root),
artifacts_root = repro_dir / "scratch_dir"
pipeline_path = artifacts_root / "pipeline.yml"
ci_cmd(
"generate",
"--output-file",
str(pipeline_path),
"--artifacts-root",
str(artifacts_root),
)
job_name = gitlab_generator.get_job_name(job_spec)
with open(repro_dir / "repro.json", "w", encoding="utf-8") as f:
f.write(
json.dumps(
{
"job_name": job_name,
"job_spec_json": "archivefiles.json",
"ci_project_dir": str(repro_dir),
}
)
)
job_name = gitlab_generator.get_job_name(job_spec)
with open(repro_dir / "install.sh", "w", encoding="utf-8") as f:
f.write("#!/bin/sh\n\n#fake install\nspack install blah\n")
with open(repro_dir / "repro.json", "w", encoding="utf-8") as f:
f.write(
json.dumps(
{
"job_name": job_name,
"job_spec_json": "archivefiles.json",
"ci_project_dir": str(repro_dir),
}
)
)
with open(repro_dir / "spack_info.txt", "w", encoding="utf-8") as f:
f.write(f"\nMerge {last_two_git_commits[1]} into {last_two_git_commits[0]}\n\n")
with open(repro_dir / "install.sh", "w", encoding="utf-8") as f:
f.write("#!/bin/sh\n\n#fake install\nspack install blah\n")
with open(repro_dir / "spack_info.txt", "w", encoding="utf-8") as f:
if merge_commit_test:
f.write(
f"\nMerge {last_two_git_commits[1]} into {last_two_git_commits[0]}\n\n"
)
else:
f.write(f"\ncommit {last_two_git_commits[1]}\n\n")
return "jobs_scratch_dir"
def fake_download_and_extract_artifacts(url, work_dir):
pass
monkeypatch.setattr(ci, "download_and_extract_artifacts", fake_download_and_extract_artifacts)
rep_out = ci_cmd(
@@ -1384,64 +1375,6 @@ def fake_download_and_extract_artifacts(url, work_dir, merge_commit_test=True):
# Make sure we tell the user where it is when not in interactive mode
assert f"$ {repro_dir}/start.sh" in rep_out
# Ensure the correct commits are used
assert f"checkout_commit: {last_two_git_commits[0]}" in rep_out
assert f"merge_commit: {last_two_git_commits[1]}" in rep_out
# Test re-running in dirty working dir
with pytest.raises(SpackError, match=f"{repro_dir}"):
rep_out = ci_cmd(
"reproduce-build",
"https://example.com/api/v1/projects/1/jobs/2/artifacts",
"--working-dir",
str(repro_dir),
output=str,
)
# Cleanup between tests
shutil.rmtree(repro_dir)
# Test --use-local-head
rep_out = ci_cmd(
"reproduce-build",
"https://example.com/api/v1/projects/1/jobs/2/artifacts",
"--use-local-head",
"--working-dir",
str(repro_dir),
output=str,
)
# Make sure we are checkout out the HEAD commit without a merge commit
assert "checkout_commit: HEAD" in rep_out
assert "merge_commit: None" in rep_out
# Test the case where the spack_info.txt is not a merge commit
monkeypatch.setattr(
ci,
"download_and_extract_artifacts",
lambda url, wd: fake_download_and_extract_artifacts(url, wd, False),
)
# Cleanup between tests
shutil.rmtree(repro_dir)
rep_out = ci_cmd(
"reproduce-build",
"https://example.com/api/v1/projects/1/jobs/2/artifacts",
"--working-dir",
str(repro_dir),
output=str,
)
# Make sure the script was generated
assert (repro_dir / "start.sh").exists()
# Make sure we tell the user where it is when not in interactive mode
assert f"$ {repro_dir}/start.sh" in rep_out
# Ensure the correct commit is used (different than HEAD)
assert f"checkout_commit: {last_two_git_commits[1]}" in rep_out
assert "merge_commit: None" in rep_out
@pytest.mark.parametrize(
"url_in,url_out",

View File

@@ -335,7 +335,7 @@ def test_config_add_override_leaf_from_file(mutable_empty_config, tmpdir):
def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
config("add", "packages:all:require:['%gcc']")
config("add", "packages:all:compiler:[gcc]")
# contents to add to file
contents = """spack:
@@ -357,7 +357,7 @@ def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
expected = """packages:
all:
target: [x86_64]
require: ['%gcc']
compiler: [gcc]
"""
assert expected == output
@@ -606,6 +606,7 @@ def test_config_prefer_upstream(
packages = syaml.load(open(cfg_file, encoding="utf-8"))["packages"]
# Make sure only the non-default variants are set.
assert packages["all"] == {"compiler": ["gcc@=10.2.1"]}
assert packages["boost"] == {"variants": "+debug +graph", "version": ["1.63.0"]}
assert packages["dependency-install"] == {"version": ["2.0"]}
# Ensure that neither variant gets listed for hdf5, since they conflict

View File

@@ -2,16 +2,52 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import platform
import pytest
import spack
import spack.platforms
import spack.spec
from spack.database import INDEX_JSON_FILE
from spack.main import SpackCommand
from spack.util.executable import which
debug = SpackCommand("debug")
@pytest.mark.db
def test_create_db_tarball(tmpdir, database):
with tmpdir.as_cwd():
debug("create-db-tarball")
# get the first non-dotfile to avoid coverage files in the directory
files = os.listdir(os.getcwd())
tarball_name = next(
f for f in files if not f.startswith(".") and not f.startswith("tests")
)
# debug command made an archive
assert os.path.exists(tarball_name)
# print contents of archive
tar = which("tar")
contents = tar("tzf", tarball_name, output=str)
# DB file is included
assert INDEX_JSON_FILE in contents
# specfiles from all installs are included
for spec in database.query():
# externals won't have a specfile
if spec.external:
continue
spec_suffix = "%s/.spack/spec.json" % spec.dag_hash()
assert spec_suffix in contents
def test_report():
out = debug("report")
host_platform = spack.platforms.host()

View File

@@ -24,24 +24,32 @@
mpi_deps = ["fake"]
@pytest.mark.parametrize(
"cli_args,expected",
[
(["mpileaks"], set(["callpath"] + mpis)),
(
["--transitive", "mpileaks"],
set(["callpath", "dyninst", "libdwarf", "libelf"] + mpis + mpi_deps),
),
(["--transitive", "--deptype=link,run", "dtbuild1"], {"dtlink2", "dtrun2"}),
(["--transitive", "--deptype=build", "dtbuild1"], {"dtbuild2", "dtlink2"}),
(["--transitive", "--deptype=link", "dtbuild1"], {"dtlink2"}),
],
)
def test_direct_dependencies(cli_args, expected, mock_runtimes):
out = dependencies(*cli_args)
result = set(re.split(r"\s+", out.strip()))
expected.update(mock_runtimes)
assert expected == result
def test_direct_dependencies(mock_packages):
out = dependencies("mpileaks")
actual = set(re.split(r"\s+", out.strip()))
expected = set(["callpath"] + mpis)
assert expected == actual
def test_transitive_dependencies(mock_packages):
out = dependencies("--transitive", "mpileaks")
actual = set(re.split(r"\s+", out.strip()))
expected = set(["callpath", "dyninst", "libdwarf", "libelf"] + mpis + mpi_deps)
assert expected == actual
def test_transitive_dependencies_with_deptypes(mock_packages):
out = dependencies("--transitive", "--deptype=link,run", "dtbuild1")
deps = set(re.split(r"\s+", out.strip()))
assert set(["dtlink2", "dtrun2"]) == deps
out = dependencies("--transitive", "--deptype=build", "dtbuild1")
deps = set(re.split(r"\s+", out.strip()))
assert set(["dtbuild2", "dtlink2"]) == deps
out = dependencies("--transitive", "--deptype=link", "dtbuild1")
deps = set(re.split(r"\s+", out.strip()))
assert set(["dtlink2"]) == deps
@pytest.mark.db

View File

@@ -17,16 +17,16 @@
def test_deprecate(mock_packages, mock_archive, mock_fetch, install_mockery):
install("--fake", "libelf@0.8.13")
install("--fake", "libelf@0.8.10")
install("libelf@0.8.13")
install("libelf@0.8.10")
all_installed = spack.store.STORE.db.query("libelf")
all_installed = spack.store.STORE.db.query()
assert len(all_installed) == 2
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
non_deprecated = spack.store.STORE.db.query("libelf")
all_available = spack.store.STORE.db.query("libelf", installed=InstallRecordStatus.ANY)
non_deprecated = spack.store.STORE.db.query()
all_available = spack.store.STORE.db.query(installed=InstallRecordStatus.ANY)
assert all_available == all_installed
assert non_deprecated == spack.store.STORE.db.query("libelf@0.8.13")
@@ -39,24 +39,24 @@ def test_deprecate_fails_no_such_package(mock_packages, mock_archive, mock_fetch
output = deprecate("-y", "libelf@0.8.10", "libelf@0.8.13", fail_on_error=False)
assert "Spec 'libelf@0.8.10' matches no installed packages" in output
install("--fake", "libelf@0.8.10")
install("libelf@0.8.10")
output = deprecate("-y", "libelf@0.8.10", "libelf@0.8.13", fail_on_error=False)
assert "Spec 'libelf@0.8.13' matches no installed packages" in output
def test_deprecate_install(mock_packages, mock_archive, mock_fetch, install_mockery, monkeypatch):
"""Tests that the -i option allows us to deprecate in favor of a spec
that is not yet installed.
"""
install("--fake", "libelf@0.8.10")
to_deprecate = spack.store.STORE.db.query("libelf")
def test_deprecate_install(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that the ```-i`` option allows us to deprecate in favor of a spec
that is not yet installed."""
install("libelf@0.8.10")
to_deprecate = spack.store.STORE.db.query()
assert len(to_deprecate) == 1
deprecate("-y", "-i", "libelf@0.8.10", "libelf@0.8.13")
non_deprecated = spack.store.STORE.db.query("libelf")
deprecated = spack.store.STORE.db.query("libelf", installed=InstallRecordStatus.DEPRECATED)
non_deprecated = spack.store.STORE.db.query()
deprecated = spack.store.STORE.db.query(installed=InstallRecordStatus.DEPRECATED)
assert deprecated == to_deprecate
assert len(non_deprecated) == 1
assert non_deprecated[0].satisfies("libelf@0.8.13")
@@ -64,8 +64,8 @@ def test_deprecate_install(mock_packages, mock_archive, mock_fetch, install_mock
def test_deprecate_deps(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Test that the deprecate command deprecates all dependencies properly."""
install("--fake", "libdwarf@20130729 ^libelf@0.8.13")
install("--fake", "libdwarf@20130207 ^libelf@0.8.10")
install("libdwarf@20130729 ^libelf@0.8.13")
install("libdwarf@20130207 ^libelf@0.8.10")
new_spec = spack.concretize.concretize_one("libdwarf@20130729^libelf@0.8.13")
old_spec = spack.concretize.concretize_one("libdwarf@20130207^libelf@0.8.10")
@@ -81,14 +81,14 @@ def test_deprecate_deps(mock_packages, mock_archive, mock_fetch, install_mockery
assert all_available == all_installed
assert sorted(all_available) == sorted(deprecated + non_deprecated)
assert sorted(non_deprecated) == sorted(new_spec.traverse())
assert sorted(deprecated) == sorted([old_spec, old_spec["libelf"]])
assert sorted(non_deprecated) == sorted(list(new_spec.traverse()))
assert sorted(deprecated) == sorted(list(old_spec.traverse()))
def test_uninstall_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that we can still uninstall deprecated packages."""
install("--fake", "libelf@0.8.13")
install("--fake", "libelf@0.8.10")
install("libelf@0.8.13")
install("libelf@0.8.10")
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
@@ -104,9 +104,9 @@ def test_uninstall_deprecated(mock_packages, mock_archive, mock_fetch, install_m
def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that we can re-deprecate a spec to change its deprecator."""
install("--fake", "libelf@0.8.13")
install("--fake", "libelf@0.8.12")
install("--fake", "libelf@0.8.10")
install("libelf@0.8.13")
install("libelf@0.8.12")
install("libelf@0.8.10")
deprecated_spec = spack.concretize.concretize_one("libelf@0.8.10")
@@ -117,8 +117,8 @@ def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch, i
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
non_deprecated = spack.store.STORE.db.query("libelf")
all_available = spack.store.STORE.db.query("libelf", installed=InstallRecordStatus.ANY)
non_deprecated = spack.store.STORE.db.query()
all_available = spack.store.STORE.db.query(installed=InstallRecordStatus.ANY)
assert len(non_deprecated) == 2
assert len(all_available) == 3
@@ -129,9 +129,9 @@ def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch, i
def test_deprecate_deprecator(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that when a deprecator spec is deprecated, its deprecatee specs
are updated to point to the new deprecator."""
install("--fake", "libelf@0.8.13")
install("--fake", "libelf@0.8.12")
install("--fake", "libelf@0.8.10")
install("libelf@0.8.13")
install("libelf@0.8.12")
install("libelf@0.8.10")
first_deprecated_spec = spack.concretize.concretize_one("libelf@0.8.10")
second_deprecated_spec = spack.concretize.concretize_one("libelf@0.8.12")
@@ -144,8 +144,8 @@ def test_deprecate_deprecator(mock_packages, mock_archive, mock_fetch, install_m
deprecate("-y", "libelf@0.8.12", "libelf@0.8.13")
non_deprecated = spack.store.STORE.db.query("libelf")
all_available = spack.store.STORE.db.query("libelf", installed=InstallRecordStatus.ANY)
non_deprecated = spack.store.STORE.db.query()
all_available = spack.store.STORE.db.query(installed=InstallRecordStatus.ANY)
assert len(non_deprecated) == 1
assert len(all_available) == 3
@@ -158,8 +158,8 @@ def test_deprecate_deprecator(mock_packages, mock_archive, mock_fetch, install_m
def test_concretize_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that the concretizer throws an error if we concretize to a
deprecated spec"""
install("--fake", "libelf@0.8.13")
install("--fake", "libelf@0.8.10")
install("libelf@0.8.13")
install("libelf@0.8.10")
deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")

View File

@@ -127,15 +127,16 @@ def test_dev_build_before_until(tmpdir, install_mockery):
assert not_installed in out
def _print_spack_short_spec(*args):
print(f"SPACK_SHORT_SPEC={os.environ['SPACK_SHORT_SPEC']}")
def print_spack_cc(*args):
# Eat arguments and print environment variable to test
print(os.environ.get("CC", ""))
def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, install_mockery, working_env):
monkeypatch.setattr(os, "execvp", _print_spack_short_spec)
monkeypatch.setattr(os, "execvp", print_spack_cc)
with tmpdir.as_cwd():
output = dev_build("-b", "edit", "--drop-in", "sh", "dev-build-test-install@0.0.0")
assert "SPACK_SHORT_SPEC=dev-build-test-install@0.0.0" in output
assert os.path.join("lib", "spack", "env") in output
def test_dev_build_fails_already_installed(tmpdir, install_mockery):

View File

@@ -194,7 +194,7 @@ def test_diff_cmd(install_mockery, mock_fetch, mock_archive, mock_packages):
def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages):
"""Test with and without the --first option"""
install_cmd("--fake", "mpileaks")
install_cmd("mpileaks")
# Only one version of mpileaks will work
diff_cmd("mpileaks", "mpileaks")

View File

@@ -1750,10 +1750,7 @@ def check_stage(spec):
spec = spack.concretize.concretize_one(spec)
for dep in spec.traverse():
stage_name = f"{stage_prefix}{dep.name}-{dep.version}-{dep.dag_hash()}"
if dep.external:
assert not os.path.exists(os.path.join(root, stage_name))
else:
assert os.path.isdir(os.path.join(root, stage_name))
assert os.path.isdir(os.path.join(root, stage_name))
check_stage("mpileaks")
check_stage("zmpi")
@@ -3078,10 +3075,11 @@ def test_stack_view_activate_from_default(
assert "FOOBAR=mpileaks" in shell
def test_envvar_set_in_activate(tmp_path, mock_packages, install_mockery):
spack_yaml = tmp_path / "spack.yaml"
spack_yaml.write_text(
"""
def test_envvar_set_in_activate(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
filename = str(tmpdir.join("spack.yaml"))
with open(filename, "w", encoding="utf-8") as f:
f.write(
"""\
spack:
specs:
- cmake%gcc
@@ -3089,21 +3087,21 @@ def test_envvar_set_in_activate(tmp_path, mock_packages, install_mockery):
set:
ENVAR_SET_IN_ENV_LOAD: "True"
"""
)
)
with tmpdir.as_cwd():
env("create", "test", "./spack.yaml")
with ev.read("test"):
install()
env("create", "test", str(spack_yaml))
with ev.read("test"):
install("--fake")
test_env = ev.read("test")
output = env("activate", "--sh", "test")
test_env = ev.read("test")
output = env("activate", "--sh", "test")
assert "ENVAR_SET_IN_ENV_LOAD=True" in output
assert "ENVAR_SET_IN_ENV_LOAD=True" in output
with test_env:
with spack.util.environment.set_env(ENVAR_SET_IN_ENV_LOAD="True"):
output = env("deactivate", "--sh")
assert "unset ENVAR_SET_IN_ENV_LOAD" in output
with test_env:
with spack.util.environment.set_env(ENVAR_SET_IN_ENV_LOAD="True"):
output = env("deactivate", "--sh")
assert "unset ENVAR_SET_IN_ENV_LOAD" in output
def test_stack_view_no_activate_without_default(

View File

@@ -233,27 +233,21 @@ def test_display_json_deps(database, capsys):
@pytest.mark.db
def test_find_format(database, config):
output = find("--format", "{name}-{^mpi.name}", "mpileaks")
assert set(output.strip().split("\n")) == {
"mpileaks-zmpi",
"mpileaks-mpich",
"mpileaks-mpich2",
}
assert set(output.strip().split("\n")) == set(
["mpileaks-zmpi", "mpileaks-mpich", "mpileaks-mpich2"]
)
output = find("--format", "{name}-{version}-{compiler.name}-{^mpi.name}", "mpileaks")
assert "installed package" not in output
assert set(output.strip().split("\n")) == {
"mpileaks-2.3-gcc-zmpi",
"mpileaks-2.3-gcc-mpich",
"mpileaks-2.3-gcc-mpich2",
}
assert set(output.strip().split("\n")) == set(
["mpileaks-2.3-gcc-zmpi", "mpileaks-2.3-gcc-mpich", "mpileaks-2.3-gcc-mpich2"]
)
output = find("--format", "{name}-{^mpi.name}-{hash:7}", "mpileaks")
elements = output.strip().split("\n")
assert set(e[:-7] for e in elements) == {
"mpileaks-zmpi-",
"mpileaks-mpich-",
"mpileaks-mpich2-",
}
assert set(e[:-7] for e in elements) == set(
["mpileaks-zmpi-", "mpileaks-mpich-", "mpileaks-mpich2-"]
)
# hashes are in base32
for e in elements:
@@ -354,7 +348,7 @@ def test_find_prefix_in_env(
"""Test `find` formats requiring concrete specs work in environments."""
env("create", "test")
with ev.read("test"):
install("--fake", "--add", "mpileaks")
install("--add", "mpileaks")
find("-p")
find("-l")
find("-L")

View File

@@ -304,8 +304,6 @@ def test_run_import_check(tmp_path: pathlib.Path):
contents = '''
import spack.cmd
import spack.config # do not drop this import because of this comment
import spack.repo
import spack.repo_utils
# this comment about spack.error should not be removed
class Example(spack.build_systems.autotools.AutotoolsPackage):
@@ -316,7 +314,6 @@ def foo(config: "spack.error.SpackError"):
# the type hint is quoted, so it should not be removed
spack.util.executable.Executable("example")
print(spack.__version__)
print(spack.repo_utils.__file__)
'''
file.write_text(contents)
root = str(tmp_path)
@@ -332,7 +329,6 @@ def foo(config: "spack.error.SpackError"):
output = output_buf.getvalue()
assert "issues.py: redundant import: spack.cmd" in output
assert "issues.py: redundant import: spack.repo" in output
assert "issues.py: redundant import: spack.config" not in output # comment prevents removal
assert "issues.py: missing import: spack" in output # used by spack.__version__
assert "issues.py: missing import: spack.build_systems.autotools" in output

View File

@@ -1243,7 +1243,7 @@ def test_transitive_conditional_virtual_dependency(self, mutable_config):
def test_conditional_provides_or_depends_on(self):
# Check that we can concretize correctly a spec that can either
# provide a virtual or depend on it based on the value of a variant
s = spack.concretize.concretize_one("v1-consumer ^conditional-provider +disable-v1")
s = spack.concretize.concretize_one("conditional-provider +disable-v1")
assert "v1-provider" in s
assert s["v1"].name == "v1-provider"
assert s["v2"].name == "conditional-provider"

View File

@@ -259,7 +259,7 @@ def test_develop(self):
def test_external_mpi(self):
# make sure this doesn't give us an external first.
spec = spack.concretize.concretize_one("mpi")
assert not spec.external and spec.package.provides("mpi")
assert not spec["mpi"].external
# load config
conf = syaml.load_config(
@@ -293,7 +293,7 @@ def mock_module(cmd, module):
monkeypatch.setattr(spack.util.module_cmd, "module", mock_module)
spec = spack.concretize.concretize_one("mpi")
assert not spec.external and spec.package.provides("mpi")
assert not spec["mpi"].external
# load config
conf = syaml.load_config(

View File

@@ -1,6 +1,7 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pathlib
import pytest
@@ -181,7 +182,7 @@ def test_requirement_adds_version_satisfies(
# Sanity check: early version of T does not include U
s0 = spack.concretize.concretize_one("t@2.0")
assert "u" not in s0
assert not ("u" in s0)
conf_str = """\
packages:
@@ -199,11 +200,11 @@ def test_requirement_adds_version_satisfies(
@pytest.mark.parametrize("require_checksum", (True, False))
def test_requirement_adds_git_hash_version(
require_checksum, concretize_scope, test_repo, mock_git_version_info, monkeypatch
require_checksum, concretize_scope, test_repo, mock_git_version_info, monkeypatch, working_env
):
# A full commit sha is a checksummed version, so this test should pass in both cases
if require_checksum:
monkeypatch.setenv("SPACK_CONCRETIZER_REQUIRE_CHECKSUM", "yes")
os.environ["SPACK_CONCRETIZER_REQUIRE_CHECKSUM"] = "yes"
repo_path, filename, commits = mock_git_version_info
monkeypatch.setattr(

View File

@@ -2171,8 +2171,3 @@ def getcode(self):
def info(self):
return self.headers
@pytest.fixture()
def mock_runtimes(config, mock_packages):
return mock_packages.packages_with_tags("runtime")

View File

@@ -206,7 +206,7 @@ def test_repo(_create_test_repo, monkeypatch, mock_stage):
)
def test_redistribute_directive(test_repo, spec_str, distribute_src, distribute_bin):
spec = spack.spec.Spec(spec_str)
assert spack.repo.PATH.get_pkg_class(spec.fullname).redistribute_source(spec) == distribute_src
assert spec.package_class.redistribute_source(spec) == distribute_src
concretized_spec = spack.concretize.concretize_one(spec)
assert concretized_spec.package.redistribute_binary == distribute_bin

View File

@@ -190,7 +190,8 @@ def test_environment_cant_modify_environments_root(tmpdir):
@pytest.mark.parametrize(
"original_content",
[
"""\
(
"""\
spack:
specs:
- matrix:
@@ -198,6 +199,7 @@ def test_environment_cant_modify_environments_root(tmpdir):
- - a
concretizer:
unify: false"""
)
],
)
def test_roundtrip_spack_yaml_with_comments(original_content, mock_packages, config, tmp_path):

View File

@@ -557,7 +557,7 @@ def test_combine_phase_logs(tmpdir):
def test_combine_phase_logs_does_not_care_about_encoding(tmpdir):
# this is invalid utf-8 at a minimum
data = b"\x00\xf4\xbf\x00\xbf\xbf"
data = b"\x00\xF4\xBF\x00\xBF\xBF"
input = [str(tmpdir.join("a")), str(tmpdir.join("b"))]
output = str(tmpdir.join("c"))

View File

@@ -341,53 +341,39 @@ def test_destination_merge_visitor_file_dir_clashes(tmpdir):
assert b_to_a.fatal_conflicts[0].dst == "example"
@pytest.mark.parametrize("normalize", [True, False])
def test_source_merge_visitor_handles_same_file_gracefully(
tmp_path: pathlib.Path, normalize: bool
):
"""Symlinked files/dirs from one prefix to the other are not file or fatal conflicts, they are
resolved by taking the underlying file/dir, and this does not depend on the order prefixes
are visited."""
def test_source_merge_visitor_does_not_register_identical_file_conflicts(tmp_path: pathlib.Path):
"""Tests whether the SourceMergeVisitor does not register identical file conflicts.
but instead registers the file that triggers the potential conflict."""
(tmp_path / "dir_bottom").mkdir()
(tmp_path / "dir_bottom" / "file").write_bytes(b"hello")
def u(path: str) -> str:
return path.upper() if normalize else path
(tmp_path / "dir_top").mkdir()
(tmp_path / "dir_top" / "file").symlink_to(tmp_path / "dir_bottom" / "file")
(tmp_path / "dir_top" / "zzzz").write_bytes(b"hello")
(tmp_path / "a").mkdir()
(tmp_path / "a" / "file").write_bytes(b"hello")
(tmp_path / "a" / "dir").mkdir()
(tmp_path / "a" / "dir" / "foo").write_bytes(b"hello")
visitor = SourceMergeVisitor()
visitor.set_projection(str(tmp_path / "view"))
(tmp_path / "b").mkdir()
(tmp_path / "b" / u("file")).symlink_to(tmp_path / "a" / "file")
(tmp_path / "b" / u("dir")).symlink_to(tmp_path / "a" / "dir")
(tmp_path / "b" / "bar").write_bytes(b"hello")
visit_directory_tree(str(tmp_path / "dir_top"), visitor)
visitor_1 = SourceMergeVisitor(normalize_paths=normalize)
visitor_1.set_projection(str(tmp_path / "view"))
for p in ("a", "b"):
visit_directory_tree(str(tmp_path / p), visitor_1)
# After visiting the top dir, we should have `file` and `zzzz` listed, in that order. Using
# .items() to test order.
assert list(visitor.files.items()) == [
(str(tmp_path / "view" / "file"), (str(tmp_path / "dir_top"), "file")),
(str(tmp_path / "view" / "zzzz"), (str(tmp_path / "dir_top"), "zzzz")),
]
visitor_2 = SourceMergeVisitor(normalize_paths=normalize)
visitor_2.set_projection(str(tmp_path / "view"))
for p in ("b", "a"):
visit_directory_tree(str(tmp_path / p), visitor_2)
assert not visitor_1.file_conflicts and not visitor_2.file_conflicts
assert not visitor_1.fatal_conflicts and not visitor_2.fatal_conflicts
assert (
sorted(visitor_1.files.items())
== sorted(visitor_2.files.items())
== [
(str(tmp_path / "view" / "bar"), (str(tmp_path / "b"), "bar")),
(str(tmp_path / "view" / "dir" / "foo"), (str(tmp_path / "a"), f"dir{os.sep}foo")),
(str(tmp_path / "view" / "file"), (str(tmp_path / "a"), "file")),
]
)
assert visitor_1.directories[str(tmp_path / "view" / "dir")] == (str(tmp_path / "a"), "dir")
assert visitor_2.directories[str(tmp_path / "view" / "dir")] == (str(tmp_path / "a"), "dir")
# Then after visiting the bottom dir, the "conflict" should be resolved, and `file` should
# come from the bottom dir.
visit_directory_tree(str(tmp_path / "dir_bottom"), visitor)
assert not visitor.file_conflicts
assert list(visitor.files.items()) == [
(str(tmp_path / "view" / "zzzz"), (str(tmp_path / "dir_top"), "zzzz")),
(str(tmp_path / "view" / "file"), (str(tmp_path / "dir_bottom"), "file")),
]
def test_source_merge_visitor_deals_with_dangling_symlinks(tmp_path: pathlib.Path):
def test_source_merge_visitor_does_deals_with_dangling_symlinks(tmp_path: pathlib.Path):
"""When a file and a dangling symlink conflict, this should be handled like a file conflict."""
(tmp_path / "dir_a").mkdir()
os.symlink("non-existent", str(tmp_path / "dir_a" / "file"))
@@ -410,127 +396,3 @@ def test_source_merge_visitor_deals_with_dangling_symlinks(tmp_path: pathlib.Pat
# The first file encountered should be listed.
assert visitor.files == {str(tmp_path / "view" / "file"): (str(tmp_path / "dir_a"), "file")}
@pytest.mark.parametrize("normalize", [True, False])
def test_source_visitor_file_file(tmp_path: pathlib.Path, normalize: bool):
(tmp_path / "a").mkdir()
(tmp_path / "b").mkdir()
(tmp_path / "a" / "file").write_bytes(b"")
(tmp_path / "b" / "FILE").write_bytes(b"")
v = SourceMergeVisitor(normalize_paths=normalize)
for p in ("a", "b"):
visit_directory_tree(str(tmp_path / p), v)
if normalize:
assert len(v.files) == 1
assert len(v.directories) == 0
assert "file" in v.files # first file wins
assert len(v.file_conflicts) == 1
else:
assert len(v.files) == 2
assert len(v.directories) == 0
assert "file" in v.files and "FILE" in v.files
assert not v.fatal_conflicts
assert not v.file_conflicts
@pytest.mark.parametrize("normalize", [True, False])
def test_source_visitor_file_dir(tmp_path: pathlib.Path, normalize: bool):
(tmp_path / "a").mkdir()
(tmp_path / "a" / "file").write_bytes(b"")
(tmp_path / "b").mkdir()
(tmp_path / "b" / "FILE").mkdir()
v1 = SourceMergeVisitor(normalize_paths=normalize)
for p in ("a", "b"):
visit_directory_tree(str(tmp_path / p), v1)
v2 = SourceMergeVisitor(normalize_paths=normalize)
for p in ("b", "a"):
visit_directory_tree(str(tmp_path / p), v2)
assert not v1.file_conflicts and not v2.file_conflicts
if normalize:
assert len(v1.fatal_conflicts) == len(v2.fatal_conflicts) == 1
else:
assert len(v1.files) == len(v2.files) == 1
assert "file" in v1.files and "file" in v2.files
assert len(v1.directories) == len(v2.directories) == 1
assert "FILE" in v1.directories and "FILE" in v2.directories
assert not v1.fatal_conflicts and not v2.fatal_conflicts
@pytest.mark.parametrize("normalize", [True, False])
def test_source_visitor_dir_dir(tmp_path: pathlib.Path, normalize: bool):
(tmp_path / "a").mkdir()
(tmp_path / "a" / "dir").mkdir()
(tmp_path / "b").mkdir()
(tmp_path / "b" / "DIR").mkdir()
v = SourceMergeVisitor(normalize_paths=normalize)
for p in ("a", "b"):
visit_directory_tree(str(tmp_path / p), v)
assert not v.files
assert not v.fatal_conflicts
assert not v.file_conflicts
if normalize:
assert len(v.directories) == 1
assert "dir" in v.directories
else:
assert len(v.directories) == 2
assert "DIR" in v.directories and "dir" in v.directories
@pytest.mark.parametrize("normalize", [True, False])
def test_dst_visitor_file_file(tmp_path: pathlib.Path, normalize: bool):
(tmp_path / "a").mkdir()
(tmp_path / "b").mkdir()
(tmp_path / "a" / "file").write_bytes(b"")
(tmp_path / "b" / "FILE").write_bytes(b"")
src = SourceMergeVisitor(normalize_paths=normalize)
visit_directory_tree(str(tmp_path / "a"), src)
visit_directory_tree(str(tmp_path / "b"), DestinationMergeVisitor(src))
assert len(src.files) == 1
assert len(src.directories) == 0
assert "file" in src.files
assert not src.file_conflicts
if normalize:
assert len(src.fatal_conflicts) == 1
assert "FILE" in [c.dst for c in src.fatal_conflicts]
else:
assert not src.fatal_conflicts
@pytest.mark.parametrize("normalize", [True, False])
def test_dst_visitor_file_dir(tmp_path: pathlib.Path, normalize: bool):
(tmp_path / "a").mkdir()
(tmp_path / "a" / "file").write_bytes(b"")
(tmp_path / "b").mkdir()
(tmp_path / "b" / "FILE").mkdir()
src1 = SourceMergeVisitor(normalize_paths=normalize)
visit_directory_tree(str(tmp_path / "a"), src1)
visit_directory_tree(str(tmp_path / "b"), DestinationMergeVisitor(src1))
src2 = SourceMergeVisitor(normalize_paths=normalize)
visit_directory_tree(str(tmp_path / "b"), src2)
visit_directory_tree(str(tmp_path / "a"), DestinationMergeVisitor(src2))
assert len(src1.files) == 1
assert "file" in src1.files
assert not src1.directories
assert not src2.file_conflicts
assert len(src2.directories) == 1
if normalize:
assert len(src1.fatal_conflicts) == 1
assert "FILE" in [c.dst for c in src1.fatal_conflicts]
assert not src2.files
assert len(src2.fatal_conflicts) == 1
assert "file" in [c.dst for c in src2.fatal_conflicts]
else:
assert not src1.fatal_conflicts and not src2.fatal_conflicts
assert not src1.file_conflicts and not src2.file_conflicts

View File

@@ -2,7 +2,6 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pickle
import stat
import pytest
@@ -224,10 +223,3 @@ def test_check_module_set_name(mutable_config):
with pytest.raises(spack.error.ConfigError, match=msg):
spack.cmd.modules.check_module_set_name("third")
@pytest.mark.parametrize("module_type", ["tcl", "lmod"])
def test_module_writers_are_pickleable(default_mock_concretization, module_type):
s = default_mock_concretization("mpileaks")
writer = spack.modules.module_types[module_type](s, "default")
assert pickle.loads(pickle.dumps(writer)).spec == s

View File

@@ -1,6 +1,7 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Test class methods on Package objects.
This doesn't include methods on package *instances* (like do_patch(),
@@ -15,7 +16,6 @@
import llnl.util.filesystem as fs
import spack.binary_distribution
import spack.compilers
import spack.concretize
import spack.deptypes as dt
@@ -23,11 +23,15 @@
import spack.install_test
import spack.package
import spack.package_base
import spack.repo
import spack.spec
import spack.store
from spack.build_systems.generic import Package
from spack.error import InstallError
from spack.solver.input_analysis import NoStaticAnalysis, StaticAnalysis
@pytest.fixture(scope="module")
def mpi_names(mock_repo_path):
return [spec.name for spec in mock_repo_path.providers_for("mpi")]
@pytest.fixture()
@@ -49,94 +53,78 @@ def mpileaks_possible_deps(mock_packages, mpi_names):
return possible
@pytest.fixture(params=[NoStaticAnalysis, StaticAnalysis])
def mock_inspector(config, mock_packages, request):
inspector_cls = request.param
if inspector_cls is NoStaticAnalysis:
return inspector_cls(configuration=config, repo=mock_packages)
return inspector_cls(
configuration=config,
repo=mock_packages,
store=spack.store.STORE,
binary_index=spack.binary_distribution.BINARY_INDEX,
def test_possible_dependencies(mock_packages, mpileaks_possible_deps):
pkg_cls = spack.repo.PATH.get_pkg_class("mpileaks")
expanded_possible_deps = pkg_cls.possible_dependencies(expand_virtuals=True)
assert mpileaks_possible_deps == expanded_possible_deps
assert {
"callpath": {"dyninst", "mpi"},
"dyninst": {"libdwarf", "libelf"},
"libdwarf": {"libelf"},
"libelf": set(),
"mpi": set(),
"mpileaks": {"callpath", "mpi"},
} == pkg_cls.possible_dependencies(expand_virtuals=False)
def test_possible_direct_dependencies(mock_packages, mpileaks_possible_deps):
pkg_cls = spack.repo.PATH.get_pkg_class("mpileaks")
deps = pkg_cls.possible_dependencies(transitive=False, expand_virtuals=False)
assert {"callpath": set(), "mpi": set(), "mpileaks": {"callpath", "mpi"}} == deps
def test_possible_dependencies_virtual(mock_packages, mpi_names):
expected = dict(
(name, set(dep for dep in spack.repo.PATH.get_pkg_class(name).dependencies_by_name()))
for name in mpi_names
)
# only one mock MPI has a dependency
expected["fake"] = set()
assert expected == spack.package_base.possible_dependencies("mpi", transitive=False)
def test_possible_dependencies_missing(mock_packages):
pkg_cls = spack.repo.PATH.get_pkg_class("missing-dependency")
missing = {}
pkg_cls.possible_dependencies(transitive=True, missing=missing)
assert {"this-is-a-missing-dependency"} == missing["missing-dependency"]
def test_possible_dependencies_with_deptypes(mock_packages):
dtbuild1 = spack.repo.PATH.get_pkg_class("dtbuild1")
assert {
"dtbuild1": {"dtrun2", "dtlink2"},
"dtlink2": set(),
"dtrun2": set(),
} == dtbuild1.possible_dependencies(depflag=dt.LINK | dt.RUN)
assert {
"dtbuild1": {"dtbuild2", "dtlink2"},
"dtbuild2": set(),
"dtlink2": set(),
} == dtbuild1.possible_dependencies(depflag=dt.BUILD)
assert {"dtbuild1": {"dtlink2"}, "dtlink2": set()} == dtbuild1.possible_dependencies(
depflag=dt.LINK
)
@pytest.fixture
def mpi_names(mock_inspector):
return [spec.name for spec in mock_inspector.providers_for("mpi")]
@pytest.mark.parametrize(
"pkg_name,fn_kwargs,expected",
[
(
"mpileaks",
{"expand_virtuals": True, "allowed_deps": dt.ALL},
{
"fake",
"mpileaks",
"multi-provider-mpi",
"callpath",
"dyninst",
"mpich2",
"libdwarf",
"zmpi",
"low-priority-provider",
"intel-parallel-studio",
"mpich",
"libelf",
},
),
(
"mpileaks",
{"expand_virtuals": False, "allowed_deps": dt.ALL},
{"callpath", "dyninst", "libdwarf", "libelf", "mpileaks"},
),
(
"mpileaks",
{"expand_virtuals": False, "allowed_deps": dt.ALL, "transitive": False},
{"callpath", "mpileaks"},
),
("dtbuild1", {"allowed_deps": dt.LINK | dt.RUN}, {"dtbuild1", "dtrun2", "dtlink2"}),
("dtbuild1", {"allowed_deps": dt.BUILD}, {"dtbuild1", "dtbuild2", "dtlink2"}),
("dtbuild1", {"allowed_deps": dt.LINK}, {"dtbuild1", "dtlink2"}),
],
)
def test_possible_dependencies(pkg_name, fn_kwargs, expected, mock_runtimes, mock_inspector):
"""Tests possible nodes of mpileaks, under different scenarios."""
expected.update(mock_runtimes)
result, *_ = mock_inspector.possible_dependencies(pkg_name, **fn_kwargs)
assert expected == result
def test_possible_dependencies_virtual(mock_inspector, mock_packages, mock_runtimes, mpi_names):
expected = set(mpi_names)
for name in mpi_names:
expected.update(dep for dep in mock_packages.get_pkg_class(name).dependencies_by_name())
expected.update(mock_runtimes)
real_pkgs, *_ = mock_inspector.possible_dependencies(
"mpi", transitive=False, allowed_deps=dt.ALL
)
assert expected == real_pkgs
def test_possible_dependencies_missing(mock_inspector):
result, *_ = mock_inspector.possible_dependencies("missing-dependency", allowed_deps=dt.ALL)
assert "this-is-a-missing-dependency" not in result
def test_possible_dependencies_with_multiple_classes(
mock_inspector, mock_packages, mpileaks_possible_deps
):
def test_possible_dependencies_with_multiple_classes(mock_packages, mpileaks_possible_deps):
pkgs = ["dt-diamond", "mpileaks"]
expected = set(mpileaks_possible_deps)
expected.update({"dt-diamond", "dt-diamond-left", "dt-diamond-right", "dt-diamond-bottom"})
expected.update(mock_packages.packages_with_tags("runtime"))
expected = mpileaks_possible_deps.copy()
expected.update(
{
"dt-diamond": set(["dt-diamond-left", "dt-diamond-right"]),
"dt-diamond-left": set(["dt-diamond-bottom"]),
"dt-diamond-right": set(["dt-diamond-bottom"]),
"dt-diamond-bottom": set(),
}
)
real_pkgs, *_ = mock_inspector.possible_dependencies(*pkgs, allowed_deps=dt.ALL)
assert set(expected) == real_pkgs
assert expected == spack.package_base.possible_dependencies(*pkgs)
def setup_install_test(source_paths, test_root):

View File

@@ -65,21 +65,12 @@ def test_repo_unknown_pkg(mutable_mock_repo):
mutable_mock_repo.get_pkg_class("builtin.mock.nonexistentpackage")
def test_repo_last_mtime(mock_packages):
mtime_with_package_py = [
(os.path.getmtime(p.module.__file__), p.module.__file__)
for p in spack.repo.PATH.all_package_classes()
]
repo_mtime = spack.repo.PATH.last_mtime()
max_mtime, max_file = max(mtime_with_package_py)
if max_mtime > repo_mtime:
modified_after = "\n ".join(
f"{path} ({mtime})" for mtime, path in mtime_with_package_py if mtime > repo_mtime
)
assert (
max_mtime <= repo_mtime
), f"the following files were modified while running tests:\n {modified_after}"
assert max_mtime == repo_mtime, f"last_mtime incorrect for {max_file}"
@pytest.mark.maybeslow
def test_repo_last_mtime():
latest_mtime = max(
os.path.getmtime(p.module.__file__) for p in spack.repo.PATH.all_package_classes()
)
assert spack.repo.PATH.last_mtime() == latest_mtime
def test_repo_invisibles(mutable_mock_repo, extra_repo):
@@ -100,13 +91,13 @@ def test_namespace_hasattr(attr_name, exists, mutable_mock_repo):
@pytest.mark.regression("24552")
def test_all_package_names_is_cached_correctly(mock_packages):
def test_all_package_names_is_cached_correctly():
assert "mpi" in spack.repo.all_package_names(include_virtuals=True)
assert "mpi" not in spack.repo.all_package_names(include_virtuals=False)
@pytest.mark.regression("29203")
def test_use_repositories_doesnt_change_class(mock_packages):
def test_use_repositories_doesnt_change_class():
"""Test that we don't create the same package module and class multiple times
when swapping repositories.
"""
@@ -175,25 +166,18 @@ def test_repo_dump_virtuals(tmpdir, mutable_mock_repo, mock_packages, ensure_deb
assert "package.py" in os.listdir(tmpdir), "Expected the virtual's package to be copied"
@pytest.mark.parametrize("repos", [["mock"], ["extra"], ["mock", "extra"], ["extra", "mock"]])
def test_repository_construction_doesnt_use_globals(nullify_globals, tmp_path, repos):
def _repo_paths(repos):
repo_paths, namespaces = [], []
for entry in repos:
if entry == "mock":
repo_paths.append(spack.paths.mock_packages_path)
namespaces.append("builtin.mock")
if entry == "extra":
name = "extra.mock"
repo_dir = tmp_path / name
repo_dir.mkdir()
_ = spack.repo.MockRepositoryBuilder(repo_dir, name)
repo_paths.append(str(repo_dir))
namespaces.append(name)
return repo_paths, namespaces
repo_paths, namespaces = _repo_paths(repos)
@pytest.mark.parametrize(
"repo_paths,namespaces",
[
([spack.paths.packages_path], ["builtin"]),
([spack.paths.mock_packages_path], ["builtin.mock"]),
([spack.paths.packages_path, spack.paths.mock_packages_path], ["builtin", "builtin.mock"]),
([spack.paths.mock_packages_path, spack.paths.packages_path], ["builtin.mock", "builtin"]),
],
)
def test_repository_construction_doesnt_use_globals(
nullify_globals, tmp_path, repo_paths, namespaces
):
repo_cache = spack.util.file_cache.FileCache(str(tmp_path / "cache"))
repo_path = spack.repo.RepoPath(*repo_paths, cache=repo_cache)
assert len(repo_path.repos) == len(namespaces)

View File

@@ -585,9 +585,8 @@ def test_construct_spec_with_deptypes(self):
assert s["b"].edges_to_dependencies(name="c")[0].depflag == dt.BUILD
assert s["d"].edges_to_dependencies(name="e")[0].depflag == dt.BUILD | dt.LINK
assert s["e"].edges_to_dependencies(name="f")[0].depflag == dt.RUN
# The subscript follows link/run transitive deps or direct build/test deps, therefore
# we need an extra step to get to "c"
assert s["b"]["c"].edges_from_dependents(name="b")[0].depflag == dt.BUILD
assert s["c"].edges_from_dependents(name="b")[0].depflag == dt.BUILD
assert s["e"].edges_from_dependents(name="d")[0].depflag == dt.BUILD | dt.LINK
assert s["f"].edges_from_dependents(name="e")[0].depflag == dt.RUN
@@ -899,9 +898,8 @@ def test_adding_same_deptype_with_the_same_name_raises(
@pytest.mark.regression("33499")
def test_indexing_prefers_direct_or_transitive_link_deps():
"""Tests whether spec indexing prefers direct/transitive link/run type deps over deps of
build/test deps.
"""
# Test whether spec indexing prefers direct/transitive link type deps over deps of
# build/run/test deps, and whether it does fall back to a full dag search.
root = Spec("root")
# Use a and z to since we typically traverse by edges sorted alphabetically.
@@ -914,7 +912,7 @@ def test_indexing_prefers_direct_or_transitive_link_deps():
z3_flavor_1 = Spec("z3 +through_a1")
z3_flavor_2 = Spec("z3 +through_z1")
root.add_dependency_edge(a1, depflag=dt.BUILD | dt.TEST, virtuals=())
root.add_dependency_edge(a1, depflag=dt.BUILD | dt.RUN | dt.TEST, virtuals=())
# unique package as a dep of a build/run/test type dep.
a1.add_dependency_edge(a2, depflag=dt.ALL, virtuals=())
@@ -929,14 +927,8 @@ def test_indexing_prefers_direct_or_transitive_link_deps():
assert "through_z1" in root["z3"].variants
assert "through_a1" in a1["z3"].variants
# Ensure that only the runtime sub-DAG can be searched
with pytest.raises(KeyError):
root["a2"]
# Check consistency of __contains__ with __getitem__
assert "z3 +through_z1" in root
assert "z3 +through_a1" in a1
assert "a2" not in root
# Ensure that the full DAG is still searched
assert root["a2"]
def test_getitem_sticks_to_subdag():

View File

@@ -700,9 +700,9 @@ def test_unsatisfiable_multi_value_variant(self, default_mock_concretization):
def test_copy_satisfies_transitive(self):
spec = spack.concretize.concretize_one("dttop")
copy = spec.copy()
for s, t in zip(spec.traverse(), copy.traverse()):
assert s.satisfies(t)
assert t.satisfies(s)
for s in spec.traverse():
assert s.satisfies(copy[s.name])
assert copy[s.name].satisfies(s)
def test_intersects_virtual(self):
assert Spec("mpich").intersects(Spec("mpi"))
@@ -1989,26 +1989,3 @@ def test_equality_discriminate_on_propagation(lhs, rhs):
def test_comparison_multivalued_variants():
assert Spec("x=a") < Spec("x=a,b") < Spec("x==a,b") < Spec("x==a,b,c")
def test_comparison_after_breaking_hash_change():
# We simulate a breaking change in DAG hash computation in Spack. We have two specs that are
# entirely equal modulo DAG hash. When deserializing these specs, we don't want them to compare
# as equal, because DAG hash is used throughout in Spack to distinguish between specs
# (e.g. database, build caches, install dir).
s = Spec("example@=1.0")
s._mark_concrete(True)
# compute the dag hash and a change to it
dag_hash = s.dag_hash()
new_dag_hash = f"{'b' if dag_hash[0] == 'a' else 'a'}{dag_hash[1:]}"
before_breakage = s.to_dict()
after_breakage = s.to_dict()
after_breakage["spec"]["nodes"][0]["hash"] = new_dag_hash
assert before_breakage != after_breakage
x = Spec.from_dict(before_breakage)
y = Spec.from_dict(after_breakage)
assert x != y
assert len({x, y}) == 2

View File

@@ -201,15 +201,3 @@ def test_drop_redundant_rpath(tmpdir, binary_with_rpaths):
new_rpaths = elf.get_rpaths(binary)
assert set(existing_dirs).issubset(new_rpaths)
assert set(non_existing_dirs).isdisjoint(new_rpaths)
def test_elf_invalid_e_shnum(tmp_path):
# from llvm/test/Object/Inputs/invalid-e_shnum.elf
path = tmp_path / "invalid-e_shnum.elf"
with open(path, "wb") as file:
file.write(
b"\x7fELF\x02\x010000000000\x03\x00>\x0000000000000000000000"
b"\x00\x00\x00\x00\x00\x00\x00\x000000000000@\x000000"
)
with open(path, "rb") as file, pytest.raises(elf.ElfParsingError):
elf.parse_elf(file)

View File

@@ -8,6 +8,7 @@
import pytest
import spack.concretize
import spack.directives
import spack.directives_meta
import spack.paths
import spack.repo

View File

@@ -2,7 +2,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Non-fixture utilities for test code. Must be imported."""
"""Non-fixture utilities for test code. Must be imported.
"""
from spack.main import make_argument_parser

View File

@@ -195,10 +195,7 @@ def parse_program_headers(f: BinaryIO, elf: ElfFile) -> None:
elf: ELF file parser data
"""
# Forward to the program header
try:
f.seek(elf.elf_hdr.e_phoff)
except OSError:
raise ElfParsingError("Could not seek to program header")
f.seek(elf.elf_hdr.e_phoff)
# Here we have to make a mapping from virtual address to offset in the file.
ph_fmt = elf.byte_order + ("LLQQQQQQ" if elf.is_64_bit else "LLLLLLLL")
@@ -248,10 +245,7 @@ def parse_pt_interp(f: BinaryIO, elf: ElfFile) -> None:
f: file handle
elf: ELF file parser data
"""
try:
f.seek(elf.pt_interp_p_offset)
except OSError:
raise ElfParsingError("Could not seek to PT_INTERP entry")
f.seek(elf.pt_interp_p_offset)
data = read_exactly(f, elf.pt_interp_p_filesz, "Malformed PT_INTERP entry")
elf.pt_interp_str = parse_c_string(data)
@@ -270,10 +264,7 @@ def find_strtab_size_at_offset(f: BinaryIO, elf: ElfFile, offset: int) -> int:
"""
section_hdr_fmt = elf.byte_order + ("LLQQQQLLQQ" if elf.is_64_bit else "LLLLLLLLLL")
section_hdr_size = calcsize(section_hdr_fmt)
try:
f.seek(elf.elf_hdr.e_shoff)
except OSError:
raise ElfParsingError("Could not seek to section header table")
f.seek(elf.elf_hdr.e_shoff)
for _ in range(elf.elf_hdr.e_shnum):
data = read_exactly(f, section_hdr_size, "Malformed section header")
sh = SectionHeader(*unpack(section_hdr_fmt, data))
@@ -295,10 +286,7 @@ def retrieve_strtab(f: BinaryIO, elf: ElfFile, offset: int) -> bytes:
Returns: file offset
"""
size = find_strtab_size_at_offset(f, elf, offset)
try:
f.seek(offset)
except OSError:
raise ElfParsingError("Could not seek to string table")
f.seek(offset)
return read_exactly(f, size, "Could not read string table")
@@ -331,10 +319,7 @@ def parse_pt_dynamic(f: BinaryIO, elf: ElfFile) -> None:
count_runpath = 0
count_strtab = 0
try:
f.seek(elf.pt_dynamic_p_offset)
except OSError:
raise ElfParsingError("Could not seek to PT_DYNAMIC entry")
f.seek(elf.pt_dynamic_p_offset)
# In case of broken ELF files, don't read beyond the advertized size.
for _ in range(elf.pt_dynamic_p_filesz // dynamic_array_size):
@@ -493,10 +478,7 @@ def get_interpreter(path: str) -> Optional[str]:
def _delete_dynamic_array_entry(
f: BinaryIO, elf: ElfFile, should_delete: Callable[[int, int], bool]
) -> None:
try:
f.seek(elf.pt_dynamic_p_offset)
except OSError:
raise ElfParsingError("Could not seek to PT_DYNAMIC entry")
f.seek(elf.pt_dynamic_p_offset)
dynamic_array_fmt = elf.byte_order + ("qQ" if elf.is_64_bit else "lL")
dynamic_array_size = calcsize(dynamic_array_fmt)
new_offset = elf.pt_dynamic_p_offset # points to the new dynamic array

View File

@@ -8,6 +8,7 @@
import spack.directives_meta
import spack.error
import spack.fetch_strategy
import spack.package_base
import spack.repo
import spack.spec
import spack.util.hash
@@ -60,18 +61,10 @@ class RemoveDirectives(ast.NodeTransformer):
"""
def __init__(self, spec):
#: List of attributes to be excluded from a package's hash.
self.metadata_attrs = [s.url_attr for s in spack.fetch_strategy.all_strategies] + [
"homepage",
"url",
"urls",
"list_url",
"extendable",
"parallel",
"make_jobs",
"maintainers",
"tags",
]
# list of URL attributes and metadata attributes
# these will be removed from packages.
self.metadata_attrs = [s.url_attr for s in spack.fetch_strategy.all_strategies]
self.metadata_attrs += spack.package_base.PackageBase.metadata_attrs
self.spec = spec
self.in_classdef = False # used to avoid nested classdefs

View File

@@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Simple wrapper around JSON to guarantee consistent use of load/dump."""
"""Simple wrapper around JSON to guarantee consistent use of load/dump. """
import json
from typing import Any, Dict, Optional

View File

@@ -436,8 +436,8 @@ def _dump_annotated(handler, data, stream=None):
width = max(clen(a) for a in _ANNOTATIONS)
formats = ["%%-%ds %%s\n" % (width + cextra(a)) for a in _ANNOTATIONS]
for fmt, annotation, line in zip(formats, _ANNOTATIONS, lines):
stream.write(fmt % (annotation, line))
for f, a, l in zip(formats, _ANNOTATIONS, lines):
stream.write(f % (a, l))
if getvalue:
return getvalue()

View File

@@ -10,7 +10,7 @@
COMMIT_VERSION = re.compile(r"^[a-f0-9]{40}$")
# Infinity-like versions. The order in the list implies the comparison rules
infinity_versions = ["stable", "nightly", "trunk", "head", "master", "main", "develop"]
infinity_versions = ["stable", "trunk", "head", "master", "main", "develop"]
iv_min_len = min(len(s) for s in infinity_versions)

View File

@@ -1,7 +1,6 @@
[project]
name="spack"
description="The spack package manager"
requires-python=">=3.6"
dependencies=[
"clingo",
"setuptools",
@@ -68,42 +67,9 @@ features = [
"ci",
]
[tool.ruff]
line-length = 99
extend-include = ["bin/spack"]
extend-exclude = ["lib/spack/external", "*.pyi"]
[tool.ruff.format]
skip-magic-trailing-comma = true
[tool.ruff.lint]
extend-select = ["I"]
ignore = ["E731", "E203"]
[tool.ruff.lint.isort]
split-on-trailing-comma = false
section-order = [
"future",
"standard-library",
"third-party",
"archspec",
"llnl",
"spack",
"first-party",
"local-folder",
]
[tool.ruff.lint.isort.sections]
spack = ["spack"]
archspec = ["archspec"]
llnl = ["llnl"]
[tool.ruff.lint.per-file-ignores]
"var/spack/repos/*/package.py" = ["F403", "F405", "F811", "F821"]
"*-ci-package.py" = ["F403", "F405", "F821"]
[tool.black]
line-length = 99
target-version = ['py36', 'py37', 'py38', 'py39', 'py310']
include = '(lib/spack|var/spack/repos)/.*\.pyi?$|bin/spack$'
extend-exclude = 'lib/spack/external'
skip_magic_trailing_comma = true

View File

@@ -539,6 +539,57 @@ data-vis-sdk-build:
- artifacts: True
job: data-vis-sdk-generate
########################################
# AWS ISC Applications (x86_64)
########################################
# Call this AFTER .*-generate
.aws-isc-overrides:
# This controls image for generate step; build step is controlled by spack.yaml
# Note that generator emits OS info for build so these should be the same.
image: { "name": "ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09", "entrypoint": [""] }
.aws-isc:
extends: [ ".linux_x86_64_v3" ]
variables:
SPACK_CI_STACK_NAME: aws-isc
aws-isc-generate:
extends: [ ".aws-isc", ".generate-x86_64", ".aws-isc-overrides", ".tags-x86_64_v4" ]
aws-isc-build:
extends: [ ".aws-isc", ".build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: aws-isc-generate
strategy: depend
needs:
- artifacts: True
job: aws-isc-generate
# Parallel Pipeline for aarch64 (reuses override image, but generates and builds on aarch64)
.aws-isc-aarch64:
extends: [ ".linux_aarch64" ]
variables:
SPACK_CI_STACK_NAME: aws-isc-aarch64
aws-isc-aarch64-generate:
extends: [ ".aws-isc-aarch64", ".generate-aarch64", ".aws-isc-overrides" ]
aws-isc-aarch64-build:
extends: [ ".aws-isc-aarch64", ".build" ]
trigger:
include:
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
job: aws-isc-aarch64-generate
strategy: depend
needs:
- artifacts: True
job: aws-isc-aarch64-generate
########################################
# Spack Tutorial
########################################

View File

@@ -0,0 +1,141 @@
spack:
view: false
packages:
all:
providers:
blas:
- openblas
mkl:
- intel-oneapi-mkl
mpi:
- openmpi
- mpich
variants: +mpi
tbb:
require: intel-tbb
binutils:
variants: +ld +gold +headers +libiberty ~nls
version:
- 2.36.1
doxygen:
version:
- 1.8.20
elfutils:
variants: ~nls
hdf5:
variants: +fortran +hl +shared
libfabric:
variants: fabrics=efa,tcp,udp,sockets,verbs,shm,mrail,rxd,rxm
libunwind:
variants: +pic +xz
mesa:
variants: ~llvm
mpich:
variants: ~wrapperrpath netmod=ofi device=ch4
ncurses:
variants: +termlib
openblas:
variants: threads=openmp
openmpi:
variants: fabrics=ofi +legacylaunchers
openturns:
version:
- '1.18'
relion:
variants: ~mklfft
# texlive:
# version: [20210325]
trilinos:
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
xz:
variants: +pic
definitions:
- compiler_specs:
- gcc@11.2
# Licensing OK?
# - intel-oneapi-compilers@2022.1
# - nvhpc
- app_specs:
- bwa
# Depends on simde which requires newer compiler?
#- bowtie2
# Requires x86_64 specific ASM
#- cistem
- cromwell
- fastqc
- flux-sched
- flux-core
- flux-pmix
- gatk
- gromacs
- lammps
- wrf build_type=dm+sm
- mfem
- mpas-model ^parallelio+pnetcdf
- nextflow
- octave
- openfoam
- osu-micro-benchmarks
- parallel
# - paraview
- picard
- quantum-espresso
- raja
# Depends on bowtie2 -> simde which requires newer compiler?
#- rsem
# Errors on texlive
#- rstudio
- salmon
- samtools
- seqtk
- snakemake
- star
# Requires gcc@9:
#- ufs-weather-model
# requires LLVM which fails without constraint
#- visit
- lib_specs:
- openmpi fabrics=ofi
- openmpi fabrics=ofi +legacylaunchers
- openmpi fabrics=auto
- mpich
- libfabric
- compiler:
- '%gcc@7.3.1'
- target:
- target=aarch64
specs:
- matrix:
- - $app_specs
- - $compiler
- - $target
- matrix:
- - $lib_specs
- - $compiler
- - $target
- matrix:
- - $compiler_specs
- - $compiler
- - $target
ci:
pipeline-gen:
- build-job:
image: {name: ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09, entrypoint: ['']}
cdash:
build-group: AWS Packages

View File

@@ -0,0 +1,153 @@
spack:
view: false
packages:
all:
providers:
blas:
- openblas
mkl:
- intel-oneapi-mkl
mpi:
- openmpi
- mpich
variants: +mpi
tbb:
require: intel-tbb
binutils:
variants: +ld +gold +headers +libiberty ~nls
version:
- 2.36.1
doxygen:
version:
- 1.8.20
elfutils:
variants: ~nls
hdf5:
variants: +fortran +hl +shared
libfabric:
variants: fabrics=efa,tcp,udp,sockets,verbs,shm,mrail,rxd,rxm
libunwind:
variants: +pic +xz
mesa:
variants: ~llvm
mpich:
variants: ~wrapperrpath netmod=ofi device=ch4
ncurses:
variants: +termlib
openblas:
variants: threads=openmp
openmpi:
variants: fabrics=ofi +legacylaunchers
openturns:
version:
- '1.18'
relion:
variants: ~mklfft
# texlive:
# version: [20210325]
trilinos:
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext
+ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu
+nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos
+teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
xz:
variants: +pic
definitions:
- compiler_specs:
- gcc@11.2
# Licensing OK?
# - intel-oneapi-compilers@2022.1
# - nvhpc
- cuda_specs:
# Disabled for consistency with aarch64
#- relion +cuda cuda_arch=70
- raja +cuda cuda_arch=70
- mfem +cuda cuda_arch=70
- app_specs:
- bwa
# Disabled for consistency with aarch64
#- bowtie2
# Disabled for consistency with aarch64
#- cistem
- cromwell
- fastqc
- flux-sched
- flux-core
- flux-pmix
- gatk
- gromacs
- lammps
- wrf build_type=dm+sm
- mfem
- mpas-model ^parallelio+pnetcdf
- nextflow
- octave
- openfoam
- osu-micro-benchmarks
- parallel
# - paraview
- picard
- quantum-espresso
# Build broken for gcc@7.3.1 x86_64_v4 (error: '_mm512_loadu_epi32' was not declared in this scope)
#- raja
# Disabled for consistency with aarch64
#- rsem
# Errors on texlive
#- rstudio
- salmon
- samtools
- seqtk
- snakemake
- star
# Requires gcc@9:
#- ufs-weather-model
# Disabled for consistency with aarch64
#- visit
- lib_specs:
- openmpi fabrics=ofi
- openmpi fabrics=ofi +legacylaunchers
- openmpi fabrics=auto
- mpich
- libfabric
- compiler:
- '%gcc@7.3.1'
- target:
- target=x86_64_v3
specs:
- matrix:
- - $cuda_specs
- - $compiler
- - $target
- matrix:
- - $app_specs
- - $compiler
- - $target
- matrix:
- - $lib_specs
- - $compiler
- - $target
- matrix:
- - $compiler_specs
- - $compiler
- - $target
ci:
pipeline-gen:
- build-job:
image: {name: ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09, entrypoint: ['']}
cdash:
build-group: AWS Packages

Some files were not shown because too many files have changed in this diff Show More