Compare commits
1 Commits
backports/
...
hs/fix/aut
Author | SHA1 | Date | |
---|---|---|---|
![]() |
e1b0f282c4 |
26
.github/workflows/ci.yaml
vendored
26
.github/workflows/ci.yaml
vendored
@@ -83,17 +83,10 @@ jobs:
|
||||
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
if: ${{ always() }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
run: |
|
||||
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
run: "true"
|
||||
|
||||
coverage:
|
||||
needs: [ unit-tests, prechecks ]
|
||||
@@ -101,19 +94,8 @@ jobs:
|
||||
secrets: inherit
|
||||
|
||||
all:
|
||||
needs: [ unit-tests, coverage, bootstrap ]
|
||||
if: ${{ always() }}
|
||||
needs: [ coverage, bootstrap ]
|
||||
runs-on: ubuntu-latest
|
||||
# See https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/accessing-contextual-information-about-workflow-runs#needs-context
|
||||
steps:
|
||||
- name: Status summary
|
||||
run: |
|
||||
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
|
||||
echo "Bootstrap tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
- name: Success
|
||||
run: "true"
|
||||
|
@@ -3,5 +3,5 @@ clingo==5.7.1
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.20241105
|
||||
types-six==1.16.21.20241009
|
||||
vermin==1.6.0
|
||||
|
20
.github/workflows/unit_tests.yaml
vendored
20
.github/workflows/unit_tests.yaml
vendored
@@ -52,13 +52,7 @@ jobs:
|
||||
# Needed for unit tests
|
||||
sudo apt-get -y install \
|
||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||
cmake bison libbison-dev subversion
|
||||
# On ubuntu 24.04, kcov was removed. It may come back in some future Ubuntu
|
||||
- name: Set up Homebrew
|
||||
id: set-up-homebrew
|
||||
uses: Homebrew/actions/setup-homebrew@40e9946c182a64b3db1bf51be0dcb915f7802aa9
|
||||
- name: Install kcov with brew
|
||||
run: "brew install kcov"
|
||||
cmake bison libbison-dev kcov
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
|
||||
@@ -105,13 +99,7 @@ jobs:
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for shell tests
|
||||
sudo apt-get install -y coreutils csh zsh tcsh fish dash bash subversion
|
||||
# On ubuntu 24.04, kcov was removed. It may come back in some future Ubuntu
|
||||
- name: Set up Homebrew
|
||||
id: set-up-homebrew
|
||||
uses: Homebrew/actions/setup-homebrew@40e9946c182a64b3db1bf51be0dcb915f7802aa9
|
||||
- name: Install kcov with brew
|
||||
run: "brew install kcov"
|
||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-xdist
|
||||
@@ -146,7 +134,7 @@ jobs:
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory '*'
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
useradd spack-test
|
||||
@@ -186,7 +174,7 @@ jobs:
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap status
|
||||
spack solve zlib
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretize.py
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-clingo-cffi
|
||||
|
2
.github/workflows/valid-style.yml
vendored
2
.github/workflows/valid-style.yml
vendored
@@ -74,7 +74,7 @@ jobs:
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git config --global --add safe.directory '*'
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git fetch --unshallow
|
||||
. .github/workflows/bin/setup_git.sh
|
||||
useradd spack-test
|
||||
|
394
CHANGELOG.md
394
CHANGELOG.md
@@ -1,395 +1,3 @@
|
||||
# v0.23.1 (2025-02-19)
|
||||
|
||||
## Bugfixes
|
||||
- Fix a correctness issue of `ArchSpec.intersects` (#48741)
|
||||
- Make `extra_attributes` order independent in Spec hashing (#48615, #48854)
|
||||
- Fix issue where system proxy settings were not respected in OCI build caches (#48783)
|
||||
- Fix an issue where the `--test` concretizer flag was not forwarded correctly (#48417)
|
||||
- Fix an issue where `codesign` and `install_name_tool` would not preserve hardlinks on
|
||||
Darwin (#47808)
|
||||
- Fix an issue on Darwin where codesign would run on unmodified binaries (#48568)
|
||||
- Patch configure scripts generated with libtool < 2.5.4, to avoid redundant flags when
|
||||
creating shared libraries on Darwin (#48671)
|
||||
- Fix issue related to mirror URL paths on Windows (#47898)
|
||||
- Esnure proper UTF-8 encoding/decoding in logging (#48005)
|
||||
- Fix issues related to `filter_file` (#48038, #48108)
|
||||
- Fix issue related to creating bootstrap source mirrors (#48235)
|
||||
- Fix issue where command line config arguments were not always top level (#48255)
|
||||
- Fix an incorrect typehint of `concretized()` (#48504)
|
||||
- Improve mention of next Spack version in warning (#47887)
|
||||
- Tests: fix forward compatibility with Python 3.13 (#48209)
|
||||
- Docs: encourage use of `--oci-username-variable` and `--oci-password-variable` (#48189)
|
||||
- Docs: ensure Getting Started has bootstrap list output in correct place (#48281)
|
||||
- CI: allow GitHub actions to run on forks of Spack with different project name (#48041)
|
||||
- CI: make unit tests work on Ubuntu 24.04 (#48151)
|
||||
- CI: re-enable cray pipelines (#47697)
|
||||
|
||||
## Package updates
|
||||
- `qt-base`: fix rpath for dependents (#47424)
|
||||
- `gdk-pixbuf`: fix outdated URL (#47825)
|
||||
|
||||
# v0.23.0 (2024-11-13)
|
||||
|
||||
`v0.23.0` is a major feature release.
|
||||
|
||||
We are planning to make this the last major release before Spack `v1.0`
|
||||
in June 2025. Alongside `v0.23`, we will be making pre-releases (alpha,
|
||||
beta, etc.) of `v1.0`, and we encourage users to try them and send us
|
||||
feedback, either on GitHub or on Slack. You can track the road to
|
||||
`v1.0` here:
|
||||
|
||||
* https://github.com/spack/spack/releases
|
||||
* https://github.com/spack/spack/discussions/30634
|
||||
|
||||
## Features in this Release
|
||||
|
||||
1. **Language virtuals**
|
||||
|
||||
Your packages can now explicitly depend on the languages they require.
|
||||
Historically, Spack has considered C, C++, and Fortran compiler
|
||||
dependencies to be implicit. In `v0.23`, you should ensure that
|
||||
new packages add relevant C, C++, and Fortran dependencies like this:
|
||||
|
||||
```python
|
||||
depends_on("c", type="build")
|
||||
depends_on("cxx", type="build")
|
||||
depends_on("fortran", type="build")
|
||||
```
|
||||
|
||||
We encourage you to add these annotations to your packages now, to prepare
|
||||
for Spack `v1.0.0`. In `v1.0.0`, these annotations will be necessary for
|
||||
your package to use C, C++, and Fortran compilers. Note that you should
|
||||
*not* add language dependencies to packages that don't need them, e.g.,
|
||||
pure python packages.
|
||||
|
||||
We have already auto-generated these dependencies for packages in the
|
||||
`builtin` repository (see #45217), based on the types of source files
|
||||
present in each package's source code. We *may* have added too many or too
|
||||
few language dependencies, so please submit pull requests to correct
|
||||
packages if you find that the language dependencies are incorrect.
|
||||
|
||||
Note that we have also backported support for these dependencies to
|
||||
`v0.21.3` and `v0.22.2`, to make all of them forward-compatible with
|
||||
`v0.23`. This should allow you to move easily between older and newer Spack
|
||||
releases without breaking your packages.
|
||||
|
||||
2. **Spec splicing**
|
||||
|
||||
We are working to make binary installation more seamless in Spack. `v0.23`
|
||||
introduces "splicing", which allows users to deploy binaries using local,
|
||||
optimized versions of a binary interface, even if they were not built with
|
||||
that interface. For example, this would allow you to build binaries in the
|
||||
cloud using `mpich` and install them on a system using a local, optimized
|
||||
version of `mvapich2` *without rebuilding*. Spack preserves full provenance
|
||||
for the installed packages and knows that they were built one way but
|
||||
deployed another.
|
||||
|
||||
Our intent is to leverage this across many key HPC binary packages,
|
||||
e.g. MPI, CUDA, ROCm, and libfabric.
|
||||
|
||||
Fundamentally, splicing allows Spack to redeploy an existing spec with
|
||||
different dependencies than how it was built. There are two interfaces to
|
||||
splicing.
|
||||
|
||||
a. Explicit Splicing
|
||||
|
||||
#39136 introduced the explicit splicing interface. In the
|
||||
concretizer config, you can specify a target spec and a replacement
|
||||
by hash.
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
splice:
|
||||
explicit:
|
||||
- target: mpi
|
||||
replacement: mpich/abcdef
|
||||
```
|
||||
|
||||
Here, every installation that would normally use the target spec will
|
||||
instead use its replacement. Above, any spec using *any* `mpi` will be
|
||||
spliced to depend on the specific `mpich` installation requested. This
|
||||
*can* go wrong if you try to replace something built with, e.g.,
|
||||
`openmpi` with `mpich`, and it is on the user to ensure ABI
|
||||
compatibility between target and replacement specs. This currently
|
||||
requires some expertise to use, but it will allow users to reuse the
|
||||
binaries they create across more machines and environments.
|
||||
|
||||
b. Automatic Splicing (experimental)
|
||||
|
||||
#46729 introduced automatic splicing. In the concretizer config, enable
|
||||
automatic splicing:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
splice:
|
||||
automatic: true
|
||||
```
|
||||
|
||||
or run:
|
||||
|
||||
```console
|
||||
spack config add concretizer:splice:automatic:true
|
||||
```
|
||||
|
||||
The concretizer will select splices for ABI compatibility to maximize
|
||||
package reuse. Packages can denote ABI compatibility using the
|
||||
`can_splice` directive. No packages in Spack yet use this directive, so
|
||||
if you want to use this feature you will need to add `can_splice`
|
||||
annotations to your packages. We are working on ways to add more ABI
|
||||
compatibility information to the Spack package repository, and this
|
||||
directive may change in the future.
|
||||
|
||||
See the documentation for more details:
|
||||
* https://spack.readthedocs.io/en/latest/build_settings.html#splicing
|
||||
* https://spack.readthedocs.io/en/latest/packaging_guide.html#specifying-abi-compatibility
|
||||
|
||||
3. Broader variant propagation
|
||||
|
||||
Since #42931, you can specify propagated variants like `hdf5
|
||||
build_type==RelWithDebInfo` or `trilinos ++openmp` to propagate a variant
|
||||
to all dependencies for which it is relevant. This is valid *even* if the
|
||||
variant does not exist on the package or its dependencies.
|
||||
|
||||
See https://spack.readthedocs.io/en/latest/basic_usage.html#variants.
|
||||
|
||||
4. Query specs by namespace
|
||||
|
||||
#45416 allows a package's namespace (indicating the repository it came from)
|
||||
to be treated like a variant. You can request packages from particular repos
|
||||
like this:
|
||||
|
||||
```console
|
||||
spack find zlib namespace=builtin
|
||||
spack find zlib namespace=myrepo
|
||||
```
|
||||
|
||||
Previously, the spec syntax only allowed namespaces to be prefixes of spec
|
||||
names, e.g. `builtin.zlib`. The previous syntax still works.
|
||||
|
||||
5. `spack spec` respects environment settings and `unify:true`
|
||||
|
||||
`spack spec` did not previously respect environment lockfiles or
|
||||
unification settings, which made it difficult to see exactly how a spec
|
||||
would concretize within an environment. Now it does, so the output you get
|
||||
with `spack spec` will be *the same* as what your environment will
|
||||
concretize to when you run `spack concretize`. Similarly, if you provide
|
||||
multiple specs on the command line with `spack spec`, it will concretize
|
||||
them together if `unify:true` is set.
|
||||
|
||||
See #47556 and #44843.
|
||||
|
||||
6. Less noisy `spack spec` output
|
||||
|
||||
`spack spec` previously showed output like this:
|
||||
|
||||
```console
|
||||
> spack spec /v5fn6xo
|
||||
Input spec
|
||||
--------------------------------
|
||||
- /v5fn6xo
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
[+] openssl@3.3.1%apple-clang@16.0.0~docs+shared arch=darwin-sequoia-m1
|
||||
...
|
||||
```
|
||||
|
||||
But the input spec is redundant, and we know we run `spack spec` to concretize
|
||||
the input spec. `spack spec` now *only* shows the concretized spec. See #47574.
|
||||
|
||||
7. Better output for `spack find -c`
|
||||
|
||||
In an environmnet, `spack find -c` lets you search the concretized, but not
|
||||
yet installed, specs, just as you would the installed ones. As with `spack
|
||||
spec`, this should make it easier for you to see what *will* be built
|
||||
before building and installing it. See #44713.
|
||||
|
||||
8. `spack -C <env>`: use an environment's configuration without activation
|
||||
|
||||
Spack environments allow you to associate:
|
||||
1. a set of (possibly concretized) specs, and
|
||||
2. configuration
|
||||
|
||||
When you activate an environment, you're using both of these. Previously, we
|
||||
supported:
|
||||
* `spack -e <env>` to run spack in the context of a specific environment, and
|
||||
* `spack -C <directory>` to run spack using a directory with configuration files.
|
||||
|
||||
You can now also pass an environment to `spack -C` to use *only* the environment's
|
||||
configuration, but not the specs or lockfile. See #45046.
|
||||
|
||||
## New commands, options, and directives
|
||||
|
||||
* The new `spack env track` command (#41897) takes a non-managed Spack
|
||||
environment and adds a symlink to Spack's `$environments_root` directory, so
|
||||
that it will be included for reference counting for commands like `spack
|
||||
uninstall` and `spack gc`. If you use free-standing directory environments,
|
||||
this is useful for preventing Spack from removing things required by your
|
||||
environments. You can undo this tracking with the `spack env untrack`
|
||||
command.
|
||||
|
||||
* Add `-t` short option for `spack --backtrace` (#47227)
|
||||
|
||||
`spack -d / --debug` enables backtraces on error, but it can be very
|
||||
verbose, and sometimes you just want the backtrace. `spack -t / --backtrace`
|
||||
provides that option.
|
||||
|
||||
* `gc`: restrict to specific specs (#46790)
|
||||
|
||||
If you only want to garbage-collect specific packages, you can now provide
|
||||
them on the command line. This gives users finer-grained control over what
|
||||
is uninstalled.
|
||||
|
||||
* oci buildcaches now support `--only=package`. You can now push *just* a
|
||||
package and not its dependencies to an OCI registry. This allows dependents
|
||||
of non-redistributable specs to be stored in OCI registries without an
|
||||
error. See #45775.
|
||||
|
||||
## Notable refactors
|
||||
* Variants are now fully conditional
|
||||
|
||||
The `variants` dictionary on packages was previously keyed by variant name,
|
||||
and allowed only one definition of any given variant. Spack is now smart
|
||||
enough to understand that variants may have different values and defaults
|
||||
for different versions. For example, `warpx` prior to `23.06` only supported
|
||||
builds for one dimensionality, and newer `warpx` versions could be built
|
||||
with support for many different dimensions:
|
||||
|
||||
```python
|
||||
variant(
|
||||
"dims",
|
||||
default="3",
|
||||
values=("1", "2", "3", "rz"),
|
||||
multi=False,
|
||||
description="Number of spatial dimensions",
|
||||
when="@:23.05",
|
||||
)
|
||||
variant(
|
||||
"dims",
|
||||
default="1,2,rz,3",
|
||||
values=("1", "2", "3", "rz"),
|
||||
multi=True,
|
||||
description="Number of spatial dimensions",
|
||||
when="@23.06:",
|
||||
)
|
||||
```
|
||||
|
||||
Previously, the default for the old version of `warpx` was not respected and
|
||||
had to be specified manually. Now, Spack will select the right variant
|
||||
definition for each version at concretization time. This allows variants to
|
||||
evolve more smoothly over time. See #44425 for details.
|
||||
|
||||
## Highlighted bugfixes
|
||||
|
||||
1. Externals no longer override the preferred provider (#45025).
|
||||
|
||||
External definitions could interfere with package preferences. Now, if
|
||||
`openmpi` is the preferred `mpi`, and an external `mpich` is defined, a new
|
||||
`openmpi` *will* be built if building it is possible. Previously we would
|
||||
prefer `mpich` despite the preference.
|
||||
|
||||
2. Composable `cflags` (#41049).
|
||||
|
||||
This release fixes a longstanding bug that concretization would fail if
|
||||
there were different `cflags` specified in `packages.yaml`,
|
||||
`compilers.yaml`, or on `the` CLI. Flags and their ordering are now tracked
|
||||
in the concretizer and flags from multiple sources will be merged.
|
||||
|
||||
3. Fix concretizer Unification for included environments (#45139).
|
||||
|
||||
## Deprecations, removals, and syntax changes
|
||||
|
||||
1. The old concretizer has been removed from Spack, along with the
|
||||
`config:concretizer` config option. Spack will emit a warning if the option
|
||||
is present in user configuration, since it now has no effect. Spack now
|
||||
uses a simpler bootstrapping mechanism, where a JSON prototype is tweaked
|
||||
slightly to get an initial concrete spec to download. See #45215.
|
||||
|
||||
2. Best-effort expansion of spec matrices has been removed. This feature did
|
||||
not work with the "new" ASP-based concretizer, and did not work with
|
||||
`unify: True` or `unify: when_possible`. Use the
|
||||
[exclude key](https://spack.readthedocs.io/en/latest/environments.html#spec-matrices)
|
||||
for the environment to exclude invalid components, or use multiple spec
|
||||
matrices to combine the list of specs for which the constraint is valid and
|
||||
the list of specs for which it is not. See #40792.
|
||||
|
||||
3. The old Cray `platform` (based on Cray PE modules) has been removed, and
|
||||
`platform=cray` is no longer supported. Since `v0.19`, Spack has handled
|
||||
Cray machines like Linux clusters with extra packages, and we have
|
||||
encouraged using this option to support Cray. The new approach allows us to
|
||||
correctly handle Cray machines with non-SLES operating systems, and it is
|
||||
much more reliable than making assumptions about Cray modules. See the
|
||||
`v0.19` release notes and #43796 for more details.
|
||||
|
||||
4. The `config:install_missing_compilers` config option has been deprecated,
|
||||
and it is a no-op when set in `v0.23`. Our new compiler dependency model
|
||||
will replace it with a much more reliable and robust mechanism in `v1.0`.
|
||||
See #46237.
|
||||
|
||||
5. Config options that deprecated in `v0.21` have been removed in `v0.23`. You
|
||||
can now only specify preferences for `compilers`, `targets`, and
|
||||
`providers` globally via the `packages:all:` section. Similarly, you can
|
||||
only specify `versions:` locally for a specific package. See #44061 and
|
||||
#31261 for details.
|
||||
|
||||
6. Spack's old test interface has been removed (#45752), having been
|
||||
deprecated in `v0.22.0` (#34236). All `builtin` packages have been updated
|
||||
to use the new interface. See the [stand-alone test documentation](
|
||||
https://spack.readthedocs.io/en/latest/packaging_guide.html#stand-alone-tests)
|
||||
|
||||
7. The `spack versions --safe-only` option, deprecated since `v0.21.0`, has
|
||||
been removed. See #45765.
|
||||
|
||||
* The `--dependencies` and `--optimize` arguments to `spack ci` have been
|
||||
deprecated. See #45005.
|
||||
|
||||
## Binary caches
|
||||
1. Public binary caches now include an ML stack for Linux/aarch64 (#39666)We
|
||||
now build an ML stack for Linux/aarch64 for all pull requests and on
|
||||
develop. The ML stack includes both CPU-only and CUDA builds for Horovod,
|
||||
Hugging Face, JAX, Keras, PyTorch,scikit-learn, TensorBoard, and
|
||||
TensorFlow, and related packages. The CPU-only stack also includes XGBoost.
|
||||
See https://cache.spack.io/tag/develop/?stack=ml-linux-aarch64-cuda.
|
||||
|
||||
2. There is also now an stack of developer tools for macOS (#46910), which is
|
||||
analogous to the Linux devtools stack. You can use this to avoid building
|
||||
many common build dependencies. See
|
||||
https://cache.spack.io/tag/develop/?stack=developer-tools-darwin.
|
||||
|
||||
## Architecture support
|
||||
* archspec has been updated to `v0.2.5`, with support for `zen5`
|
||||
* Spack's CUDA package now supports the Grace Hopper `9.0a` compute capability (#45540)
|
||||
|
||||
## Windows
|
||||
* Windows bootstrapping: `file` and `gpg` (#41810)
|
||||
* `scripts` directory added to PATH on Windows for python extensions (#45427)
|
||||
* Fix `spack load --list` and `spack unload` on Windows (#35720)
|
||||
|
||||
## Other notable changes
|
||||
* Bugfix: `spack find -x` in environments (#46798)
|
||||
* Spec splices are now robust to duplicate nodes with the same name in a spec (#46382)
|
||||
* Cache per-compiler libc calculations for performance (#47213)
|
||||
* Fixed a bug in external detection for openmpi (#47541)
|
||||
* Mirror configuration allows username/password as environment variables (#46549)
|
||||
* Default library search caps maximum depth (#41945)
|
||||
* Unify interface for `spack spec` and `spack solve` commands (#47182)
|
||||
* Spack no longer RPATHs directories in the default library search path (#44686)
|
||||
* Improved performance of Spack database (#46554)
|
||||
* Enable package reuse for packages with versions from git refs (#43859)
|
||||
* Improved handling for `uuid` virtual on macos (#43002)
|
||||
* Improved tracking of task queueing/requeueing in the installer (#46293)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* Over 2,000 pull requests updated package recipes
|
||||
* 8,307 total packages, 329 new since `v0.22.0`
|
||||
* 140 new Python packages
|
||||
* 14 new R packages
|
||||
* 373 people contributed to this release
|
||||
* 357 committers to packages
|
||||
* 60 committers to core
|
||||
|
||||
|
||||
# v0.22.2 (2024-09-21)
|
||||
|
||||
## Bugfixes
|
||||
@@ -811,7 +419,7 @@ feedback, either on GitHub or on Slack. You can track the road to
|
||||
- spack graph: fix coloring with environments (#41240)
|
||||
- spack info: sort variants in --variants-by-name (#41389)
|
||||
- Spec.format: error on old style format strings (#41934)
|
||||
- ASP-based solver:
|
||||
- ASP-based solver:
|
||||
- fix infinite recursion when computing concretization errors (#41061)
|
||||
- don't error for type mismatch on preferences (#41138)
|
||||
- don't emit spurious debug output (#41218)
|
||||
|
@@ -39,19 +39,11 @@ concretizer:
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only
|
||||
# (e.g. py-setuptools, cmake etc.)
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# Option to specify compatiblity between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||
os_compatible: {}
|
||||
|
||||
# Option to specify whether to support splicing. Splicing allows for
|
||||
# the relinking of concrete package dependencies in order to better
|
||||
# reuse already built packages with ABI compatible dependencies
|
||||
splice:
|
||||
explicit: []
|
||||
automatic: false
|
||||
|
@@ -40,9 +40,9 @@ packages:
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
libgfortran: [gcc-runtime]
|
||||
libgfortran: [ gcc-runtime ]
|
||||
libglx: [mesa+glx]
|
||||
libifcore: [intel-oneapi-runtime]
|
||||
libifcore: [ intel-oneapi-runtime ]
|
||||
libllvm: [llvm]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
|
@@ -1359,10 +1359,6 @@ For example, for the ``stackstart`` variant:
|
||||
mpileaks stackstart==4 # variant will be propagated to dependencies
|
||||
mpileaks stackstart=4 # only mpileaks will have this variant value
|
||||
|
||||
Spack also allows variants to be propagated from a package that does
|
||||
not have that variant.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Compiler Flags
|
||||
^^^^^^^^^^^^^^
|
||||
|
@@ -265,30 +265,25 @@ infrastructure, or to cache Spack built binaries in Github Actions and
|
||||
GitLab CI.
|
||||
|
||||
To get started, configure an OCI mirror using ``oci://`` as the scheme,
|
||||
and optionally specify variables that hold the username and password (or
|
||||
personal access token) for the registry:
|
||||
and optionally specify a username and password (or personal access token):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --oci-username-variable REGISTRY_USER \
|
||||
--oci-password-variable REGISTRY_TOKEN \
|
||||
my_registry oci://example.com/my_image
|
||||
$ spack mirror add --oci-username username --oci-password password my_registry oci://example.com/my_image
|
||||
|
||||
Spack follows the naming conventions of Docker, with Dockerhub as the default
|
||||
registry. To use Dockerhub, you can omit the registry domain:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add ... my_registry oci://username/my_image
|
||||
$ spack mirror add --oci-username username --oci-password password my_registry oci://username/my_image
|
||||
|
||||
From here, you can use the mirror as any other build cache:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export REGISTRY_USER=...
|
||||
$ export REGISTRY_TOKEN=...
|
||||
$ spack buildcache push my_registry <specs...> # push to the registry
|
||||
$ spack install <specs...> # or install from the registry
|
||||
$ spack install <specs...> # install from the registry
|
||||
|
||||
A unique feature of buildcaches on top of OCI registries is that it's incredibly
|
||||
easy to generate get a runnable container image with the binaries installed. This
|
||||
|
@@ -237,35 +237,3 @@ is optional -- by default, splices will be transitive.
|
||||
``mpich/abcdef`` instead of ``mvapich2`` as the MPI provider. Spack
|
||||
will warn the user in this case, but will not fail the
|
||||
concretization.
|
||||
|
||||
.. _automatic_splicing:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Automatic Splicing
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The Spack solver can be configured to do automatic splicing for
|
||||
ABI-compatible packages. Automatic splices are enabled in the concretizer
|
||||
config section
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
splice:
|
||||
automatic: True
|
||||
|
||||
Packages can include ABI-compatibility information using the
|
||||
``can_splice`` directive. See :ref:`the packaging
|
||||
guide<abi_compatibility>` for instructions on specifying ABI
|
||||
compatibility using the ``can_splice`` directive.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``can_splice`` directive is experimental and may be changed in
|
||||
future versions.
|
||||
|
||||
When automatic splicing is enabled, the concretizer will combine any
|
||||
number of ABI-compatible specs if possible to reuse installed packages
|
||||
and packages available from binary caches. The end result of these
|
||||
specs is equivalent to a series of transitive/intransitive splices,
|
||||
but the series may be non-obvious.
|
||||
|
@@ -214,14 +214,12 @@ def setup(sphinx):
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
("py:class", "spack.spec.ArchSpec"),
|
||||
("py:class", "spack.spec.InstallStatus"),
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
||||
("py:class", "spack.traverse.EdgeAndDepth"),
|
||||
("py:class", "archspec.cpu.microarchitecture.Microarchitecture"),
|
||||
("py:class", "spack.compiler.CompilerCache"),
|
||||
# TypeVar that is not handled correctly
|
||||
("py:class", "llnl.util.lang.T"),
|
||||
]
|
||||
|
@@ -38,11 +38,9 @@ just have to configure and OCI registry and run ``spack buildcache push``.
|
||||
spack -e . install
|
||||
|
||||
# Configure the registry
|
||||
spack -e . mirror add --oci-username-variable REGISTRY_USER \
|
||||
--oci-password-variable REGISTRY_TOKEN \
|
||||
container-registry oci://example.com/name/image
|
||||
spack -e . mirror add --oci-username ... --oci-password ... container-registry oci://example.com/name/image
|
||||
|
||||
# Push the image (do set REGISTRY_USER and REGISTRY_TOKEN)
|
||||
# Push the image
|
||||
spack -e . buildcache push --update-index --base-image ubuntu:22.04 --tag my_env container-registry
|
||||
|
||||
The resulting container image can then be run as follows:
|
||||
|
@@ -184,7 +184,7 @@ Style Tests
|
||||
|
||||
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
||||
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
|
||||
`mypy <https://mypy.readthedocs.io/en/stable/>`_ for type checking. PEP 8 is
|
||||
`mypy <https://mypy.readthedocs.io/en/stable/>` for type checking. PEP 8 is
|
||||
a series of style guides for Python that provide suggestions for everything
|
||||
from variable naming to indentation. In order to limit the number of PRs that
|
||||
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
||||
|
@@ -333,9 +333,13 @@ inserting them at different places in the spack code base. Whenever a hook
|
||||
type triggers by way of a function call, we find all the hooks of that type,
|
||||
and run them.
|
||||
|
||||
Spack defines hooks by way of a module in the ``lib/spack/spack/hooks`` directory.
|
||||
This module has to be registered in ``__init__.py`` so that Spack is aware of it.
|
||||
This section will cover the basic kind of hooks, and how to write them.
|
||||
Spack defines hooks by way of a module at ``lib/spack/spack/hooks`` where we can define
|
||||
types of hooks in the ``__init__.py``, and then python files in that folder
|
||||
can use hook functions. The files are automatically parsed, so if you write
|
||||
a new file for some integration (e.g., ``lib/spack/spack/hooks/myintegration.py``
|
||||
you can then write hook functions in that file that will be automatically detected,
|
||||
and run whenever your hook is called. This section will cover the basic kind
|
||||
of hooks, and how to write them.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Types of Hooks
|
||||
|
@@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
.. code-block:: console
|
||||
|
||||
apt update
|
||||
apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
|
||||
apt install build-essential ca-certificates coreutils curl environment-modules gfortran git gpg lsb-release python3 python3-distutils python3-venv unzip zip
|
||||
|
||||
.. tab-item:: RHEL
|
||||
|
||||
@@ -43,14 +43,14 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
|
||||
dnf install epel-release
|
||||
dnf group install "Development Tools"
|
||||
dnf install gcc-gfortran redhat-lsb-core python3 unzip
|
||||
dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3
|
||||
|
||||
.. tab-item:: macOS Brew
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
brew update
|
||||
brew install gcc git zip
|
||||
brew install curl gcc git gnupg zip
|
||||
|
||||
------------
|
||||
Installation
|
||||
@@ -148,22 +148,20 @@ The first time you concretize a spec, Spack will bootstrap automatically:
|
||||
--------------------------------
|
||||
zlib@1.2.13%gcc@9.4.0+optimize+pic+shared build_system=makefile arch=linux-ubuntu20.04-icelake
|
||||
|
||||
The default bootstrap behavior is to use pre-built binaries. You can verify the
|
||||
active bootstrap repositories with:
|
||||
|
||||
.. command-output:: spack bootstrap list
|
||||
|
||||
If for security concerns you cannot bootstrap ``clingo`` from pre-built
|
||||
binaries, you have to disable fetching the binaries we generated with Github Actions.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack bootstrap disable github-actions-v0.6
|
||||
==> "github-actions-v0.6" is now disabled and will not be used for bootstrapping
|
||||
$ spack bootstrap disable github-actions-v0.5
|
||||
==> "github-actions-v0.5" is now disabled and will not be used for bootstrapping
|
||||
$ spack bootstrap disable github-actions-v0.4
|
||||
==> "github-actions-v0.4" is now disabled and will not be used for bootstrapping
|
||||
$ spack bootstrap disable github-actions-v0.3
|
||||
==> "github-actions-v0.3" is now disabled and will not be used for bootstrapping
|
||||
|
||||
You can verify that the new settings are effective with:
|
||||
|
||||
.. command-output:: spack bootstrap list
|
||||
|
||||
You can verify that the new settings are effective with ``spack bootstrap list``.
|
||||
|
||||
.. note::
|
||||
|
||||
|
@@ -12,6 +12,10 @@
|
||||
Spack
|
||||
===================
|
||||
|
||||
.. epigraph::
|
||||
|
||||
`These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
|
||||
|
||||
Spack is a package management tool designed to support multiple
|
||||
versions and configurations of software on a wide variety of platforms
|
||||
and environments. It was designed for large supercomputing centers,
|
||||
|
@@ -1267,7 +1267,7 @@ Git fetching supports the following parameters to ``version``:
|
||||
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
|
||||
large repositories that have separate portions that can be built independently.
|
||||
If paths provided are directories then all the subdirectories and associated files
|
||||
will also be cloned.
|
||||
will also be cloned.
|
||||
|
||||
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
|
||||
|
||||
@@ -1367,8 +1367,8 @@ Submodules
|
||||
git-submodule``.
|
||||
|
||||
Sparse-Checkout
|
||||
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
||||
sparse-checkout feature. This will only clone the paths that are specified in the
|
||||
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
||||
sparse-checkout feature. This will only clone the paths that are specified in the
|
||||
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
|
||||
This feature allows you to only clone what you need from a large repository.
|
||||
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
|
||||
@@ -2392,7 +2392,7 @@ by the ``--jobs`` option:
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 7, 11
|
||||
:linenos:
|
||||
|
||||
|
||||
class Xios(Package):
|
||||
...
|
||||
def install(self, spec, prefix):
|
||||
@@ -2503,14 +2503,15 @@ with. For example, suppose that in the ``libdwarf`` package you write:
|
||||
|
||||
depends_on("libelf@0.8")
|
||||
|
||||
Now ``libdwarf`` will require ``libelf`` in the range ``0.8``, which
|
||||
includes patch versions ``0.8.1``, ``0.8.2``, etc. Apart from version
|
||||
restrictions, you can also specify variants if this package requires
|
||||
optional features of the dependency.
|
||||
Now ``libdwarf`` will require ``libelf`` at *exactly* version ``0.8``.
|
||||
You can also specify a requirement for a particular variant or for
|
||||
specific compiler flags:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("libelf@0.8 +parser +pic")
|
||||
depends_on("libelf@0.8+debug")
|
||||
depends_on("libelf debug=True")
|
||||
depends_on("libelf cppflags='-fPIC'")
|
||||
|
||||
Both users *and* package authors can use the same spec syntax to refer
|
||||
to different package configurations. Users use the spec syntax on the
|
||||
@@ -2518,82 +2519,46 @@ command line to find installed packages or to install packages with
|
||||
particular constraints, and package authors can use specs to describe
|
||||
relationships between packages.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Specifying backward and forward compatibility
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^
|
||||
Version ranges
|
||||
^^^^^^^^^^^^^^
|
||||
|
||||
Packages are often compatible with a range of versions of their
|
||||
dependencies. This is typically referred to as backward and forward
|
||||
compatibility. Spack allows you to specify this in the ``depends_on``
|
||||
directive using version ranges.
|
||||
|
||||
**Backwards compatibility** means that the package requires at least a
|
||||
certain version of its dependency:
|
||||
Although some packages require a specific version for their dependencies,
|
||||
most can be built with a range of versions. For example, if you are
|
||||
writing a package for a legacy Python module that only works with Python
|
||||
2.4 through 2.6, this would look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@3.10:")
|
||||
depends_on("python@2.4:2.6")
|
||||
|
||||
In this case, the package requires Python 3.10 or newer.
|
||||
|
||||
Commonly, packages drop support for older versions of a dependency as
|
||||
they release new versions. In Spack you can conveniently add every
|
||||
backward compatibility rule as a separate line:
|
||||
Version ranges in Spack are *inclusive*, so ``2.4:2.6`` means any version
|
||||
greater than or equal to ``2.4`` and up to and including any ``2.6.x``. If
|
||||
you want to specify that a package works with any version of Python 3 (or
|
||||
higher), this would look like:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# backward compatibility with Python
|
||||
depends_on("python@3.8:")
|
||||
depends_on("python@3.9:", when="@1.2:")
|
||||
depends_on("python@3.10:", when="@1.4:")
|
||||
depends_on("python@3:")
|
||||
|
||||
This means that in general we need Python 3.8 or newer; from version
|
||||
1.2 onwards we need Python 3.9 or newer; from version 1.4 onwards we
|
||||
need Python 3.10 or newer. Notice that it's fine to have overlapping
|
||||
ranges in the ``when`` clauses.
|
||||
|
||||
**Forward compatibility** means that the package requires at most a
|
||||
certain version of its dependency. Forward compatibility rules are
|
||||
necessary when there are breaking changes in the dependency that the
|
||||
package cannot handle. In Spack we often add forward compatibility
|
||||
bounds only at the time a new, breaking version of a dependency is
|
||||
released. As with backward compatibility, it is typical to see a list
|
||||
of forward compatibility bounds in a package file as seperate lines:
|
||||
Here we leave out the upper bound. If you want to say that a package
|
||||
requires Python 2, you can similarly leave out the lower bound:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# forward compatibility with Python
|
||||
depends_on("python@:3.12", when="@:1.10")
|
||||
depends_on("python@:3.13", when="@:1.12")
|
||||
depends_on("python@:2")
|
||||
|
||||
Notice how the ``:`` now appears before the version number both in the
|
||||
dependency and in the ``when`` clause. This tells Spack that in general
|
||||
we need Python 3.13 or older up to version ``1.12.x``, and up to version
|
||||
``1.10.x`` we need Python 3.12 or older. Said differently, forward compatibility
|
||||
with Python 3.13 was added in version 1.11, while version 1.13 added forward
|
||||
compatibility with Python 3.14.
|
||||
Notice that we didn't use ``@:3``. Version ranges are *inclusive*, so
|
||||
``@:3`` means "up to and including any 3.x version".
|
||||
|
||||
Notice that a version range ``@:3.12`` includes *any* patch version
|
||||
number ``3.12.x``, which is often useful when specifying forward compatibility
|
||||
bounds.
|
||||
|
||||
So far we have seen open-ended version ranges, which is by far the most
|
||||
common use case. It is also possible to specify both a lower and an upper bound
|
||||
on the version of a dependency, like this:
|
||||
You can also simply write
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@3.10:3.12")
|
||||
depends_on("python@2.7")
|
||||
|
||||
There is short syntax to specify that a package is compatible with say any
|
||||
``3.x`` version:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@3")
|
||||
|
||||
The above is equivalent to ``depends_on("python@3:3")``, which means at least
|
||||
Python version 3 and at most any version ``3.x.y``.
|
||||
to tell Spack that the package needs Python 2.7.x. This is equivalent to
|
||||
``@2.7:2.7``.
|
||||
|
||||
In very rare cases, you may need to specify an exact version, for example
|
||||
if you need to distinguish between ``3.2`` and ``3.2.1``:
|
||||
@@ -5420,7 +5385,7 @@ by build recipes. Examples of checking :ref:`variant settings <variants>` and
|
||||
determine whether it needs to also set up build dependencies (see
|
||||
:ref:`test-build-tests`).
|
||||
|
||||
The ``MyPackage`` package below provides two basic test examples:
|
||||
The ``MyPackage`` package below provides two basic test examples:
|
||||
``test_example`` and ``test_example2``. The first runs the installed
|
||||
``example`` and ensures its output contains an expected string. The second
|
||||
runs ``example2`` without checking output so is only concerned with confirming
|
||||
@@ -5737,7 +5702,7 @@ subdirectory of the installation prefix. They are automatically copied to
|
||||
the appropriate relative paths under the test stage directory prior to
|
||||
executing stand-alone tests.
|
||||
|
||||
.. tip::
|
||||
.. tip::
|
||||
|
||||
*Perform test-related conversions once when copying files.*
|
||||
|
||||
@@ -7113,46 +7078,6 @@ might write:
|
||||
CXXFLAGS += -I$DWARF_PREFIX/include
|
||||
CXXFLAGS += -L$DWARF_PREFIX/lib
|
||||
|
||||
.. _abi_compatibility:
|
||||
|
||||
----------------------------
|
||||
Specifying ABI Compatibility
|
||||
----------------------------
|
||||
|
||||
Packages can include ABI-compatibility information using the
|
||||
``can_splice`` directive. For example, if ``Foo`` version 1.1 can
|
||||
always replace version 1.0, then the package could have:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
can_splice("foo@1.0", when="@1.1")
|
||||
|
||||
For virtual packages, packages can also specify ABI-compabitiliby with
|
||||
other packages providing the same virtual. For example, ``zlib-ng``
|
||||
could specify:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
can_splice("zlib@1.3.1", when="@2.2+compat")
|
||||
|
||||
Some packages have ABI-compatibility that is dependent on matching
|
||||
variant values, either for all variants or for some set of
|
||||
ABI-relevant variants. In those cases, it is not necessary to specify
|
||||
the full combinatorial explosion. The ``match_variants`` keyword can
|
||||
cover all single-value variants.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
can_splice("foo@1.1", when="@1.2", match_variants=["bar"]) # any value for bar as long as they're the same
|
||||
can_splice("foo@1.2", when="@1.3", match_variants="*") # any variant values if all single-value variants match
|
||||
|
||||
The concretizer will use ABI compatibility to determine automatic
|
||||
splices when :ref:`automatic splicing<automatic_splicing>` is enabled.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``can_splice`` directive is experimental, and may be replaced
|
||||
by a higher-level interface in future versions of Spack.
|
||||
|
||||
.. _package_class_structure:
|
||||
|
||||
|
@@ -2,8 +2,8 @@ sphinx==8.1.3
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==3.0.1
|
||||
python-levenshtein==0.26.1
|
||||
docutils==0.21.2
|
||||
python-levenshtein==0.26.0
|
||||
docutils==0.20.1
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.3
|
||||
pytest==8.3.3
|
||||
|
238
lib/spack/env/cc
vendored
238
lib/spack/env/cc
vendored
@@ -101,9 +101,10 @@ setsep() {
|
||||
esac
|
||||
}
|
||||
|
||||
# prepend LISTNAME ELEMENT
|
||||
# prepend LISTNAME ELEMENT [SEP]
|
||||
#
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME.
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME,
|
||||
# assuming the list is separated by SEP.
|
||||
# Handles empty lists and single-element lists.
|
||||
prepend() {
|
||||
varname="$1"
|
||||
@@ -237,36 +238,6 @@ esac
|
||||
}
|
||||
"
|
||||
|
||||
# path_list functions. Path_lists have 3 parts: spack_store_<list>, <list> and system_<list>,
|
||||
# which are used to prioritize paths when assembling the final command line.
|
||||
|
||||
# init_path_lists LISTNAME
|
||||
# Set <LISTNAME>, spack_store_<LISTNAME>, and system_<LISTNAME> to "".
|
||||
init_path_lists() {
|
||||
eval "spack_store_$1=\"\""
|
||||
eval "$1=\"\""
|
||||
eval "system_$1=\"\""
|
||||
}
|
||||
|
||||
# assign_path_lists LISTNAME1 LISTNAME2
|
||||
# Copy contents of LISTNAME2 into LISTNAME1, for each path_list prefix.
|
||||
assign_path_lists() {
|
||||
eval "spack_store_$1=\"\${spack_store_$2}\""
|
||||
eval "$1=\"\${$2}\""
|
||||
eval "system_$1=\"\${system_$2}\""
|
||||
}
|
||||
|
||||
# append_path_lists LISTNAME ELT
|
||||
# Append the provided ELT to the appropriate list, based on the result of path_order().
|
||||
append_path_lists() {
|
||||
path_order "$2"
|
||||
case $? in
|
||||
0) eval "append spack_store_$1 \"\$2\"" ;;
|
||||
1) eval "append $1 \"\$2\"" ;;
|
||||
2) eval "append system_$1 \"\$2\"" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Check if optional parameters are defined
|
||||
# If we aren't asking for debug flags, don't add them
|
||||
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
||||
@@ -499,7 +470,12 @@ input_command="$*"
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
@@ -508,14 +484,24 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
fi
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
fi
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
wl_expect_rpath=yes
|
||||
@@ -523,7 +509,8 @@ parse_Wl() {
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
-Wl)
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory. We don't support it.
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory, we don't support
|
||||
# it.
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
@@ -542,10 +529,21 @@ categorize_arguments() {
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
|
||||
init_path_lists return_isystem_include_dirs_list
|
||||
init_path_lists return_include_dirs_list
|
||||
init_path_lists return_lib_dirs_list
|
||||
init_path_lists return_rpath_dirs_list
|
||||
return_isystem_spack_store_include_dirs_list=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
|
||||
return_spack_store_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
|
||||
return_spack_store_lib_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
|
||||
return_spack_store_rpath_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
@@ -611,17 +609,32 @@ categorize_arguments() {
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append_path_lists return_isystem_include_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_isystem_include_dirs_list "$arg" ;;
|
||||
2) append return_isystem_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append_path_lists return_include_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_include_dirs_list "$arg" ;;
|
||||
2) append return_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
append_path_lists return_lib_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_lib_dirs_list "$arg" ;;
|
||||
1) append return_lib_dirs_list "$arg" ;;
|
||||
2) append return_system_lib_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
@@ -654,17 +667,32 @@ categorize_arguments() {
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
@@ -681,36 +709,7 @@ categorize_arguments() {
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
# if mode is not ld, we can just add to other args
|
||||
if [ "$mode" != "ld" ]; then
|
||||
append return_other_args_list "$1"
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
# if we're in linker mode, we need to parse raw RPATH args
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
if [ $# -eq 1 ]; then
|
||||
# -rpath without value: let the linker raise an error.
|
||||
append return_other_args_list "$1"
|
||||
break
|
||||
fi
|
||||
shift
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
@@ -732,10 +731,21 @@ categorize_arguments() {
|
||||
|
||||
categorize_arguments "$@"
|
||||
|
||||
assign_path_lists isystem_include_dirs_list return_isystem_include_dirs_list
|
||||
assign_path_lists include_dirs_list return_include_dirs_list
|
||||
assign_path_lists lib_dirs_list return_lib_dirs_list
|
||||
assign_path_lists rpath_dirs_list return_rpath_dirs_list
|
||||
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
other_args_list="$return_other_args_list"
|
||||
@@ -811,10 +821,21 @@ IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
|
||||
assign_path_lists spack_flags_isystem_include_dirs_list return_isystem_include_dirs_list
|
||||
assign_path_lists spack_flags_include_dirs_list return_include_dirs_list
|
||||
assign_path_lists spack_flags_lib_dirs_list return_lib_dirs_list
|
||||
assign_path_lists spack_flags_rpath_dirs_list return_rpath_dirs_list
|
||||
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
@@ -873,7 +894,7 @@ esac
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
||||
extend spack_store_isystem_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
||||
else
|
||||
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
@@ -889,63 +910,64 @@ args_list="$flags_list"
|
||||
|
||||
# Include search paths partitioned by (in store, non-sytem, system)
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_store_spack_flags_include_dirs_list -I
|
||||
extend args_list spack_flags_spack_store_include_dirs_list -I
|
||||
extend args_list spack_store_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_include_dirs_list -I
|
||||
extend args_list include_dirs_list -I
|
||||
|
||||
extend args_list spack_store_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_store_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list system_spack_flags_include_dirs_list -I
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
|
||||
extend args_list system_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list system_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths partitioned by (in store, non-sytem, system)
|
||||
extend args_list spack_store_spack_flags_lib_dirs_list "-L"
|
||||
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
|
||||
extend args_list spack_store_lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
|
||||
extend args_list system_spack_flags_lib_dirs_list "-L"
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
# RPATHs arguments
|
||||
rpath_prefix=""
|
||||
case "$mode" in
|
||||
ccld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
rpath_prefix="$rpath"
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
ld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
rpath_prefix="-rpath${lsep}"
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
esac
|
||||
|
||||
# if mode is ccld or ld, extend RPATH lists with the prefix determined above
|
||||
if [ -n "$rpath_prefix" ]; then
|
||||
extend args_list spack_store_spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath_prefix"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend args_list rpath_dirs_list "$rpath_prefix"
|
||||
|
||||
extend args_list system_spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend args_list system_rpath_dirs_list "$rpath_prefix"
|
||||
fi
|
||||
|
||||
# Other arguments from the input command
|
||||
extend args_list other_args_list
|
||||
extend args_list spack_flags_other_args_list
|
||||
|
@@ -20,24 +20,11 @@
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from itertools import accumulate
|
||||
from typing import (
|
||||
Callable,
|
||||
Deque,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
List,
|
||||
Match,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.symlink
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import dedupe, fnmatch_translate_multiple, memoized
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||
|
||||
from ..path import path_to_os_path, system_path_filter
|
||||
@@ -98,8 +85,6 @@
|
||||
"visit_directory_tree",
|
||||
]
|
||||
|
||||
Path = Union[str, pathlib.Path]
|
||||
|
||||
if sys.version_info < (3, 7, 4):
|
||||
# monkeypatch shutil.copystat to fix PermissionError when copying read-only
|
||||
# files on Lustre when using Python < 3.7.4
|
||||
@@ -301,32 +286,35 @@ def filter_file(
|
||||
ignore_absent: bool = False,
|
||||
start_at: Optional[str] = None,
|
||||
stop_at: Optional[str] = None,
|
||||
encoding: Optional[str] = "utf-8",
|
||||
) -> None:
|
||||
r"""Like sed, but uses python regular expressions.
|
||||
|
||||
Filters every line of each file through regex and replaces the file with a filtered version.
|
||||
Preserves mode of filtered files.
|
||||
Filters every line of each file through regex and replaces the file
|
||||
with a filtered version. Preserves mode of filtered files.
|
||||
|
||||
As with re.sub, ``repl`` can be either a string or a callable. If it is a callable, it is
|
||||
passed the match object and should return a suitable replacement string. If it is a string, it
|
||||
can contain ``\1``, ``\2``, etc. to represent back-substitution as sed would allow.
|
||||
As with re.sub, ``repl`` can be either a string or a callable.
|
||||
If it is a callable, it is passed the match object and should
|
||||
return a suitable replacement string. If it is a string, it
|
||||
can contain ``\1``, ``\2``, etc. to represent back-substitution
|
||||
as sed would allow.
|
||||
|
||||
Args:
|
||||
regex: The regular expression to search for
|
||||
repl: The string to replace matches with
|
||||
*filenames: One or more files to search and replace string: Treat regex as a plain string.
|
||||
Default it False backup: Make backup file(s) suffixed with ``~``. Default is False
|
||||
ignore_absent: Ignore any files that don't exist. Default is False
|
||||
start_at: Marker used to start applying the replacements. If a text line matches this
|
||||
marker filtering is started at the next line. All contents before the marker and the
|
||||
marker itself are copied verbatim. Default is to start filtering from the first line of
|
||||
the file.
|
||||
stop_at: Marker used to stop scanning the file further. If a text line matches this marker
|
||||
filtering is stopped and the rest of the file is copied verbatim. Default is to filter
|
||||
until the end of the file.
|
||||
encoding: The encoding to use when reading and writing the files. Default is None, which
|
||||
uses the system's default encoding.
|
||||
regex (str): The regular expression to search for
|
||||
repl (str): The string to replace matches with
|
||||
*filenames: One or more files to search and replace
|
||||
string (bool): Treat regex as a plain string. Default it False
|
||||
backup (bool): Make backup file(s) suffixed with ``~``. Default is False
|
||||
ignore_absent (bool): Ignore any files that don't exist.
|
||||
Default is False
|
||||
start_at (str): Marker used to start applying the replacements. If a
|
||||
text line matches this marker filtering is started at the next line.
|
||||
All contents before the marker and the marker itself are copied
|
||||
verbatim. Default is to start filtering from the first line of the
|
||||
file.
|
||||
stop_at (str): Marker used to stop scanning the file further. If a text
|
||||
line matches this marker filtering is stopped and the rest of the
|
||||
file is copied verbatim. Default is to filter until the end of the
|
||||
file.
|
||||
"""
|
||||
# Allow strings to use \1, \2, etc. for replacement, like sed
|
||||
if not callable(repl):
|
||||
@@ -342,56 +330,72 @@ def groupid_to_group(x):
|
||||
|
||||
if string:
|
||||
regex = re.escape(regex)
|
||||
regex_compiled = re.compile(regex)
|
||||
for path in path_to_os_path(*filenames):
|
||||
if ignore_absent and not os.path.exists(path):
|
||||
tty.debug(f'FILTER FILE: file "{path}" not found. Skipping to next file.')
|
||||
for filename in path_to_os_path(*filenames):
|
||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||
tty.debug(msg.format(filename, regex))
|
||||
|
||||
backup_filename = filename + "~"
|
||||
tmp_filename = filename + ".spack~"
|
||||
|
||||
if ignore_absent and not os.path.exists(filename):
|
||||
msg = 'FILTER FILE: file "{0}" not found. Skipping to next file.'
|
||||
tty.debug(msg.format(filename))
|
||||
continue
|
||||
else:
|
||||
tty.debug(f'FILTER FILE: {path} [replacing "{regex}"]')
|
||||
|
||||
fd, temp_path = tempfile.mkstemp(
|
||||
prefix=f"{os.path.basename(path)}.", dir=os.path.dirname(path)
|
||||
)
|
||||
os.close(fd)
|
||||
# Create backup file. Don't overwrite an existing backup
|
||||
# file in case this file is being filtered multiple times.
|
||||
if not os.path.exists(backup_filename):
|
||||
shutil.copy(filename, backup_filename)
|
||||
|
||||
shutil.copy(path, temp_path)
|
||||
errored = False
|
||||
# Create a temporary file to read from. We cannot use backup_filename
|
||||
# in case filter_file is invoked multiple times on the same file.
|
||||
shutil.copy(filename, tmp_filename)
|
||||
|
||||
try:
|
||||
# Open as a text file and filter until the end of the file is reached, or we found a
|
||||
# marker in the line if it was specified. To avoid translating line endings (\n to
|
||||
# \r\n and vice-versa) use newline="".
|
||||
with open(
|
||||
temp_path, mode="r", errors="surrogateescape", newline="", encoding=encoding
|
||||
) as input_file, open(
|
||||
path, mode="w", errors="surrogateescape", newline="", encoding=encoding
|
||||
) as output_file:
|
||||
if start_at is None and stop_at is None: # common case, avoids branching in loop
|
||||
for line in input_file:
|
||||
output_file.write(re.sub(regex_compiled, repl, line))
|
||||
else:
|
||||
# state is -1 before start_at; 0 between; 1 after stop_at
|
||||
state = 0 if start_at is None else -1
|
||||
for line in input_file:
|
||||
if state == 0:
|
||||
# Open as a text file and filter until the end of the file is
|
||||
# reached, or we found a marker in the line if it was specified
|
||||
#
|
||||
# To avoid translating line endings (\n to \r\n and vice-versa)
|
||||
# we force os.open to ignore translations and use the line endings
|
||||
# the file comes with
|
||||
with open(tmp_filename, mode="r", errors="surrogateescape", newline="") as input_file:
|
||||
with open(filename, mode="w", errors="surrogateescape", newline="") as output_file:
|
||||
do_filtering = start_at is None
|
||||
# Using iter and readline is a workaround needed not to
|
||||
# disable input_file.tell(), which will happen if we call
|
||||
# input_file.next() implicitly via the for loop
|
||||
for line in iter(input_file.readline, ""):
|
||||
if stop_at is not None:
|
||||
current_position = input_file.tell()
|
||||
if stop_at == line.strip():
|
||||
state = 1
|
||||
else:
|
||||
line = re.sub(regex_compiled, repl, line)
|
||||
elif state == -1 and start_at == line.strip():
|
||||
state = 0
|
||||
output_file.write(line)
|
||||
output_file.write(line)
|
||||
break
|
||||
if do_filtering:
|
||||
filtered_line = re.sub(regex, repl, line)
|
||||
output_file.write(filtered_line)
|
||||
else:
|
||||
do_filtering = start_at == line.strip()
|
||||
output_file.write(line)
|
||||
else:
|
||||
current_position = None
|
||||
|
||||
# If we stopped filtering at some point, reopen the file in
|
||||
# binary mode and copy verbatim the remaining part
|
||||
if current_position and stop_at:
|
||||
with open(tmp_filename, mode="rb") as input_binary_buffer:
|
||||
input_binary_buffer.seek(current_position)
|
||||
with open(filename, mode="ab") as output_binary_buffer:
|
||||
output_binary_buffer.writelines(input_binary_buffer.readlines())
|
||||
|
||||
except BaseException:
|
||||
# restore the original file
|
||||
os.rename(temp_path, path)
|
||||
errored = True
|
||||
# clean up the original file on failure.
|
||||
shutil.move(backup_filename, filename)
|
||||
raise
|
||||
|
||||
finally:
|
||||
if not errored and not backup:
|
||||
os.unlink(temp_path)
|
||||
os.remove(tmp_filename)
|
||||
if not backup and os.path.exists(backup_filename):
|
||||
os.remove(backup_filename)
|
||||
|
||||
|
||||
class FileFilter:
|
||||
@@ -1669,203 +1673,105 @@ def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2)
|
||||
return FindFirstFile(root, *files, bfs_depth=bfs_depth).find()
|
||||
|
||||
|
||||
def find(
|
||||
root: Union[Path, Sequence[Path]],
|
||||
files: Union[str, Sequence[str]],
|
||||
recursive: bool = True,
|
||||
max_depth: Optional[int] = None,
|
||||
) -> List[str]:
|
||||
"""Finds all files matching the patterns from ``files`` starting from ``root``. This function
|
||||
returns a deterministic result for the same input and directory structure when run multiple
|
||||
times. Symlinked directories are followed, and unique directories are searched only once. Each
|
||||
matching file is returned only once at lowest depth in case multiple paths exist due to
|
||||
symlinked directories.
|
||||
def find(root, files, recursive=True):
|
||||
"""Search for ``files`` starting from the ``root`` directory.
|
||||
|
||||
Like GNU/BSD find but written entirely in Python.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ find /usr -name python
|
||||
|
||||
is equivalent to:
|
||||
|
||||
>>> find('/usr', 'python')
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ find /usr/local/bin -maxdepth 1 -name python
|
||||
|
||||
is equivalent to:
|
||||
|
||||
>>> find('/usr/local/bin', 'python', recursive=False)
|
||||
|
||||
Accepts any glob characters accepted by fnmatch:
|
||||
|
||||
========== ====================================
|
||||
Pattern Meaning
|
||||
========== ====================================
|
||||
``*`` matches one or more characters
|
||||
``*`` matches everything
|
||||
``?`` matches any single character
|
||||
``[seq]`` matches any character in ``seq``
|
||||
``[!seq]`` matches any character not in ``seq``
|
||||
========== ====================================
|
||||
|
||||
Examples:
|
||||
|
||||
>>> find("/usr", "*.txt", recursive=True, max_depth=2)
|
||||
|
||||
finds all files with the extension ``.txt`` in the directory ``/usr`` and subdirectories up to
|
||||
depth 2.
|
||||
|
||||
>>> find(["/usr", "/var"], ["*.txt", "*.log"], recursive=True)
|
||||
|
||||
finds all files with the extension ``.txt`` or ``.log`` in the directories ``/usr`` and
|
||||
``/var`` at any depth.
|
||||
|
||||
>>> find("/usr", "GL/*.h", recursive=True)
|
||||
|
||||
finds all header files in a directory GL at any depth in the directory ``/usr``.
|
||||
|
||||
Parameters:
|
||||
root: One or more root directories to start searching from
|
||||
files: One or more filename patterns to search for
|
||||
recursive: if False search only root, if True descends from roots. Defaults to True.
|
||||
max_depth: if set, don't search below this depth. Cannot be set if recursive is False
|
||||
root (str): The root directory to start searching from
|
||||
files (str or collections.abc.Sequence): Library name(s) to search for
|
||||
recursive (bool): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to True.
|
||||
|
||||
Returns a list of absolute, matching file paths.
|
||||
Returns:
|
||||
list: The files that have been found
|
||||
"""
|
||||
if isinstance(root, (str, pathlib.Path)):
|
||||
root = [root]
|
||||
elif not isinstance(root, collections.abc.Sequence):
|
||||
raise TypeError(f"'root' arg must be a path or a sequence of paths, not '{type(root)}']")
|
||||
|
||||
if isinstance(files, str):
|
||||
files = [files]
|
||||
elif not isinstance(files, collections.abc.Sequence):
|
||||
raise TypeError(f"'files' arg must be str or a sequence of str, not '{type(files)}']")
|
||||
|
||||
# If recursive is false, max_depth can only be None or 0
|
||||
if max_depth and not recursive:
|
||||
raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False")
|
||||
if recursive:
|
||||
tty.debug(f"Find (recursive): {root} {str(files)}")
|
||||
result = _find_recursive(root, files)
|
||||
else:
|
||||
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
||||
result = _find_non_recursive(root, files)
|
||||
|
||||
tty.debug(f"Find (max depth = {max_depth}): {root} {files}")
|
||||
if not recursive:
|
||||
max_depth = 0
|
||||
elif max_depth is None:
|
||||
max_depth = sys.maxsize
|
||||
result = _find_max_depth(root, files, max_depth)
|
||||
tty.debug(f"Find complete: {root} {files}")
|
||||
tty.debug(f"Find complete: {root} {str(files)}")
|
||||
return result
|
||||
|
||||
|
||||
def _log_file_access_issue(e: OSError, path: str) -> None:
|
||||
errno_name = errno.errorcode.get(e.errno, "UNKNOWN")
|
||||
tty.debug(f"find must skip {path}: {errno_name} {e}")
|
||||
@system_path_filter
|
||||
def _find_recursive(root, search_files):
|
||||
# The variable here is **on purpose** a defaultdict. The idea is that
|
||||
# we want to poke the filesystem as little as possible, but still maintain
|
||||
# stability in the order of the answer. Thus we are recording each library
|
||||
# found in a key, and reconstructing the stable order later.
|
||||
found_files = collections.defaultdict(list)
|
||||
|
||||
# Make the path absolute to have os.walk also return an absolute path
|
||||
root = os.path.abspath(root)
|
||||
for path, _, list_files in os.walk(root):
|
||||
for search_file in search_files:
|
||||
matches = glob.glob(os.path.join(path, search_file))
|
||||
matches = [os.path.join(path, x) for x in matches]
|
||||
found_files[search_file].extend(matches)
|
||||
|
||||
answer = []
|
||||
for search_file in search_files:
|
||||
answer.extend(found_files[search_file])
|
||||
|
||||
return answer
|
||||
|
||||
|
||||
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
||||
# Note: on windows, st_ino is the file index and st_dev is the volume serial number. See
|
||||
# https://github.com/python/cpython/blob/3.9/Python/fileutils.c
|
||||
return (s.st_ino, s.st_dev)
|
||||
@system_path_filter
|
||||
def _find_non_recursive(root, search_files):
|
||||
# The variable here is **on purpose** a defaultdict as os.list_dir
|
||||
# can return files in any order (does not preserve stability)
|
||||
found_files = collections.defaultdict(list)
|
||||
|
||||
# Make the path absolute to have absolute path returned
|
||||
root = os.path.abspath(root)
|
||||
|
||||
def _dedupe_files(paths: List[str]) -> List[str]:
|
||||
"""Deduplicate files by inode and device, dropping files that cannot be accessed."""
|
||||
unique_files: List[str] = []
|
||||
# tuple of (inode, device) for each file without following symlinks
|
||||
visited: Set[Tuple[int, int]] = set()
|
||||
for path in paths:
|
||||
try:
|
||||
stat_info = os.lstat(path)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, path)
|
||||
continue
|
||||
file_id = _file_id(stat_info)
|
||||
if file_id not in visited:
|
||||
unique_files.append(path)
|
||||
visited.add(file_id)
|
||||
return unique_files
|
||||
for search_file in search_files:
|
||||
matches = glob.glob(os.path.join(root, search_file))
|
||||
matches = [os.path.join(root, x) for x in matches]
|
||||
found_files[search_file].extend(matches)
|
||||
|
||||
answer = []
|
||||
for search_file in search_files:
|
||||
answer.extend(found_files[search_file])
|
||||
|
||||
def _find_max_depth(
|
||||
roots: Sequence[Path], globs: Sequence[str], max_depth: int = sys.maxsize
|
||||
) -> List[str]:
|
||||
"""See ``find`` for the public API."""
|
||||
# We optimize for the common case of simple filename only patterns: a single, combined regex
|
||||
# is used. For complex patterns that include path components, we use a slower glob call from
|
||||
# every directory we visit within max_depth.
|
||||
filename_only_patterns = {
|
||||
f"pattern_{i}": os.path.normcase(x) for i, x in enumerate(globs) if "/" not in x
|
||||
}
|
||||
complex_patterns = {f"pattern_{i}": x for i, x in enumerate(globs) if "/" in x}
|
||||
regex = re.compile(fnmatch_translate_multiple(filename_only_patterns))
|
||||
# Ordered dictionary that keeps track of what pattern found which files
|
||||
matched_paths: Dict[str, List[str]] = {f"pattern_{i}": [] for i, _ in enumerate(globs)}
|
||||
# Ensure returned paths are always absolute
|
||||
roots = [os.path.abspath(r) for r in roots]
|
||||
# Breadth-first search queue. Each element is a tuple of (depth, dir)
|
||||
dir_queue: Deque[Tuple[int, str]] = collections.deque()
|
||||
# Set of visited directories. Each element is a tuple of (inode, device)
|
||||
visited_dirs: Set[Tuple[int, int]] = set()
|
||||
|
||||
for root in roots:
|
||||
try:
|
||||
stat_root = os.stat(root)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, root)
|
||||
continue
|
||||
dir_id = _file_id(stat_root)
|
||||
if dir_id not in visited_dirs:
|
||||
dir_queue.appendleft((0, root))
|
||||
visited_dirs.add(dir_id)
|
||||
|
||||
while dir_queue:
|
||||
depth, curr_dir = dir_queue.pop()
|
||||
try:
|
||||
dir_iter = os.scandir(curr_dir)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, curr_dir)
|
||||
continue
|
||||
|
||||
# Use glob.glob for complex patterns.
|
||||
for pattern_name, pattern in complex_patterns.items():
|
||||
matched_paths[pattern_name].extend(
|
||||
path for path in glob.glob(os.path.join(curr_dir, pattern))
|
||||
)
|
||||
|
||||
# List of subdirectories by path and (inode, device) tuple
|
||||
subdirs: List[Tuple[str, Tuple[int, int]]] = []
|
||||
|
||||
with dir_iter:
|
||||
for dir_entry in dir_iter:
|
||||
|
||||
# Match filename only patterns
|
||||
if filename_only_patterns:
|
||||
m = regex.match(os.path.normcase(dir_entry.name))
|
||||
if m:
|
||||
for pattern_name in filename_only_patterns:
|
||||
if m.group(pattern_name):
|
||||
matched_paths[pattern_name].append(dir_entry.path)
|
||||
break
|
||||
|
||||
# Collect subdirectories
|
||||
if depth >= max_depth:
|
||||
continue
|
||||
|
||||
try:
|
||||
if not dir_entry.is_dir(follow_symlinks=True):
|
||||
continue
|
||||
if sys.platform == "win32":
|
||||
# Note: st_ino/st_dev on DirEntry.stat are not set on Windows, so we have
|
||||
# to call os.stat
|
||||
stat_info = os.stat(dir_entry.path, follow_symlinks=True)
|
||||
else:
|
||||
stat_info = dir_entry.stat(follow_symlinks=True)
|
||||
except OSError as e:
|
||||
# Possible permission issue, or a symlink that cannot be resolved (ELOOP).
|
||||
_log_file_access_issue(e, dir_entry.path)
|
||||
continue
|
||||
|
||||
subdirs.append((dir_entry.path, _file_id(stat_info)))
|
||||
|
||||
# Enqueue subdirectories in a deterministic order
|
||||
if subdirs:
|
||||
subdirs.sort(key=lambda s: os.path.basename(s[0]))
|
||||
for subdir, subdir_id in subdirs:
|
||||
if subdir_id not in visited_dirs:
|
||||
dir_queue.appendleft((depth + 1, subdir))
|
||||
visited_dirs.add(subdir_id)
|
||||
|
||||
# Sort the matched paths for deterministic output
|
||||
for paths in matched_paths.values():
|
||||
paths.sort()
|
||||
all_matching_paths = [path for paths in matched_paths.values() for path in paths]
|
||||
|
||||
# We only dedupe files if we have any complex patterns, since only they can match the same file
|
||||
# multiple times
|
||||
return _dedupe_files(all_matching_paths) if complex_patterns else all_matching_paths
|
||||
return answer
|
||||
|
||||
|
||||
# Utilities for libraries and headers
|
||||
@@ -2304,9 +2210,7 @@ def find_system_libraries(libraries, shared=True):
|
||||
return libraries_found
|
||||
|
||||
|
||||
def find_libraries(
|
||||
libraries, root, shared=True, recursive=False, runtime=True, max_depth: Optional[int] = None
|
||||
):
|
||||
def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
"""Returns an iterable of full paths to libraries found in a root dir.
|
||||
|
||||
Accepts any glob characters accepted by fnmatch:
|
||||
@@ -2327,8 +2231,6 @@ def find_libraries(
|
||||
otherwise for static. Defaults to True.
|
||||
recursive (bool): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to False.
|
||||
max_depth (int): if set, don't search below this depth. Cannot be set
|
||||
if recursive is False
|
||||
runtime (bool): Windows only option, no-op elsewhere. If true,
|
||||
search for runtime shared libs (.DLL), otherwise, search
|
||||
for .Lib files. If shared is false, this has no meaning.
|
||||
@@ -2337,7 +2239,6 @@ def find_libraries(
|
||||
Returns:
|
||||
LibraryList: The libraries that have been found
|
||||
"""
|
||||
|
||||
if isinstance(libraries, str):
|
||||
libraries = [libraries]
|
||||
elif not isinstance(libraries, collections.abc.Sequence):
|
||||
@@ -2370,10 +2271,8 @@ def find_libraries(
|
||||
libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes]
|
||||
|
||||
if not recursive:
|
||||
if max_depth:
|
||||
raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False")
|
||||
# If not recursive, look for the libraries directly in root
|
||||
return LibraryList(find(root, libraries, recursive=False))
|
||||
return LibraryList(find(root, libraries, False))
|
||||
|
||||
# To speedup the search for external packages configured e.g. in /usr,
|
||||
# perform first non-recursive search in root/lib then in root/lib64 and
|
||||
@@ -2391,7 +2290,7 @@ def find_libraries(
|
||||
if found_libs:
|
||||
break
|
||||
else:
|
||||
found_libs = find(root, libraries, recursive=True, max_depth=max_depth)
|
||||
found_libs = find(root, libraries, True)
|
||||
|
||||
return LibraryList(found_libs)
|
||||
|
||||
@@ -2820,25 +2719,6 @@ def temporary_dir(
|
||||
remove_directory_contents(tmp_dir)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def edit_in_place_through_temporary_file(file_path: str) -> Generator[str, None, None]:
|
||||
"""Context manager for modifying ``file_path`` in place, preserving its inode and hardlinks,
|
||||
for functions or external tools that do not support in-place editing. Notice that this function
|
||||
is unsafe in that it works with paths instead of a file descriptors, but this is by design,
|
||||
since we assume the call site will create a new inode at the same path."""
|
||||
tmp_fd, tmp_path = tempfile.mkstemp(
|
||||
dir=os.path.dirname(file_path), prefix=f"{os.path.basename(file_path)}."
|
||||
)
|
||||
# windows cannot replace a file with open fds, so close since the call site needs to replace.
|
||||
os.close(tmp_fd)
|
||||
try:
|
||||
shutil.copyfile(file_path, tmp_path, follow_symlinks=True)
|
||||
yield tmp_path
|
||||
shutil.copyfile(tmp_path, file_path, follow_symlinks=True)
|
||||
finally:
|
||||
os.unlink(tmp_path)
|
||||
|
||||
|
||||
def filesummary(path, print_bytes=16) -> Tuple[int, bytes]:
|
||||
"""Create a small summary of the given file. Does not error
|
||||
when file does not exist.
|
||||
|
@@ -5,17 +5,15 @@
|
||||
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import fnmatch
|
||||
import functools
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
import typing
|
||||
import warnings
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
|
||||
from typing import Callable, Iterable, List, Tuple, TypeVar
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = r"^\.#|~$"
|
||||
@@ -861,19 +859,6 @@ def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
|
||||
return line_list
|
||||
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
PatternStr = re.Pattern[str]
|
||||
else:
|
||||
PatternStr = typing.Pattern[str]
|
||||
|
||||
|
||||
def fnmatch_translate_multiple(named_patterns: Dict[str, str]) -> str:
|
||||
"""Similar to ``fnmatch.translate``, but takes an ordered dictionary where keys are pattern
|
||||
names, and values are filename patterns. The output is a regex that matches any of the
|
||||
patterns in order, and named capture groups are used to identify which pattern matched."""
|
||||
return "|".join(f"(?P<{n}>{fnmatch.translate(p)})" for n, p in named_patterns.items())
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def nullcontext(*args, **kwargs):
|
||||
"""Empty context manager.
|
||||
@@ -886,6 +871,15 @@ class UnhashableArguments(TypeError):
|
||||
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
|
||||
|
||||
|
||||
def enum(**kwargs):
|
||||
"""Return an enum-like class.
|
||||
|
||||
Args:
|
||||
**kwargs: explicit dictionary of enums
|
||||
"""
|
||||
return type("Enum", (object,), kwargs)
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
|
@@ -879,13 +879,10 @@ def _writer_daemon(
|
||||
write_fd.close()
|
||||
|
||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||
# that prevents unbuffered text I/O. [needs citation]
|
||||
# 2. Enforce a UTF-8 interpretation of build process output with errors replaced by '?'.
|
||||
# The downside is that the log file will not contain the exact output of the build process.
|
||||
# that prevents unbuffered text I/O.
|
||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||
# 3. closefd=False because Connection has "ownership"
|
||||
read_file = os.fdopen(
|
||||
read_fd.fileno(), "r", 1, encoding="utf-8", errors="replace", closefd=False
|
||||
)
|
||||
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
|
||||
|
||||
if stdin_fd:
|
||||
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
||||
@@ -931,7 +928,11 @@ def _writer_daemon(
|
||||
try:
|
||||
while line_count < 100:
|
||||
# Handle output from the calling process.
|
||||
line = _retry(read_file.readline)()
|
||||
try:
|
||||
line = _retry(read_file.readline)()
|
||||
except UnicodeDecodeError:
|
||||
# installs like --test=root gpgme produce non-UTF8 logs
|
||||
line = "<line lost: output was not encoded as UTF-8>\n"
|
||||
|
||||
if not line:
|
||||
return
|
||||
@@ -945,13 +946,6 @@ def _writer_daemon(
|
||||
output_line = clean_line
|
||||
if filter_fn:
|
||||
output_line = filter_fn(clean_line)
|
||||
enc = sys.stdout.encoding
|
||||
if enc != "utf-8":
|
||||
# On Python 3.6 and 3.7-3.14 with non-{utf-8,C} locale stdout
|
||||
# may not be able to handle utf-8 output. We do an inefficient
|
||||
# dance of re-encoding with errors replaced, so stdout.write
|
||||
# does not raise.
|
||||
output_line = output_line.encode(enc, "replace").decode(enc)
|
||||
sys.stdout.write(output_line)
|
||||
|
||||
# Stripped output to log file.
|
||||
|
@@ -11,21 +11,9 @@
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.23.2.dev0"
|
||||
__version__ = "0.23.0.dev0"
|
||||
spack_version = __version__
|
||||
|
||||
#: The current Package API version implemented by this version of Spack. The Package API defines
|
||||
#: the Python interface for packages as well as the layout of package repositories. The minor
|
||||
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
||||
#: version is incremented upon breaking changes. This version is changed independently from the
|
||||
#: Spack version.
|
||||
package_api_version = (1, 0)
|
||||
|
||||
#: The minimum Package API version that this version of Spack is compatible with. This should
|
||||
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
||||
#: compatibility with vX.0.
|
||||
min_package_api_version = (1, 0)
|
||||
|
||||
|
||||
def __try_int(v):
|
||||
try:
|
||||
@@ -92,6 +80,4 @@ def get_short_version() -> str:
|
||||
"get_version",
|
||||
"get_spack_commit",
|
||||
"get_short_version",
|
||||
"package_api_version",
|
||||
"min_package_api_version",
|
||||
]
|
||||
|
@@ -714,9 +714,9 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# values are either ConditionalValue objects or the values themselves
|
||||
# values are either Value objects (for conditional values) or the values themselves
|
||||
build_system_names = set(
|
||||
v.value if isinstance(v, spack.variant.ConditionalValue) else v
|
||||
v.value if isinstance(v, spack.variant.Value) else v
|
||||
for _, variant in pkg_cls.variant_definitions("build_system")
|
||||
for v in variant.values
|
||||
)
|
||||
@@ -805,7 +805,7 @@ def _uses_deprecated_globals(pkgs, error_cls):
|
||||
|
||||
file = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
tree = ast.parse(open(file).read())
|
||||
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args"))
|
||||
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args"))
|
||||
visitor.visit(tree)
|
||||
if visitor.references_to_globals:
|
||||
errors.append(
|
||||
@@ -1366,8 +1366,14 @@ def _test_detection_by_executable(pkgs, debug_log, error_cls):
|
||||
|
||||
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
result = []
|
||||
# Check items are of the same type
|
||||
if not isinstance(_detected, type(_expected)):
|
||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||
_details = [f"{_detected} was detected instead"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
# If they are string expected is a regex
|
||||
if isinstance(_expected, str) and isinstance(_detected, str):
|
||||
if isinstance(_expected, str):
|
||||
try:
|
||||
_regex = re.compile(_expected)
|
||||
except re.error:
|
||||
@@ -1383,7 +1389,7 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
_details = [f"{_detected} does not match the regex"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
elif isinstance(_expected, dict) and isinstance(_detected, dict):
|
||||
if isinstance(_expected, dict):
|
||||
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
||||
if _not_detected:
|
||||
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
||||
@@ -1398,10 +1404,6 @@ def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
result.extend(
|
||||
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
||||
)
|
||||
else:
|
||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||
_details = [f"{_detected} was detected instead"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
return result
|
||||
|
||||
|
@@ -1182,9 +1182,6 @@ def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool)
|
||||
self.tmpdir: str
|
||||
self.executor: concurrent.futures.Executor
|
||||
|
||||
# Verify if the mirror meets the requirements to push
|
||||
self.mirror.ensure_mirror_usable("push")
|
||||
|
||||
def __enter__(self):
|
||||
self._tmpdir = tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root())
|
||||
self._executor = spack.util.parallel.make_concurrent_executor()
|
||||
@@ -2332,9 +2329,7 @@ def is_backup_file(file):
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
# preserve the original inode by running codesign on a copy
|
||||
with fsys.edit_in_place_through_temporary_file(binary) as tmp_binary:
|
||||
codesign("-fs-", tmp_binary)
|
||||
codesign("-fs-", binary)
|
||||
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
|
@@ -357,13 +357,6 @@ def _do_patch_libtool_configure(self):
|
||||
)
|
||||
# Support Libtool 2.4.2 and older:
|
||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||
# Configure scripts generated with libtool < 2.5.4 have a faulty test for the
|
||||
# -single_module linker flag. A deprecation warning makes it think the default is
|
||||
# -multi_module, triggering it to use problematic linker flags (such as ld -r). The
|
||||
# linker default is `-single_module` from (ancient) macOS 10.4, so override by setting
|
||||
# `lt_cv_apple_cc_single_mod=yes`. See the fix in libtool commit
|
||||
# 82f7f52123e4e7e50721049f7fa6f9b870e09c9d.
|
||||
x.filter("lt_cv_apple_cc_single_mod=no", "lt_cv_apple_cc_single_mod=yes", string=True)
|
||||
|
||||
@spack.builder.run_after("configure")
|
||||
def _do_patch_libtool(self):
|
||||
|
@@ -5,6 +5,7 @@
|
||||
|
||||
"""Caches used by Spack to store data"""
|
||||
import os
|
||||
from typing import Union
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
@@ -31,8 +32,12 @@ def _misc_cache():
|
||||
return spack.util.file_cache.FileCache(path)
|
||||
|
||||
|
||||
FileCacheType = Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton]
|
||||
|
||||
#: Spack's cache for small data
|
||||
MISC_CACHE: spack.util.file_cache.FileCache = llnl.util.lang.Singleton(_misc_cache) # type: ignore
|
||||
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_misc_cache)
|
||||
)
|
||||
|
||||
|
||||
def fetch_cache_location():
|
||||
@@ -69,4 +74,6 @@ def store(self, fetcher, relative_dest):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
FETCH_CACHE: spack.fetch_strategy.FsCache = llnl.util.lang.Singleton(_fetch_cache) # type: ignore
|
||||
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_fetch_cache)
|
||||
)
|
||||
|
@@ -8,7 +8,6 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections import Counter
|
||||
from typing import List, Union
|
||||
|
||||
import llnl.string
|
||||
@@ -18,14 +17,12 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.concretize
|
||||
import spack.config # breaks a cycle.
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.extensions
|
||||
import spack.parser
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
@@ -167,9 +164,7 @@ def quote_kvp(string: str) -> str:
|
||||
|
||||
|
||||
def parse_specs(
|
||||
args: Union[str, List[str]],
|
||||
concretize: bool = False,
|
||||
tests: spack.concretize.TestsType = False,
|
||||
args: Union[str, List[str]], concretize: bool = False, tests: bool = False
|
||||
) -> List[spack.spec.Spec]:
|
||||
"""Convenience function for parsing arguments from specs. Handles common
|
||||
exceptions and dies if there are errors.
|
||||
@@ -178,68 +173,10 @@ def parse_specs(
|
||||
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
||||
|
||||
specs = spack.parser.parse(arg_string)
|
||||
if not concretize:
|
||||
return specs
|
||||
|
||||
to_concretize: List[spack.concretize.SpecPairInput] = [(s, None) for s in specs]
|
||||
return _concretize_spec_pairs(to_concretize, tests=tests)
|
||||
|
||||
|
||||
def _concretize_spec_pairs(
|
||||
to_concretize: List[spack.concretize.SpecPairInput], tests: spack.concretize.TestsType = False
|
||||
) -> List[spack.spec.Spec]:
|
||||
"""Helper method that concretizes abstract specs from a list of abstract,concrete pairs.
|
||||
|
||||
Any spec with a concrete spec associated with it will concretize to that spec. Any spec
|
||||
with ``None`` for its concrete spec will be newly concretized. This method respects unification
|
||||
rules from config."""
|
||||
unify = spack.config.get("concretizer:unify", False)
|
||||
|
||||
# Special case for concretizing a single spec
|
||||
if len(to_concretize) == 1:
|
||||
abstract, concrete = to_concretize[0]
|
||||
return [concrete or abstract.concretized(tests=tests)]
|
||||
|
||||
# Special case if every spec is either concrete or has an abstract hash
|
||||
if all(
|
||||
concrete or abstract.concrete or abstract.abstract_hash
|
||||
for abstract, concrete in to_concretize
|
||||
):
|
||||
# Get all the concrete specs
|
||||
ret = [
|
||||
concrete or (abstract if abstract.concrete else abstract.lookup_hash())
|
||||
for abstract, concrete in to_concretize
|
||||
]
|
||||
|
||||
# If unify: true, check that specs don't conflict
|
||||
# Since all concrete, "when_possible" is not relevant
|
||||
if unify is True: # True, "when_possible", False are possible values
|
||||
runtimes = spack.repo.PATH.packages_with_tags("runtime")
|
||||
specs_per_name = Counter(
|
||||
spec.name
|
||||
for spec in traverse.traverse_nodes(
|
||||
ret, deptype=("link", "run"), key=traverse.by_dag_hash
|
||||
)
|
||||
if spec.name not in runtimes # runtimes are allowed multiple times
|
||||
)
|
||||
|
||||
conflicts = sorted(name for name, count in specs_per_name.items() if count > 1)
|
||||
if conflicts:
|
||||
raise spack.error.SpecError(
|
||||
"Specs conflict and `concretizer:unify` is configured true.",
|
||||
f" specs depend on multiple versions of {', '.join(conflicts)}",
|
||||
)
|
||||
return ret
|
||||
|
||||
# Standard case
|
||||
concretize_method = spack.concretize.concretize_separately # unify: false
|
||||
if unify is True:
|
||||
concretize_method = spack.concretize.concretize_together
|
||||
elif unify == "when_possible":
|
||||
concretize_method = spack.concretize.concretize_together_when_possible
|
||||
|
||||
concretized = concretize_method(to_concretize, tests=tests)
|
||||
return [concrete for _, concrete in concretized]
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests)
|
||||
return specs
|
||||
|
||||
|
||||
def matching_spec_from_env(spec):
|
||||
@@ -255,22 +192,6 @@ def matching_spec_from_env(spec):
|
||||
return spec.concretized()
|
||||
|
||||
|
||||
def matching_specs_from_env(specs):
|
||||
"""
|
||||
Same as ``matching_spec_from_env`` but respects spec unification rules.
|
||||
|
||||
For each spec, if there is a matching spec in the environment it is used. If no
|
||||
matching spec is found, this will return the given spec but concretized in the
|
||||
context of the active environment and other given specs, with unification rules applied.
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
spec_pairs = [(spec, env.matching_spec(spec) if env else None) for spec in specs]
|
||||
additional_concrete_specs = (
|
||||
[(concrete, concrete) for _, concrete in env.concretized_specs()] if env else []
|
||||
)
|
||||
return _concretize_spec_pairs(spec_pairs + additional_concrete_specs)[: len(spec_pairs)]
|
||||
|
||||
|
||||
def disambiguate_spec(spec, env, local=False, installed=True, first=False):
|
||||
"""Given a spec, figure out which installed package it refers to.
|
||||
|
||||
@@ -588,18 +509,6 @@ def __init__(self, name):
|
||||
super().__init__("{0} is not a permissible Spack command name.".format(name))
|
||||
|
||||
|
||||
class MultipleSpecsMatch(Exception):
|
||||
"""Raised when multiple specs match a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
class NoSpecMatches(Exception):
|
||||
"""Raised when no spec matches a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
########################################
|
||||
# argparse types for argument validation
|
||||
########################################
|
||||
|
@@ -29,7 +29,7 @@
|
||||
|
||||
|
||||
# Tarball to be downloaded if binary packages are requested in a local mirror
|
||||
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.6/bootstrap-buildcache.tar.gz"
|
||||
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.4/bootstrap-buildcache.tar.gz"
|
||||
|
||||
#: Subdirectory where to create the mirror
|
||||
LOCAL_MIRROR_DIR = "bootstrap_cache"
|
||||
@@ -51,9 +51,9 @@
|
||||
},
|
||||
}
|
||||
|
||||
CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.6/clingo.json"
|
||||
GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.6/gnupg.json"
|
||||
PATCHELF_JSON = "$spack/share/spack/bootstrap/github-actions-v0.6/patchelf.json"
|
||||
CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/clingo.json"
|
||||
GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/gnupg.json"
|
||||
PATCHELF_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/patchelf.json"
|
||||
|
||||
# Metadata for a generated source mirror
|
||||
SOURCE_METADATA = {
|
||||
|
@@ -105,8 +105,7 @@ def clean(parser, args):
|
||||
# Then do the cleaning falling through the cases
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
specs = spack.cmd.matching_specs_from_env(specs)
|
||||
|
||||
specs = list(spack.cmd.matching_spec_from_env(x) for x in specs)
|
||||
for spec in specs:
|
||||
msg = "Cleaning build stage [{0}]"
|
||||
tty.msg(msg.format(spec.short_spec))
|
||||
|
@@ -528,7 +528,6 @@ def __call__(self, parser, namespace, values, option_string):
|
||||
# the const from the constructor or a value from the CLI.
|
||||
# Note that this is only called if the argument is actually
|
||||
# specified on the command line.
|
||||
spack.config.CONFIG.ensure_scope_ordering()
|
||||
spack.config.set(self.config_path, self.const, scope="command_line")
|
||||
|
||||
|
||||
@@ -582,51 +581,23 @@ def add_concretizer_args(subparser):
|
||||
|
||||
|
||||
def add_connection_args(subparser, add_help):
|
||||
def add_argument_string_or_variable(parser, arg: str, *, deprecate_str: bool = True, **kwargs):
|
||||
group = parser.add_mutually_exclusive_group()
|
||||
group.add_argument(arg, **kwargs)
|
||||
# Update help string
|
||||
if "help" in kwargs:
|
||||
kwargs["help"] = "environment variable containing " + kwargs["help"]
|
||||
group.add_argument(arg + "-variable", **kwargs)
|
||||
|
||||
s3_connection_parser = subparser.add_argument_group("S3 Connection")
|
||||
|
||||
add_argument_string_or_variable(
|
||||
s3_connection_parser,
|
||||
"--s3-access-key-id",
|
||||
help="ID string to use to connect to this S3 mirror",
|
||||
subparser.add_argument(
|
||||
"--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
|
||||
)
|
||||
add_argument_string_or_variable(
|
||||
s3_connection_parser,
|
||||
"--s3-access-key-secret",
|
||||
help="secret string to use to connect to this S3 mirror",
|
||||
subparser.add_argument(
|
||||
"--s3-access-key-secret", help="secret string to use to connect to this S3 mirror"
|
||||
)
|
||||
add_argument_string_or_variable(
|
||||
s3_connection_parser,
|
||||
"--s3-access-token",
|
||||
help="access token to use to connect to this S3 mirror",
|
||||
subparser.add_argument(
|
||||
"--s3-access-token", help="access token to use to connect to this S3 mirror"
|
||||
)
|
||||
s3_connection_parser.add_argument(
|
||||
subparser.add_argument(
|
||||
"--s3-profile", help="S3 profile name to use to connect to this S3 mirror", default=None
|
||||
)
|
||||
s3_connection_parser.add_argument(
|
||||
subparser.add_argument(
|
||||
"--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror"
|
||||
)
|
||||
|
||||
oci_connection_parser = subparser.add_argument_group("OCI Connection")
|
||||
|
||||
add_argument_string_or_variable(
|
||||
oci_connection_parser,
|
||||
"--oci-username",
|
||||
deprecate_str=False,
|
||||
help="username to use to connect to this OCI mirror",
|
||||
)
|
||||
add_argument_string_or_variable(
|
||||
oci_connection_parser,
|
||||
"--oci-password",
|
||||
help="password to use to connect to this OCI mirror",
|
||||
)
|
||||
subparser.add_argument("--oci-username", help="username to use to connect to this OCI mirror")
|
||||
subparser.add_argument("--oci-password", help="password to use to connect to this OCI mirror")
|
||||
|
||||
|
||||
def use_buildcache(cli_arg_value):
|
||||
|
@@ -10,12 +10,11 @@
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Set
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.string as string
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import islink, symlink
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
@@ -51,8 +50,6 @@
|
||||
"update",
|
||||
"revert",
|
||||
"depfile",
|
||||
"track",
|
||||
"untrack",
|
||||
]
|
||||
|
||||
|
||||
@@ -449,193 +446,6 @@ def env_deactivate(args):
|
||||
sys.stdout.write(cmds)
|
||||
|
||||
|
||||
#
|
||||
# env track
|
||||
#
|
||||
def env_track_setup_parser(subparser):
|
||||
"""track an environment from a directory in Spack"""
|
||||
subparser.add_argument("-n", "--name", help="custom environment name")
|
||||
subparser.add_argument("dir", help="path to environment")
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
|
||||
|
||||
def env_track(args):
|
||||
src_path = os.path.abspath(args.dir)
|
||||
if not ev.is_env_dir(src_path):
|
||||
tty.die("Cannot track environment. Path doesn't contain an environment")
|
||||
|
||||
if args.name:
|
||||
name = args.name
|
||||
else:
|
||||
name = os.path.basename(src_path)
|
||||
|
||||
try:
|
||||
dst_path = ev.environment_dir_from_name(name, exists_ok=False)
|
||||
except ev.SpackEnvironmentError:
|
||||
tty.die(
|
||||
f"An environment named {name} already exists. Set a name with:"
|
||||
"\n\n"
|
||||
f" spack env track --name NAME {src_path}\n"
|
||||
)
|
||||
|
||||
symlink(src_path, dst_path)
|
||||
|
||||
tty.msg(f"Tracking environment in {src_path}")
|
||||
tty.msg(
|
||||
"You can now activate this environment with the following command:\n\n"
|
||||
f" spack env activate {name}\n"
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# env remove & untrack helpers
|
||||
#
|
||||
def filter_managed_env_names(env_names: Set[str]) -> Set[str]:
|
||||
tracked_env_names = {e for e in env_names if islink(ev.environment_dir_from_name(e))}
|
||||
managed_env_names = env_names - set(tracked_env_names)
|
||||
|
||||
num_managed_envs = len(managed_env_names)
|
||||
managed_envs_str = " ".join(managed_env_names)
|
||||
if num_managed_envs >= 2:
|
||||
tty.error(
|
||||
f"The following are not tracked environments. "
|
||||
"To remove them completely run,"
|
||||
"\n\n"
|
||||
f" spack env rm {managed_envs_str}\n"
|
||||
)
|
||||
|
||||
elif num_managed_envs > 0:
|
||||
tty.error(
|
||||
f"'{managed_envs_str}' is not a tracked env. "
|
||||
"To remove it completely run,"
|
||||
"\n\n"
|
||||
f" spack env rm {managed_envs_str}\n"
|
||||
)
|
||||
|
||||
return tracked_env_names
|
||||
|
||||
|
||||
def get_valid_envs(env_names: Set[str]) -> Set[ev.Environment]:
|
||||
valid_envs = set()
|
||||
for env_name in env_names:
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
valid_envs.add(env)
|
||||
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
pass
|
||||
|
||||
return valid_envs
|
||||
|
||||
|
||||
def _env_untrack_or_remove(
|
||||
env_names: List[str], remove: bool = False, force: bool = False, yes_to_all: bool = False
|
||||
):
|
||||
all_env_names = set(ev.all_environment_names())
|
||||
known_env_names = set(env_names).intersection(all_env_names)
|
||||
unknown_env_names = set(env_names) - known_env_names
|
||||
|
||||
# print error for unknown environments
|
||||
for env_name in unknown_env_names:
|
||||
tty.error(f"Environment '{env_name}' does not exist")
|
||||
|
||||
# if only unlinking is allowed, remove all environments
|
||||
# which do not point internally at symlinks
|
||||
if not remove:
|
||||
env_names_to_remove = filter_managed_env_names(known_env_names)
|
||||
else:
|
||||
env_names_to_remove = known_env_names
|
||||
|
||||
# initalize all environments with valid spack.yaml configs
|
||||
all_valid_envs = get_valid_envs(all_env_names)
|
||||
|
||||
# build a task list of environments and bad env names to remove
|
||||
envs_to_remove = [e for e in all_valid_envs if e.name in env_names_to_remove]
|
||||
bad_env_names_to_remove = env_names_to_remove - {e.name for e in envs_to_remove}
|
||||
for remove_env in envs_to_remove:
|
||||
for env in all_valid_envs:
|
||||
# don't check if an environment is included to itself
|
||||
if env.name == remove_env.name:
|
||||
continue
|
||||
|
||||
# check if an environment is included un another
|
||||
if remove_env.path in env.included_concrete_envs:
|
||||
msg = f"Environment '{remove_env.name}' is used by environment '{env.name}'"
|
||||
if force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
tty.error(msg)
|
||||
envs_to_remove.remove(remove_env)
|
||||
|
||||
# ask the user if they really want to remove the known environments
|
||||
# force should do the same as yes to all here following the symantics of rm
|
||||
if not (yes_to_all or force) and (envs_to_remove or bad_env_names_to_remove):
|
||||
environments = string.plural(len(env_names_to_remove), "environment", show_n=False)
|
||||
envs = string.comma_and(list(env_names_to_remove))
|
||||
answer = tty.get_yes_or_no(
|
||||
f"Really {'remove' if remove else 'untrack'} {environments} {envs}?", default=False
|
||||
)
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
# keep track of the environments we remove for later printing the exit code
|
||||
removed_env_names = []
|
||||
for env in envs_to_remove:
|
||||
name = env.name
|
||||
if not force and env.active:
|
||||
tty.error(
|
||||
f"Environment '{name}' can't be "
|
||||
f"{'removed' if remove else 'untracked'} while activated."
|
||||
)
|
||||
continue
|
||||
# Get path to check if environment is a tracked / symlinked environment
|
||||
if islink(env.path):
|
||||
real_env_path = os.path.realpath(env.path)
|
||||
os.unlink(env.path)
|
||||
tty.msg(
|
||||
f"Sucessfully untracked environment '{name}', "
|
||||
"but it can still be found at:\n\n"
|
||||
f" {real_env_path}\n"
|
||||
)
|
||||
else:
|
||||
env.destroy()
|
||||
tty.msg(f"Successfully removed environment '{name}'")
|
||||
|
||||
removed_env_names.append(env.name)
|
||||
|
||||
for bad_env_name in bad_env_names_to_remove:
|
||||
shutil.rmtree(
|
||||
spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True)
|
||||
)
|
||||
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
||||
removed_env_names.append(env.name)
|
||||
|
||||
# Following the design of linux rm we should exit with a status of 1
|
||||
# anytime we cannot delete every environment the user asks for.
|
||||
# However, we should still process all the environments we know about
|
||||
# and delete them instead of failing on the first unknown enviornment.
|
||||
if len(removed_env_names) < len(known_env_names):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
#
|
||||
# env untrack
|
||||
#
|
||||
def env_untrack_setup_parser(subparser):
|
||||
"""track an environment from a directory in Spack"""
|
||||
subparser.add_argument("env", nargs="+", help="tracked environment name")
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="force unlink even when environment is active"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
|
||||
|
||||
def env_untrack(args):
|
||||
_env_untrack_or_remove(
|
||||
env_names=args.env, force=args.force, yes_to_all=args.yes_to_all, remove=False
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# env remove
|
||||
#
|
||||
@@ -661,9 +471,54 @@ def env_remove_setup_parser(subparser):
|
||||
|
||||
def env_remove(args):
|
||||
"""remove existing environment(s)"""
|
||||
_env_untrack_or_remove(
|
||||
env_names=args.rm_env, remove=True, force=args.force, yes_to_all=args.yes_to_all
|
||||
)
|
||||
remove_envs = []
|
||||
valid_envs = []
|
||||
bad_envs = []
|
||||
|
||||
for env_name in ev.all_environment_names():
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
valid_envs.append(env)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
remove_envs.append(env)
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
if env_name in args.rm_env:
|
||||
bad_envs.append(env_name)
|
||||
|
||||
# Check if remove_env is included from another env before trying to remove
|
||||
for env in valid_envs:
|
||||
for remove_env in remove_envs:
|
||||
# don't check if environment is included to itself
|
||||
if env.name == remove_env.name:
|
||||
continue
|
||||
|
||||
if remove_env.path in env.included_concrete_envs:
|
||||
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
|
||||
if args.force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
tty.die(msg)
|
||||
|
||||
if not args.yes_to_all:
|
||||
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
||||
envs = string.comma_and(args.rm_env)
|
||||
answer = tty.get_yes_or_no(f"Really remove {environments} {envs}?", default=False)
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
for env in remove_envs:
|
||||
name = env.name
|
||||
if env.active:
|
||||
tty.die(f"Environment {name} can't be removed while activated.")
|
||||
env.destroy()
|
||||
tty.msg(f"Successfully removed environment '{name}'")
|
||||
|
||||
for bad_env_name in bad_envs:
|
||||
shutil.rmtree(
|
||||
spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True)
|
||||
)
|
||||
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
||||
|
||||
|
||||
#
|
||||
|
@@ -222,9 +222,11 @@ def decorator(spec, fmt):
|
||||
def display_env(env, args, decorator, results):
|
||||
"""Display extra find output when running in an environment.
|
||||
|
||||
In an environment, `spack find` outputs a preliminary section
|
||||
showing the root specs of the environment (this is in addition
|
||||
to the section listing out specs matching the query parameters).
|
||||
Find in an environment outputs 2 or 3 sections:
|
||||
|
||||
1. Root specs
|
||||
2. Concretized roots (if asked for with -c)
|
||||
3. Installed specs
|
||||
|
||||
"""
|
||||
tty.msg("In environment %s" % env.name)
|
||||
@@ -297,56 +299,6 @@ def root_decorator(spec, string):
|
||||
print()
|
||||
|
||||
|
||||
def _find_query(args, env):
|
||||
q_args = query_arguments(args)
|
||||
concretized_but_not_installed = list()
|
||||
if env:
|
||||
all_env_specs = env.all_specs()
|
||||
if args.constraint:
|
||||
init_specs = cmd.parse_specs(args.constraint)
|
||||
env_specs = env.all_matching_specs(*init_specs)
|
||||
else:
|
||||
env_specs = all_env_specs
|
||||
|
||||
spec_hashes = set(x.dag_hash() for x in env_specs)
|
||||
specs_meeting_q_args = set(spack.store.STORE.db.query(hashes=spec_hashes, **q_args))
|
||||
|
||||
results = list()
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
for spec in env_specs:
|
||||
if not spec.installed:
|
||||
concretized_but_not_installed.append(spec)
|
||||
if spec in specs_meeting_q_args:
|
||||
results.append(spec)
|
||||
else:
|
||||
results = args.specs(**q_args)
|
||||
|
||||
# use groups by default except with format.
|
||||
if args.groups is None:
|
||||
args.groups = not args.format
|
||||
|
||||
# Exit early with an error code if no package matches the constraint
|
||||
if concretized_but_not_installed and args.show_concretized:
|
||||
pass
|
||||
elif results:
|
||||
pass
|
||||
elif args.constraint:
|
||||
raise cmd.NoSpecMatches()
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
concretized_but_not_installed = [
|
||||
x for x in concretized_but_not_installed if x.name in packages_with_tags
|
||||
]
|
||||
|
||||
if args.loaded:
|
||||
results = cmd.filter_loaded_specs(results)
|
||||
|
||||
return results, concretized_but_not_installed
|
||||
|
||||
|
||||
def find(parser, args):
|
||||
env = ev.active_environment()
|
||||
|
||||
@@ -355,12 +307,34 @@ def find(parser, args):
|
||||
if not env and args.show_concretized:
|
||||
tty.die("-c / --show-concretized requires an active environment")
|
||||
|
||||
try:
|
||||
results, concretized_but_not_installed = _find_query(args, env)
|
||||
except cmd.NoSpecMatches:
|
||||
# Note: this uses args.constraint vs. args.constraint_specs because
|
||||
# the latter only exists if you call args.specs()
|
||||
tty.die(f"No package matches the query: {' '.join(args.constraint)}")
|
||||
if env:
|
||||
if args.constraint:
|
||||
init_specs = spack.cmd.parse_specs(args.constraint)
|
||||
results = env.all_matching_specs(*init_specs)
|
||||
else:
|
||||
results = env.all_specs()
|
||||
else:
|
||||
q_args = query_arguments(args)
|
||||
results = args.specs(**q_args)
|
||||
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
# use groups by default except with format.
|
||||
if args.groups is None:
|
||||
args.groups = not args.format
|
||||
|
||||
# Exit early with an error code if no package matches the constraint
|
||||
if not results and args.constraint:
|
||||
constraint_str = " ".join(str(s) for s in args.constraint_specs)
|
||||
tty.die(f"No package matches the query: {constraint_str}")
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
|
||||
if args.loaded:
|
||||
results = spack.cmd.filter_loaded_specs(results)
|
||||
|
||||
if args.install_status or args.show_concretized:
|
||||
status_fn = spack.spec.Spec.install_status
|
||||
@@ -371,16 +345,14 @@ def find(parser, args):
|
||||
if args.json:
|
||||
cmd.display_specs_as_json(results, deps=args.deps)
|
||||
else:
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
if not args.format:
|
||||
if env:
|
||||
display_env(env, args, decorator, results)
|
||||
|
||||
if not args.only_roots:
|
||||
display_results = list(results)
|
||||
if args.show_concretized:
|
||||
display_results += concretized_but_not_installed
|
||||
display_results = results
|
||||
if not args.show_concretized:
|
||||
display_results = list(x for x in results if x.installed)
|
||||
cmd.display_specs(
|
||||
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
|
||||
)
|
||||
@@ -398,9 +370,13 @@ def find(parser, args):
|
||||
concretized_suffix += " (show with `spack find -c`)"
|
||||
|
||||
pkg_type = "loaded" if args.loaded else "installed"
|
||||
cmd.print_how_many_pkgs(results, pkg_type, suffix=installed_suffix)
|
||||
spack.cmd.print_how_many_pkgs(
|
||||
list(x for x in results if x.installed), pkg_type, suffix=installed_suffix
|
||||
)
|
||||
|
||||
if env:
|
||||
cmd.print_how_many_pkgs(
|
||||
concretized_but_not_installed, "concretized", suffix=concretized_suffix
|
||||
spack.cmd.print_how_many_pkgs(
|
||||
list(x for x in results if not x.installed),
|
||||
"concretized",
|
||||
suffix=concretized_suffix,
|
||||
)
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import re
|
||||
from collections import defaultdict
|
||||
@@ -96,7 +97,7 @@ def list_files(args):
|
||||
OLD_LICENSE, SPDX_MISMATCH, GENERAL_MISMATCH = range(1, 4)
|
||||
|
||||
#: Latest year that copyright applies. UPDATE THIS when bumping copyright.
|
||||
latest_year = 2024 # year of 0.23 release
|
||||
latest_year = datetime.date.today().year
|
||||
strict_date = r"Copyright 2013-%s" % latest_year
|
||||
|
||||
#: regexes for valid license lines at tops of files
|
||||
|
@@ -98,9 +98,8 @@ def do_mark(specs, explicit):
|
||||
specs (list): list of specs to be marked
|
||||
explicit (bool): whether to mark specs as explicitly installed
|
||||
"""
|
||||
with spack.store.STORE.db.write_transaction():
|
||||
for spec in specs:
|
||||
spack.store.STORE.db.mark(spec, "explicit", explicit)
|
||||
for spec in specs:
|
||||
spack.store.STORE.db.update_explicit(spec, explicit)
|
||||
|
||||
|
||||
def mark_specs(args, specs):
|
||||
|
@@ -231,133 +231,31 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
|
||||
def _configure_access_pair(
|
||||
args, id_tok, id_variable_tok, secret_tok, secret_variable_tok, default=None
|
||||
):
|
||||
"""Configure the access_pair options"""
|
||||
|
||||
# Check if any of the arguments are set to update this access_pair.
|
||||
# If none are set, then skip computing the new access pair
|
||||
args_id = getattr(args, id_tok)
|
||||
args_id_variable = getattr(args, id_variable_tok)
|
||||
args_secret = getattr(args, secret_tok)
|
||||
args_secret_variable = getattr(args, secret_variable_tok)
|
||||
if not any([args_id, args_id_variable, args_secret, args_secret_variable]):
|
||||
return None
|
||||
|
||||
def _default_value(id_):
|
||||
if isinstance(default, list):
|
||||
return default[0] if id_ == "id" else default[1]
|
||||
elif isinstance(default, dict):
|
||||
return default.get(id_)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _default_variable(id_):
|
||||
if isinstance(default, dict):
|
||||
return default.get(id_ + "_variable")
|
||||
else:
|
||||
return None
|
||||
|
||||
id_ = None
|
||||
id_variable = None
|
||||
secret = None
|
||||
secret_variable = None
|
||||
|
||||
# Get the value/default value if the argument of the inverse
|
||||
if not args_id_variable:
|
||||
id_ = getattr(args, id_tok) or _default_value("id")
|
||||
if not args_id:
|
||||
id_variable = getattr(args, id_variable_tok) or _default_variable("id")
|
||||
if not args_secret_variable:
|
||||
secret = getattr(args, secret_tok) or _default_value("secret")
|
||||
if not args_secret:
|
||||
secret_variable = getattr(args, secret_variable_tok) or _default_variable("secret")
|
||||
|
||||
if (id_ or id_variable) and (secret or secret_variable):
|
||||
if secret:
|
||||
if not id_:
|
||||
raise SpackError("Cannot add mirror with a variable id and text secret")
|
||||
|
||||
return [id_, secret]
|
||||
else:
|
||||
return dict(
|
||||
[
|
||||
(("id", id_) if id_ else ("id_variable", id_variable)),
|
||||
("secret_variable", secret_variable),
|
||||
]
|
||||
)
|
||||
else:
|
||||
if id_ or id_variable or secret or secret_variable is not None:
|
||||
id_arg_tok = id_tok.replace("_", "-")
|
||||
secret_arg_tok = secret_tok.replace("_", "-")
|
||||
tty.warn(
|
||||
"Expected both parts of the access pair to be specified. "
|
||||
f"(i.e. --{id_arg_tok} and --{secret_arg_tok})"
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def mirror_add(args):
|
||||
"""add a mirror to Spack"""
|
||||
if (
|
||||
args.s3_access_key_id
|
||||
or args.s3_access_key_secret
|
||||
or args.s3_access_token
|
||||
or args.s3_access_key_id_variable
|
||||
or args.s3_access_key_secret_variable
|
||||
or args.s3_access_token_variable
|
||||
or args.s3_profile
|
||||
or args.s3_endpoint_url
|
||||
or args.type
|
||||
or args.oci_username
|
||||
or args.oci_password
|
||||
or args.oci_username_variable
|
||||
or args.oci_password_variable
|
||||
or args.autopush
|
||||
or args.signed is not None
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
# S3 Connection
|
||||
if args.s3_access_key_secret:
|
||||
tty.warn(
|
||||
"Configuring mirror secrets as plain text with --s3-access-key-secret is "
|
||||
"deprecated. Use --s3-access-key-secret-variable instead"
|
||||
)
|
||||
if args.oci_password:
|
||||
tty.warn(
|
||||
"Configuring mirror secrets as plain text with --oci-password is deprecated. "
|
||||
"Use --oci-password-variable instead"
|
||||
)
|
||||
access_pair = _configure_access_pair(
|
||||
args,
|
||||
"s3_access_key_id",
|
||||
"s3_access_key_id_variable",
|
||||
"s3_access_key_secret",
|
||||
"s3_access_key_secret_variable",
|
||||
)
|
||||
if access_pair:
|
||||
connection["access_pair"] = access_pair
|
||||
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
connection["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret]
|
||||
if args.s3_access_token:
|
||||
connection["access_token"] = args.s3_access_token
|
||||
elif args.s3_access_token_variable:
|
||||
connection["access_token_variable"] = args.s3_access_token_variable
|
||||
|
||||
if args.s3_profile:
|
||||
connection["profile"] = args.s3_profile
|
||||
|
||||
if args.s3_endpoint_url:
|
||||
connection["endpoint_url"] = args.s3_endpoint_url
|
||||
|
||||
# OCI Connection
|
||||
access_pair = _configure_access_pair(
|
||||
args, "oci_username", "oci_username_variable", "oci_password", "oci_password_variable"
|
||||
)
|
||||
if access_pair:
|
||||
connection["access_pair"] = access_pair
|
||||
|
||||
if args.oci_username and args.oci_password:
|
||||
connection["access_pair"] = [args.oci_username, args.oci_password]
|
||||
if args.type:
|
||||
connection["binary"] = "binary" in args.type
|
||||
connection["source"] = "source" in args.type
|
||||
@@ -387,35 +285,16 @@ def _configure_mirror(args):
|
||||
changes = {}
|
||||
if args.url:
|
||||
changes["url"] = args.url
|
||||
|
||||
default_access_pair = entry._get_value("access_pair", direction or "fetch")
|
||||
# TODO: Init access_pair args with the fetch/push/base values in the current mirror state
|
||||
access_pair = _configure_access_pair(
|
||||
args,
|
||||
"s3_access_key_id",
|
||||
"s3_access_key_id_variable",
|
||||
"s3_access_key_secret",
|
||||
"s3_access_key_secret_variable",
|
||||
default=default_access_pair,
|
||||
)
|
||||
if access_pair:
|
||||
changes["access_pair"] = access_pair
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
changes["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret]
|
||||
if args.s3_access_token:
|
||||
changes["access_token"] = args.s3_access_token
|
||||
if args.s3_profile:
|
||||
changes["profile"] = args.s3_profile
|
||||
if args.s3_endpoint_url:
|
||||
changes["endpoint_url"] = args.s3_endpoint_url
|
||||
access_pair = _configure_access_pair(
|
||||
args,
|
||||
"oci_username",
|
||||
"oci_username_variable",
|
||||
"oci_password",
|
||||
"oci_password_variable",
|
||||
default=default_access_pair,
|
||||
)
|
||||
if access_pair:
|
||||
changes["access_pair"] = access_pair
|
||||
if args.oci_username and args.oci_password:
|
||||
changes["access_pair"] = [args.oci_username, args.oci_password]
|
||||
if getattr(args, "signed", None) is not None:
|
||||
changes["signed"] = args.signed
|
||||
if getattr(args, "autopush", None) is not None:
|
||||
|
@@ -19,7 +19,6 @@
|
||||
import spack.modules
|
||||
import spack.modules.common
|
||||
import spack.repo
|
||||
from spack.cmd import MultipleSpecsMatch, NoSpecMatches
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manipulate module files"
|
||||
@@ -92,6 +91,18 @@ def add_loads_arguments(subparser):
|
||||
arguments.add_common_arguments(subparser, ["recurse_dependencies"])
|
||||
|
||||
|
||||
class MultipleSpecsMatch(Exception):
|
||||
"""Raised when multiple specs match a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
class NoSpecMatches(Exception):
|
||||
"""Raised when no spec matches a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
def one_spec_or_raise(specs):
|
||||
"""Ensures exactly one spec has been selected, or raises the appropriate
|
||||
exception.
|
||||
|
@@ -33,9 +33,8 @@ def patch(parser, args):
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
specs = spack.cmd.matching_specs_from_env(specs)
|
||||
for spec in specs:
|
||||
_patch(spec.package)
|
||||
_patch(spack.cmd.matching_spec_from_env(spec).package)
|
||||
|
||||
|
||||
def _patch_env(env: ev.Environment):
|
||||
|
@@ -82,6 +82,14 @@ def spec(parser, args):
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
tree_kwargs = {
|
||||
"cover": args.cover,
|
||||
"format": fmt,
|
||||
"hashlen": None if args.very_long else 7,
|
||||
"show_types": args.types,
|
||||
"status_fn": install_status_fn if args.install_status else None,
|
||||
}
|
||||
|
||||
# use a read transaction if we are getting install status for every
|
||||
# spec in the DAG. This avoids repeatedly querying the DB.
|
||||
tree_context = lang.nullcontext
|
||||
@@ -91,35 +99,46 @@ def spec(parser, args):
|
||||
env = ev.active_environment()
|
||||
|
||||
if args.specs:
|
||||
concrete_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
input_specs = spack.cmd.parse_specs(args.specs)
|
||||
concretized_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = list(zip(input_specs, concretized_specs))
|
||||
elif env:
|
||||
env.concretize()
|
||||
concrete_specs = env.concrete_roots()
|
||||
specs = env.concretized_specs()
|
||||
|
||||
if not args.format:
|
||||
# environments are printed together in a combined tree() invocation,
|
||||
# except when using --yaml or --json, which we print spec by spec below.
|
||||
tree_kwargs["key"] = spack.traverse.by_dag_hash
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
|
||||
return
|
||||
else:
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
# With --yaml, --json, or --format, just print the raw specs to output
|
||||
if args.format:
|
||||
for spec in concrete_specs:
|
||||
for input, output in specs:
|
||||
# With --yaml or --json, just print the raw specs to output
|
||||
if args.format:
|
||||
if args.format == "yaml":
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
sys.stdout.write(output.to_yaml(hash=ht.dag_hash))
|
||||
elif args.format == "json":
|
||||
print(spec.to_json(hash=ht.dag_hash))
|
||||
print(output.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
print(spec.format(args.format))
|
||||
return
|
||||
print(output.format(args.format))
|
||||
continue
|
||||
|
||||
with tree_context():
|
||||
print(
|
||||
spack.spec.tree(
|
||||
concrete_specs,
|
||||
cover=args.cover,
|
||||
format=fmt,
|
||||
hashlen=None if args.very_long else 7,
|
||||
show_types=args.types,
|
||||
status_fn=install_status_fn if args.install_status else None,
|
||||
hashes=args.long or args.very_long,
|
||||
key=spack.traverse.by_dag_hash,
|
||||
)
|
||||
)
|
||||
with tree_context():
|
||||
# Only show the headers for input specs that are not concrete to avoid
|
||||
# repeated output. This happens because parse_specs outputs concrete
|
||||
# specs for `/hash` inputs.
|
||||
if not input.concrete:
|
||||
tree_kwargs["hashes"] = False # Always False for input spec
|
||||
print("Input spec")
|
||||
print("--------------------------------")
|
||||
print(input.tree(**tree_kwargs))
|
||||
print("Concretized")
|
||||
print("--------------------------------")
|
||||
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(output.tree(**tree_kwargs))
|
||||
|
@@ -47,8 +47,8 @@ def stage(parser, args):
|
||||
if len(specs) > 1 and custom_path:
|
||||
tty.die("`--path` requires a single spec, but multiple were provided")
|
||||
|
||||
specs = spack.cmd.matching_specs_from_env(specs)
|
||||
for spec in specs:
|
||||
spec = spack.cmd.matching_spec_from_env(spec)
|
||||
pkg = spec.package
|
||||
|
||||
if custom_path:
|
||||
|
@@ -24,7 +24,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.23"
|
||||
tutorial_branch = "releases/v0.22"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
@@ -4,23 +4,20 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import contextlib
|
||||
import hashlib
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Dict, List, Optional, Sequence
|
||||
from typing import List, Optional, Sequence
|
||||
|
||||
import llnl.path
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
|
||||
|
||||
import spack.caches
|
||||
import spack.error
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
@@ -29,7 +26,6 @@
|
||||
import spack.util.module_cmd
|
||||
import spack.version
|
||||
from spack.util.environment import filter_system_paths
|
||||
from spack.util.file_cache import FileCache
|
||||
|
||||
__all__ = ["Compiler"]
|
||||
|
||||
@@ -38,7 +34,7 @@
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()) -> str:
|
||||
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
"""Invokes the compiler at a given path passing a single
|
||||
version argument and returns the output.
|
||||
|
||||
@@ -61,7 +57,7 @@ def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()) -
|
||||
return output
|
||||
|
||||
|
||||
def get_compiler_version_output(compiler_path, *args, **kwargs) -> str:
|
||||
def get_compiler_version_output(compiler_path, *args, **kwargs):
|
||||
"""Wrapper for _get_compiler_version_output()."""
|
||||
# This ensures that we memoize compiler output by *absolute path*,
|
||||
# not just executable name. If we don't do this, and the path changes
|
||||
@@ -294,7 +290,6 @@ def __init__(
|
||||
self.environment = environment or {}
|
||||
self.extra_rpaths = extra_rpaths or []
|
||||
self.enable_implicit_rpaths = enable_implicit_rpaths
|
||||
self.cache = COMPILER_CACHE
|
||||
|
||||
self.cc = paths[0]
|
||||
self.cxx = paths[1]
|
||||
@@ -395,11 +390,15 @@ def real_version(self):
|
||||
|
||||
E.g. C++11 flag checks.
|
||||
"""
|
||||
real_version_str = self.cache.get(self).real_version
|
||||
if not real_version_str or real_version_str == "unknown":
|
||||
return self.version
|
||||
|
||||
return spack.version.StandardVersion.from_string(real_version_str)
|
||||
if not self._real_version:
|
||||
try:
|
||||
real_version = spack.version.Version(self.get_real_version())
|
||||
if real_version == spack.version.Version("unknown"):
|
||||
return self.version
|
||||
self._real_version = real_version
|
||||
except spack.util.executable.ProcessError:
|
||||
self._real_version = self.version
|
||||
return self._real_version
|
||||
|
||||
def implicit_rpaths(self) -> List[str]:
|
||||
if self.enable_implicit_rpaths is False:
|
||||
@@ -428,11 +427,6 @@ def default_dynamic_linker(self) -> Optional[str]:
|
||||
@property
|
||||
def default_libc(self) -> Optional["spack.spec.Spec"]:
|
||||
"""Determine libc targeted by the compiler from link line"""
|
||||
# technically this should be testing the target platform of the compiler, but we don't have
|
||||
# that, so stick to host platform for now.
|
||||
if sys.platform in ("darwin", "win32"):
|
||||
return None
|
||||
|
||||
dynamic_linker = self.default_dynamic_linker
|
||||
|
||||
if not dynamic_linker:
|
||||
@@ -451,23 +445,19 @@ def required_libs(self):
|
||||
@property
|
||||
def compiler_verbose_output(self) -> Optional[str]:
|
||||
"""Verbose output from compiling a dummy C source file. Output is cached."""
|
||||
return self.cache.get(self).c_compiler_output
|
||||
if not hasattr(self, "_compile_c_source_output"):
|
||||
self._compile_c_source_output = self._compile_dummy_c_source()
|
||||
return self._compile_c_source_output
|
||||
|
||||
def _compile_dummy_c_source(self) -> Optional[str]:
|
||||
if self.cc:
|
||||
cc = self.cc
|
||||
ext = "c"
|
||||
else:
|
||||
cc = self.cxx
|
||||
ext = "cc"
|
||||
|
||||
cc = self.cc if self.cc else self.cxx
|
||||
if not cc or not self.verbose_flag:
|
||||
return None
|
||||
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
|
||||
fout = os.path.join(tmpdir, "output")
|
||||
fin = os.path.join(tmpdir, f"main.{ext}")
|
||||
fin = os.path.join(tmpdir, "main.c")
|
||||
|
||||
with open(fin, "w") as csource:
|
||||
csource.write(
|
||||
@@ -569,7 +559,7 @@ def fc_pic_flag(self):
|
||||
# Note: This is not a class method. The class methods are used to detect
|
||||
# compilers on PATH based systems, and do not set up the run environment of
|
||||
# the compiler. This method can be called on `module` based systems as well
|
||||
def get_real_version(self) -> str:
|
||||
def get_real_version(self):
|
||||
"""Query the compiler for its version.
|
||||
|
||||
This is the "real" compiler version, regardless of what is in the
|
||||
@@ -579,17 +569,14 @@ def get_real_version(self) -> str:
|
||||
modifications) to enable the compiler to run properly on any platform.
|
||||
"""
|
||||
cc = spack.util.executable.Executable(self.cc)
|
||||
try:
|
||||
with self.compiler_environment():
|
||||
output = cc(
|
||||
self.version_argument,
|
||||
output=str,
|
||||
error=str,
|
||||
ignore_errors=tuple(self.ignore_version_errors),
|
||||
)
|
||||
return self.extract_version_from_output(output)
|
||||
except spack.util.executable.ProcessError:
|
||||
return "unknown"
|
||||
with self.compiler_environment():
|
||||
output = cc(
|
||||
self.version_argument,
|
||||
output=str,
|
||||
error=str,
|
||||
ignore_errors=tuple(self.ignore_version_errors),
|
||||
)
|
||||
return self.extract_version_from_output(output)
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
@@ -616,7 +603,7 @@ def default_version(cls, cc):
|
||||
|
||||
@classmethod
|
||||
@llnl.util.lang.memoized
|
||||
def extract_version_from_output(cls, output: str) -> str:
|
||||
def extract_version_from_output(cls, output):
|
||||
"""Extracts the version from compiler's output."""
|
||||
match = re.search(cls.version_regex, output)
|
||||
return match.group(1) if match else "unknown"
|
||||
@@ -745,106 +732,3 @@ def __init__(self, compiler, feature, flag_name, ver_string=None):
|
||||
)
|
||||
+ " implement the {0} property and submit a pull request or issue.".format(flag_name),
|
||||
)
|
||||
|
||||
|
||||
class CompilerCacheEntry:
|
||||
"""Deserialized cache entry for a compiler"""
|
||||
|
||||
__slots__ = ["c_compiler_output", "real_version"]
|
||||
|
||||
def __init__(self, c_compiler_output: Optional[str], real_version: str):
|
||||
self.c_compiler_output = c_compiler_output
|
||||
self.real_version = real_version
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Optional[str]]):
|
||||
if not isinstance(data, dict):
|
||||
raise ValueError(f"Invalid {cls.__name__} data")
|
||||
c_compiler_output = data.get("c_compiler_output")
|
||||
real_version = data.get("real_version")
|
||||
if not isinstance(real_version, str) or not isinstance(
|
||||
c_compiler_output, (str, type(None))
|
||||
):
|
||||
raise ValueError(f"Invalid {cls.__name__} data")
|
||||
return cls(c_compiler_output, real_version)
|
||||
|
||||
|
||||
class CompilerCache:
|
||||
"""Base class for compiler output cache. Default implementation does not cache anything."""
|
||||
|
||||
def value(self, compiler: Compiler) -> Dict[str, Optional[str]]:
|
||||
return {
|
||||
"c_compiler_output": compiler._compile_dummy_c_source(),
|
||||
"real_version": compiler.get_real_version(),
|
||||
}
|
||||
|
||||
def get(self, compiler: Compiler) -> CompilerCacheEntry:
|
||||
return CompilerCacheEntry.from_dict(self.value(compiler))
|
||||
|
||||
|
||||
class FileCompilerCache(CompilerCache):
|
||||
"""Cache for compiler output, which is used to determine implicit link paths, the default libc
|
||||
version, and the compiler version."""
|
||||
|
||||
name = os.path.join("compilers", "compilers.json")
|
||||
|
||||
def __init__(self, cache: "FileCache") -> None:
|
||||
self.cache = cache
|
||||
self.cache.init_entry(self.name)
|
||||
self._data: Dict[str, Dict[str, Optional[str]]] = {}
|
||||
|
||||
def _get_entry(self, key: str) -> Optional[CompilerCacheEntry]:
|
||||
try:
|
||||
return CompilerCacheEntry.from_dict(self._data[key])
|
||||
except ValueError:
|
||||
del self._data[key]
|
||||
except KeyError:
|
||||
pass
|
||||
return None
|
||||
|
||||
def get(self, compiler: Compiler) -> CompilerCacheEntry:
|
||||
# Cache hit
|
||||
try:
|
||||
with self.cache.read_transaction(self.name) as f:
|
||||
assert f is not None
|
||||
self._data = json.loads(f.read())
|
||||
assert isinstance(self._data, dict)
|
||||
except (json.JSONDecodeError, AssertionError):
|
||||
self._data = {}
|
||||
|
||||
key = self._key(compiler)
|
||||
value = self._get_entry(key)
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
# Cache miss
|
||||
with self.cache.write_transaction(self.name) as (old, new):
|
||||
try:
|
||||
assert old is not None
|
||||
self._data = json.loads(old.read())
|
||||
assert isinstance(self._data, dict)
|
||||
except (json.JSONDecodeError, AssertionError):
|
||||
self._data = {}
|
||||
|
||||
# Use cache entry that may have been created by another process in the meantime.
|
||||
entry = self._get_entry(key)
|
||||
|
||||
# Finally compute the cache entry
|
||||
if entry is None:
|
||||
self._data[key] = self.value(compiler)
|
||||
entry = CompilerCacheEntry.from_dict(self._data[key])
|
||||
|
||||
new.write(json.dumps(self._data, separators=(",", ":")))
|
||||
|
||||
return entry
|
||||
|
||||
def _key(self, compiler: Compiler) -> str:
|
||||
as_bytes = json.dumps(compiler.to_dict(), separators=(",", ":")).encode("utf-8")
|
||||
return hashlib.sha256(as_bytes).hexdigest()
|
||||
|
||||
|
||||
def _make_compiler_cache():
|
||||
return FileCompilerCache(spack.caches.MISC_CACHE)
|
||||
|
||||
|
||||
COMPILER_CACHE: CompilerCache = llnl.util.lang.Singleton(_make_compiler_cache) # type: ignore
|
||||
|
@@ -116,5 +116,5 @@ def fflags(self):
|
||||
def _handle_default_flag_addtions(self):
|
||||
# This is a known issue for AOCC 3.0 see:
|
||||
# https://developer.amd.com/wp-content/resources/AOCC-3.0-Install-Guide.pdf
|
||||
if self.version.satisfies(ver("3.0.0")):
|
||||
if self.real_version.satisfies(ver("3.0.0")):
|
||||
return "-Wno-unused-command-line-argument " "-mllvm -eliminate-similar-expr=false"
|
||||
|
@@ -2,20 +2,14 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""High-level functions to concretize list of specs"""
|
||||
import sys
|
||||
import time
|
||||
"""
|
||||
(DEPRECATED) Used to contain the code for the original concretizer
|
||||
"""
|
||||
from contextlib import contextmanager
|
||||
from typing import Iterable, List, Optional, Sequence, Tuple, Union
|
||||
from itertools import chain
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.repo
|
||||
import spack.util.parallel
|
||||
from spack.spec import ArchSpec, CompilerSpec, Spec
|
||||
|
||||
CHECK_COMPILER_EXISTENCE = True
|
||||
|
||||
@@ -36,168 +30,67 @@ def enable_compiler_existence_check():
|
||||
CHECK_COMPILER_EXISTENCE = saved
|
||||
|
||||
|
||||
SpecPairInput = Tuple[Spec, Optional[Spec]]
|
||||
SpecPair = Tuple[Spec, Spec]
|
||||
SpecLike = Union[Spec, str]
|
||||
TestsType = Union[bool, Iterable[str]]
|
||||
def find_spec(spec, condition, default=None):
|
||||
"""Searches the dag from spec in an intelligent order and looks
|
||||
for a spec that matches a condition"""
|
||||
# First search parents, then search children
|
||||
deptype = ("build", "link")
|
||||
dagiter = chain(
|
||||
spec.traverse(direction="parents", deptype=deptype, root=False),
|
||||
spec.traverse(direction="children", deptype=deptype, root=False),
|
||||
)
|
||||
visited = set()
|
||||
for relative in dagiter:
|
||||
if condition(relative):
|
||||
return relative
|
||||
visited.add(id(relative))
|
||||
|
||||
# Then search all other relatives in the DAG *except* spec
|
||||
for relative in spec.root.traverse(deptype="all"):
|
||||
if relative is spec:
|
||||
continue
|
||||
if id(relative) in visited:
|
||||
continue
|
||||
if condition(relative):
|
||||
return relative
|
||||
|
||||
# Finally search spec itself.
|
||||
if condition(spec):
|
||||
return spec
|
||||
|
||||
return default # Nothing matched the condition; return default.
|
||||
|
||||
|
||||
def concretize_specs_together(
|
||||
abstract_specs: Sequence[SpecLike], tests: TestsType = False
|
||||
) -> Sequence[Spec]:
|
||||
def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
Args:
|
||||
abstract_specs: abstract specs to be concretized
|
||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||
will have test dependencies. If False, test dependencies will be disregarded.
|
||||
tests (bool or list or set): False to run no tests, True to test
|
||||
all packages, or a list of package names to run tests for some
|
||||
*abstract_specs: abstract specs to be concretized, given either
|
||||
as Specs or strings
|
||||
|
||||
Returns:
|
||||
List of concretized specs
|
||||
"""
|
||||
import spack.solver.asp
|
||||
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
solver = spack.solver.asp.Solver()
|
||||
result = solver.solve(abstract_specs, tests=tests, allow_deprecated=allow_deprecated)
|
||||
result = solver.solve(
|
||||
abstract_specs, tests=kwargs.get("tests", False), allow_deprecated=allow_deprecated
|
||||
)
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
|
||||
def concretize_together(
|
||||
spec_list: Sequence[SpecPairInput], tests: TestsType = False
|
||||
) -> List[SpecPair]:
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
Args:
|
||||
spec_list: list of tuples to concretize. First entry is abstract spec, second entry is
|
||||
already concrete spec or None if not yet concretized
|
||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||
will have test dependencies. If False, test dependencies will be disregarded.
|
||||
"""
|
||||
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
||||
abstract_specs = [abstract for abstract, _ in spec_list]
|
||||
concrete_specs = concretize_specs_together(to_concretize, tests=tests)
|
||||
return list(zip(abstract_specs, concrete_specs))
|
||||
|
||||
|
||||
def concretize_together_when_possible(
|
||||
spec_list: Sequence[SpecPairInput], tests: TestsType = False
|
||||
) -> List[SpecPair]:
|
||||
"""Given a number of specs as input, tries to concretize them together to the extent possible.
|
||||
|
||||
See documentation for ``unify: when_possible`` concretization for the precise definition of
|
||||
"to the extent possible".
|
||||
|
||||
Args:
|
||||
spec_list: list of tuples to concretize. First entry is abstract spec, second entry is
|
||||
already concrete spec or None if not yet concretized
|
||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||
will have test dependencies. If False, test dependencies will be disregarded.
|
||||
"""
|
||||
import spack.solver.asp
|
||||
|
||||
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
||||
old_concrete_to_abstract = {
|
||||
concrete: abstract for (abstract, concrete) in spec_list if concrete
|
||||
}
|
||||
|
||||
result_by_user_spec = {}
|
||||
solver = spack.solver.asp.Solver()
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
for result in solver.solve_in_rounds(
|
||||
to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
||||
):
|
||||
result_by_user_spec.update(result.specs_by_input)
|
||||
|
||||
# If the "abstract" spec is a concrete spec from the previous concretization
|
||||
# translate it back to an abstract spec. Otherwise, keep the abstract spec
|
||||
return [
|
||||
(old_concrete_to_abstract.get(abstract, abstract), concrete)
|
||||
for abstract, concrete in sorted(result_by_user_spec.items())
|
||||
]
|
||||
|
||||
|
||||
def concretize_separately(
|
||||
spec_list: Sequence[SpecPairInput], tests: TestsType = False
|
||||
) -> List[SpecPair]:
|
||||
"""Concretizes the input specs separately from each other.
|
||||
|
||||
Args:
|
||||
spec_list: list of tuples to concretize. First entry is abstract spec, second entry is
|
||||
already concrete spec or None if not yet concretized
|
||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||
will have test dependencies. If False, test dependencies will be disregarded.
|
||||
"""
|
||||
import spack.bootstrap
|
||||
|
||||
to_concretize = [abstract for abstract, concrete in spec_list if not concrete]
|
||||
args = [
|
||||
(i, str(abstract), tests)
|
||||
for i, abstract in enumerate(to_concretize)
|
||||
if not abstract.concrete
|
||||
]
|
||||
ret = [(i, abstract) for i, abstract in enumerate(to_concretize) if abstract.concrete]
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
|
||||
# Ensure all the indexes have been built or updated, since
|
||||
# otherwise the processes in the pool may timeout on waiting
|
||||
# for a write lock. We do this indirectly by retrieving the
|
||||
# provider index, which should in turn trigger the update of
|
||||
# all the indexes if there's any need for that.
|
||||
_ = spack.repo.PATH.provider_index
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.all_compilers_config(spack.config.CONFIG)
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(args) == 0:
|
||||
# Still have to combine the things that were passed in as abstract with the things
|
||||
# that were passed in as pairs
|
||||
return [(abstract, concrete) for abstract, (_, concrete) in zip(to_concretize, ret)] + [
|
||||
(abstract, concrete) for abstract, concrete in spec_list if concrete
|
||||
]
|
||||
|
||||
# Solve the environment in parallel on Linux
|
||||
# TODO: support parallel concretization on macOS and Windows
|
||||
num_procs = min(len(args), spack.config.determine_number_of_jobs(parallel=True))
|
||||
|
||||
for j, (i, concrete, duration) in enumerate(
|
||||
spack.util.parallel.imap_unordered(
|
||||
_concretize_task, args, processes=num_procs, debug=tty.is_debug(), maxtaskperchild=1
|
||||
)
|
||||
):
|
||||
ret.append((i, concrete))
|
||||
percentage = (j + 1) / len(args) * 100
|
||||
tty.verbose(
|
||||
f"{duration:6.1f}s [{percentage:3.0f}%] {concrete.cformat('{hash:7}')} "
|
||||
f"{to_concretize[i].colored_str}"
|
||||
)
|
||||
sys.stdout.flush()
|
||||
|
||||
# Add specs in original order
|
||||
ret.sort(key=lambda x: x[0])
|
||||
|
||||
return [(abstract, concrete) for abstract, (_, concrete) in zip(to_concretize, ret)] + [
|
||||
(abstract, concrete) for abstract, concrete in spec_list if concrete
|
||||
]
|
||||
|
||||
|
||||
def _concretize_task(packed_arguments: Tuple[int, str, TestsType]) -> Tuple[int, Spec, float]:
|
||||
index, spec_str, tests = packed_arguments
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
start = time.time()
|
||||
spec = Spec(spec_str).concretized(tests=tests)
|
||||
return index, spec, time.time() - start
|
||||
|
||||
|
||||
class UnavailableCompilerVersionError(spack.error.SpackError):
|
||||
"""Raised when there is no available compiler that satisfies a
|
||||
compiler spec."""
|
||||
|
||||
def __init__(self, compiler_spec: CompilerSpec, arch: Optional[ArchSpec] = None) -> None:
|
||||
err_msg = f"No compilers with spec {compiler_spec} found"
|
||||
def __init__(self, compiler_spec, arch=None):
|
||||
err_msg = "No compilers with spec {0} found".format(compiler_spec)
|
||||
if arch:
|
||||
err_msg += f" for operating system {arch.os} and target {arch.target}."
|
||||
err_msg += " for operating system {0} and target {1}.".format(arch.os, arch.target)
|
||||
|
||||
super().__init__(
|
||||
err_msg,
|
||||
|
@@ -427,23 +427,6 @@ def __init__(self, *scopes: ConfigScope) -> None:
|
||||
self.push_scope(scope)
|
||||
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
|
||||
|
||||
def ensure_unwrapped(self) -> "Configuration":
|
||||
"""Ensure we unwrap this object from any dynamic wrapper (like Singleton)"""
|
||||
return self
|
||||
|
||||
def highest(self) -> ConfigScope:
|
||||
"""Scope with highest precedence"""
|
||||
return next(reversed(self.scopes.values())) # type: ignore
|
||||
|
||||
@_config_mutator
|
||||
def ensure_scope_ordering(self):
|
||||
"""Ensure that scope order matches documented precedent"""
|
||||
# FIXME: We also need to consider that custom configurations and other orderings
|
||||
# may not be preserved correctly
|
||||
if "command_line" in self.scopes:
|
||||
# TODO (when dropping python 3.6): self.scopes.move_to_end
|
||||
self.scopes["command_line"] = self.remove_scope("command_line")
|
||||
|
||||
@_config_mutator
|
||||
def push_scope(self, scope: ConfigScope) -> None:
|
||||
"""Add a higher precedence scope to the Configuration."""
|
||||
@@ -731,7 +714,7 @@ def print_section(self, section: str, blame: bool = False, *, scope=None) -> Non
|
||||
@contextlib.contextmanager
|
||||
def override(
|
||||
path_or_scope: Union[ConfigScope, str], value: Optional[Any] = None
|
||||
) -> Generator[Configuration, None, None]:
|
||||
) -> Generator[Union[lang.Singleton, Configuration], None, None]:
|
||||
"""Simple way to override config settings within a context.
|
||||
|
||||
Arguments:
|
||||
@@ -769,7 +752,13 @@ def override(
|
||||
assert scope is overrides
|
||||
|
||||
|
||||
def _add_platform_scope(cfg: Configuration, name: str, path: str, writable: bool = True) -> None:
|
||||
#: configuration scopes added on the command line set by ``spack.main.main()``
|
||||
COMMAND_LINE_SCOPES: List[str] = []
|
||||
|
||||
|
||||
def _add_platform_scope(
|
||||
cfg: Union[Configuration, lang.Singleton], name: str, path: str, writable: bool = True
|
||||
) -> None:
|
||||
"""Add a platform-specific subdirectory for the current platform."""
|
||||
platform = spack.platforms.host().name
|
||||
scope = DirectoryConfigScope(
|
||||
@@ -803,7 +792,9 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||
return config_paths
|
||||
|
||||
|
||||
def _add_command_line_scopes(cfg: Configuration, command_line_scopes: List[str]) -> None:
|
||||
def _add_command_line_scopes(
|
||||
cfg: Union[Configuration, lang.Singleton], command_line_scopes: List[str]
|
||||
) -> None:
|
||||
"""Add additional scopes from the --config-scope argument, either envs or dirs."""
|
||||
import spack.environment.environment as env # circular import
|
||||
|
||||
@@ -873,11 +864,18 @@ def create() -> Configuration:
|
||||
# Each scope can have per-platfom overrides in subdirectories
|
||||
_add_platform_scope(cfg, name, path)
|
||||
|
||||
# add command-line scopes
|
||||
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
||||
|
||||
# we make a special scope for spack commands so that they can
|
||||
# override configuration options.
|
||||
cfg.push_scope(InternalConfigScope("command_line"))
|
||||
|
||||
return cfg
|
||||
|
||||
|
||||
#: This is the singleton configuration instance for Spack.
|
||||
CONFIG: Configuration = lang.Singleton(create) # type: ignore
|
||||
CONFIG: Union[Configuration, lang.Singleton] = lang.Singleton(create)
|
||||
|
||||
|
||||
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
||||
|
@@ -1336,7 +1336,7 @@ def _deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") ->
|
||||
self._data[spec_key] = spec_rec
|
||||
|
||||
@_autospec
|
||||
def mark(self, spec: "spack.spec.Spec", key: str, value: Any) -> None:
|
||||
def mark(self, spec: "spack.spec.Spec", key, value) -> None:
|
||||
"""Mark an arbitrary record on a spec."""
|
||||
with self.write_transaction():
|
||||
return self._mark(spec, key, value)
|
||||
@@ -1771,6 +1771,24 @@ def root(key, record):
|
||||
if id(rec.spec) not in needed and rec.installed
|
||||
]
|
||||
|
||||
def update_explicit(self, spec, explicit):
|
||||
"""
|
||||
Update the spec's explicit state in the database.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): the spec whose install record is being updated
|
||||
explicit (bool): ``True`` if the package was requested explicitly
|
||||
by the user, ``False`` if it was pulled in as a dependency of
|
||||
an explicit package.
|
||||
"""
|
||||
rec = self.get_record(spec)
|
||||
if explicit != rec.explicit:
|
||||
with self.write_transaction():
|
||||
message = "{s.name}@{s.version} : marking the package {0}"
|
||||
status = "explicit" if explicit else "implicit"
|
||||
tty.debug(message.format(status, s=spec))
|
||||
rec.explicit = explicit
|
||||
|
||||
|
||||
class NoUpstreamVisitor:
|
||||
"""Gives edges to upstream specs, but does follow edges from upstream specs."""
|
||||
|
@@ -64,7 +64,6 @@ class OpenMpi(Package):
|
||||
"DirectiveMeta",
|
||||
"DisableRedistribute",
|
||||
"version",
|
||||
"conditional",
|
||||
"conflicts",
|
||||
"depends_on",
|
||||
"extends",
|
||||
@@ -77,7 +76,6 @@ class OpenMpi(Package):
|
||||
"build_system",
|
||||
"requires",
|
||||
"redistribute",
|
||||
"can_splice",
|
||||
]
|
||||
|
||||
_patch_order_index = 0
|
||||
@@ -506,43 +504,6 @@ def _execute_provides(pkg: "spack.package_base.PackageBase"):
|
||||
return _execute_provides
|
||||
|
||||
|
||||
@directive("splice_specs")
|
||||
def can_splice(
|
||||
target: SpecType, *, when: SpecType, match_variants: Union[None, str, List[str]] = None
|
||||
):
|
||||
"""Packages can declare whether they are ABI-compatible with another package
|
||||
and thus can be spliced into concrete versions of that package.
|
||||
|
||||
Args:
|
||||
target: The spec that the current package is ABI-compatible with.
|
||||
|
||||
when: An anonymous spec constraining current package for when it is
|
||||
ABI-compatible with target.
|
||||
|
||||
match_variants: A list of variants that must match
|
||||
between target spec and current package, with special value '*'
|
||||
which matches all variants. Example: a variant is defined on both
|
||||
packages called json, and they are ABI-compatible whenever they agree on
|
||||
the json variant (regardless of whether it is turned on or off). Note
|
||||
that this cannot be applied to multi-valued variants and multi-valued
|
||||
variants will be skipped by '*'.
|
||||
"""
|
||||
|
||||
def _execute_can_splice(pkg: "spack.package_base.PackageBase"):
|
||||
when_spec = _make_when_spec(when)
|
||||
if isinstance(match_variants, str) and match_variants != "*":
|
||||
raise ValueError(
|
||||
"* is the only valid string for match_variants "
|
||||
"if looking to provide a single variant, use "
|
||||
f"[{match_variants}] instead"
|
||||
)
|
||||
if when_spec is None:
|
||||
return
|
||||
pkg.splice_specs[when_spec] = (spack.spec.Spec(target), match_variants)
|
||||
|
||||
return _execute_can_splice
|
||||
|
||||
|
||||
@directive("patches")
|
||||
def patch(
|
||||
url_or_filename: str,
|
||||
@@ -616,15 +577,6 @@ def _execute_patch(pkg_or_dep: Union["spack.package_base.PackageBase", Dependenc
|
||||
return _execute_patch
|
||||
|
||||
|
||||
def conditional(*values: List[Any], when: Optional[WhenType] = None):
|
||||
"""Conditional values that can be used in variant declarations."""
|
||||
# _make_when_spec returns None when the condition is statically false.
|
||||
when = _make_when_spec(when)
|
||||
return spack.variant.ConditionalVariantValues(
|
||||
spack.variant.ConditionalValue(x, when=when) for x in values
|
||||
)
|
||||
|
||||
|
||||
@directive("variants")
|
||||
def variant(
|
||||
name: str,
|
||||
|
@@ -473,7 +473,6 @@
|
||||
active_environment,
|
||||
all_environment_names,
|
||||
all_environments,
|
||||
as_env_dir,
|
||||
create,
|
||||
create_in_dir,
|
||||
deactivate,
|
||||
@@ -481,7 +480,6 @@
|
||||
default_view_name,
|
||||
display_specs,
|
||||
environment_dir_from_name,
|
||||
environment_from_name_or_dir,
|
||||
exists,
|
||||
initialize_environment_dir,
|
||||
installed_specs,
|
||||
@@ -509,7 +507,6 @@
|
||||
"active_environment",
|
||||
"all_environment_names",
|
||||
"all_environments",
|
||||
"as_env_dir",
|
||||
"create",
|
||||
"create_in_dir",
|
||||
"deactivate",
|
||||
@@ -517,7 +514,6 @@
|
||||
"default_view_name",
|
||||
"display_specs",
|
||||
"environment_dir_from_name",
|
||||
"environment_from_name_or_dir",
|
||||
"exists",
|
||||
"initialize_environment_dir",
|
||||
"installed_specs",
|
||||
|
@@ -11,19 +11,22 @@
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import time
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
from llnl.util.link_tree import ConflictingSpecsError
|
||||
from llnl.util.symlink import islink, readlink, symlink
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
|
||||
import spack
|
||||
import spack.caches
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
@@ -42,6 +45,7 @@
|
||||
import spack.util.environment
|
||||
import spack.util.hash
|
||||
import spack.util.lock as lk
|
||||
import spack.util.parallel
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -53,8 +57,6 @@
|
||||
from spack.spec_list import SpecList
|
||||
from spack.util.path import substitute_path_variables
|
||||
|
||||
SpecPair = spack.concretize.SpecPair
|
||||
|
||||
#: environment variable used to indicate the active environment
|
||||
spack_env_var = "SPACK_ENV"
|
||||
|
||||
@@ -275,22 +277,6 @@ def is_env_dir(path):
|
||||
return os.path.isdir(path) and os.path.exists(os.path.join(path, manifest_name))
|
||||
|
||||
|
||||
def as_env_dir(name_or_dir):
|
||||
"""Translate an environment name or directory to the environment directory"""
|
||||
if is_env_dir(name_or_dir):
|
||||
return name_or_dir
|
||||
else:
|
||||
validate_env_name(name_or_dir)
|
||||
if not exists(name_or_dir):
|
||||
raise SpackEnvironmentError("no such environment '%s'" % name_or_dir)
|
||||
return root(name_or_dir)
|
||||
|
||||
|
||||
def environment_from_name_or_dir(name_or_dir):
|
||||
"""Get an environment with the supplied name."""
|
||||
return Environment(as_env_dir(name_or_dir))
|
||||
|
||||
|
||||
def read(name):
|
||||
"""Get an environment with the supplied name."""
|
||||
validate_env_name(name)
|
||||
@@ -668,7 +654,7 @@ def from_dict(base_path, d):
|
||||
|
||||
@property
|
||||
def _current_root(self):
|
||||
if not islink(self.root):
|
||||
if not os.path.islink(self.root):
|
||||
return None
|
||||
|
||||
root = readlink(self.root)
|
||||
@@ -1508,7 +1494,7 @@ def deconcretize(self, spec: spack.spec.Spec, concrete: bool = True):
|
||||
|
||||
def _get_specs_to_concretize(
|
||||
self,
|
||||
) -> Tuple[List[spack.spec.Spec], List[spack.spec.Spec], List[SpecPair]]:
|
||||
) -> Tuple[Set[spack.spec.Spec], Set[spack.spec.Spec], List[spack.spec.Spec]]:
|
||||
"""Compute specs to concretize for unify:true and unify:when_possible.
|
||||
|
||||
This includes new user specs and any already concretized specs.
|
||||
@@ -1518,20 +1504,23 @@ def _get_specs_to_concretize(
|
||||
|
||||
"""
|
||||
# Exit early if the set of concretized specs is the set of user specs
|
||||
new_user_specs = list(set(self.user_specs) - set(self.concretized_user_specs))
|
||||
kept_user_specs = list(set(self.user_specs) & set(self.concretized_user_specs))
|
||||
kept_user_specs += self.included_user_specs
|
||||
new_user_specs = set(self.user_specs) - set(self.concretized_user_specs)
|
||||
kept_user_specs = set(self.user_specs) & set(self.concretized_user_specs)
|
||||
if not new_user_specs:
|
||||
return new_user_specs, kept_user_specs, []
|
||||
|
||||
specs_to_concretize = [(s, None) for s in new_user_specs] + [
|
||||
(abstract, concrete)
|
||||
concrete_specs_to_keep = [
|
||||
concrete
|
||||
for abstract, concrete in self.concretized_specs()
|
||||
if abstract in kept_user_specs
|
||||
]
|
||||
|
||||
specs_to_concretize = list(new_user_specs) + concrete_specs_to_keep
|
||||
return new_user_specs, kept_user_specs, specs_to_concretize
|
||||
|
||||
def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[SpecPair]:
|
||||
def _concretize_together_where_possible(
|
||||
self, tests: bool = False
|
||||
) -> List[Tuple[spack.spec.Spec, spack.spec.Spec]]:
|
||||
# Avoid cyclic dependency
|
||||
import spack.solver.asp
|
||||
|
||||
@@ -1540,26 +1529,36 @@ def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[S
|
||||
if not new_user_specs:
|
||||
return []
|
||||
|
||||
old_concrete_to_abstract = {
|
||||
concrete: abstract for (abstract, concrete) in self.concretized_specs()
|
||||
}
|
||||
|
||||
self.concretized_user_specs = []
|
||||
self.concretized_order = []
|
||||
self.specs_by_hash = {}
|
||||
|
||||
ret = []
|
||||
result = spack.concretize.concretize_together_when_possible(
|
||||
specs_to_concretize, tests=tests
|
||||
)
|
||||
for abstract, concrete in result:
|
||||
# Only add to the environment if it's from this environment (not included in)
|
||||
if abstract in self.user_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
result_by_user_spec = {}
|
||||
solver = spack.solver.asp.Solver()
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
for result in solver.solve_in_rounds(
|
||||
specs_to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
||||
):
|
||||
result_by_user_spec.update(result.specs_by_input)
|
||||
|
||||
# Return only the new specs
|
||||
result = []
|
||||
for abstract, concrete in sorted(result_by_user_spec.items()):
|
||||
# If the "abstract" spec is a concrete spec from the previous concretization
|
||||
# translate it back to an abstract spec. Otherwise, keep the abstract spec
|
||||
abstract = old_concrete_to_abstract.get(abstract, abstract)
|
||||
if abstract in new_user_specs:
|
||||
ret.append((abstract, concrete))
|
||||
result.append((abstract, concrete))
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
|
||||
return ret
|
||||
return result
|
||||
|
||||
def _concretize_together(self, tests: bool = False) -> Sequence[SpecPair]:
|
||||
def _concretize_together(
|
||||
self, tests: bool = False
|
||||
) -> List[Tuple[spack.spec.Spec, spack.spec.Spec]]:
|
||||
"""Concretization strategy that concretizes all the specs
|
||||
in the same DAG.
|
||||
"""
|
||||
@@ -1573,8 +1572,8 @@ def _concretize_together(self, tests: bool = False) -> Sequence[SpecPair]:
|
||||
self.specs_by_hash = {}
|
||||
|
||||
try:
|
||||
concretized_specs = spack.concretize.concretize_together(
|
||||
specs_to_concretize, tests=tests
|
||||
concrete_specs: List[spack.spec.Spec] = spack.concretize.concretize_specs_together(
|
||||
*specs_to_concretize, tests=tests
|
||||
)
|
||||
except spack.error.UnsatisfiableSpecError as e:
|
||||
# "Enhance" the error message for multiple root specs, suggest a less strict
|
||||
@@ -1592,13 +1591,14 @@ def _concretize_together(self, tests: bool = False) -> Sequence[SpecPair]:
|
||||
)
|
||||
raise
|
||||
|
||||
# set() | set() does not preserve ordering, even though sets are ordered
|
||||
ordered_user_specs = list(new_user_specs) + list(kept_user_specs)
|
||||
concretized_specs = [x for x in zip(ordered_user_specs, concrete_specs)]
|
||||
for abstract, concrete in concretized_specs:
|
||||
# Don't add if it's just included
|
||||
if abstract in self.user_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
|
||||
# Return the portion of the return value that is new
|
||||
return concretized_specs[: len(new_user_specs)]
|
||||
# zip truncates the longer list, which is exactly what we want here
|
||||
return list(zip(new_user_specs, concrete_specs))
|
||||
|
||||
def _concretize_separately(self, tests=False):
|
||||
"""Concretization strategy that concretizes separately one
|
||||
@@ -1620,16 +1620,71 @@ def _concretize_separately(self, tests=False):
|
||||
concrete = old_specs_by_hash[h]
|
||||
self._add_concrete_spec(s, concrete, new=False)
|
||||
|
||||
to_concretize = [
|
||||
(root, None) for root in self.user_specs if root not in old_concretized_user_specs
|
||||
]
|
||||
concretized_specs = spack.concretize.concretize_separately(to_concretize, tests=tests)
|
||||
# Concretize any new user specs that we haven't concretized yet
|
||||
args, root_specs, i = [], [], 0
|
||||
for uspec in self.user_specs:
|
||||
if uspec not in old_concretized_user_specs:
|
||||
root_specs.append(uspec)
|
||||
args.append((i, str(uspec), tests))
|
||||
i += 1
|
||||
|
||||
by_hash = {}
|
||||
for abstract, concrete in concretized_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
|
||||
# Ensure all the indexes have been built or updated, since
|
||||
# otherwise the processes in the pool may timeout on waiting
|
||||
# for a write lock. We do this indirectly by retrieving the
|
||||
# provider index, which should in turn trigger the update of
|
||||
# all the indexes if there's any need for that.
|
||||
_ = spack.repo.PATH.provider_index
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.all_compilers_config(spack.config.CONFIG)
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(args) == 0:
|
||||
return []
|
||||
|
||||
# Solve the environment in parallel on Linux
|
||||
start = time.time()
|
||||
num_procs = min(len(args), spack.config.determine_number_of_jobs(parallel=True))
|
||||
|
||||
# TODO: support parallel concretization on macOS and Windows
|
||||
msg = "Starting concretization"
|
||||
if sys.platform not in ("darwin", "win32") and num_procs > 1:
|
||||
msg += f" pool with {num_procs} processes"
|
||||
tty.msg(msg)
|
||||
|
||||
batch = []
|
||||
for j, (i, concrete, duration) in enumerate(
|
||||
spack.util.parallel.imap_unordered(
|
||||
_concretize_task,
|
||||
args,
|
||||
processes=num_procs,
|
||||
debug=tty.is_debug(),
|
||||
maxtaskperchild=1,
|
||||
)
|
||||
):
|
||||
batch.append((i, concrete))
|
||||
percentage = (j + 1) / len(args) * 100
|
||||
tty.verbose(
|
||||
f"{duration:6.1f}s [{percentage:3.0f}%] {concrete.cformat('{hash:7}')} "
|
||||
f"{root_specs[i].colored_str}"
|
||||
)
|
||||
sys.stdout.flush()
|
||||
|
||||
# Add specs in original order
|
||||
batch.sort(key=lambda x: x[0])
|
||||
by_hash = {} # for attaching information on test dependencies
|
||||
for root, (_, concrete) in zip(root_specs, batch):
|
||||
self._add_concrete_spec(root, concrete)
|
||||
by_hash[concrete.dag_hash()] = concrete
|
||||
|
||||
finish = time.time()
|
||||
tty.msg(f"Environment concretized in {finish - start:.2f} seconds")
|
||||
|
||||
# Unify the specs objects, so we get correct references to all parents
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
|
||||
@@ -1649,7 +1704,11 @@ def _concretize_separately(self, tests=False):
|
||||
test_dependency.copy(), depflag=dt.TEST, virtuals=current_edge.virtuals
|
||||
)
|
||||
|
||||
return concretized_specs
|
||||
results = [
|
||||
(abstract, self.specs_by_hash[h])
|
||||
for abstract, h in zip(self.concretized_user_specs, self.concretized_order)
|
||||
]
|
||||
return results
|
||||
|
||||
@property
|
||||
def default_view(self):
|
||||
@@ -2456,6 +2515,14 @@ def display_specs(specs):
|
||||
print(tree_string)
|
||||
|
||||
|
||||
def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
|
||||
index, spec_str, tests = packed_arguments
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
start = time.time()
|
||||
spec = Spec(spec_str).concretized(tests=tests)
|
||||
return index, spec, time.time() - start
|
||||
|
||||
|
||||
def make_repo_path(root):
|
||||
"""Make a RepoPath from the repo subdirectories in an environment."""
|
||||
path = spack.repo.RepoPath(cache=spack.caches.MISC_CACHE)
|
||||
@@ -3044,13 +3111,11 @@ def prepare_config_scope(self) -> None:
|
||||
"""Add the manifest's scopes to the global configuration search path."""
|
||||
for scope in self.env_config_scopes:
|
||||
spack.config.CONFIG.push_scope(scope)
|
||||
spack.config.CONFIG.ensure_scope_ordering()
|
||||
|
||||
def deactivate_config_scope(self) -> None:
|
||||
"""Remove any of the manifest's scopes from the global config path."""
|
||||
for scope in self.env_config_scopes:
|
||||
spack.config.CONFIG.remove_scope(scope.name)
|
||||
spack.config.CONFIG.ensure_scope_ordering()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def use_config(self):
|
||||
|
@@ -21,40 +21,44 @@
|
||||
features.
|
||||
"""
|
||||
import importlib
|
||||
import types
|
||||
from typing import List, Optional
|
||||
|
||||
from llnl.util.lang import ensure_last, list_modules
|
||||
|
||||
import spack.paths
|
||||
|
||||
|
||||
class _HookRunner:
|
||||
#: Order in which hooks are executed
|
||||
HOOK_ORDER = [
|
||||
"spack.hooks.module_file_generation",
|
||||
"spack.hooks.licensing",
|
||||
"spack.hooks.sbang",
|
||||
"spack.hooks.windows_runtime_linkage",
|
||||
"spack.hooks.drop_redundant_rpaths",
|
||||
"spack.hooks.absolutify_elf_sonames",
|
||||
"spack.hooks.permissions_setters",
|
||||
# after all mutations to the install prefix, write metadata
|
||||
"spack.hooks.write_install_manifest",
|
||||
# after all metadata is written
|
||||
"spack.hooks.autopush",
|
||||
]
|
||||
|
||||
#: Contains all hook modules after first call, shared among all HookRunner objects
|
||||
_hooks: Optional[List[types.ModuleType]] = None
|
||||
#: Stores all hooks on first call, shared among
|
||||
#: all HookRunner objects
|
||||
_hooks = None
|
||||
|
||||
def __init__(self, hook_name):
|
||||
self.hook_name = hook_name
|
||||
|
||||
@classmethod
|
||||
def _populate_hooks(cls):
|
||||
# Lazily populate the list of hooks
|
||||
cls._hooks = []
|
||||
|
||||
relative_names = list(list_modules(spack.paths.hooks_path))
|
||||
|
||||
# write_install_manifest should come after any mutation of the install prefix, and
|
||||
# autopush should include the install manifest.
|
||||
ensure_last(relative_names, "absolutify_elf_sonames", "write_install_manifest", "autopush")
|
||||
|
||||
for name in relative_names:
|
||||
module_name = __name__ + "." + name
|
||||
module_obj = importlib.import_module(module_name)
|
||||
cls._hooks.append((module_name, module_obj))
|
||||
|
||||
@property
|
||||
def hooks(self) -> List[types.ModuleType]:
|
||||
def hooks(self):
|
||||
if not self._hooks:
|
||||
self._hooks = [importlib.import_module(module_name) for module_name in self.HOOK_ORDER]
|
||||
self._populate_hooks()
|
||||
return self._hooks
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
for module in self.hooks:
|
||||
for _, module in self.hooks:
|
||||
if hasattr(module, self.hook_name):
|
||||
hook = getattr(module, self.hook_name)
|
||||
if hasattr(hook, "__call__"):
|
||||
|
@@ -412,7 +412,7 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
|
||||
tty.debug(f"{pre} already registered in DB")
|
||||
record = spack.store.STORE.db.get_record(spec)
|
||||
if explicit and not record.explicit:
|
||||
spack.store.STORE.db.mark(spec, "explicit", True)
|
||||
spack.store.STORE.db.update_explicit(spec, explicit)
|
||||
|
||||
except KeyError:
|
||||
# If not, register it and generate the module file.
|
||||
@@ -1507,8 +1507,8 @@ def _prepare_for_install(self, task: Task) -> None:
|
||||
self._update_installed(task)
|
||||
|
||||
# Only update the explicit entry once for the explicit package
|
||||
if task.explicit and not rec.explicit:
|
||||
spack.store.STORE.db.mark(task.pkg.spec, "explicit", True)
|
||||
if task.explicit:
|
||||
spack.store.STORE.db.update_explicit(task.pkg.spec, True)
|
||||
|
||||
def _cleanup_all_tasks(self) -> None:
|
||||
"""Cleanup all tasks to include releasing their locks."""
|
||||
|
@@ -911,6 +911,13 @@ def _main(argv=None):
|
||||
# Make spack load / env activate work on macOS
|
||||
restore_macos_dyld_vars()
|
||||
|
||||
# make spack.config aware of any command line configuration scopes
|
||||
if args.config_scopes:
|
||||
spack.config.COMMAND_LINE_SCOPES = args.config_scopes
|
||||
|
||||
# ensure options on spack command come before everything
|
||||
setup_main_options(args)
|
||||
|
||||
# activate an environment if one was specified on the command line
|
||||
env_format_error = None
|
||||
if not args.no_env:
|
||||
@@ -924,12 +931,6 @@ def _main(argv=None):
|
||||
e.print_context()
|
||||
env_format_error = e
|
||||
|
||||
# Push scopes from the command line last
|
||||
if args.config_scopes:
|
||||
spack.config._add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
|
||||
spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("command_line"))
|
||||
setup_main_options(args)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# Things that require configuration should go below here
|
||||
# ------------------------------------------------------------------------
|
||||
|
@@ -18,7 +18,7 @@
|
||||
import sys
|
||||
import traceback
|
||||
import urllib.parse
|
||||
from typing import Any, Dict, Optional, Tuple, Union
|
||||
from typing import List, Optional, Union
|
||||
|
||||
import llnl.url
|
||||
import llnl.util.symlink
|
||||
@@ -153,66 +153,8 @@ def push_url(self):
|
||||
"""Get the valid, canonicalized fetch URL"""
|
||||
return self.get_url("push")
|
||||
|
||||
def ensure_mirror_usable(self, direction: str = "push"):
|
||||
access_pair = self._get_value("access_pair", direction)
|
||||
access_token_variable = self._get_value("access_token_variable", direction)
|
||||
|
||||
errors = []
|
||||
|
||||
# Verify that the credentials that are variables expand
|
||||
if access_pair and isinstance(access_pair, dict):
|
||||
if "id_variable" in access_pair and access_pair["id_variable"] not in os.environ:
|
||||
errors.append(f"id_variable {access_pair['id_variable']} not set in environment")
|
||||
if "secret_variable" in access_pair:
|
||||
if access_pair["secret_variable"] not in os.environ:
|
||||
errors.append(
|
||||
f"environment variable `{access_pair['secret_variable']}` "
|
||||
"(secret_variable) not set"
|
||||
)
|
||||
|
||||
if access_token_variable:
|
||||
if access_token_variable not in os.environ:
|
||||
errors.append(
|
||||
f"environment variable `{access_pair['access_token_variable']}` "
|
||||
"(access_token_variable) not set"
|
||||
)
|
||||
|
||||
if errors:
|
||||
msg = f"invalid {direction} configuration for mirror {self.name}: "
|
||||
msg += "\n ".join(errors)
|
||||
raise MirrorError(msg)
|
||||
|
||||
def _update_connection_dict(self, current_data: dict, new_data: dict, top_level: bool):
|
||||
# Only allow one to exist in the config
|
||||
if "access_token" in current_data and "access_token_variable" in new_data:
|
||||
current_data.pop("access_token")
|
||||
elif "access_token_variable" in current_data and "access_token" in new_data:
|
||||
current_data.pop("access_token_variable")
|
||||
|
||||
# If updating to a new access_pair that is the deprecated list, warn
|
||||
warn_deprecated_access_pair = False
|
||||
if "access_pair" in new_data:
|
||||
warn_deprecated_access_pair = isinstance(new_data["access_pair"], list)
|
||||
# If the not updating the current access_pair, and it is the deprecated list, warn
|
||||
elif "access_pair" in current_data:
|
||||
warn_deprecated_access_pair = isinstance(current_data["access_pair"], list)
|
||||
|
||||
if warn_deprecated_access_pair:
|
||||
tty.warn(
|
||||
f"in mirror {self.name}: support for plain text secrets in config files "
|
||||
"(access_pair: [id, secret]) is deprecated and will be removed in a future Spack "
|
||||
"version. Use environment variables instead (access_pair: "
|
||||
"{id: ..., secret_variable: ...})"
|
||||
)
|
||||
|
||||
keys = [
|
||||
"url",
|
||||
"access_pair",
|
||||
"access_token",
|
||||
"access_token_variable",
|
||||
"profile",
|
||||
"endpoint_url",
|
||||
]
|
||||
keys = ["url", "access_pair", "access_token", "profile", "endpoint_url"]
|
||||
if top_level:
|
||||
keys += ["binary", "source", "signed", "autopush"]
|
||||
changed = False
|
||||
@@ -328,53 +270,11 @@ def get_url(self, direction: str) -> str:
|
||||
|
||||
return _url_or_path_to_url(url)
|
||||
|
||||
def get_credentials(self, direction: str) -> Dict[str, Any]:
|
||||
"""Get the mirror credentials from the mirror config
|
||||
|
||||
Args:
|
||||
direction: fetch or push mirror config
|
||||
|
||||
Returns:
|
||||
Dictionary from credential type string to value
|
||||
|
||||
Credential Type Map:
|
||||
access_token -> str
|
||||
access_pair -> tuple(str,str)
|
||||
profile -> str
|
||||
"""
|
||||
creddict: Dict[str, Any] = {}
|
||||
access_token = self.get_access_token(direction)
|
||||
if access_token:
|
||||
creddict["access_token"] = access_token
|
||||
|
||||
access_pair = self.get_access_pair(direction)
|
||||
if access_pair:
|
||||
creddict.update({"access_pair": access_pair})
|
||||
|
||||
profile = self.get_profile(direction)
|
||||
if profile:
|
||||
creddict["profile"] = profile
|
||||
|
||||
return creddict
|
||||
|
||||
def get_access_token(self, direction: str) -> Optional[str]:
|
||||
tok = self._get_value("access_token_variable", direction)
|
||||
if tok:
|
||||
return os.environ.get(tok)
|
||||
else:
|
||||
return self._get_value("access_token", direction)
|
||||
return None
|
||||
return self._get_value("access_token", direction)
|
||||
|
||||
def get_access_pair(self, direction: str) -> Optional[Tuple[str, str]]:
|
||||
pair = self._get_value("access_pair", direction)
|
||||
if isinstance(pair, (tuple, list)) and len(pair) == 2:
|
||||
return (pair[0], pair[1]) if all(pair) else None
|
||||
elif isinstance(pair, dict):
|
||||
id_ = os.environ.get(pair["id_variable"]) if "id_variable" in pair else pair["id"]
|
||||
secret = os.environ.get(pair["secret_variable"])
|
||||
return (id_, secret) if id_ and secret else None
|
||||
else:
|
||||
return None
|
||||
def get_access_pair(self, direction: str) -> Optional[List]:
|
||||
return self._get_value("access_pair", direction)
|
||||
|
||||
def get_profile(self, direction: str) -> Optional[str]:
|
||||
return self._get_value("profile", direction)
|
||||
|
@@ -377,10 +377,9 @@ def credentials_from_mirrors(
|
||||
# Prefer push credentials over fetch. Unlikely that those are different
|
||||
# but our config format allows it.
|
||||
for direction in ("push", "fetch"):
|
||||
pair = mirror.get_credentials(direction).get("access_pair")
|
||||
if not pair:
|
||||
pair = mirror.get_access_pair(direction)
|
||||
if pair is None:
|
||||
continue
|
||||
|
||||
url = mirror.get_url(direction)
|
||||
if not url.startswith("oci://"):
|
||||
continue
|
||||
@@ -397,7 +396,6 @@ def create_opener():
|
||||
"""Create an opener that can handle OCI authentication."""
|
||||
opener = urllib.request.OpenerDirector()
|
||||
for handler in [
|
||||
urllib.request.ProxyHandler(),
|
||||
urllib.request.UnknownHandler(),
|
||||
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
|
||||
spack.util.web.SpackHTTPDefaultErrorHandler(),
|
||||
|
@@ -103,7 +103,12 @@
|
||||
from spack.spec import InvalidSpecDetected, Spec
|
||||
from spack.util.executable import *
|
||||
from spack.util.filesystem import file_command, fix_darwin_install_name, mime_type
|
||||
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
|
||||
from spack.variant import (
|
||||
any_combination_of,
|
||||
auto_or_any_combination_of,
|
||||
conditional,
|
||||
disjoint_sets,
|
||||
)
|
||||
from spack.version import Version, ver
|
||||
|
||||
# These are just here for editor support; they will be replaced when the build env
|
||||
|
@@ -622,7 +622,6 @@ class PackageBase(WindowsRPath, PackageViewMixin, RedistributionMixin, metaclass
|
||||
patches: Dict["spack.spec.Spec", List["spack.patch.Patch"]]
|
||||
variants: Dict["spack.spec.Spec", Dict[str, "spack.variant.Variant"]]
|
||||
languages: Dict["spack.spec.Spec", Set[str]]
|
||||
splice_specs: Dict["spack.spec.Spec", Tuple["spack.spec.Spec", Union[None, str, List[str]]]]
|
||||
|
||||
#: By default, packages are not virtual
|
||||
#: Virtual packages override this attribute
|
||||
|
@@ -13,7 +13,6 @@
|
||||
import macholib.mach_o
|
||||
import macholib.MachO
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
@@ -276,10 +275,10 @@ def modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths):
|
||||
|
||||
# Deduplicate and flatten
|
||||
args = list(itertools.chain.from_iterable(llnl.util.lang.dedupe(args)))
|
||||
install_name_tool = executable.Executable("install_name_tool")
|
||||
if args:
|
||||
with fs.edit_in_place_through_temporary_file(cur_path) as temp_path:
|
||||
install_name_tool(*args, temp_path)
|
||||
args.append(str(cur_path))
|
||||
install_name_tool = executable.Executable("install_name_tool")
|
||||
install_name_tool(*args)
|
||||
|
||||
|
||||
def macholib_get_paths(cur_path):
|
||||
@@ -718,8 +717,8 @@ def fixup_macos_rpath(root, filename):
|
||||
# No fixes needed
|
||||
return False
|
||||
|
||||
with fs.edit_in_place_through_temporary_file(abspath) as temp_path:
|
||||
executable.Executable("install_name_tool")(*args, temp_path)
|
||||
args.append(abspath)
|
||||
executable.Executable("install_name_tool")(*args)
|
||||
return True
|
||||
|
||||
|
||||
|
@@ -209,7 +209,7 @@ def _apply_to_file(self, f):
|
||||
# but it's nasty to deal with matches across boundaries, so let's stick to
|
||||
# something simple.
|
||||
|
||||
modified = False
|
||||
modified = True
|
||||
|
||||
for match in self.regex.finditer(f.read()):
|
||||
# The matching prefix (old) and its replacement (new)
|
||||
|
@@ -34,7 +34,6 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack
|
||||
import spack.caches
|
||||
import spack.config
|
||||
import spack.error
|
||||
@@ -42,7 +41,6 @@
|
||||
import spack.provider_index
|
||||
import spack.spec
|
||||
import spack.tag
|
||||
import spack.util.file_cache
|
||||
import spack.util.git
|
||||
import spack.util.naming as nm
|
||||
import spack.util.path
|
||||
@@ -51,8 +49,6 @@
|
||||
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
|
||||
ROOT_PYTHON_NAMESPACE = "spack.pkg"
|
||||
|
||||
_API_REGEX = re.compile(r"^v(\d+)\.(\d+)$")
|
||||
|
||||
|
||||
def python_package_for_repo(namespace):
|
||||
"""Returns the full namespace of a repository, given its relative one
|
||||
@@ -593,7 +589,7 @@ def __init__(
|
||||
self,
|
||||
package_checker: FastPackageChecker,
|
||||
namespace: str,
|
||||
cache: spack.util.file_cache.FileCache,
|
||||
cache: "spack.caches.FileCacheType",
|
||||
):
|
||||
self.checker = package_checker
|
||||
self.packages_path = self.checker.packages_path
|
||||
@@ -686,7 +682,7 @@ class RepoPath:
|
||||
def __init__(
|
||||
self,
|
||||
*repos: Union[str, "Repo"],
|
||||
cache: Optional[spack.util.file_cache.FileCache],
|
||||
cache: Optional["spack.caches.FileCacheType"],
|
||||
overrides: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
self.repos: List[Repo] = []
|
||||
@@ -949,59 +945,26 @@ def __reduce__(self):
|
||||
return RepoPath.unmarshal, self.marshal()
|
||||
|
||||
|
||||
def _parse_package_api_version(
|
||||
config: Dict[str, Any],
|
||||
min_api: Tuple[int, int] = spack.min_package_api_version,
|
||||
max_api: Tuple[int, int] = spack.package_api_version,
|
||||
) -> Tuple[int, int]:
|
||||
api = config.get("api")
|
||||
if api is None:
|
||||
package_api = (1, 0)
|
||||
else:
|
||||
if not isinstance(api, str):
|
||||
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
|
||||
api_match = _API_REGEX.match(api)
|
||||
if api_match is None:
|
||||
raise BadRepoError(f"Invalid Package API version '{api}'. Must be of the form vX.Y")
|
||||
package_api = (int(api_match.group(1)), int(api_match.group(2)))
|
||||
|
||||
if min_api <= package_api <= max_api:
|
||||
return package_api
|
||||
|
||||
min_str = ".".join(str(i) for i in min_api)
|
||||
max_str = ".".join(str(i) for i in max_api)
|
||||
curr_str = ".".join(str(i) for i in package_api)
|
||||
raise BadRepoError(
|
||||
f"Package API v{curr_str} is not supported by this version of Spack ("
|
||||
f"must be between v{min_str} and v{max_str})"
|
||||
)
|
||||
|
||||
|
||||
class Repo:
|
||||
"""Class representing a package repository in the filesystem.
|
||||
|
||||
Each package repository must have a top-level configuration file called `repo.yaml`.
|
||||
Each package repository must have a top-level configuration file
|
||||
called `repo.yaml`.
|
||||
|
||||
It contains the following keys:
|
||||
Currently, `repo.yaml` must define:
|
||||
|
||||
`namespace`:
|
||||
A Python namespace where the repository's packages should live.
|
||||
|
||||
`subdirectory`:
|
||||
An optional subdirectory name where packages are placed
|
||||
|
||||
`api`:
|
||||
A string of the form vX.Y that indicates the Package API version. The default is "v1.0".
|
||||
For the repo to be compatible with the current version of Spack, the version must be
|
||||
greater than or equal to :py:data:`spack.min_package_api_version` and less than or equal to
|
||||
:py:data:`spack.package_api_version`.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
*,
|
||||
cache: spack.util.file_cache.FileCache,
|
||||
cache: "spack.caches.FileCacheType",
|
||||
overrides: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
"""Instantiate a package repository from a filesystem path.
|
||||
@@ -1031,7 +994,7 @@ def check(condition, msg):
|
||||
f"{os.path.join(root, repo_config_name)} must define a namespace.",
|
||||
)
|
||||
|
||||
self.namespace: str = config["namespace"]
|
||||
self.namespace = config["namespace"]
|
||||
check(
|
||||
re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
|
||||
f"Invalid namespace '{self.namespace}' in repo '{self.root}'. "
|
||||
@@ -1044,14 +1007,12 @@ def check(condition, msg):
|
||||
# Keep name components around for checking prefixes.
|
||||
self._names = self.full_namespace.split(".")
|
||||
|
||||
packages_dir: str = config.get("subdirectory", packages_dir_name)
|
||||
packages_dir = config.get("subdirectory", packages_dir_name)
|
||||
self.packages_path = os.path.join(self.root, packages_dir)
|
||||
check(
|
||||
os.path.isdir(self.packages_path), f"No directory '{packages_dir}' found in '{root}'"
|
||||
)
|
||||
|
||||
self.package_api = _parse_package_api_version(config)
|
||||
|
||||
# Class attribute overrides by package name
|
||||
self.overrides = overrides or {}
|
||||
|
||||
@@ -1101,7 +1062,7 @@ def is_prefix(self, fullname: str) -> bool:
|
||||
parts = fullname.split(".")
|
||||
return self._names[: len(parts)] == parts
|
||||
|
||||
def _read_config(self) -> Dict[str, Any]:
|
||||
def _read_config(self) -> Dict[str, str]:
|
||||
"""Check for a YAML config file in this db's root directory."""
|
||||
try:
|
||||
with open(self.config_file) as reponame_file:
|
||||
@@ -1478,7 +1439,9 @@ def _path(configuration=None):
|
||||
return create(configuration=configuration)
|
||||
|
||||
|
||||
def create(configuration: spack.config.Configuration) -> RepoPath:
|
||||
def create(
|
||||
configuration: Union["spack.config.Configuration", llnl.util.lang.Singleton]
|
||||
) -> RepoPath:
|
||||
"""Create a RepoPath from a configuration object.
|
||||
|
||||
Args:
|
||||
@@ -1501,7 +1464,7 @@ def create(configuration: spack.config.Configuration) -> RepoPath:
|
||||
|
||||
|
||||
#: Singleton repo path instance
|
||||
PATH: RepoPath = llnl.util.lang.Singleton(_path) # type: ignore
|
||||
PATH: Union[RepoPath, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_path)
|
||||
|
||||
# Add the finder to sys.meta_path
|
||||
REPOS_FINDER = ReposFinder()
|
||||
|
@@ -33,14 +33,8 @@
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"local",
|
||||
"buildcache",
|
||||
"external",
|
||||
"environment",
|
||||
],
|
||||
"enum": ["local", "buildcache", "external"],
|
||||
},
|
||||
"path": {"type": "string"},
|
||||
"include": LIST_OF_SPECS,
|
||||
"exclude": LIST_OF_SPECS,
|
||||
},
|
||||
@@ -78,8 +72,7 @@
|
||||
"transitive": {"type": "boolean", "default": False},
|
||||
},
|
||||
},
|
||||
},
|
||||
"automatic": {"type": "boolean"},
|
||||
}
|
||||
},
|
||||
},
|
||||
"duplicates": {
|
||||
|
@@ -106,8 +106,8 @@
|
||||
{
|
||||
"names": ["install_missing_compilers"],
|
||||
"message": "The config:install_missing_compilers option has been deprecated in "
|
||||
"Spack v0.23, and is currently ignored. It will be removed from config after "
|
||||
"Spack v1.0.",
|
||||
"Spack v0.23, and is currently ignored. It will be removed from config in "
|
||||
"Spack v0.25.",
|
||||
"error": False,
|
||||
},
|
||||
],
|
||||
|
@@ -19,8 +19,6 @@
|
||||
#: Top level key in a manifest file
|
||||
TOP_LEVEL_KEY = "spack"
|
||||
|
||||
include_concrete = {"type": "array", "default": [], "items": {"type": "string"}}
|
||||
|
||||
properties: Dict[str, Any] = {
|
||||
"spack": {
|
||||
"type": "object",
|
||||
@@ -33,7 +31,7 @@
|
||||
{
|
||||
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
"specs": spec_list_schema,
|
||||
"include_concrete": include_concrete,
|
||||
"include_concrete": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
},
|
||||
),
|
||||
}
|
||||
|
@@ -15,42 +15,14 @@
|
||||
"url": {"type": "string"},
|
||||
# todo: replace this with named keys "username" / "password" or "id" / "secret"
|
||||
"access_pair": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "array",
|
||||
"items": {"minItems": 2, "maxItems": 2, "type": ["string", "null"]},
|
||||
}, # deprecated
|
||||
{
|
||||
"type": "object",
|
||||
"required": ["secret_variable"],
|
||||
# Only allow id or id_variable to be set, not both
|
||||
"oneOf": [{"required": ["id"]}, {"required": ["id_variable"]}],
|
||||
"properties": {
|
||||
"id": {"type": "string"},
|
||||
"id_variable": {"type": "string"},
|
||||
"secret_variable": {"type": "string"},
|
||||
},
|
||||
},
|
||||
]
|
||||
"type": "array",
|
||||
"items": {"type": ["string", "null"], "minItems": 2, "maxItems": 2},
|
||||
},
|
||||
"access_token": {"type": ["string", "null"]},
|
||||
"profile": {"type": ["string", "null"]},
|
||||
"endpoint_url": {"type": ["string", "null"]},
|
||||
"access_token": {"type": ["string", "null"]}, # deprecated
|
||||
"access_token_variable": {"type": ["string", "null"]},
|
||||
}
|
||||
|
||||
connection_ext = {
|
||||
"deprecatedProperties": [
|
||||
{
|
||||
"names": ["access_token"],
|
||||
"message": "Use of plain text `access_token` in mirror config is deprecated, use "
|
||||
"environment variables instead (access_token_variable)",
|
||||
"error": False,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
#: Mirror connection inside pull/push keys
|
||||
fetch_and_push = {
|
||||
"anyOf": [
|
||||
@@ -59,7 +31,6 @@
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {**connection}, # type: ignore
|
||||
**connection_ext, # type: ignore
|
||||
},
|
||||
]
|
||||
}
|
||||
@@ -78,7 +49,6 @@
|
||||
"autopush": {"type": "boolean"},
|
||||
**connection, # type: ignore
|
||||
},
|
||||
**connection_ext, # type: ignore
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
@@ -100,28 +70,3 @@
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
||||
|
||||
|
||||
def update(data):
|
||||
import jsonschema
|
||||
|
||||
errors = []
|
||||
|
||||
def check_access_pair(name, section):
|
||||
if not section or not isinstance(section, dict):
|
||||
return
|
||||
|
||||
if "access_token" in section and "access_token_variable" in section:
|
||||
errors.append(
|
||||
f'{name}: mirror credential "access_token" conflicts with "access_token_variable"'
|
||||
)
|
||||
|
||||
# Check all of the sections
|
||||
for name, section in data.items():
|
||||
check_access_pair(name, section)
|
||||
if isinstance(section, dict):
|
||||
check_access_pair(name, section.get("fetch"))
|
||||
check_access_pair(name, section.get("push"))
|
||||
|
||||
if errors:
|
||||
raise jsonschema.ValidationError("\n".join(errors))
|
||||
|
@@ -27,6 +27,7 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution
|
||||
import spack.bootstrap.core
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
@@ -52,7 +53,6 @@
|
||||
|
||||
from .core import (
|
||||
AspFunction,
|
||||
AspVar,
|
||||
NodeArgument,
|
||||
ast_sym,
|
||||
ast_type,
|
||||
@@ -515,8 +515,6 @@ def _compute_specs_from_answer_set(self):
|
||||
best = min(self.answers)
|
||||
opt, _, answer = best
|
||||
for input_spec in self.abstract_specs:
|
||||
# The specs must be unified to get here, so it is safe to associate any satisfying spec
|
||||
# with the input. Multiple inputs may be matched to the same concrete spec
|
||||
node = SpecBuilder.make_node(pkg=input_spec.name)
|
||||
if input_spec.virtual:
|
||||
providers = [
|
||||
@@ -525,14 +523,12 @@ def _compute_specs_from_answer_set(self):
|
||||
node = SpecBuilder.make_node(pkg=providers[0])
|
||||
candidate = answer.get(node)
|
||||
|
||||
if candidate and candidate.satisfies(input_spec):
|
||||
self._concrete_specs.append(answer[node])
|
||||
self._concrete_specs_by_input[input_spec] = answer[node]
|
||||
elif candidate and candidate.build_spec.satisfies(input_spec):
|
||||
tty.warn(
|
||||
"explicit splice configuration has caused the concretized spec"
|
||||
f" {candidate} not to satisfy the input spec {input_spec}"
|
||||
)
|
||||
if candidate and candidate.build_spec.satisfies(input_spec):
|
||||
if not candidate.satisfies(input_spec):
|
||||
tty.warn(
|
||||
"explicit splice configuration has caused the concretized spec"
|
||||
f" {candidate} not to satisfy the input spec {input_spec}"
|
||||
)
|
||||
self._concrete_specs.append(answer[node])
|
||||
self._concrete_specs_by_input[input_spec] = answer[node]
|
||||
else:
|
||||
@@ -818,7 +814,7 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
||||
solve, and the internal statistics from clingo.
|
||||
"""
|
||||
# avoid circular import
|
||||
import spack.bootstrap.core
|
||||
import spack.bootstrap
|
||||
|
||||
output = output or DEFAULT_OUTPUT_CONFIGURATION
|
||||
timer = spack.util.timer.Timer()
|
||||
@@ -857,8 +853,6 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
||||
self.control.load(os.path.join(parent_dir, "libc_compatibility.lp"))
|
||||
else:
|
||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||
if setup.enable_splicing:
|
||||
self.control.load(os.path.join(parent_dir, "splices.lp"))
|
||||
|
||||
timer.stop("load")
|
||||
|
||||
@@ -893,7 +887,6 @@ def on_model(model):
|
||||
result.satisfiable = solve_result.satisfiable
|
||||
|
||||
if result.satisfiable:
|
||||
timer.start("construct_specs")
|
||||
# get the best model
|
||||
builder = SpecBuilder(specs, hash_lookup=setup.reusable_and_possible)
|
||||
min_cost, best_model = min(models)
|
||||
@@ -918,8 +911,7 @@ def on_model(model):
|
||||
|
||||
# record the possible dependencies in the solve
|
||||
result.possible_dependencies = setup.pkgs
|
||||
timer.stop("construct_specs")
|
||||
timer.stop()
|
||||
|
||||
elif cores:
|
||||
result.control = self.control
|
||||
result.cores.extend(cores)
|
||||
@@ -1171,9 +1163,6 @@ def __init__(self, tests: bool = False):
|
||||
# list of unique libc specs targeted by compilers (or an educated guess if no compiler)
|
||||
self.libcs: List[spack.spec.Spec] = []
|
||||
|
||||
# If true, we have to load the code for synthesizing splices
|
||||
self.enable_splicing: bool = spack.config.CONFIG.get("concretizer:splice:automatic")
|
||||
|
||||
def pkg_version_rules(self, pkg):
|
||||
"""Output declared versions of a package.
|
||||
|
||||
@@ -1344,10 +1333,6 @@ def pkg_rules(self, pkg, tests):
|
||||
# dependencies
|
||||
self.package_dependencies_rules(pkg)
|
||||
|
||||
# splices
|
||||
if self.enable_splicing:
|
||||
self.package_splice_rules(pkg)
|
||||
|
||||
# virtual preferences
|
||||
self.virtual_preferences(
|
||||
pkg.name,
|
||||
@@ -1448,13 +1433,14 @@ def define_variant(
|
||||
for value in sorted(values):
|
||||
pkg_fact(fn.variant_possible_value(vid, value))
|
||||
|
||||
# we're done here for unconditional values
|
||||
if not isinstance(value, vt.ConditionalValue):
|
||||
# when=True means unconditional, so no need for conditional values
|
||||
if getattr(value, "when", True) is True:
|
||||
continue
|
||||
|
||||
# make a spec indicating whether the variant has this conditional value
|
||||
variant_has_value = spack.spec.Spec()
|
||||
variant_has_value.variants[name] = spack.variant.AbstractVariant(name, value.value)
|
||||
# now we have to handle conditional values
|
||||
quoted_value = spack.parser.quote_if_needed(str(value))
|
||||
vstring = f"{name}={quoted_value}"
|
||||
variant_has_value = spack.spec.Spec(vstring)
|
||||
|
||||
if value.when:
|
||||
# the conditional value is always "possible", but it imposes its when condition as
|
||||
@@ -1465,12 +1451,10 @@ def define_variant(
|
||||
imposed_spec=value.when,
|
||||
required_name=pkg.name,
|
||||
imposed_name=pkg.name,
|
||||
msg=f"{pkg.name} variant {name} has value '{value.value}' when {value.when}",
|
||||
msg=f"{pkg.name} variant {name} has value '{quoted_value}' when {value.when}",
|
||||
)
|
||||
else:
|
||||
vstring = f"{name}='{value.value}'"
|
||||
|
||||
# We know the value is never allowed statically (when was None), but we can't just
|
||||
# We know the value is never allowed statically (when was false), but we can't just
|
||||
# ignore it b/c it could come in as a possible value and we need a good error msg.
|
||||
# So, it's a conflict -- if the value is somehow used, it'll trigger an error.
|
||||
trigger_id = self.condition(
|
||||
@@ -1686,94 +1670,6 @@ def dependency_holds(input_spec, requirements):
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
def _gen_match_variant_splice_constraints(
|
||||
self,
|
||||
pkg,
|
||||
cond_spec: "spack.spec.Spec",
|
||||
splice_spec: "spack.spec.Spec",
|
||||
hash_asp_var: "AspVar",
|
||||
splice_node,
|
||||
match_variants: List[str],
|
||||
):
|
||||
# If there are no variants to match, no constraints are needed
|
||||
variant_constraints = []
|
||||
for i, variant_name in enumerate(match_variants):
|
||||
vari_defs = pkg.variant_definitions(variant_name)
|
||||
# the spliceable config of the package always includes the variant
|
||||
if vari_defs != [] and any(cond_spec.satisfies(s) for (s, _) in vari_defs):
|
||||
variant = vari_defs[0][1]
|
||||
if variant.multi:
|
||||
continue # cannot automatically match multi-valued variants
|
||||
value_var = AspVar(f"VariValue{i}")
|
||||
attr_constraint = fn.attr("variant_value", splice_node, variant_name, value_var)
|
||||
hash_attr_constraint = fn.hash_attr(
|
||||
hash_asp_var, "variant_value", splice_spec.name, variant_name, value_var
|
||||
)
|
||||
variant_constraints.append(attr_constraint)
|
||||
variant_constraints.append(hash_attr_constraint)
|
||||
return variant_constraints
|
||||
|
||||
def package_splice_rules(self, pkg):
|
||||
self.gen.h2("Splice rules")
|
||||
for i, (cond, (spec_to_splice, match_variants)) in enumerate(
|
||||
sorted(pkg.splice_specs.items())
|
||||
):
|
||||
with named_spec(cond, pkg.name):
|
||||
self.version_constraints.add((cond.name, cond.versions))
|
||||
self.version_constraints.add((spec_to_splice.name, spec_to_splice.versions))
|
||||
hash_var = AspVar("Hash")
|
||||
splice_node = fn.node(AspVar("NID"), cond.name)
|
||||
when_spec_attrs = [
|
||||
fn.attr(c.args[0], splice_node, *(c.args[2:]))
|
||||
for c in self.spec_clauses(cond, body=True, required_from=None)
|
||||
if c.args[0] != "node"
|
||||
]
|
||||
splice_spec_hash_attrs = [
|
||||
fn.hash_attr(hash_var, *(c.args))
|
||||
for c in self.spec_clauses(spec_to_splice, body=True, required_from=None)
|
||||
if c.args[0] != "node"
|
||||
]
|
||||
if match_variants is None:
|
||||
variant_constraints = []
|
||||
elif match_variants == "*":
|
||||
filt_match_variants = set()
|
||||
for map in pkg.variants.values():
|
||||
for k in map:
|
||||
filt_match_variants.add(k)
|
||||
filt_match_variants = list(filt_match_variants)
|
||||
variant_constraints = self._gen_match_variant_splice_constraints(
|
||||
pkg, cond, spec_to_splice, hash_var, splice_node, filt_match_variants
|
||||
)
|
||||
else:
|
||||
if any(
|
||||
v in cond.variants or v in spec_to_splice.variants for v in match_variants
|
||||
):
|
||||
raise Exception(
|
||||
"Overlap between match_variants and explicitly set variants"
|
||||
)
|
||||
variant_constraints = self._gen_match_variant_splice_constraints(
|
||||
pkg, cond, spec_to_splice, hash_var, splice_node, match_variants
|
||||
)
|
||||
|
||||
rule_head = fn.abi_splice_conditions_hold(
|
||||
i, splice_node, spec_to_splice.name, hash_var
|
||||
)
|
||||
rule_body_components = (
|
||||
[
|
||||
# splice_set_fact,
|
||||
fn.attr("node", splice_node),
|
||||
fn.installed_hash(spec_to_splice.name, hash_var),
|
||||
]
|
||||
+ when_spec_attrs
|
||||
+ splice_spec_hash_attrs
|
||||
+ variant_constraints
|
||||
)
|
||||
rule_body = ",\n ".join(str(r) for r in rule_body_components)
|
||||
rule = f"{rule_head} :-\n {rule_body}."
|
||||
self.gen.append(rule)
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
def virtual_preferences(self, pkg_name, func):
|
||||
"""Call func(vspec, provider, i) for each of pkg's provider prefs."""
|
||||
config = spack.config.get("packages")
|
||||
@@ -2132,12 +2028,9 @@ def _spec_clauses(
|
||||
for variant_def in variant_defs:
|
||||
self.variant_values_from_specs.add((spec.name, id(variant_def), value))
|
||||
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
if variant.propagate:
|
||||
clauses.append(f.propagate(spec.name, fn.variant_value(vname, value)))
|
||||
if self.pkg_class(spec.name).has_variant(vname):
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
else:
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
|
||||
# compiler and compiler version
|
||||
if spec.compiler:
|
||||
@@ -2636,9 +2529,8 @@ def concrete_specs(self):
|
||||
for h, spec in self.reusable_and_possible.explicit_items():
|
||||
# this indicates that there is a spec like this installed
|
||||
self.gen.fact(fn.installed_hash(spec.name, h))
|
||||
# indirection layer between hash constraints and imposition to allow for splicing
|
||||
for pred in self.spec_clauses(spec, body=True, required_from=None):
|
||||
self.gen.fact(fn.hash_attr(h, *pred.args))
|
||||
# this describes what constraints it imposes on the solve
|
||||
self.impose(h, spec, body=True)
|
||||
self.gen.newline()
|
||||
# Declare as possible parts of specs that are not in package.py
|
||||
# - Add versions to possible versions
|
||||
@@ -2724,7 +2616,6 @@ def setup(
|
||||
)
|
||||
for name, info in env.dev_specs.items()
|
||||
)
|
||||
|
||||
specs = tuple(specs) # ensure compatible types to add
|
||||
|
||||
self.gen.h1("Reusable concrete specs")
|
||||
@@ -3579,14 +3470,6 @@ def consume_facts(self):
|
||||
self._setup.effect_rules()
|
||||
|
||||
|
||||
# This should be a dataclass, but dataclasses don't work on Python 3.6
|
||||
class Splice:
|
||||
def __init__(self, splice_node: NodeArgument, child_name: str, child_hash: str):
|
||||
self.splice_node = splice_node
|
||||
self.child_name = child_name
|
||||
self.child_hash = child_hash
|
||||
|
||||
|
||||
class SpecBuilder:
|
||||
"""Class with actions to rebuild a spec from ASP results."""
|
||||
|
||||
@@ -3622,11 +3505,10 @@ def make_node(*, pkg: str) -> NodeArgument:
|
||||
"""
|
||||
return NodeArgument(id="0", pkg=pkg)
|
||||
|
||||
def __init__(self, specs, hash_lookup=None):
|
||||
def __init__(
|
||||
self, specs: List[spack.spec.Spec], *, hash_lookup: Optional[ConcreteSpecsByHash] = None
|
||||
):
|
||||
self._specs: Dict[NodeArgument, spack.spec.Spec] = {}
|
||||
|
||||
# Matches parent nodes to splice node
|
||||
self._splices: Dict[NodeArgument, List[Splice]] = {}
|
||||
self._result = None
|
||||
self._command_line_specs = specs
|
||||
self._flag_sources: Dict[Tuple[NodeArgument, str], Set[str]] = collections.defaultdict(
|
||||
@@ -3710,8 +3592,16 @@ def external_spec_selected(self, node, idx):
|
||||
|
||||
def depends_on(self, parent_node, dependency_node, type):
|
||||
dependency_spec = self._specs[dependency_node]
|
||||
edges = self._specs[parent_node].edges_to_dependencies(name=dependency_spec.name)
|
||||
edges = [x for x in edges if id(x.spec) == id(dependency_spec)]
|
||||
depflag = dt.flag_from_string(type)
|
||||
self._specs[parent_node].add_dependency_edge(dependency_spec, depflag=depflag, virtuals=())
|
||||
|
||||
if not edges:
|
||||
self._specs[parent_node].add_dependency_edge(
|
||||
self._specs[dependency_node], depflag=depflag, virtuals=()
|
||||
)
|
||||
else:
|
||||
edges[0].update_deptypes(depflag=depflag)
|
||||
|
||||
def virtual_on_edge(self, parent_node, provider_node, virtual):
|
||||
dependencies = self._specs[parent_node].edges_to_dependencies(name=(provider_node.pkg))
|
||||
@@ -3828,57 +3718,6 @@ def _order_index(flag_group):
|
||||
def deprecated(self, node: NodeArgument, version: str) -> None:
|
||||
tty.warn(f'using "{node.pkg}@{version}" which is a deprecated version')
|
||||
|
||||
def splice_at_hash(
|
||||
self,
|
||||
parent_node: NodeArgument,
|
||||
splice_node: NodeArgument,
|
||||
child_name: str,
|
||||
child_hash: str,
|
||||
):
|
||||
splice = Splice(splice_node, child_name=child_name, child_hash=child_hash)
|
||||
self._splices.setdefault(parent_node, []).append(splice)
|
||||
|
||||
def _resolve_automatic_splices(self):
|
||||
"""After all of the specs have been concretized, apply all immediate splices.
|
||||
|
||||
Use reverse topological order to ensure that all dependencies are resolved
|
||||
before their parents, allowing for maximal sharing and minimal copying.
|
||||
|
||||
"""
|
||||
fixed_specs = {}
|
||||
|
||||
# create a mapping from dag hash to an integer representing position in reverse topo order.
|
||||
specs = self._specs.values()
|
||||
topo_order = list(traverse.traverse_nodes(specs, order="topo", key=traverse.by_dag_hash))
|
||||
topo_lookup = {spec.dag_hash(): index for index, spec in enumerate(reversed(topo_order))}
|
||||
|
||||
# iterate over specs, children before parents
|
||||
for node, spec in sorted(self._specs.items(), key=lambda x: topo_lookup[x[1].dag_hash()]):
|
||||
immediate = self._splices.get(node, [])
|
||||
if not immediate and not any(
|
||||
edge.spec in fixed_specs for edge in spec.edges_to_dependencies()
|
||||
):
|
||||
continue
|
||||
new_spec = spec.copy(deps=False)
|
||||
new_spec.build_spec = spec
|
||||
for edge in spec.edges_to_dependencies():
|
||||
depflag = edge.depflag & ~dt.BUILD
|
||||
if any(edge.spec.dag_hash() == splice.child_hash for splice in immediate):
|
||||
splice = [s for s in immediate if s.child_hash == edge.spec.dag_hash()][0]
|
||||
new_spec.add_dependency_edge(
|
||||
self._specs[splice.splice_node], depflag=depflag, virtuals=edge.virtuals
|
||||
)
|
||||
elif edge.spec in fixed_specs:
|
||||
new_spec.add_dependency_edge(
|
||||
fixed_specs[edge.spec], depflag=depflag, virtuals=edge.virtuals
|
||||
)
|
||||
else:
|
||||
new_spec.add_dependency_edge(
|
||||
edge.spec, depflag=depflag, virtuals=edge.virtuals
|
||||
)
|
||||
self._specs[node] = new_spec
|
||||
fixed_specs[spec] = new_spec
|
||||
|
||||
@staticmethod
|
||||
def sort_fn(function_tuple) -> Tuple[int, int]:
|
||||
"""Ensure attributes are evaluated in the correct order.
|
||||
@@ -3908,6 +3747,7 @@ def build_specs(self, function_tuples):
|
||||
# them here so that directives that build objects (like node and
|
||||
# node_compiler) are called in the right order.
|
||||
self.function_tuples = sorted(set(function_tuples), key=self.sort_fn)
|
||||
|
||||
self._specs = {}
|
||||
for name, args in self.function_tuples:
|
||||
if SpecBuilder.ignored_attributes.match(name):
|
||||
@@ -3937,14 +3777,10 @@ def build_specs(self, function_tuples):
|
||||
continue
|
||||
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it except for node_flag_source,
|
||||
# since node_flag_source is tracking information not in the spec itself
|
||||
# we also need to keep track of splicing information.
|
||||
# do not bother calling actions on it
|
||||
spec = self._specs.get(args[0])
|
||||
if spec and spec.concrete:
|
||||
do_not_ignore_attrs = ["node_flag_source", "splice_at_hash"]
|
||||
if name not in do_not_ignore_attrs:
|
||||
continue
|
||||
continue
|
||||
|
||||
action(*args)
|
||||
|
||||
@@ -3954,7 +3790,7 @@ def build_specs(self, function_tuples):
|
||||
# inject patches -- note that we' can't use set() to unique the
|
||||
# roots here, because the specs aren't complete, and the hash
|
||||
# function will loop forever.
|
||||
roots = [spec.root for spec in self._specs.values()]
|
||||
roots = [spec.root for spec in self._specs.values() if not spec.root.installed]
|
||||
roots = dict((id(r), r) for r in roots)
|
||||
for root in roots.values():
|
||||
spack.spec.Spec.inject_patches_variant(root)
|
||||
@@ -3970,8 +3806,6 @@ def build_specs(self, function_tuples):
|
||||
for root in roots.values():
|
||||
root._finalize_concretization()
|
||||
|
||||
self._resolve_automatic_splices()
|
||||
|
||||
for s in self._specs.values():
|
||||
spack.spec.Spec.ensure_no_deprecated(s)
|
||||
|
||||
@@ -3986,6 +3820,7 @@ def build_specs(self, function_tuples):
|
||||
)
|
||||
|
||||
specs = self.execute_explicit_splices()
|
||||
|
||||
return specs
|
||||
|
||||
def execute_explicit_splices(self):
|
||||
@@ -4143,7 +3978,7 @@ def selected_specs(self) -> List[spack.spec.Spec]:
|
||||
return [s for s in self.factory() if self.is_selected(s)]
|
||||
|
||||
@staticmethod
|
||||
def from_store(configuration, *, include, exclude) -> "SpecFilter":
|
||||
def from_store(configuration, include, exclude) -> "SpecFilter":
|
||||
"""Constructs a filter that takes the specs from the current store."""
|
||||
packages = _external_config_with_implicit_externals(configuration)
|
||||
is_reusable = functools.partial(_is_reusable, packages=packages, local=True)
|
||||
@@ -4151,7 +3986,7 @@ def from_store(configuration, *, include, exclude) -> "SpecFilter":
|
||||
return SpecFilter(factory=factory, is_usable=is_reusable, include=include, exclude=exclude)
|
||||
|
||||
@staticmethod
|
||||
def from_buildcache(configuration, *, include, exclude) -> "SpecFilter":
|
||||
def from_buildcache(configuration, include, exclude) -> "SpecFilter":
|
||||
"""Constructs a filter that takes the specs from the configured buildcaches."""
|
||||
packages = _external_config_with_implicit_externals(configuration)
|
||||
is_reusable = functools.partial(_is_reusable, packages=packages, local=False)
|
||||
@@ -4159,29 +3994,6 @@ def from_buildcache(configuration, *, include, exclude) -> "SpecFilter":
|
||||
factory=_specs_from_mirror, is_usable=is_reusable, include=include, exclude=exclude
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def from_environment(configuration, *, include, exclude, env) -> "SpecFilter":
|
||||
packages = _external_config_with_implicit_externals(configuration)
|
||||
is_reusable = functools.partial(_is_reusable, packages=packages, local=True)
|
||||
factory = functools.partial(_specs_from_environment, env=env)
|
||||
return SpecFilter(factory=factory, is_usable=is_reusable, include=include, exclude=exclude)
|
||||
|
||||
@staticmethod
|
||||
def from_environment_included_concrete(
|
||||
configuration,
|
||||
*,
|
||||
include: List[str],
|
||||
exclude: List[str],
|
||||
env: ev.Environment,
|
||||
included_concrete: str,
|
||||
) -> "SpecFilter":
|
||||
packages = _external_config_with_implicit_externals(configuration)
|
||||
is_reusable = functools.partial(_is_reusable, packages=packages, local=True)
|
||||
factory = functools.partial(
|
||||
_specs_from_environment_included_concrete, env=env, included_concrete=included_concrete
|
||||
)
|
||||
return SpecFilter(factory=factory, is_usable=is_reusable, include=include, exclude=exclude)
|
||||
|
||||
|
||||
def _specs_from_store(configuration):
|
||||
store = spack.store.create(configuration)
|
||||
@@ -4199,23 +4011,6 @@ def _specs_from_mirror():
|
||||
return []
|
||||
|
||||
|
||||
def _specs_from_environment(env):
|
||||
"""Return all concrete specs from the environment. This includes all included concrete"""
|
||||
if env:
|
||||
return [concrete for _, concrete in env.concretized_specs()]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
def _specs_from_environment_included_concrete(env, included_concrete):
|
||||
"""Return only concrete specs from the environment included from the included_concrete"""
|
||||
if env:
|
||||
assert included_concrete in env.included_concrete_envs
|
||||
return [concrete for concrete in env.included_specs_by_hash[included_concrete].values()]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
class ReuseStrategy(enum.Enum):
|
||||
ROOTS = enum.auto()
|
||||
DEPENDENCIES = enum.auto()
|
||||
@@ -4245,12 +4040,6 @@ def __init__(self, configuration: spack.config.Configuration) -> None:
|
||||
SpecFilter.from_buildcache(
|
||||
configuration=self.configuration, include=[], exclude=[]
|
||||
),
|
||||
SpecFilter.from_environment(
|
||||
configuration=self.configuration,
|
||||
include=[],
|
||||
exclude=[],
|
||||
env=ev.active_environment(), # includes all concrete includes
|
||||
),
|
||||
]
|
||||
)
|
||||
else:
|
||||
@@ -4265,46 +4054,7 @@ def __init__(self, configuration: spack.config.Configuration) -> None:
|
||||
for source in reuse_yaml.get("from", default_sources):
|
||||
include = source.get("include", default_include)
|
||||
exclude = source.get("exclude", default_exclude)
|
||||
if source["type"] == "environment" and "path" in source:
|
||||
env_dir = ev.as_env_dir(source["path"])
|
||||
active_env = ev.active_environment()
|
||||
if active_env and env_dir in active_env.included_concrete_envs:
|
||||
# If environment is included as a concrete environment, use the local copy
|
||||
# of specs in the active environment.
|
||||
# note: included concrete environments are only updated at concretization
|
||||
# time, and reuse needs to matchthe included specs.
|
||||
self.reuse_sources.append(
|
||||
SpecFilter.from_environment_included_concrete(
|
||||
self.configuration,
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
env=active_env,
|
||||
included_concrete=env_dir,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# If the environment is not included as a concrete environment, use the
|
||||
# current specs from its lockfile.
|
||||
self.reuse_sources.append(
|
||||
SpecFilter.from_environment(
|
||||
self.configuration,
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
env=ev.environment_from_name_or_dir(env_dir),
|
||||
)
|
||||
)
|
||||
elif source["type"] == "environment":
|
||||
# reusing from the current environment implicitly reuses from all of the
|
||||
# included concrete environments
|
||||
self.reuse_sources.append(
|
||||
SpecFilter.from_environment(
|
||||
self.configuration,
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
env=ev.active_environment(),
|
||||
)
|
||||
)
|
||||
elif source["type"] == "local":
|
||||
if source["type"] == "local":
|
||||
self.reuse_sources.append(
|
||||
SpecFilter.from_store(self.configuration, include=include, exclude=exclude)
|
||||
)
|
||||
@@ -4322,6 +4072,7 @@ def reusable_specs(self, specs: List[spack.spec.Spec]) -> List[spack.spec.Spec]:
|
||||
result = []
|
||||
for reuse_source in self.reuse_sources:
|
||||
result.extend(reuse_source.selected_specs())
|
||||
|
||||
# If we only want to reuse dependencies, remove the root specs
|
||||
if self.reuse_strategy == ReuseStrategy.DEPENDENCIES:
|
||||
result = [spec for spec in result if not any(root in spec for root in specs)]
|
||||
@@ -4352,7 +4103,7 @@ def _check_input_and_extract_concrete_specs(specs):
|
||||
spack.spec.Spec.ensure_valid_variants(s)
|
||||
return reusable
|
||||
|
||||
def solve_with_stats(
|
||||
def solve(
|
||||
self,
|
||||
specs,
|
||||
out=None,
|
||||
@@ -4363,8 +4114,6 @@ def solve_with_stats(
|
||||
allow_deprecated=False,
|
||||
):
|
||||
"""
|
||||
Concretize a set of specs and track the timing and statistics for the solve
|
||||
|
||||
Arguments:
|
||||
specs (list): List of ``Spec`` objects to solve for.
|
||||
out: Optionally write the generate ASP program to a file-like object.
|
||||
@@ -4376,22 +4125,15 @@ def solve_with_stats(
|
||||
setup_only (bool): if True, stop after setup and don't solve (default False).
|
||||
allow_deprecated (bool): allow deprecated version in the solve
|
||||
"""
|
||||
# Check upfront that the variants are admissible
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self.selector.reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
||||
return self.driver.solve(
|
||||
result, _, _ = self.driver.solve(
|
||||
setup, specs, reuse=reusable_specs, output=output, allow_deprecated=allow_deprecated
|
||||
)
|
||||
|
||||
def solve(self, specs, **kwargs):
|
||||
"""
|
||||
Convenience function for concretizing a set of specs and ignoring timing
|
||||
and statistics. Uses the same kwargs as solve_with_stats.
|
||||
"""
|
||||
# Check upfront that the variants are admissible
|
||||
result, _, _ = self.solve_with_stats(specs, **kwargs)
|
||||
return result
|
||||
|
||||
def solve_in_rounds(
|
||||
@@ -4491,10 +4233,11 @@ def __init__(self, provided, conflicts):
|
||||
|
||||
super().__init__(msg)
|
||||
|
||||
self.provided = provided
|
||||
|
||||
# Add attribute expected of the superclass interface
|
||||
self.required = None
|
||||
self.constraint_type = None
|
||||
self.provided = provided
|
||||
|
||||
|
||||
class InvalidSpliceError(spack.error.SpackError):
|
||||
|
@@ -57,12 +57,6 @@
|
||||
internal_error("provider with no virtual node").
|
||||
:- provider(PackageNode, _), not attr("node", PackageNode),
|
||||
internal_error("provider with no real node").
|
||||
:- node_has_variant(PackageNode, _, _), not attr("node", PackageNode),
|
||||
internal_error("node has variant for a non-node").
|
||||
:- attr("variant_set", PackageNode, _, _), not attr("node", PackageNode),
|
||||
internal_error("variant_set for a non-node").
|
||||
:- variant_is_propagated(PackageNode, _), not attr("node", PackageNode),
|
||||
internal_error("variant_is_propagated for a non-node").
|
||||
|
||||
:- attr("root", node(ID, PackageNode)), ID > min_dupe_id,
|
||||
internal_error("root with a non-minimal duplicate ID").
|
||||
@@ -581,8 +575,7 @@ attr("virtual_on_edge", PackageNode, ProviderNode, Virtual)
|
||||
% or used somewhere
|
||||
:- attr("virtual_node", node(_, Virtual)),
|
||||
not attr("virtual_on_incoming_edges", _, Virtual),
|
||||
not attr("virtual_root", node(_, Virtual)),
|
||||
internal_error("virtual node does not match incoming edge").
|
||||
not attr("virtual_root", node(_, Virtual)).
|
||||
|
||||
attr("virtual_on_incoming_edges", ProviderNode, Virtual)
|
||||
:- attr("virtual_on_edge", _, ProviderNode, Virtual).
|
||||
@@ -636,8 +629,7 @@ do_not_impose(EffectID, node(X, Package))
|
||||
virtual_condition_holds(_, PossibleProvider, Virtual),
|
||||
PossibleProvider != ProviderNode,
|
||||
explicitly_requested_root(PossibleProvider),
|
||||
not explicitly_requested_root(ProviderNode),
|
||||
internal_error("If a root can provide a virtual, it must be the provider").
|
||||
not explicitly_requested_root(ProviderNode).
|
||||
|
||||
% A package cannot be the actual provider for a virtual if it does not
|
||||
% fulfill the conditions to provide that virtual
|
||||
@@ -780,8 +772,7 @@ required_provider(Provider, Virtual)
|
||||
pkg_fact(Virtual, condition_effect(ConditionID, EffectID)),
|
||||
imposed_constraint(EffectID, "node", Provider).
|
||||
|
||||
:- provider(node(Y, Package), node(X, Virtual)), required_provider(Provider, Virtual), Package != Provider,
|
||||
internal_error("If a provider is required the concretizer must use it").
|
||||
:- provider(node(Y, Package), node(X, Virtual)), required_provider(Provider, Virtual), Package != Provider.
|
||||
|
||||
% TODO: the following choice rule allows the solver to add compiler
|
||||
% flags if their only source is from a requirement. This is overly-specific
|
||||
@@ -861,8 +852,7 @@ variant_defined(PackageNode, Name) :- variant_definition(PackageNode, Name, _).
|
||||
% for two or more variant definitions, this prefers the last one defined.
|
||||
:- node_has_variant(node(NodeID, Package), Name, SelectedVariantID),
|
||||
variant_definition(node(NodeID, Package), Name, VariantID),
|
||||
VariantID > SelectedVariantID,
|
||||
internal_error("If the solver picks a variant descriptor it must use that variant descriptor").
|
||||
VariantID > SelectedVariantID.
|
||||
|
||||
% B: Associating applicable package rules with nodes
|
||||
|
||||
@@ -979,7 +969,6 @@ error(100, "{0} variant '{1}' cannot have values '{2}' and '{3}' as they come fr
|
||||
|
||||
:- attr("variant_set", node(ID, Package), Variant, Value),
|
||||
not attr("variant_value", node(ID, Package), Variant, Value).
|
||||
internal_error("If a variant is set to a value it must have that value").
|
||||
|
||||
% The rules below allow us to prefer default values for variants
|
||||
% whenever possible. If a variant is set in a spec, or if it is
|
||||
@@ -990,7 +979,7 @@ variant_not_default(node(ID, Package), Variant, Value)
|
||||
% variants set explicitly on the CLI don't count as non-default
|
||||
not attr("variant_set", node(ID, Package), Variant, Value),
|
||||
% variant values forced by propagation don't count as non-default
|
||||
not propagate(node(ID, Package), variant_value(Variant, Value, _)),
|
||||
not propagate(node(ID, Package), variant_value(Variant, Value)),
|
||||
% variants set on externals that we could use don't count as non-default
|
||||
% this makes spack prefer to use an external over rebuilding with the
|
||||
% default configuration
|
||||
@@ -1002,7 +991,7 @@ variant_default_not_used(node(ID, Package), Variant, Value)
|
||||
:- variant_default_value(node(ID, Package), Variant, Value),
|
||||
node_has_variant(node(ID, Package), Variant, _),
|
||||
not attr("variant_value", node(ID, Package), Variant, Value),
|
||||
not propagate(node(ID, Package), variant_value(Variant, _, _)),
|
||||
not propagate(node(ID, Package), variant_value(Variant, _)),
|
||||
attr("node", node(ID, Package)).
|
||||
|
||||
% The variant is set in an external spec
|
||||
@@ -1047,14 +1036,10 @@ variant_single_value(PackageNode, Variant)
|
||||
% Propagation semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
non_default_propagation(variant_value(Name, Value)) :- attr("propagate", RootNode, variant_value(Name, Value)).
|
||||
|
||||
% Propagation roots have a corresponding attr("propagate", ...)
|
||||
propagate(RootNode, PropagatedAttribute) :- attr("propagate", RootNode, PropagatedAttribute), not non_default_propagation(PropagatedAttribute).
|
||||
propagate(RootNode, PropagatedAttribute) :- attr("propagate", RootNode, PropagatedAttribute).
|
||||
propagate(RootNode, PropagatedAttribute, EdgeTypes) :- attr("propagate", RootNode, PropagatedAttribute, EdgeTypes).
|
||||
|
||||
% Special case variants, to inject the source node in the propagated attribute
|
||||
propagate(RootNode, variant_value(Name, Value, RootNode)) :- attr("propagate", RootNode, variant_value(Name, Value)).
|
||||
|
||||
% Propagate an attribute along edges to child nodes
|
||||
propagate(ChildNode, PropagatedAttribute) :-
|
||||
@@ -1076,53 +1061,21 @@ propagate(ChildNode, PropagatedAttribute, edge_types(DepType1, DepType2)) :-
|
||||
|
||||
% If a variant is propagated, and can be accepted, set its value
|
||||
attr("variant_selected", PackageNode, Variant, Value, VariantType, VariantID) :-
|
||||
propagate(PackageNode, variant_value(Variant, Value, _)),
|
||||
propagate(PackageNode, variant_value(Variant, Value)),
|
||||
node_has_variant(PackageNode, Variant, VariantID),
|
||||
variant_type(VariantID, VariantType),
|
||||
variant_possible_value(PackageNode, Variant, Value).
|
||||
variant_possible_value(PackageNode, Variant, Value),
|
||||
not attr("variant_set", PackageNode, Variant).
|
||||
|
||||
% If a variant is propagated, we cannot have extraneous values
|
||||
variant_is_propagated(PackageNode, Variant) :-
|
||||
attr("variant_value", PackageNode, Variant, Value),
|
||||
propagate(PackageNode, variant_value(Variant, Value, _)),
|
||||
propagate(PackageNode, variant_value(Variant, Value)),
|
||||
not attr("variant_set", PackageNode, Variant).
|
||||
|
||||
:- variant_is_propagated(PackageNode, Variant),
|
||||
attr("variant_selected", PackageNode, Variant, Value, _, _),
|
||||
not propagate(PackageNode, variant_value(Variant, Value, _)).
|
||||
|
||||
error(100, "{0} and {1} cannot both propagate variant '{2}' to the shared dependency: {3}",
|
||||
Package1, Package2, Variant, Dependency) :-
|
||||
% The variant is a singlevalued variant
|
||||
variant_single_value(node(X, Package1), Variant),
|
||||
% Dependency is trying to propagate Variant with different values and is not the source package
|
||||
propagate(node(Z, Dependency), variant_value(Variant, Value1, node(X, Package1))),
|
||||
propagate(node(Z, Dependency), variant_value(Variant, Value2, node(Y, Package2))),
|
||||
% Package1 and Package2 and their values are different
|
||||
Package1 > Package2, Value1 != Value2,
|
||||
not propagate(node(Z, Dependency), variant_value(Variant, _, node(Z, Dependency))).
|
||||
|
||||
% Cannot propagate the same variant from two different packages if one is a dependency of the other
|
||||
error(100, "{0} and {1} cannot both propagate variant '{2}'", Package1, Package2, Variant) :-
|
||||
% The variant is a single-valued variant
|
||||
variant_single_value(node(X, Package1), Variant),
|
||||
% Package1 and Package2 and their values are different
|
||||
Package1 != Package2, Value1 != Value2,
|
||||
% Package2 is set to propagate the value from Package1
|
||||
propagate(node(Y, Package2), variant_value(Variant, Value2, node(X, Package2))),
|
||||
propagate(node(Y, Package2), variant_value(Variant, Value1, node(X, Package1))),
|
||||
variant_is_propagated(node(Y, Package2), Variant).
|
||||
|
||||
% Cannot propagate a variant if a different value was set for it in a dependency
|
||||
error(100, "Cannot propagate the variant '{0}' from the package: {1} because package: {2} is set to exclude it", Variant, Source, Package) :-
|
||||
% Package has a Variant and Source is propagating Variant
|
||||
attr("variant_set", node(X, Package), Variant, Value1),
|
||||
% The packages and values are different
|
||||
Source != Package, Value1 != Value2,
|
||||
% The variant is a single-valued variant
|
||||
variant_single_value(node(X, Package1), Variant),
|
||||
% A different value is being propagated from somewhere else
|
||||
propagate(node(X, Package), variant_value(Variant, Value2, node(Y, Source))).
|
||||
not propagate(PackageNode, variant_value(Variant, Value)).
|
||||
|
||||
%----
|
||||
% Flags
|
||||
@@ -1449,71 +1402,25 @@ attr("node_flag", PackageNode, NodeFlag) :- attr("node_flag_set", PackageNode, N
|
||||
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Installed Packages
|
||||
% Installed packages
|
||||
%-----------------------------------------------------------------------------
|
||||
% the solver is free to choose at most one installed hash for each package
|
||||
{ attr("hash", node(ID, Package), Hash) : installed_hash(Package, Hash) } 1
|
||||
:- attr("node", node(ID, Package)), internal_error("Package must resolve to at most one hash").
|
||||
|
||||
#defined installed_hash/2.
|
||||
#defined abi_splice_conditions_hold/4.
|
||||
|
||||
% These are the previously concretized attributes of the installed package as
|
||||
% a hash. It has the general form:
|
||||
% hash_attr(Hash, Attribute, PackageName, Args*)
|
||||
#defined hash_attr/3.
|
||||
#defined hash_attr/4.
|
||||
#defined hash_attr/5.
|
||||
#defined hash_attr/6.
|
||||
#defined hash_attr/7.
|
||||
|
||||
{ attr("hash", node(ID, PackageName), Hash): installed_hash(PackageName, Hash) } 1 :-
|
||||
attr("node", node(ID, PackageName)),
|
||||
internal_error("Package must resolve to at most 1 hash").
|
||||
% you can't choose an installed hash for a dev spec
|
||||
:- attr("hash", PackageNode, Hash), attr("variant_value", PackageNode, "dev_path", _).
|
||||
|
||||
% You can't install a hash, if it is not installed
|
||||
:- attr("hash", node(ID, Package), Hash), not installed_hash(Package, Hash).
|
||||
% This should be redundant given the constraint above
|
||||
:- attr("node", PackageNode), 2 { attr("hash", PackageNode, Hash) }.
|
||||
|
||||
% hash_attrs are versions, but can_splice_attr are usually node_version_satisfies
|
||||
hash_attr(Hash, "node_version_satisfies", PackageName, Constraint) :-
|
||||
hash_attr(Hash, "version", PackageName, Version),
|
||||
pkg_fact(PackageName, version_satisfies(Constraint, Version)).
|
||||
|
||||
% This recovers the exact semantics for hash reuse hash and depends_on are where
|
||||
% splices are decided, and virtual_on_edge can result in name-changes, which is
|
||||
% why they are all treated separately.
|
||||
imposed_constraint(Hash, Attr, PackageName) :-
|
||||
hash_attr(Hash, Attr, PackageName).
|
||||
imposed_constraint(Hash, Attr, PackageName, A1) :-
|
||||
hash_attr(Hash, Attr, PackageName, A1), Attr != "hash".
|
||||
imposed_constraint(Hash, Attr, PackageName, Arg1, Arg2) :-
|
||||
hash_attr(Hash, Attr, PackageName, Arg1, Arg2),
|
||||
Attr != "depends_on",
|
||||
Attr != "virtual_on_edge".
|
||||
imposed_constraint(Hash, Attr, PackageName, A1, A2, A3) :-
|
||||
hash_attr(Hash, Attr, PackageName, A1, A2, A3).
|
||||
imposed_constraint(Hash, "hash", PackageName, Hash) :- installed_hash(PackageName, Hash).
|
||||
% Without splicing, we simply recover the exact semantics
|
||||
imposed_constraint(ParentHash, "hash", ChildName, ChildHash) :-
|
||||
hash_attr(ParentHash, "hash", ChildName, ChildHash),
|
||||
ChildHash != ParentHash,
|
||||
not abi_splice_conditions_hold(_, _, ChildName, ChildHash).
|
||||
|
||||
imposed_constraint(Hash, "depends_on", PackageName, DepName, Type) :-
|
||||
hash_attr(Hash, "depends_on", PackageName, DepName, Type),
|
||||
hash_attr(Hash, "hash", DepName, DepHash),
|
||||
not attr("splice_at_hash", _, _, DepName, DepHash).
|
||||
|
||||
imposed_constraint(Hash, "virtual_on_edge", PackageName, DepName, VirtName) :-
|
||||
hash_attr(Hash, "virtual_on_edge", PackageName, DepName, VirtName),
|
||||
not attr("splice_at_hash", _, _, DepName,_).
|
||||
|
||||
% Rules pertaining to attr("splice_at_hash") and abi_splice_conditions_hold will
|
||||
% be conditionally loaded from splices.lp
|
||||
|
||||
impose(Hash, PackageNode) :- attr("hash", PackageNode, Hash), attr("node", PackageNode).
|
||||
|
||||
% If there is not a hash for a package, we build it.
|
||||
build(PackageNode) :- attr("node", PackageNode), not concrete(PackageNode).
|
||||
% if a hash is selected, we impose all the constraints that implies
|
||||
impose(Hash, PackageNode) :- attr("hash", PackageNode, Hash).
|
||||
|
||||
% if we haven't selected a hash for a package, we'll be building it
|
||||
build(PackageNode) :- not attr("hash", PackageNode, _), attr("node", PackageNode).
|
||||
|
||||
% Minimizing builds is tricky. We want a minimizing criterion
|
||||
|
||||
@@ -1526,7 +1433,6 @@ build(PackageNode) :- attr("node", PackageNode), not concrete(PackageNode).
|
||||
% criteria for built specs -- so that they take precedence over the otherwise
|
||||
% topmost-priority criterion to reuse what is installed.
|
||||
%
|
||||
|
||||
% The priority ranges are:
|
||||
% 1000+ Optimizations for concretization errors
|
||||
% 300 - 1000 Highest priority optimizations for valid solutions
|
||||
@@ -1552,10 +1458,12 @@ build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", Package
|
||||
pkg_fact(Package, version_declared(Version, Weight, "installed")),
|
||||
not optimize_for_reuse().
|
||||
|
||||
#defined installed_hash/2.
|
||||
|
||||
% This statement, which is a hidden feature of clingo, let us avoid cycles in the DAG
|
||||
#edge (A, B) : depends_on(A, B).
|
||||
|
||||
|
||||
%-----------------------------------------------------------------
|
||||
% Optimization to avoid errors
|
||||
%-----------------------------------------------------------------
|
||||
|
@@ -44,17 +44,6 @@ def _id(thing: Any) -> Union[str, AspObject]:
|
||||
return f'"{str(thing)}"'
|
||||
|
||||
|
||||
class AspVar(AspObject):
|
||||
"""Represents a variable in an ASP rule, allows for conditionally generating
|
||||
rules"""
|
||||
|
||||
def __init__(self, name: str):
|
||||
self.name = name
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.name)
|
||||
|
||||
|
||||
@lang.key_ordering
|
||||
class AspFunction(AspObject):
|
||||
"""A term in the ASP logic program"""
|
||||
@@ -99,8 +88,6 @@ def _argify(self, arg: Any) -> Any:
|
||||
return clingo().Number(arg)
|
||||
elif isinstance(arg, AspFunction):
|
||||
return clingo().Function(arg.name, [self._argify(x) for x in arg.args], positive=True)
|
||||
elif isinstance(arg, AspVar):
|
||||
return clingo().Variable(arg.name)
|
||||
return clingo().String(str(arg))
|
||||
|
||||
def symbol(self):
|
||||
|
@@ -15,6 +15,7 @@
|
||||
#show attr/4.
|
||||
#show attr/5.
|
||||
#show attr/6.
|
||||
|
||||
% names of optimization criteria
|
||||
#show opt_criterion/2.
|
||||
|
||||
|
@@ -1,56 +0,0 @@
|
||||
% Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% These rules are conditionally loaded to handle the synthesis of spliced
|
||||
% packages.
|
||||
% =============================================================================
|
||||
% Consider the concrete spec:
|
||||
% foo@2.72%gcc@11.4 arch=linux-ubuntu22.04-icelake build_system=autotools ^bar ...
|
||||
% It will emit the following facts for reuse (below is a subset)
|
||||
% installed_hash("foo", "xxxyyy")
|
||||
% hash_attr("xxxyyy", "hash", "foo", "xxxyyy")
|
||||
% hash_attr("xxxyyy", "version", "foo", "2.72")
|
||||
% hash_attr("xxxyyy", "node_os", "ubuntu22.04")
|
||||
% hash_attr("xxxyyy", "hash", "bar", "zzzqqq")
|
||||
% hash_attr("xxxyyy", "depends_on", "foo", "bar", "link")
|
||||
% Rules that derive abi_splice_conditions_hold will be generated from
|
||||
% use of the `can_splice` directive. The will have the following form:
|
||||
% can_splice("foo@1.0.0+a", when="@1.0.1+a", match_variants=["b"]) --->
|
||||
% abi_splice_conditions_hold(0, node(SID, "foo"), "foo", BashHash) :-
|
||||
% installed_hash("foo", BaseHash),
|
||||
% attr("node", node(SID, SpliceName)),
|
||||
% attr("node_version_satisfies", node(SID, "foo"), "1.0.1"),
|
||||
% hash_attr("hash", "node_version_satisfies", "foo", "1.0.1"),
|
||||
% attr("variant_value", node(SID, "foo"), "a", "True"),
|
||||
% hash_attr("hash", "variant_value", "foo", "a", "True"),
|
||||
% attr("variant_value", node(SID, "foo"), "b", VariVar0),
|
||||
% hash_attr("hash", "variant_value", "foo", "b", VariVar0),
|
||||
|
||||
% If the splice is valid (i.e. abi_splice_conditions_hold is derived) in the
|
||||
% dependency of a concrete spec the solver free to choose whether to continue
|
||||
% with the exact hash semantics by simply imposing the child hash, or introducing
|
||||
% a spliced node as the dependency instead
|
||||
{ imposed_constraint(ParentHash, "hash", ChildName, ChildHash) } :-
|
||||
hash_attr(ParentHash, "hash", ChildName, ChildHash),
|
||||
abi_splice_conditions_hold(_, node(SID, SpliceName), ChildName, ChildHash).
|
||||
|
||||
attr("splice_at_hash", ParentNode, node(SID, SpliceName), ChildName, ChildHash) :-
|
||||
attr("hash", ParentNode, ParentHash),
|
||||
hash_attr(ParentHash, "hash", ChildName, ChildHash),
|
||||
abi_splice_conditions_hold(_, node(SID, SpliceName), ChildName, ChildHash),
|
||||
ParentHash != ChildHash,
|
||||
not imposed_constraint(ParentHash, "hash", ChildName, ChildHash).
|
||||
|
||||
% Names and virtual providers may change when a dependency is spliced in
|
||||
imposed_constraint(Hash, "dependency_holds", ParentName, SpliceName, Type) :-
|
||||
hash_attr(Hash, "depends_on", ParentName, DepName, Type),
|
||||
hash_attr(Hash, "hash", DepName, DepHash),
|
||||
attr("splice_at_hash", node(ID, ParentName), node(SID, SpliceName), DepName, DepHash).
|
||||
|
||||
imposed_constraint(Hash, "virtual_on_edge", ParentName, SpliceName, VirtName) :-
|
||||
hash_attr(Hash, "virtual_on_edge", ParentName, DepName, VirtName),
|
||||
attr("splice_at_hash", node(ID, ParentName), node(SID, SpliceName), DepName, DepHash).
|
||||
|
@@ -59,7 +59,7 @@
|
||||
import re
|
||||
import socket
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, Iterable, List, Match, Optional, Set, Tuple, Union
|
||||
from typing import Any, Callable, Dict, List, Match, Optional, Set, Tuple, Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -448,9 +448,6 @@ def _target_satisfies(self, other: "ArchSpec", strict: bool) -> bool:
|
||||
return bool(self._target_intersection(other))
|
||||
|
||||
def _target_constrain(self, other: "ArchSpec") -> bool:
|
||||
if self.target is None and other.target is None:
|
||||
return False
|
||||
|
||||
if not other._target_satisfies(self, strict=False):
|
||||
raise UnsatisfiableArchitectureSpecError(self, other)
|
||||
|
||||
@@ -499,56 +496,21 @@ def _target_intersection(self, other):
|
||||
if (not s_min or o_comp >= s_min) and (not s_max or o_comp <= s_max):
|
||||
results.append(o_min)
|
||||
else:
|
||||
# Take the "min" of the two max, if there is a partial ordering.
|
||||
n_max = ""
|
||||
if s_max and o_max:
|
||||
_s_max = _make_microarchitecture(s_max)
|
||||
_o_max = _make_microarchitecture(o_max)
|
||||
if _s_max.family != _o_max.family:
|
||||
continue
|
||||
if _s_max <= _o_max:
|
||||
n_max = s_max
|
||||
elif _o_max < _s_max:
|
||||
n_max = o_max
|
||||
else:
|
||||
continue
|
||||
elif s_max:
|
||||
n_max = s_max
|
||||
elif o_max:
|
||||
n_max = o_max
|
||||
|
||||
# Take the "max" of the two min.
|
||||
n_min = ""
|
||||
if s_min and o_min:
|
||||
_s_min = _make_microarchitecture(s_min)
|
||||
_o_min = _make_microarchitecture(o_min)
|
||||
if _s_min.family != _o_min.family:
|
||||
continue
|
||||
if _s_min >= _o_min:
|
||||
n_min = s_min
|
||||
elif _o_min > _s_min:
|
||||
n_min = o_min
|
||||
else:
|
||||
continue
|
||||
elif s_min:
|
||||
n_min = s_min
|
||||
elif o_min:
|
||||
n_min = o_min
|
||||
|
||||
if n_min and n_max:
|
||||
_n_min = _make_microarchitecture(n_min)
|
||||
_n_max = _make_microarchitecture(n_max)
|
||||
if _n_min.family != _n_max.family or not _n_min <= _n_max:
|
||||
continue
|
||||
if n_min == n_max:
|
||||
results.append(n_min)
|
||||
else:
|
||||
results.append(f"{n_min}:{n_max}")
|
||||
elif n_min:
|
||||
results.append(f"{n_min}:")
|
||||
elif n_max:
|
||||
results.append(f":{n_max}")
|
||||
# Take intersection of two ranges
|
||||
# Lots of comparisons needed
|
||||
_s_min = _make_microarchitecture(s_min)
|
||||
_s_max = _make_microarchitecture(s_max)
|
||||
_o_min = _make_microarchitecture(o_min)
|
||||
_o_max = _make_microarchitecture(o_max)
|
||||
|
||||
n_min = s_min if _s_min >= _o_min else o_min
|
||||
n_max = s_max if _s_max <= _o_max else o_max
|
||||
_n_min = _make_microarchitecture(n_min)
|
||||
_n_max = _make_microarchitecture(n_max)
|
||||
if _n_min == _n_max:
|
||||
results.append(n_min)
|
||||
elif not n_min or not n_max or _n_min < _n_max:
|
||||
results.append("%s:%s" % (n_min, n_max))
|
||||
return results
|
||||
|
||||
def constrain(self, other: "ArchSpec") -> bool:
|
||||
@@ -915,9 +877,8 @@ def constrain(self, other):
|
||||
# Next, if any flags in other propagate, we force them to propagate in our case
|
||||
shared = list(sorted(set(other[flag_type]) - extra_other))
|
||||
for x, y in _shared_subset_pair_iterate(shared, sorted(self[flag_type])):
|
||||
if y.propagate is True and x.propagate is False:
|
||||
changed = True
|
||||
y.propagate = False
|
||||
if x.propagate:
|
||||
y.propagate = True
|
||||
|
||||
# TODO: what happens if flag groups with a partial (but not complete)
|
||||
# intersection specify different behaviors for flag propagation?
|
||||
@@ -972,7 +933,6 @@ def _cmp_iter(self):
|
||||
def flags():
|
||||
for flag in v:
|
||||
yield flag
|
||||
yield flag.propagate
|
||||
|
||||
yield flags
|
||||
|
||||
@@ -1003,6 +963,10 @@ def _sort_by_dep_types(dspec: DependencySpec):
|
||||
return dspec.depflag
|
||||
|
||||
|
||||
#: Enum for edge directions
|
||||
EdgeDirection = lang.enum(parent=0, child=1)
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering
|
||||
class _EdgeMap(collections.abc.Mapping):
|
||||
"""Represent a collection of edges (DependencySpec objects) in the DAG.
|
||||
@@ -1016,20 +980,26 @@ class _EdgeMap(collections.abc.Mapping):
|
||||
|
||||
__slots__ = "edges", "store_by_child"
|
||||
|
||||
def __init__(self, store_by_child: bool = True) -> None:
|
||||
self.edges: Dict[str, List[DependencySpec]] = {}
|
||||
self.store_by_child = store_by_child
|
||||
def __init__(self, store_by=EdgeDirection.child):
|
||||
# Sanitize input arguments
|
||||
msg = 'unexpected value for "store_by" argument'
|
||||
assert store_by in (EdgeDirection.child, EdgeDirection.parent), msg
|
||||
|
||||
def __getitem__(self, key: str) -> List[DependencySpec]:
|
||||
#: This dictionary maps a package name to a list of edges
|
||||
#: i.e. to a list of DependencySpec objects
|
||||
self.edges = {}
|
||||
self.store_by_child = store_by == EdgeDirection.child
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.edges[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.edges)
|
||||
|
||||
def __len__(self) -> int:
|
||||
def __len__(self):
|
||||
return len(self.edges)
|
||||
|
||||
def add(self, edge: DependencySpec) -> None:
|
||||
def add(self, edge: DependencySpec):
|
||||
key = edge.spec.name if self.store_by_child else edge.parent.name
|
||||
if key in self.edges:
|
||||
lst = self.edges[key]
|
||||
@@ -1038,8 +1008,8 @@ def add(self, edge: DependencySpec) -> None:
|
||||
else:
|
||||
self.edges[key] = [edge]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{{deps: {', '.join(str(d) for d in sorted(self.values()))}}}"
|
||||
def __str__(self):
|
||||
return "{deps: %s}" % ", ".join(str(d) for d in sorted(self.values()))
|
||||
|
||||
def _cmp_iter(self):
|
||||
for item in sorted(itertools.chain.from_iterable(self.edges.values())):
|
||||
@@ -1056,32 +1026,24 @@ def copy(self):
|
||||
|
||||
return clone
|
||||
|
||||
def select(
|
||||
self,
|
||||
*,
|
||||
parent: Optional[str] = None,
|
||||
child: Optional[str] = None,
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
virtuals: Optional[List[str]] = None,
|
||||
) -> List[DependencySpec]:
|
||||
"""Selects a list of edges and returns them.
|
||||
def select(self, parent=None, child=None, depflag: dt.DepFlag = dt.ALL):
|
||||
"""Select a list of edges and return them.
|
||||
|
||||
If an edge:
|
||||
|
||||
- Has *any* of the dependency types passed as argument,
|
||||
- Matches the parent and/or child name
|
||||
- Provides *any* of the virtuals passed as argument
|
||||
|
||||
- Matches the parent and/or child name, if passed
|
||||
then it is selected.
|
||||
|
||||
The deptypes argument needs to be a flag, since the method won't
|
||||
convert it for performance reason.
|
||||
|
||||
Args:
|
||||
parent: name of the parent package
|
||||
child: name of the child package
|
||||
parent (str): name of the parent package
|
||||
child (str): name of the child package
|
||||
depflag: allowed dependency types in flag form
|
||||
virtuals: list of virtuals on the edge
|
||||
|
||||
Returns:
|
||||
List of DependencySpec objects
|
||||
"""
|
||||
if not depflag:
|
||||
return []
|
||||
@@ -1100,10 +1062,6 @@ def select(
|
||||
# Filter by allowed dependency types
|
||||
selected = (dep for dep in selected if not dep.depflag or (depflag & dep.depflag))
|
||||
|
||||
# Filter by virtuals
|
||||
if virtuals is not None:
|
||||
selected = (dep for dep in selected if any(v in dep.virtuals for v in virtuals))
|
||||
|
||||
return list(selected)
|
||||
|
||||
def clear(self):
|
||||
@@ -1512,8 +1470,8 @@ def __init__(
|
||||
self.architecture = None
|
||||
self.compiler = None
|
||||
self.compiler_flags = FlagMap(self)
|
||||
self._dependents = _EdgeMap(store_by_child=False)
|
||||
self._dependencies = _EdgeMap(store_by_child=True)
|
||||
self._dependents = _EdgeMap(store_by=EdgeDirection.parent)
|
||||
self._dependencies = _EdgeMap(store_by=EdgeDirection.child)
|
||||
self.namespace = None
|
||||
|
||||
# initial values for all spec hash types
|
||||
@@ -1538,8 +1496,9 @@ def __init__(
|
||||
self._external_path = external_path
|
||||
self.external_modules = Spec._format_module_list(external_modules)
|
||||
|
||||
# This attribute is used to store custom information for external specs.
|
||||
self.extra_attributes: dict = {}
|
||||
# This attribute is used to store custom information for
|
||||
# external specs. None signal that it was not set yet.
|
||||
self.extra_attributes = None
|
||||
|
||||
# This attribute holds the original build copy of the spec if it is
|
||||
# deployed differently than it was built. None signals that the spec
|
||||
@@ -1632,7 +1591,7 @@ def _get_dependency(self, name):
|
||||
return deps[0]
|
||||
|
||||
def edges_from_dependents(
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL, *, virtuals: Optional[List[str]] = None
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL
|
||||
) -> List[DependencySpec]:
|
||||
"""Return a list of edges connecting this node in the DAG
|
||||
to parents.
|
||||
@@ -1640,25 +1599,20 @@ def edges_from_dependents(
|
||||
Args:
|
||||
name (str): filter dependents by package name
|
||||
depflag: allowed dependency types
|
||||
virtuals: allowed virtuals
|
||||
"""
|
||||
return [
|
||||
d for d in self._dependents.select(parent=name, depflag=depflag, virtuals=virtuals)
|
||||
]
|
||||
return [d for d in self._dependents.select(parent=name, depflag=depflag)]
|
||||
|
||||
def edges_to_dependencies(
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL, *, virtuals: Optional[List[str]] = None
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL
|
||||
) -> List[DependencySpec]:
|
||||
"""Returns a list of edges connecting this node in the DAG to children.
|
||||
"""Return a list of edges connecting this node in the DAG
|
||||
to children.
|
||||
|
||||
Args:
|
||||
name (str): filter dependencies by package name
|
||||
depflag: allowed dependency types
|
||||
virtuals: allowed virtuals
|
||||
"""
|
||||
return [
|
||||
d for d in self._dependencies.select(child=name, depflag=depflag, virtuals=virtuals)
|
||||
]
|
||||
return [d for d in self._dependencies.select(child=name, depflag=depflag)]
|
||||
|
||||
@property
|
||||
def edge_attributes(self) -> str:
|
||||
@@ -1681,24 +1635,17 @@ def edge_attributes(self) -> str:
|
||||
return f"[{result}]"
|
||||
|
||||
def dependencies(
|
||||
self,
|
||||
name=None,
|
||||
deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL,
|
||||
*,
|
||||
virtuals: Optional[List[str]] = None,
|
||||
self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL
|
||||
) -> List["Spec"]:
|
||||
"""Returns a list of direct dependencies (nodes in the DAG)
|
||||
"""Return a list of direct dependencies (nodes in the DAG).
|
||||
|
||||
Args:
|
||||
name: filter dependencies by package name
|
||||
name (str): filter dependencies by package name
|
||||
deptype: allowed dependency types
|
||||
virtuals: allowed virtuals
|
||||
"""
|
||||
if not isinstance(deptype, dt.DepFlag):
|
||||
deptype = dt.canonicalize(deptype)
|
||||
return [
|
||||
d.spec for d in self.edges_to_dependencies(name, depflag=deptype, virtuals=virtuals)
|
||||
]
|
||||
return [d.spec for d in self.edges_to_dependencies(name, depflag=deptype)]
|
||||
|
||||
def dependents(
|
||||
self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL
|
||||
@@ -2238,24 +2185,12 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
if params:
|
||||
d["parameters"] = params
|
||||
|
||||
if params and not self.concrete:
|
||||
flag_names = [
|
||||
name
|
||||
for name, flags in self.compiler_flags.items()
|
||||
if any(x.propagate for x in flags)
|
||||
]
|
||||
d["propagate"] = sorted(
|
||||
itertools.chain(
|
||||
[v.name for v in self.variants.values() if v.propagate], flag_names
|
||||
)
|
||||
)
|
||||
|
||||
if self.external:
|
||||
d["external"] = syaml.syaml_dict(
|
||||
[
|
||||
("path", self.external_path),
|
||||
("module", self.external_modules or None),
|
||||
("extra_attributes", syaml.sorted_dict(self.extra_attributes)),
|
||||
("module", self.external_modules),
|
||||
("extra_attributes", self.extra_attributes),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -2422,10 +2357,16 @@ def node_dict_with_hashes(self, hash=ht.dag_hash):
|
||||
spec is concrete, the full hash is added as well. If 'build' is in
|
||||
the hash_type, the build hash is also added."""
|
||||
node = self.to_node_dict(hash)
|
||||
# All specs have at least a DAG hash
|
||||
node[ht.dag_hash.name] = self.dag_hash()
|
||||
|
||||
if not self.concrete:
|
||||
# dag_hash is lazily computed -- but if we write a spec out, we want it
|
||||
# to be included. This is effectively the last chance we get to compute
|
||||
# it accurately.
|
||||
if self.concrete:
|
||||
# all specs have at least a DAG hash
|
||||
node[ht.dag_hash.name] = self.dag_hash()
|
||||
|
||||
else:
|
||||
node["concrete"] = False
|
||||
|
||||
# we can also give them other hash types if we want
|
||||
@@ -2865,7 +2806,7 @@ def ensure_no_deprecated(root):
|
||||
msg += " For each package listed, choose another spec\n"
|
||||
raise SpecDeprecatedError(msg)
|
||||
|
||||
def concretize(self, tests: Union[bool, Iterable[str]] = False) -> None:
|
||||
def concretize(self, tests: Union[bool, List[str]] = False) -> None:
|
||||
"""Concretize the current spec.
|
||||
|
||||
Args:
|
||||
@@ -2944,7 +2885,7 @@ def _mark_concrete(self, value=True):
|
||||
if (not value) and s.concrete and s.installed:
|
||||
continue
|
||||
elif not value:
|
||||
s.clear_caches()
|
||||
s.clear_cached_hashes()
|
||||
s._mark_root_concrete(value)
|
||||
|
||||
def _finalize_concretization(self):
|
||||
@@ -2993,7 +2934,7 @@ def _finalize_concretization(self):
|
||||
for spec in self.traverse():
|
||||
spec._cached_hash(ht.dag_hash)
|
||||
|
||||
def concretized(self, tests: Union[bool, Iterable[str]] = False) -> "Spec":
|
||||
def concretized(self, tests=False):
|
||||
"""This is a non-destructive version of concretize().
|
||||
|
||||
First clones, then returns a concrete version of this package
|
||||
@@ -3057,12 +2998,7 @@ def ensure_valid_variants(spec):
|
||||
pkg_variants = pkg_cls.variant_names()
|
||||
# reserved names are variants that may be set on any package
|
||||
# but are not necessarily recorded by the package's class
|
||||
propagate_variants = [name for name, variant in spec.variants.items() if variant.propagate]
|
||||
|
||||
not_existing = set(spec.variants) - (
|
||||
set(pkg_variants) | set(vt.reserved_names) | set(propagate_variants)
|
||||
)
|
||||
|
||||
not_existing = set(spec.variants) - (set(pkg_variants) | set(vt.reserved_names))
|
||||
if not_existing:
|
||||
raise vt.UnknownVariantError(
|
||||
f"No such variant {not_existing} for spec: '{spec}'", list(not_existing)
|
||||
@@ -3089,10 +3025,6 @@ def constrain(self, other, deps=True):
|
||||
raise spack.error.UnsatisfiableSpecError(self, other, "constrain a concrete spec")
|
||||
|
||||
other = self._autospec(other)
|
||||
if other.concrete and other.satisfies(self):
|
||||
self._dup(other)
|
||||
return True
|
||||
|
||||
if other.abstract_hash:
|
||||
if not self.abstract_hash or other.abstract_hash.startswith(self.abstract_hash):
|
||||
self.abstract_hash = other.abstract_hash
|
||||
@@ -3116,13 +3048,18 @@ def constrain(self, other, deps=True):
|
||||
if not self.variants[v].compatible(other.variants[v]):
|
||||
raise vt.UnsatisfiableVariantSpecError(self.variants[v], other.variants[v])
|
||||
|
||||
# TODO: Check out the logic here
|
||||
sarch, oarch = self.architecture, other.architecture
|
||||
if (
|
||||
sarch is not None
|
||||
and oarch is not None
|
||||
and not self.architecture.intersects(other.architecture)
|
||||
):
|
||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||
if sarch is not None and oarch is not None:
|
||||
if sarch.platform is not None and oarch.platform is not None:
|
||||
if sarch.platform != oarch.platform:
|
||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||
if sarch.os is not None and oarch.os is not None:
|
||||
if sarch.os != oarch.os:
|
||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||
if sarch.target is not None and oarch.target is not None:
|
||||
if sarch.target != oarch.target:
|
||||
raise UnsatisfiableArchitectureSpecError(sarch, oarch)
|
||||
|
||||
changed = False
|
||||
|
||||
@@ -3145,12 +3082,18 @@ def constrain(self, other, deps=True):
|
||||
|
||||
changed |= self.compiler_flags.constrain(other.compiler_flags)
|
||||
|
||||
old = str(self.architecture)
|
||||
sarch, oarch = self.architecture, other.architecture
|
||||
if sarch is not None and oarch is not None:
|
||||
changed |= self.architecture.constrain(other.architecture)
|
||||
elif oarch is not None:
|
||||
self.architecture = oarch
|
||||
changed = True
|
||||
if sarch is None or other.architecture is None:
|
||||
self.architecture = sarch or oarch
|
||||
else:
|
||||
if sarch.platform is None or oarch.platform is None:
|
||||
self.architecture.platform = sarch.platform or oarch.platform
|
||||
if sarch.os is None or oarch.os is None:
|
||||
sarch.os = sarch.os or oarch.os
|
||||
if sarch.target is None or oarch.target is None:
|
||||
sarch.target = sarch.target or oarch.target
|
||||
changed |= str(self.architecture) != old
|
||||
|
||||
if deps:
|
||||
changed |= self._constrain_dependencies(other)
|
||||
@@ -3576,8 +3519,8 @@ def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, clearde
|
||||
self.architecture = other.architecture.copy() if other.architecture else None
|
||||
self.compiler = other.compiler.copy() if other.compiler else None
|
||||
if cleardeps:
|
||||
self._dependents = _EdgeMap(store_by_child=False)
|
||||
self._dependencies = _EdgeMap(store_by_child=True)
|
||||
self._dependents = _EdgeMap(store_by=EdgeDirection.parent)
|
||||
self._dependencies = _EdgeMap(store_by=EdgeDirection.child)
|
||||
self.compiler_flags = other.compiler_flags.copy()
|
||||
self.compiler_flags.spec = self
|
||||
self.variants = other.variants.copy()
|
||||
@@ -4085,7 +4028,7 @@ def format_path(
|
||||
|
||||
def __str__(self):
|
||||
if self._concrete:
|
||||
return self.format("{name}{@version}{/hash}")
|
||||
return self.format("{name}{@version}{/hash:7}")
|
||||
|
||||
if not self._dependencies:
|
||||
return self.format()
|
||||
@@ -4282,7 +4225,7 @@ def _splice_detach_and_add_dependents(self, replacement, context):
|
||||
for ancestor in ancestors_in_context:
|
||||
# Only set it if it hasn't been spliced before
|
||||
ancestor._build_spec = ancestor._build_spec or ancestor.copy()
|
||||
ancestor.clear_caches(ignore=(ht.package_hash.attr,))
|
||||
ancestor.clear_cached_hashes(ignore=(ht.package_hash.attr,))
|
||||
for edge in ancestor.edges_to_dependencies(depflag=dt.BUILD):
|
||||
if edge.depflag & ~dt.BUILD:
|
||||
edge.depflag &= ~dt.BUILD
|
||||
@@ -4476,7 +4419,7 @@ def mask_build_deps(in_spec):
|
||||
|
||||
return spec
|
||||
|
||||
def clear_caches(self, ignore=()):
|
||||
def clear_cached_hashes(self, ignore=()):
|
||||
"""
|
||||
Clears all cached hashes in a Spec, while preserving other properties.
|
||||
"""
|
||||
@@ -4484,9 +4427,7 @@ def clear_caches(self, ignore=()):
|
||||
if h.attr not in ignore:
|
||||
if hasattr(self, h.attr):
|
||||
setattr(self, h.attr, None)
|
||||
for attr in ("_dunder_hash", "_prefix"):
|
||||
if attr not in ignore:
|
||||
setattr(self, attr, None)
|
||||
self._dunder_hash = None
|
||||
|
||||
def __hash__(self):
|
||||
# If the spec is concrete, we leverage the process hash and just use
|
||||
@@ -4562,69 +4503,8 @@ def substitute(self, vspec):
|
||||
# Set the item
|
||||
super().__setitem__(vspec.name, vspec)
|
||||
|
||||
def partition_variants(self):
|
||||
non_prop, prop = lang.stable_partition(self.values(), lambda x: not x.propagate)
|
||||
# Just return the names
|
||||
non_prop = [x.name for x in non_prop]
|
||||
prop = [x.name for x in prop]
|
||||
return non_prop, prop
|
||||
|
||||
def satisfies(self, other: "VariantMap") -> bool:
|
||||
if self.spec.concrete:
|
||||
return self._satisfies_when_self_concrete(other)
|
||||
return self._satisfies_when_self_abstract(other)
|
||||
|
||||
def _satisfies_when_self_concrete(self, other: "VariantMap") -> bool:
|
||||
non_propagating, propagating = other.partition_variants()
|
||||
result = all(
|
||||
name in self and self[name].satisfies(other[name]) for name in non_propagating
|
||||
)
|
||||
if not propagating:
|
||||
return result
|
||||
|
||||
for node in self.spec.traverse():
|
||||
if not all(
|
||||
node.variants[name].satisfies(other[name])
|
||||
for name in propagating
|
||||
if name in node.variants
|
||||
):
|
||||
return False
|
||||
return result
|
||||
|
||||
def _satisfies_when_self_abstract(self, other: "VariantMap") -> bool:
|
||||
other_non_propagating, other_propagating = other.partition_variants()
|
||||
self_non_propagating, self_propagating = self.partition_variants()
|
||||
|
||||
# First check variants without propagation set
|
||||
result = all(
|
||||
name in self_non_propagating
|
||||
and (self[name].propagate or self[name].satisfies(other[name]))
|
||||
for name in other_non_propagating
|
||||
)
|
||||
if result is False or (not other_propagating and not self_propagating):
|
||||
return result
|
||||
|
||||
# Check that self doesn't contradict variants propagated by other
|
||||
if other_propagating:
|
||||
for node in self.spec.traverse():
|
||||
if not all(
|
||||
node.variants[name].satisfies(other[name])
|
||||
for name in other_propagating
|
||||
if name in node.variants
|
||||
):
|
||||
return False
|
||||
|
||||
# Check that other doesn't contradict variants propagated by self
|
||||
if self_propagating:
|
||||
for node in other.spec.traverse():
|
||||
if not all(
|
||||
node.variants[name].satisfies(self[name])
|
||||
for name in self_propagating
|
||||
if name in node.variants
|
||||
):
|
||||
return False
|
||||
|
||||
return result
|
||||
def satisfies(self, other):
|
||||
return all(k in self and self[k].satisfies(other[k]) for k in other)
|
||||
|
||||
def intersects(self, other):
|
||||
return all(self[k].intersects(other[k]) for k in other if k in self)
|
||||
@@ -4837,17 +4717,13 @@ def from_node_dict(cls, node):
|
||||
else:
|
||||
spec.compiler = None
|
||||
|
||||
propagated_names = node.get("propagate", [])
|
||||
for name, values in node.get("parameters", {}).items():
|
||||
propagate = name in propagated_names
|
||||
if name in _valid_compiler_flags:
|
||||
spec.compiler_flags[name] = []
|
||||
for val in values:
|
||||
spec.compiler_flags.add_flag(name, val, propagate)
|
||||
spec.compiler_flags.add_flag(name, val, False)
|
||||
else:
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(
|
||||
name, values, propagate=propagate
|
||||
)
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values)
|
||||
|
||||
spec.external_path = None
|
||||
spec.external_modules = None
|
||||
@@ -4861,8 +4737,8 @@ def from_node_dict(cls, node):
|
||||
spec.external_modules = node["external"]["module"]
|
||||
if spec.external_modules is False:
|
||||
spec.external_modules = None
|
||||
spec.extra_attributes = (
|
||||
node["external"].get("extra_attributes") or syaml.syaml_dict()
|
||||
spec.extra_attributes = node["external"].get(
|
||||
"extra_attributes", syaml.syaml_dict()
|
||||
)
|
||||
|
||||
# specs read in are concrete unless marked abstract
|
||||
|
@@ -487,7 +487,7 @@ def _generate_fetchers(self, mirror_only=False) -> Generator["fs.FetchStrategy",
|
||||
# Insert fetchers in the order that the URLs are provided.
|
||||
fetchers[:0] = (
|
||||
fs.from_url_scheme(
|
||||
url_util.join(mirror.fetch_url, *self.mirror_layout.path.split(os.sep)),
|
||||
url_util.join(mirror.fetch_url, self.mirror_layout.path),
|
||||
checksum=digest,
|
||||
expand=expand,
|
||||
extension=extension,
|
||||
|
@@ -39,6 +39,9 @@
|
||||
DEFAULT_INSTALL_TREE_ROOT = os.path.join(spack.paths.opt_path, "spack")
|
||||
|
||||
|
||||
ConfigurationType = Union["spack.config.Configuration", "llnl.util.lang.Singleton"]
|
||||
|
||||
|
||||
def parse_install_tree(config_dict):
|
||||
"""Parse config settings and return values relevant to the store object.
|
||||
|
||||
@@ -204,7 +207,7 @@ def __reduce__(self):
|
||||
)
|
||||
|
||||
|
||||
def create(configuration: spack.config.Configuration) -> Store:
|
||||
def create(configuration: ConfigurationType) -> Store:
|
||||
"""Create a store from the configuration passed as input.
|
||||
|
||||
Args:
|
||||
@@ -237,7 +240,7 @@ def _create_global() -> Store:
|
||||
|
||||
|
||||
#: Singleton store instance
|
||||
STORE: Store = llnl.util.lang.Singleton(_create_global) # type: ignore
|
||||
STORE: Union[Store, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_create_global)
|
||||
|
||||
|
||||
def reinitialize():
|
||||
|
@@ -17,6 +17,7 @@
|
||||
import multiprocessing
|
||||
import pickle
|
||||
import pydoc
|
||||
import sys
|
||||
from types import ModuleType
|
||||
|
||||
import spack.config
|
||||
@@ -26,6 +27,9 @@
|
||||
import spack.repo
|
||||
import spack.store
|
||||
|
||||
_SERIALIZE = sys.platform == "win32" or (sys.version_info >= (3, 8) and sys.platform == "darwin")
|
||||
|
||||
|
||||
patches = None
|
||||
|
||||
|
||||
@@ -52,7 +56,7 @@ def _restore_and_run(self, fn, test_state):
|
||||
fn()
|
||||
|
||||
def create(self):
|
||||
test_state = GlobalStateMarshaler()
|
||||
test_state = TestState()
|
||||
return multiprocessing.Process(target=self._restore_and_run, args=(self.fn, test_state))
|
||||
|
||||
|
||||
@@ -61,56 +65,49 @@ class PackageInstallContext:
|
||||
needs to be transmitted to a child process.
|
||||
"""
|
||||
|
||||
def __init__(self, pkg, *, ctx=None):
|
||||
ctx = ctx or multiprocessing.get_context()
|
||||
self.serialize = ctx.get_start_method() != "fork"
|
||||
if self.serialize:
|
||||
def __init__(self, pkg):
|
||||
if _SERIALIZE:
|
||||
self.serialized_pkg = serialize(pkg)
|
||||
self.global_state = GlobalStateMarshaler()
|
||||
self.serialized_env = serialize(spack.environment.active_environment())
|
||||
else:
|
||||
self.pkg = pkg
|
||||
self.global_state = None
|
||||
self.env = spack.environment.active_environment()
|
||||
self.spack_working_dir = spack.paths.spack_working_dir
|
||||
self.test_state = TestState()
|
||||
|
||||
def restore(self):
|
||||
self.test_state.restore()
|
||||
spack.paths.spack_working_dir = self.spack_working_dir
|
||||
env = pickle.load(self.serialized_env) if self.serialize else self.env
|
||||
# Activating the environment modifies the global configuration, so globals have to
|
||||
# be restored afterward, in case other modifications were applied on top (e.g. from
|
||||
# command line)
|
||||
env = pickle.load(self.serialized_env) if _SERIALIZE else self.env
|
||||
if env:
|
||||
spack.environment.activate(env)
|
||||
|
||||
if self.serialize:
|
||||
self.global_state.restore()
|
||||
|
||||
# Order of operation is important, since the package might be retrieved
|
||||
# from a repo defined within the environment configuration
|
||||
pkg = pickle.load(self.serialized_pkg) if self.serialize else self.pkg
|
||||
pkg = pickle.load(self.serialized_pkg) if _SERIALIZE else self.pkg
|
||||
return pkg
|
||||
|
||||
|
||||
class GlobalStateMarshaler:
|
||||
"""Class to serialize and restore global state for child processes.
|
||||
|
||||
Spack may modify state that is normally read from disk or command line in memory;
|
||||
this object is responsible for properly serializing that state to be applied to a subprocess.
|
||||
class TestState:
|
||||
"""Spack tests may modify state that is normally read from disk in memory;
|
||||
this object is responsible for properly serializing that state to be
|
||||
applied to a subprocess. This isn't needed outside of a testing environment
|
||||
but this logic is designed to behave the same inside or outside of tests.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.config = spack.config.CONFIG.ensure_unwrapped()
|
||||
self.platform = spack.platforms.host
|
||||
self.test_patches = store_patches()
|
||||
self.store = spack.store.STORE
|
||||
if _SERIALIZE:
|
||||
self.config = spack.config.CONFIG
|
||||
self.platform = spack.platforms.host
|
||||
self.test_patches = store_patches()
|
||||
self.store = spack.store.STORE
|
||||
|
||||
def restore(self):
|
||||
spack.config.CONFIG = self.config
|
||||
spack.repo.PATH = spack.repo.create(self.config)
|
||||
spack.platforms.host = self.platform
|
||||
spack.store.STORE = self.store
|
||||
self.test_patches.restore()
|
||||
if _SERIALIZE:
|
||||
spack.config.CONFIG = self.config
|
||||
spack.repo.PATH = spack.repo.create(self.config)
|
||||
spack.platforms.host = self.platform
|
||||
spack.store.STORE = self.store
|
||||
self.test_patches.restore()
|
||||
|
||||
|
||||
class TestPatches:
|
||||
|
@@ -1,247 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
""" Test ABI-based splicing of dependencies """
|
||||
|
||||
from typing import List
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.solver.asp
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.spec import Spec
|
||||
|
||||
|
||||
class CacheManager:
|
||||
def __init__(self, specs: List[str]) -> None:
|
||||
self.req_specs = specs
|
||||
self.concr_specs: List[Spec]
|
||||
self.concr_specs = []
|
||||
|
||||
def __enter__(self):
|
||||
self.concr_specs = [Spec(s).concretized() for s in self.req_specs]
|
||||
for s in self.concr_specs:
|
||||
PackageInstaller([s.package], fake=True, explicit=True).install()
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
for s in self.concr_specs:
|
||||
s.package.do_uninstall()
|
||||
|
||||
|
||||
# MacOS and Windows only work if you pass this function pointer rather than a
|
||||
# closure
|
||||
def _mock_has_runtime_dependencies(_x):
|
||||
return True
|
||||
|
||||
|
||||
def _make_specs_non_buildable(specs: List[str]):
|
||||
output_config = {}
|
||||
for spec in specs:
|
||||
output_config[spec] = {"buildable": False}
|
||||
return output_config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def splicing_setup(mutable_database, mock_packages, monkeypatch):
|
||||
spack.config.set("concretizer:reuse", True)
|
||||
monkeypatch.setattr(
|
||||
spack.solver.asp, "_has_runtime_dependencies", _mock_has_runtime_dependencies
|
||||
)
|
||||
|
||||
|
||||
def _enable_splicing():
|
||||
spack.config.set("concretizer:splice", {"automatic": True})
|
||||
|
||||
|
||||
def _has_build_dependency(spec: Spec, name: str):
|
||||
return any(s.name == name for s in spec.dependencies(None, dt.BUILD))
|
||||
|
||||
|
||||
def test_simple_reuse(splicing_setup):
|
||||
with CacheManager(["splice-z@1.0.0+compat"]):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["splice-z"]))
|
||||
assert Spec("splice-z").concretized().satisfies(Spec("splice-z"))
|
||||
|
||||
|
||||
def test_simple_dep_reuse(splicing_setup):
|
||||
with CacheManager(["splice-z@1.0.0+compat"]):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["splice-z"]))
|
||||
assert Spec("splice-h@1").concretized().satisfies(Spec("splice-h@1"))
|
||||
|
||||
|
||||
def test_splice_installed_hash(splicing_setup):
|
||||
cache = [
|
||||
"splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0",
|
||||
"splice-h@1.0.2+compat ^splice-z@1.0.0",
|
||||
]
|
||||
with CacheManager(cache):
|
||||
packages_config = _make_specs_non_buildable(["splice-t", "splice-h"])
|
||||
spack.config.set("packages", packages_config)
|
||||
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0")
|
||||
with pytest.raises(Exception):
|
||||
goal_spec.concretized()
|
||||
_enable_splicing()
|
||||
assert goal_spec.concretized().satisfies(goal_spec)
|
||||
|
||||
|
||||
def test_splice_build_splice_node(splicing_setup):
|
||||
with CacheManager(["splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0+compat"]):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["splice-t"]))
|
||||
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0+compat")
|
||||
with pytest.raises(Exception):
|
||||
goal_spec.concretized()
|
||||
_enable_splicing()
|
||||
assert goal_spec.concretized().satisfies(goal_spec)
|
||||
|
||||
|
||||
def test_double_splice(splicing_setup):
|
||||
cache = [
|
||||
"splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0+compat",
|
||||
"splice-h@1.0.2+compat ^splice-z@1.0.1+compat",
|
||||
"splice-z@1.0.2+compat",
|
||||
]
|
||||
with CacheManager(cache):
|
||||
freeze_builds_config = _make_specs_non_buildable(["splice-t", "splice-h", "splice-z"])
|
||||
spack.config.set("packages", freeze_builds_config)
|
||||
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.2+compat")
|
||||
with pytest.raises(Exception):
|
||||
goal_spec.concretized()
|
||||
_enable_splicing()
|
||||
assert goal_spec.concretized().satisfies(goal_spec)
|
||||
|
||||
|
||||
# The next two tests are mirrors of one another
|
||||
def test_virtual_multi_splices_in(splicing_setup):
|
||||
cache = [
|
||||
"depends-on-virtual-with-abi ^virtual-abi-1",
|
||||
"depends-on-virtual-with-abi ^virtual-abi-2",
|
||||
]
|
||||
goal_specs = [
|
||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=one",
|
||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=two",
|
||||
]
|
||||
with CacheManager(cache):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
|
||||
for gs in goal_specs:
|
||||
with pytest.raises(Exception):
|
||||
Spec(gs).concretized()
|
||||
_enable_splicing()
|
||||
for gs in goal_specs:
|
||||
assert Spec(gs).concretized().satisfies(gs)
|
||||
|
||||
|
||||
def test_virtual_multi_can_be_spliced(splicing_setup):
|
||||
cache = [
|
||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=one",
|
||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=two",
|
||||
]
|
||||
goal_specs = [
|
||||
"depends-on-virtual-with-abi ^virtual-abi-1",
|
||||
"depends-on-virtual-with-abi ^virtual-abi-2",
|
||||
]
|
||||
with CacheManager(cache):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
|
||||
with pytest.raises(Exception):
|
||||
for gs in goal_specs:
|
||||
Spec(gs).concretized()
|
||||
_enable_splicing()
|
||||
for gs in goal_specs:
|
||||
assert Spec(gs).concretized().satisfies(gs)
|
||||
|
||||
|
||||
def test_manyvariant_star_matching_variant_splice(splicing_setup):
|
||||
cache = [
|
||||
# can_splice("manyvariants@1.0.0", when="@1.0.1", match_variants="*")
|
||||
"depends-on-manyvariants ^manyvariants@1.0.0+a+b c=v1 d=v2",
|
||||
"depends-on-manyvariants ^manyvariants@1.0.0~a~b c=v3 d=v3",
|
||||
]
|
||||
goal_specs = [
|
||||
Spec("depends-on-manyvariants ^manyvariants@1.0.1+a+b c=v1 d=v2"),
|
||||
Spec("depends-on-manyvariants ^manyvariants@1.0.1~a~b c=v3 d=v3"),
|
||||
]
|
||||
with CacheManager(cache):
|
||||
freeze_build_config = {"depends-on-manyvariants": {"buildable": False}}
|
||||
spack.config.set("packages", freeze_build_config)
|
||||
for goal in goal_specs:
|
||||
with pytest.raises(Exception):
|
||||
goal.concretized()
|
||||
_enable_splicing()
|
||||
for goal in goal_specs:
|
||||
assert goal.concretized().satisfies(goal)
|
||||
|
||||
|
||||
def test_manyvariant_limited_matching(splicing_setup):
|
||||
cache = [
|
||||
# can_splice("manyvariants@2.0.0+a~b", when="@2.0.1~a+b", match_variants=["c", "d"])
|
||||
"depends-on-manyvariants@2.0 ^manyvariants@2.0.0+a~b c=v3 d=v2",
|
||||
# can_splice("manyvariants@2.0.0 c=v1 d=v1", when="@2.0.1+a+b")
|
||||
"depends-on-manyvariants@2.0 ^manyvariants@2.0.0~a~b c=v1 d=v1",
|
||||
]
|
||||
goal_specs = [
|
||||
Spec("depends-on-manyvariants@2.0 ^manyvariants@2.0.1~a+b c=v3 d=v2"),
|
||||
Spec("depends-on-manyvariants@2.0 ^manyvariants@2.0.1+a+b c=v3 d=v3"),
|
||||
]
|
||||
with CacheManager(cache):
|
||||
freeze_build_config = {"depends-on-manyvariants": {"buildable": False}}
|
||||
spack.config.set("packages", freeze_build_config)
|
||||
for s in goal_specs:
|
||||
with pytest.raises(Exception):
|
||||
s.concretized()
|
||||
_enable_splicing()
|
||||
for s in goal_specs:
|
||||
assert s.concretized().satisfies(s)
|
||||
|
||||
|
||||
def test_external_splice_same_name(splicing_setup):
|
||||
cache = [
|
||||
"splice-h@1.0.0 ^splice-z@1.0.0+compat",
|
||||
"splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.1+compat",
|
||||
]
|
||||
packages_yaml = {
|
||||
"splice-z": {"externals": [{"spec": "splice-z@1.0.2+compat", "prefix": "/usr"}]}
|
||||
}
|
||||
goal_specs = [
|
||||
Spec("splice-h@1.0.0 ^splice-z@1.0.2"),
|
||||
Spec("splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.2"),
|
||||
]
|
||||
with CacheManager(cache):
|
||||
spack.config.set("packages", packages_yaml)
|
||||
_enable_splicing()
|
||||
for s in goal_specs:
|
||||
assert s.concretized().satisfies(s)
|
||||
|
||||
|
||||
def test_spliced_build_deps_only_in_build_spec(splicing_setup):
|
||||
cache = ["splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.0"]
|
||||
goal_spec = Spec("splice-t@1.0 ^splice-h@1.0.2 ^splice-z@1.0.0")
|
||||
|
||||
with CacheManager(cache):
|
||||
_enable_splicing()
|
||||
concr_goal = goal_spec.concretized()
|
||||
build_spec = concr_goal._build_spec
|
||||
# Spec has been spliced
|
||||
assert build_spec is not None
|
||||
# Build spec has spliced build dependencies
|
||||
assert _has_build_dependency(build_spec, "splice-h")
|
||||
assert _has_build_dependency(build_spec, "splice-z")
|
||||
# Spliced build dependencies are removed
|
||||
assert len(concr_goal.dependencies(None, dt.BUILD)) == 0
|
||||
|
||||
|
||||
def test_spliced_transitive_dependency(splicing_setup):
|
||||
cache = ["splice-depends-on-t@1.0 ^splice-h@1.0.1"]
|
||||
goal_spec = Spec("splice-depends-on-t^splice-h@1.0.2")
|
||||
|
||||
with CacheManager(cache):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["splice-depends-on-t"]))
|
||||
_enable_splicing()
|
||||
concr_goal = goal_spec.concretized()
|
||||
# Spec has been spliced
|
||||
assert concr_goal._build_spec is not None
|
||||
assert concr_goal["splice-t"]._build_spec is not None
|
||||
assert concr_goal.satisfies(goal_spec)
|
||||
# Spliced build dependencies are removed
|
||||
assert len(concr_goal.dependencies(None, dt.BUILD)) == 0
|
@@ -15,8 +15,6 @@
|
||||
from llnl.util.filesystem import HeaderList, LibraryList
|
||||
|
||||
import spack.build_environment
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.package_base
|
||||
|
@@ -199,7 +199,7 @@ def check_args(cc, args, expected):
|
||||
"""
|
||||
with set_env(SPACK_TEST_COMMAND="dump-args"):
|
||||
cc_modified_args = cc(*args, output=str).strip().split("\n")
|
||||
assert cc_modified_args == expected
|
||||
assert expected == cc_modified_args
|
||||
|
||||
|
||||
def check_args_contents(cc, args, must_contain, must_not_contain):
|
||||
@@ -272,43 +272,6 @@ def test_ld_mode(wrapper_environment):
|
||||
assert dump_mode(ld, ["foo.o", "bar.o", "baz.o", "-o", "foo", "-Wl,-rpath,foo"]) == "ld"
|
||||
|
||||
|
||||
def test_ld_unterminated_rpath(wrapper_environment):
|
||||
check_args(
|
||||
ld,
|
||||
["foo.o", "bar.o", "baz.o", "-o", "foo", "-rpath"],
|
||||
["ld", "--disable-new-dtags", "foo.o", "bar.o", "baz.o", "-o", "foo", "-rpath"],
|
||||
)
|
||||
|
||||
|
||||
def test_xlinker_unterminated_rpath(wrapper_environment):
|
||||
check_args(
|
||||
cc,
|
||||
["foo.o", "bar.o", "baz.o", "-o", "foo", "-Xlinker", "-rpath"],
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ [
|
||||
"-Wl,--disable-new-dtags",
|
||||
"foo.o",
|
||||
"bar.o",
|
||||
"baz.o",
|
||||
"-o",
|
||||
"foo",
|
||||
"-Xlinker",
|
||||
"-rpath",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def test_wl_unterminated_rpath(wrapper_environment):
|
||||
check_args(
|
||||
cc,
|
||||
["foo.o", "bar.o", "baz.o", "-o", "foo", "-Wl,-rpath"],
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ ["-Wl,--disable-new-dtags", "foo.o", "bar.o", "baz.o", "-o", "foo", "-Wl,-rpath"],
|
||||
)
|
||||
|
||||
|
||||
def test_ld_flags(wrapper_environment, wrapper_flags):
|
||||
check_args(
|
||||
ld,
|
||||
|
@@ -9,7 +9,6 @@
|
||||
import pathlib
|
||||
import shutil
|
||||
from argparse import Namespace
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -75,7 +74,7 @@ def setup_combined_multiple_env():
|
||||
env("create", "test1")
|
||||
test1 = ev.read("test1")
|
||||
with test1:
|
||||
add("mpich@1.0")
|
||||
add("zlib")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
@@ -117,99 +116,6 @@ def check_viewdir_removal(viewdir):
|
||||
) == ["projections.yaml"]
|
||||
|
||||
|
||||
def test_env_track_nonexistant_path_fails(capfd):
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("track", "path/does/not/exist")
|
||||
|
||||
out, _ = capfd.readouterr()
|
||||
assert "doesn't contain an environment" in out
|
||||
|
||||
|
||||
def test_env_track_existing_env_fails(capfd):
|
||||
env("create", "track_test")
|
||||
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("track", "--name", "track_test", ev.environment_dir_from_name("track_test"))
|
||||
|
||||
out, _ = capfd.readouterr()
|
||||
assert "environment named track_test already exists" in out
|
||||
|
||||
|
||||
def test_env_track_valid(tmp_path):
|
||||
with fs.working_dir(str(tmp_path)):
|
||||
# create an independent environment
|
||||
env("create", "-d", ".")
|
||||
|
||||
# test tracking an environment in known store
|
||||
env("track", "--name", "test1", ".")
|
||||
|
||||
# test removing environment to ensure independent isn't deleted
|
||||
env("rm", "-y", "test1")
|
||||
|
||||
assert os.path.isfile("spack.yaml")
|
||||
|
||||
|
||||
def test_env_untrack_valid(tmp_path):
|
||||
with fs.working_dir(str(tmp_path)):
|
||||
# create an independent environment
|
||||
env("create", "-d", ".")
|
||||
|
||||
# test tracking an environment in known store
|
||||
env("track", "--name", "test_untrack", ".")
|
||||
env("untrack", "--yes-to-all", "test_untrack")
|
||||
|
||||
# check that environment was sucessfully untracked
|
||||
out = env("ls")
|
||||
assert "test_untrack" not in out
|
||||
|
||||
|
||||
def test_env_untrack_invalid_name():
|
||||
# test untracking an environment that doesn't exist
|
||||
env_name = "invalid_enviornment_untrack"
|
||||
|
||||
out = env("untrack", env_name)
|
||||
|
||||
assert f"Environment '{env_name}' does not exist" in out
|
||||
|
||||
|
||||
def test_env_untrack_when_active(tmp_path, capfd):
|
||||
env_name = "test_untrack_active"
|
||||
|
||||
with fs.working_dir(str(tmp_path)):
|
||||
# create an independent environment
|
||||
env("create", "-d", ".")
|
||||
|
||||
# test tracking an environment in known store
|
||||
env("track", "--name", env_name, ".")
|
||||
|
||||
active_env = ev.read(env_name)
|
||||
with active_env:
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("untrack", "--yes-to-all", env_name)
|
||||
|
||||
# check that environment could not be untracked while active
|
||||
out, _ = capfd.readouterr()
|
||||
assert f"'{env_name}' can't be untracked while activated" in out
|
||||
|
||||
env("untrack", "-f", env_name)
|
||||
out = env("ls")
|
||||
assert env_name not in out
|
||||
|
||||
|
||||
def test_env_untrack_managed(tmp_path, capfd):
|
||||
env_name = "test_untrack_managed"
|
||||
|
||||
# create an managed environment
|
||||
env("create", env_name)
|
||||
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("untrack", env_name)
|
||||
|
||||
# check that environment could not be untracked while active
|
||||
out, _ = capfd.readouterr()
|
||||
assert f"'{env_name}' is not a tracked env" in out
|
||||
|
||||
|
||||
def test_add():
|
||||
e = ev.create("test")
|
||||
e.add("mpileaks")
|
||||
@@ -221,7 +127,6 @@ def test_change_match_spec():
|
||||
|
||||
e = ev.read("test")
|
||||
with e:
|
||||
|
||||
add("mpileaks@2.1")
|
||||
add("mpileaks@2.2")
|
||||
|
||||
@@ -496,17 +401,14 @@ def test_env_install_single_spec(install_mockery, mock_fetch):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("unify", [True, False, "when_possible"])
|
||||
def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch, mutable_config):
|
||||
def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch):
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
combined.unify = unify
|
||||
if not unify:
|
||||
combined.manifest.set_default_view(False)
|
||||
|
||||
combined.add("mpileaks")
|
||||
combined.concretize()
|
||||
combined.write()
|
||||
|
||||
combined.unify = unify
|
||||
|
||||
with combined:
|
||||
install()
|
||||
|
||||
@@ -520,14 +422,6 @@ def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch, mu
|
||||
assert test1_roots == combined_included_roots[test1.path]
|
||||
assert test2_roots == combined_included_roots[test2.path]
|
||||
|
||||
mpileaks = combined.specs_by_hash[combined.concretized_order[0]]
|
||||
if unify:
|
||||
assert mpileaks["mpi"].dag_hash() in test1_roots
|
||||
assert mpileaks["libelf"].dag_hash() in test2_roots
|
||||
else:
|
||||
# check that unification is not by accident
|
||||
assert mpileaks["mpi"].dag_hash() not in test1_roots
|
||||
|
||||
|
||||
def test_env_roots_marked_explicit(install_mockery, mock_fetch):
|
||||
install = SpackCommand("install")
|
||||
@@ -782,7 +676,7 @@ def test_force_remove_included_env():
|
||||
rm_output = env("remove", "-f", "-y", "test")
|
||||
list_output = env("list")
|
||||
|
||||
assert "'test' is used by environment 'combined_env'" in rm_output
|
||||
assert '"test" is being used by environment "combined_env"' in rm_output
|
||||
assert "test" not in list_output
|
||||
|
||||
|
||||
@@ -1975,7 +1869,7 @@ def test_env_include_concrete_envs_lockfile():
|
||||
def test_env_include_concrete_add_env():
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
# create new env & concretize
|
||||
# crete new env & crecretize
|
||||
env("create", "new")
|
||||
new_env = ev.read("new")
|
||||
with new_env:
|
||||
@@ -2027,116 +1921,6 @@ def test_env_include_concrete_remove_env():
|
||||
assert test2.path not in lockfile_as_dict["include_concrete"].keys()
|
||||
|
||||
|
||||
def configure_reuse(reuse_mode, combined_env) -> Optional[ev.Environment]:
|
||||
override_env = None
|
||||
_config: Dict[Any, Any] = {}
|
||||
if reuse_mode == "true":
|
||||
_config = {"concretizer": {"reuse": True}}
|
||||
elif reuse_mode == "from_environment":
|
||||
_config = {"concretizer": {"reuse": {"from": [{"type": "environment"}]}}}
|
||||
elif reuse_mode == "from_environment_test1":
|
||||
_config = {"concretizer": {"reuse": {"from": [{"type": "environment", "path": "test1"}]}}}
|
||||
elif reuse_mode == "from_environment_external_test":
|
||||
# Create a new environment called external_test that enables the "debug"
|
||||
# The default is "~debug"
|
||||
env("create", "external_test")
|
||||
override_env = ev.read("external_test")
|
||||
with override_env:
|
||||
add("mpich@1.0 +debug")
|
||||
override_env.concretize()
|
||||
override_env.write()
|
||||
|
||||
# Reuse from the environment that is not included.
|
||||
# Specify the requirement for the debug variant. By default this would concretize to use
|
||||
# mpich@3.0 but with include concrete the mpich@1.0 +debug version from the
|
||||
# "external_test" environment will be used.
|
||||
_config = {
|
||||
"concretizer": {"reuse": {"from": [{"type": "environment", "path": "external_test"}]}},
|
||||
"packages": {"mpich": {"require": ["+debug"]}},
|
||||
}
|
||||
elif reuse_mode == "from_environment_raise":
|
||||
_config = {
|
||||
"concretizer": {"reuse": {"from": [{"type": "environment", "path": "not-a-real-env"}]}}
|
||||
}
|
||||
# Disable unification in these tests to avoid confusing reuse due to unification using an
|
||||
# include concrete spec vs reuse due to the reuse configuration
|
||||
_config["concretizer"].update({"unify": False})
|
||||
|
||||
combined_env.manifest.configuration.update(_config)
|
||||
combined_env.manifest.changed = True
|
||||
combined_env.write()
|
||||
|
||||
return override_env
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"reuse_mode",
|
||||
[
|
||||
"true",
|
||||
"from_environment",
|
||||
"from_environment_test1",
|
||||
"from_environment_external_test",
|
||||
"from_environment_raise",
|
||||
],
|
||||
)
|
||||
def test_env_include_concrete_reuse(monkeypatch, reuse_mode):
|
||||
|
||||
# The mock packages do not use the gcc-runtime
|
||||
def mock_has_runtime_dependencies(*args, **kwargs):
|
||||
return True
|
||||
|
||||
monkeypatch.setattr(
|
||||
spack.solver.asp, "_has_runtime_dependencies", mock_has_runtime_dependencies
|
||||
)
|
||||
# The default mpi version is 3.x provided by mpich in the mock repo.
|
||||
# This test verifies that concretizing with an included concrete
|
||||
# environment with "concretizer:reuse:true" the included
|
||||
# concrete spec overrides the default with mpi@1.0.
|
||||
test1, _, combined = setup_combined_multiple_env()
|
||||
|
||||
# Set the reuse mode for the environment
|
||||
override_env = configure_reuse(reuse_mode, combined)
|
||||
if override_env:
|
||||
# If there is an override environment (ie. testing reuse with
|
||||
# an external environment) update it here.
|
||||
test1 = override_env
|
||||
|
||||
# Capture the test1 specs included by combined
|
||||
test1_specs_by_hash = test1.specs_by_hash
|
||||
|
||||
try:
|
||||
# Add mpileaks to the combined environment
|
||||
with combined:
|
||||
add("mpileaks")
|
||||
combined.concretize()
|
||||
comb_specs_by_hash = combined.specs_by_hash
|
||||
|
||||
# create reference env with mpileaks that does not use reuse
|
||||
# This should concretize to the default version of mpich (3.0)
|
||||
env("create", "new")
|
||||
ref_env = ev.read("new")
|
||||
with ref_env:
|
||||
add("mpileaks")
|
||||
ref_env.concretize()
|
||||
ref_specs_by_hash = ref_env.specs_by_hash
|
||||
|
||||
# Ensure that the mpich used by the mpileaks is the mpich from the reused test environment
|
||||
comb_mpileaks_spec = [s for s in comb_specs_by_hash.values() if s.name == "mpileaks"]
|
||||
test1_mpich_spec = [s for s in test1_specs_by_hash.values() if s.name == "mpich"]
|
||||
assert len(comb_mpileaks_spec) == 1
|
||||
assert len(test1_mpich_spec) == 1
|
||||
assert comb_mpileaks_spec[0]["mpich"].dag_hash() == test1_mpich_spec[0].dag_hash()
|
||||
|
||||
# None of the references specs (using mpich@3) reuse specs from test1.
|
||||
# This tests that the reuse is not happening coincidently
|
||||
assert not any([s in test1_specs_by_hash for s in ref_specs_by_hash])
|
||||
|
||||
# Make sure the raise tests raises
|
||||
assert "raise" not in reuse_mode
|
||||
except ev.SpackEnvironmentError:
|
||||
assert "raise" in reuse_mode
|
||||
|
||||
|
||||
@pytest.mark.parametrize("unify", [True, False, "when_possible"])
|
||||
def test_env_include_concrete_env_reconcretized(unify):
|
||||
"""Double check to make sure that concrete_specs for the local specs is empty
|
||||
@@ -4333,13 +4117,13 @@ def test_spack_package_ids_variable(tmpdir, mock_packages):
|
||||
# Include in Makefile and create target that depend on SPACK_PACKAGE_IDS
|
||||
with open(makefile_path, "w") as f:
|
||||
f.write(
|
||||
"""
|
||||
r"""
|
||||
all: post-install
|
||||
|
||||
include include.mk
|
||||
|
||||
example/post-install/%: example/install/%
|
||||
\t$(info post-install: $(HASH)) # noqa: W191,E101
|
||||
$(info post-install: $(HASH)) # noqa: W191,E101
|
||||
|
||||
post-install: $(addprefix example/post-install/,$(example/SPACK_PACKAGE_IDS))
|
||||
"""
|
||||
|
@@ -14,13 +14,10 @@
|
||||
import spack.cmd as cmd
|
||||
import spack.cmd.find
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
from spack.main import SpackCommand
|
||||
from spack.spec import Spec
|
||||
from spack.test.conftest import create_test_repo
|
||||
from spack.test.utilities import SpackCommandArgs
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
find = SpackCommand("find")
|
||||
@@ -456,140 +453,3 @@ def test_environment_with_version_range_in_compiler_doesnt_fail(tmp_path):
|
||||
with test_environment:
|
||||
output = find()
|
||||
assert "zlib%gcc@12.1.0" in output
|
||||
|
||||
|
||||
_pkga = (
|
||||
"a0",
|
||||
"""\
|
||||
class A0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
|
||||
depends_on("b0")
|
||||
depends_on("c0")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgb = (
|
||||
"b0",
|
||||
"""\
|
||||
class B0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgc = (
|
||||
"c0",
|
||||
"""\
|
||||
class C0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
|
||||
tags = ["tag0", "tag1"]
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgd = (
|
||||
"d0",
|
||||
"""\
|
||||
class D0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
|
||||
depends_on("c0")
|
||||
depends_on("e0")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkge = (
|
||||
"e0",
|
||||
"""\
|
||||
class E0(Package):
|
||||
tags = ["tag1", "tag2"]
|
||||
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _create_test_repo(tmpdir, mutable_config):
|
||||
r"""
|
||||
a0 d0
|
||||
/ \ / \
|
||||
b0 c0 e0
|
||||
"""
|
||||
yield create_test_repo(tmpdir, [_pkga, _pkgb, _pkgc, _pkgd, _pkge])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(_create_test_repo, monkeypatch, mock_stage):
|
||||
with spack.repo.use_repositories(_create_test_repo) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
def test_find_concretized_not_installed(
|
||||
mutable_mock_env_path, install_mockery, mock_fetch, test_repo, mock_archive
|
||||
):
|
||||
"""Test queries against installs of specs against fake repo.
|
||||
|
||||
Given A, B, C, D, E, create an environment and install A.
|
||||
Add and concretize (but do not install) D.
|
||||
Test a few queries after force uninstalling a dependency of A (but not
|
||||
A itself).
|
||||
"""
|
||||
add = SpackCommand("add")
|
||||
concretize = SpackCommand("concretize")
|
||||
uninstall = SpackCommand("uninstall")
|
||||
|
||||
def _query(_e, *args):
|
||||
return spack.cmd.find._find_query(SpackCommandArgs("find")(*args), _e)
|
||||
|
||||
def _nresults(_qresult):
|
||||
return len(_qresult[0]), len(_qresult[1])
|
||||
|
||||
env("create", "test")
|
||||
with ev.read("test") as e:
|
||||
install("--fake", "--add", "a0")
|
||||
|
||||
assert _nresults(_query(e)) == (3, 0)
|
||||
assert _nresults(_query(e, "--explicit")) == (1, 0)
|
||||
|
||||
add("d0")
|
||||
concretize("--reuse")
|
||||
|
||||
# At this point d0 should use existing c0, but d/e
|
||||
# are not installed in the env
|
||||
|
||||
# --explicit, --deprecated, --start-date, etc. are all
|
||||
# filters on records, and therefore don't apply to
|
||||
# concretized-but-not-installed results
|
||||
assert _nresults(_query(e, "--explicit")) == (1, 2)
|
||||
|
||||
assert _nresults(_query(e)) == (3, 2)
|
||||
assert _nresults(_query(e, "-c", "d0")) == (0, 1)
|
||||
|
||||
uninstall("-f", "-y", "b0")
|
||||
|
||||
# b0 is now missing (it is not installed, but has an
|
||||
# installed parent)
|
||||
|
||||
assert _nresults(_query(e)) == (2, 3)
|
||||
# b0 is "double-counted" here: it meets the --missing
|
||||
# criteria, and also now qualifies as a
|
||||
# concretized-but-not-installed spec
|
||||
assert _nresults(_query(e, "--missing")) == (3, 3)
|
||||
assert _nresults(_query(e, "--only-missing")) == (1, 3)
|
||||
|
||||
# Tags are not attached to install records, so they
|
||||
# can modify the concretized-but-not-installed results
|
||||
|
||||
assert _nresults(_query(e, "--tag=tag0")) == (1, 0)
|
||||
assert _nresults(_query(e, "--tag=tag1")) == (1, 1)
|
||||
assert _nresults(_query(e, "--tag=tag2")) == (0, 1)
|
||||
|
@@ -4,17 +4,10 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import pytest
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.solver.asp as asp
|
||||
import spack.store
|
||||
from spack.cmd import (
|
||||
CommandNameError,
|
||||
PythonNameError,
|
||||
cmd_name,
|
||||
matching_specs_from_env,
|
||||
parse_specs,
|
||||
python_name,
|
||||
require_cmd_name,
|
||||
require_python_name,
|
||||
@@ -41,99 +34,3 @@ def test_require_cmd_name():
|
||||
with pytest.raises(CommandNameError):
|
||||
require_cmd_name("okey_dokey")
|
||||
require_cmd_name(cmd_name("okey_dokey"))
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"unify,spec_strs,error",
|
||||
[
|
||||
# single spec
|
||||
(True, ["zmpi"], None),
|
||||
(False, ["mpileaks"], None),
|
||||
# multiple specs, some from hash some from file
|
||||
(True, ["zmpi", "mpileaks^zmpi", "libelf"], None),
|
||||
(True, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], spack.error.SpecError),
|
||||
(False, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], None),
|
||||
],
|
||||
)
|
||||
def test_special_cases_concretization_parse_specs(
|
||||
unify, spec_strs, error, monkeypatch, mutable_config, mutable_database, tmpdir
|
||||
):
|
||||
"""Test that special cases in parse_specs(concretize=True) bypass solver"""
|
||||
|
||||
# monkeypatch to ensure we do not call the actual concretizer
|
||||
def _fail(*args, **kwargs):
|
||||
assert False
|
||||
|
||||
monkeypatch.setattr(asp.SpackSolverSetup, "setup", _fail)
|
||||
|
||||
spack.config.set("concretizer:unify", unify)
|
||||
|
||||
args = [f"/{spack.store.STORE.db.query(s)[0].dag_hash()}" for s in spec_strs]
|
||||
if len(args) > 1:
|
||||
# We convert the last one to a specfile input
|
||||
filename = tmpdir.join("spec.json")
|
||||
spec = parse_specs(args[-1], concretize=True)[0]
|
||||
with open(filename, "w") as f:
|
||||
spec.to_json(f)
|
||||
args[-1] = str(filename)
|
||||
|
||||
if error:
|
||||
with pytest.raises(error):
|
||||
parse_specs(args, concretize=True)
|
||||
else:
|
||||
# assertion error from monkeypatch above if test fails
|
||||
parse_specs(args, concretize=True)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"unify,spec_strs,error",
|
||||
[
|
||||
# single spec
|
||||
(True, ["zmpi"], None),
|
||||
(False, ["mpileaks"], None),
|
||||
# multiple specs, some from hash some from file
|
||||
(True, ["zmpi", "mpileaks^zmpi", "libelf"], None),
|
||||
(True, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], spack.error.SpecError),
|
||||
(False, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], None),
|
||||
],
|
||||
)
|
||||
def test_special_cases_concretization_matching_specs_from_env(
|
||||
unify,
|
||||
spec_strs,
|
||||
error,
|
||||
monkeypatch,
|
||||
mutable_config,
|
||||
mutable_database,
|
||||
tmpdir,
|
||||
mutable_mock_env_path,
|
||||
):
|
||||
"""Test that special cases in parse_specs(concretize=True) bypass solver"""
|
||||
|
||||
# monkeypatch to ensure we do not call the actual concretizer
|
||||
def _fail(*args, **kwargs):
|
||||
assert False
|
||||
|
||||
monkeypatch.setattr(asp.SpackSolverSetup, "setup", _fail)
|
||||
|
||||
spack.config.set("concretizer:unify", unify)
|
||||
|
||||
ev.create("test")
|
||||
env = ev.read("test")
|
||||
|
||||
args = [f"/{spack.store.STORE.db.query(s)[0].dag_hash()}" for s in spec_strs]
|
||||
if len(args) > 1:
|
||||
# We convert the last one to a specfile input
|
||||
filename = tmpdir.join("spec.json")
|
||||
spec = parse_specs(args[-1], concretize=True)[0]
|
||||
with open(filename, "w") as f:
|
||||
spec.to_json(f)
|
||||
args[-1] = str(filename)
|
||||
|
||||
with env:
|
||||
specs = parse_specs(args, concretize=False)
|
||||
if error:
|
||||
with pytest.raises(error):
|
||||
matching_specs_from_env(specs)
|
||||
else:
|
||||
# assertion error from monkeypatch above if test fails
|
||||
matching_specs_from_env(specs)
|
||||
|
@@ -906,7 +906,7 @@ def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
specfile = "./spec.json"
|
||||
with open(specfile, "w") as f:
|
||||
f.write(spec.to_json())
|
||||
print(spec.to_json())
|
||||
|
||||
install("--log-file=cdash_reports", "--log-format=cdash", specfile)
|
||||
# Verify Configure.xml exists with expected contents.
|
||||
report_dir = tmpdir.join("cdash_reports")
|
||||
|
@@ -17,7 +17,6 @@
|
||||
import spack.version
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
|
||||
config = SpackCommand("config")
|
||||
mirror = SpackCommand("mirror")
|
||||
env = SpackCommand("env")
|
||||
add = SpackCommand("add")
|
||||
@@ -182,122 +181,20 @@ def test_mirror_crud(mutable_config, capsys):
|
||||
output = mirror("remove", "mirror")
|
||||
assert "Removed mirror" in output
|
||||
|
||||
# Test S3 connection info token as variable
|
||||
mirror("add", "--s3-access-token-variable", "aaaaaazzzzz", "mirror", "s3://spack-public")
|
||||
# Test S3 connection info id/key
|
||||
mirror(
|
||||
"add",
|
||||
"--s3-access-key-id",
|
||||
"foo",
|
||||
"--s3-access-key-secret",
|
||||
"bar",
|
||||
"mirror",
|
||||
"s3://spack-public",
|
||||
)
|
||||
|
||||
output = mirror("remove", "mirror")
|
||||
assert "Removed mirror" in output
|
||||
|
||||
def do_add_set_seturl_access_pair(
|
||||
id_arg, secret_arg, mirror_name="mirror", mirror_url="s3://spack-public"
|
||||
):
|
||||
# Test S3 connection info id/key
|
||||
output = mirror("add", id_arg, "foo", secret_arg, "bar", mirror_name, mirror_url)
|
||||
if "variable" not in secret_arg:
|
||||
assert (
|
||||
f"Configuring mirror secrets as plain text with {secret_arg} is deprecated. "
|
||||
in output
|
||||
)
|
||||
|
||||
output = config("blame", "mirrors")
|
||||
assert all([x in output for x in ("foo", "bar", mirror_name, mirror_url)])
|
||||
# Mirror access_pair deprecation warning should not be in blame output
|
||||
assert "support for plain text secrets" not in output
|
||||
|
||||
output = mirror("set", id_arg, "foo_set", secret_arg, "bar_set", mirror_name)
|
||||
if "variable" not in secret_arg:
|
||||
assert "support for plain text secrets" in output
|
||||
output = config("blame", "mirrors")
|
||||
assert all([x in output for x in ("foo_set", "bar_set", mirror_name, mirror_url)])
|
||||
if "variable" not in secret_arg:
|
||||
output = mirror(
|
||||
"set", id_arg, "foo_set", secret_arg + "-variable", "bar_set_var", mirror_name
|
||||
)
|
||||
assert "support for plain text secrets" not in output
|
||||
output = config("blame", "mirrors")
|
||||
assert all(
|
||||
[x in output for x in ("foo_set", "bar_set_var", mirror_name, mirror_url)]
|
||||
)
|
||||
|
||||
output = mirror(
|
||||
"set-url",
|
||||
id_arg,
|
||||
"foo_set_url",
|
||||
secret_arg,
|
||||
"bar_set_url",
|
||||
"--push",
|
||||
mirror_name,
|
||||
mirror_url + "-push",
|
||||
)
|
||||
output = config("blame", "mirrors")
|
||||
assert all(
|
||||
[
|
||||
x in output
|
||||
for x in ("foo_set_url", "bar_set_url", mirror_name, mirror_url + "-push")
|
||||
]
|
||||
)
|
||||
|
||||
output = mirror("set", id_arg, "a", mirror_name)
|
||||
assert "No changes made to mirror" not in output
|
||||
|
||||
output = mirror("set", secret_arg, "b", mirror_name)
|
||||
assert "No changes made to mirror" not in output
|
||||
|
||||
output = mirror("set-url", id_arg, "c", mirror_name, mirror_url)
|
||||
assert "No changes made to mirror" not in output
|
||||
|
||||
output = mirror("set-url", secret_arg, "d", mirror_name, mirror_url)
|
||||
assert "No changes made to mirror" not in output
|
||||
|
||||
output = mirror("remove", mirror_name)
|
||||
assert "Removed mirror" in output
|
||||
|
||||
output = mirror("add", id_arg, "foo", mirror_name, mirror_url)
|
||||
assert "Expected both parts of the access pair to be specified. " in output
|
||||
|
||||
output = mirror("set-url", id_arg, "bar", mirror_name, mirror_url)
|
||||
assert "Expected both parts of the access pair to be specified. " in output
|
||||
|
||||
output = mirror("set", id_arg, "bar", mirror_name)
|
||||
assert "Expected both parts of the access pair to be specified. " in output
|
||||
|
||||
output = mirror("remove", mirror_name)
|
||||
assert "Removed mirror" in output
|
||||
|
||||
output = mirror("add", secret_arg, "bar", mirror_name, mirror_url)
|
||||
assert "Expected both parts of the access pair to be specified. " in output
|
||||
|
||||
output = mirror("set-url", secret_arg, "bar", mirror_name, mirror_url)
|
||||
assert "Expected both parts of the access pair to be specified. " in output
|
||||
|
||||
output = mirror("set", secret_arg, "bar", mirror_name)
|
||||
assert "Expected both parts of the access pair to be specified. " in output
|
||||
|
||||
output = mirror("remove", mirror_name)
|
||||
assert "Removed mirror" in output
|
||||
|
||||
output = mirror("list")
|
||||
assert "No mirrors configured" in output
|
||||
|
||||
do_add_set_seturl_access_pair("--s3-access-key-id", "--s3-access-key-secret")
|
||||
do_add_set_seturl_access_pair("--s3-access-key-id", "--s3-access-key-secret-variable")
|
||||
do_add_set_seturl_access_pair(
|
||||
"--s3-access-key-id-variable", "--s3-access-key-secret-variable"
|
||||
)
|
||||
with pytest.raises(
|
||||
spack.error.SpackError, match="Cannot add mirror with a variable id and text secret"
|
||||
):
|
||||
do_add_set_seturl_access_pair("--s3-access-key-id-variable", "--s3-access-key-secret")
|
||||
|
||||
# Test OCI connection info user/password
|
||||
do_add_set_seturl_access_pair("--oci-username", "--oci-password")
|
||||
do_add_set_seturl_access_pair("--oci-username", "--oci-password-variable")
|
||||
do_add_set_seturl_access_pair("--oci-username-variable", "--oci-password-variable")
|
||||
with pytest.raises(
|
||||
spack.error.SpackError, match="Cannot add mirror with a variable id and text secret"
|
||||
):
|
||||
do_add_set_seturl_access_pair("--s3-access-key-id-variable", "--s3-access-key-secret")
|
||||
|
||||
# Test S3 connection info with endpoint URL
|
||||
mirror(
|
||||
"add",
|
||||
@@ -321,9 +218,6 @@ def do_add_set_seturl_access_pair(
|
||||
output = mirror("remove", "mirror")
|
||||
assert "Removed mirror" in output
|
||||
|
||||
output = mirror("list")
|
||||
assert "No mirrors configured" in output
|
||||
|
||||
|
||||
def test_mirror_nonexisting(mutable_config):
|
||||
with pytest.raises(SpackCommandError):
|
||||
|
@@ -311,20 +311,7 @@ def test_pkg_grep(mock_packages, capfd):
|
||||
output, _ = capfd.readouterr()
|
||||
assert output.strip() == "\n".join(
|
||||
spack.repo.PATH.get_pkg_class(name).module.__file__
|
||||
for name in [
|
||||
"depends-on-manyvariants",
|
||||
"manyvariants",
|
||||
"splice-a",
|
||||
"splice-depends-on-t",
|
||||
"splice-h",
|
||||
"splice-t",
|
||||
"splice-vh",
|
||||
"splice-vt",
|
||||
"splice-z",
|
||||
"virtual-abi-1",
|
||||
"virtual-abi-2",
|
||||
"virtual-abi-multi",
|
||||
]
|
||||
for name in ["splice-a", "splice-h", "splice-t", "splice-vh", "splice-vt", "splice-z"]
|
||||
)
|
||||
|
||||
# ensure that this string isn't fouhnd
|
||||
|
@@ -7,7 +7,6 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.spec
|
||||
@@ -180,43 +179,3 @@ def test_spec_version_assigned_git_ref_as_version(name, version, error):
|
||||
else:
|
||||
output = spec(name + "@" + version)
|
||||
assert version in output
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"unify, spec_hash_args, match, error",
|
||||
[
|
||||
# success cases with unfiy:true
|
||||
(True, ["mpileaks_mpich"], "mpich", None),
|
||||
(True, ["mpileaks_zmpi"], "zmpi", None),
|
||||
(True, ["mpileaks_mpich", "dyninst"], "mpich", None),
|
||||
(True, ["mpileaks_zmpi", "dyninst"], "zmpi", None),
|
||||
# same success cases with unfiy:false
|
||||
(False, ["mpileaks_mpich"], "mpich", None),
|
||||
(False, ["mpileaks_zmpi"], "zmpi", None),
|
||||
(False, ["mpileaks_mpich", "dyninst"], "mpich", None),
|
||||
(False, ["mpileaks_zmpi", "dyninst"], "zmpi", None),
|
||||
# cases with unfiy:false
|
||||
(True, ["mpileaks_mpich", "mpileaks_zmpi"], "callpath, mpileaks", spack.error.SpecError),
|
||||
(False, ["mpileaks_mpich", "mpileaks_zmpi"], "zmpi", None),
|
||||
],
|
||||
)
|
||||
def test_spec_unification_from_cli(
|
||||
install_mockery, mutable_config, mutable_database, unify, spec_hash_args, match, error
|
||||
):
|
||||
"""Ensure specs grouped together on the CLI are concretized together when unify:true."""
|
||||
spack.config.set("concretizer:unify", unify)
|
||||
|
||||
db = spack.store.STORE.db
|
||||
spec_lookup = {
|
||||
"mpileaks_mpich": db.query_one("mpileaks ^mpich").dag_hash(),
|
||||
"mpileaks_zmpi": db.query_one("mpileaks ^zmpi").dag_hash(),
|
||||
"dyninst": db.query_one("dyninst").dag_hash(),
|
||||
}
|
||||
|
||||
hashes = [f"/{spec_lookup[name]}" for name in spec_hash_args]
|
||||
if error:
|
||||
with pytest.raises(error, match=match):
|
||||
output = spec(*hashes)
|
||||
else:
|
||||
output = spec(*hashes)
|
||||
assert match in output
|
||||
|
@@ -3,10 +3,8 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Test basic behavior of compilers in Spack"""
|
||||
import json
|
||||
import os
|
||||
from copy import copy
|
||||
from typing import Optional
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -19,7 +17,6 @@
|
||||
import spack.util.module_cmd
|
||||
from spack.compiler import Compiler
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
from spack.util.file_cache import FileCache
|
||||
|
||||
|
||||
def test_multiple_conflicting_compiler_definitions(mutable_config):
|
||||
@@ -104,14 +101,11 @@ def verbose_flag(self):
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_implicit_rpaths(dirs_with_libfiles, monkeypatch):
|
||||
def test_implicit_rpaths(dirs_with_libfiles):
|
||||
lib_to_dirs, all_dirs = dirs_with_libfiles
|
||||
monkeypatch.setattr(
|
||||
MockCompiler,
|
||||
"_compile_dummy_c_source",
|
||||
lambda self: "ld " + " ".join(f"-L{d}" for d in all_dirs),
|
||||
)
|
||||
retrieved_rpaths = MockCompiler().implicit_rpaths()
|
||||
compiler = MockCompiler()
|
||||
compiler._compile_c_source_output = "ld " + " ".join(f"-L{d}" for d in all_dirs)
|
||||
retrieved_rpaths = compiler.implicit_rpaths()
|
||||
assert set(retrieved_rpaths) == set(lib_to_dirs["libstdc++"] + lib_to_dirs["libgfortran"])
|
||||
|
||||
|
||||
@@ -653,7 +647,6 @@ def test_raising_if_compiler_target_is_over_specific(config):
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
@pytest.mark.enable_compiler_execution
|
||||
def test_compiler_get_real_version(working_env, monkeypatch, tmpdir):
|
||||
# Test variables
|
||||
test_version = "2.2.2"
|
||||
@@ -743,7 +736,6 @@ def test_get_compilers(config):
|
||||
) == [spack.compilers._compiler_from_config_entry(without_suffix)]
|
||||
|
||||
|
||||
@pytest.mark.enable_compiler_execution
|
||||
def test_compiler_get_real_version_fails(working_env, monkeypatch, tmpdir):
|
||||
# Test variables
|
||||
test_version = "2.2.2"
|
||||
@@ -792,13 +784,15 @@ def _call(*args, **kwargs):
|
||||
compilers = spack.compilers.get_compilers([compiler_dict])
|
||||
assert len(compilers) == 1
|
||||
compiler = compilers[0]
|
||||
assert compiler.get_real_version() == "unknown"
|
||||
# Confirm environment does not change after failed call
|
||||
assert "SPACK_TEST_CMP_ON" not in os.environ
|
||||
try:
|
||||
_ = compiler.get_real_version()
|
||||
assert False
|
||||
except ProcessError:
|
||||
# Confirm environment does not change after failed call
|
||||
assert "SPACK_TEST_CMP_ON" not in os.environ
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Bash scripting unsupported on Windows (for now)")
|
||||
@pytest.mark.enable_compiler_execution
|
||||
def test_compiler_flags_use_real_version(working_env, monkeypatch, tmpdir):
|
||||
# Create compiler
|
||||
gcc = str(tmpdir.join("gcc"))
|
||||
@@ -901,57 +895,3 @@ def test_compiler_environment(working_env):
|
||||
)
|
||||
with compiler.compiler_environment():
|
||||
assert os.environ["TEST"] == "yes"
|
||||
|
||||
|
||||
class MockCompilerWithoutExecutables(MockCompiler):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._compile_dummy_c_source_count = 0
|
||||
self._get_real_version_count = 0
|
||||
|
||||
def _compile_dummy_c_source(self) -> Optional[str]:
|
||||
self._compile_dummy_c_source_count += 1
|
||||
return "gcc helloworld.c -o helloworld"
|
||||
|
||||
def get_real_version(self) -> str:
|
||||
self._get_real_version_count += 1
|
||||
return "1.0.0"
|
||||
|
||||
|
||||
def test_compiler_output_caching(tmp_path):
|
||||
"""Test that compiler output is cached on the filesystem."""
|
||||
# The first call should trigger the cache to updated.
|
||||
a = MockCompilerWithoutExecutables()
|
||||
cache = spack.compiler.FileCompilerCache(FileCache(str(tmp_path)))
|
||||
assert cache.get(a).c_compiler_output == "gcc helloworld.c -o helloworld"
|
||||
assert cache.get(a).real_version == "1.0.0"
|
||||
assert a._compile_dummy_c_source_count == 1
|
||||
assert a._get_real_version_count == 1
|
||||
|
||||
# The second call on an equivalent but distinct object should not trigger compiler calls.
|
||||
b = MockCompilerWithoutExecutables()
|
||||
cache = spack.compiler.FileCompilerCache(FileCache(str(tmp_path)))
|
||||
assert cache.get(b).c_compiler_output == "gcc helloworld.c -o helloworld"
|
||||
assert cache.get(b).real_version == "1.0.0"
|
||||
assert b._compile_dummy_c_source_count == 0
|
||||
assert b._get_real_version_count == 0
|
||||
|
||||
# Cache schema change should be handled gracefully.
|
||||
with open(cache.cache.cache_path(cache.name), "w") as f:
|
||||
for k in cache._data:
|
||||
cache._data[k] = "corrupted entry"
|
||||
f.write(json.dumps(cache._data))
|
||||
|
||||
c = MockCompilerWithoutExecutables()
|
||||
cache = spack.compiler.FileCompilerCache(FileCache(str(tmp_path)))
|
||||
assert cache.get(c).c_compiler_output == "gcc helloworld.c -o helloworld"
|
||||
assert cache.get(c).real_version == "1.0.0"
|
||||
|
||||
# Cache corruption should be handled gracefully.
|
||||
with open(cache.cache.cache_path(cache.name), "w") as f:
|
||||
f.write("corrupted cache")
|
||||
|
||||
d = MockCompilerWithoutExecutables()
|
||||
cache = spack.compiler.FileCompilerCache(FileCache(str(tmp_path)))
|
||||
assert cache.get(d).c_compiler_output == "gcc helloworld.c -o helloworld"
|
||||
assert cache.get(d).real_version == "1.0.0"
|
||||
|
@@ -14,7 +14,6 @@
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.cmd
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
@@ -33,6 +32,7 @@
|
||||
import spack.store
|
||||
import spack.util.file_cache
|
||||
import spack.variant as vt
|
||||
from spack.concretize import find_spec
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.spec import CompilerSpec, Spec
|
||||
from spack.version import Version, VersionList, ver
|
||||
@@ -540,17 +540,21 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self):
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,expected_propagation",
|
||||
[
|
||||
("hypre~~shared ^openblas+shared", [("hypre", "~shared"), ("openblas", "+shared")]),
|
||||
# Propagates past a node that doesn't have the variant
|
||||
("hypre~~shared ^openblas", [("hypre", "~shared"), ("openblas", "~shared")]),
|
||||
# Propagates from root node to all nodes
|
||||
(
|
||||
"ascent~~shared +adios2",
|
||||
[("ascent", "~shared"), ("adios2", "~shared"), ("bzip2", "~shared")],
|
||||
),
|
||||
# Propagate from a node that is not the root node
|
||||
# Propagates below a node that uses the other value explicitly
|
||||
(
|
||||
"ascent +adios2 ^adios2~~shared",
|
||||
[("ascent", "+shared"), ("adios2", "~shared"), ("bzip2", "~shared")],
|
||||
"ascent~~shared +adios2 ^adios2+shared",
|
||||
[("ascent", "~shared"), ("adios2", "+shared"), ("bzip2", "~shared")],
|
||||
),
|
||||
(
|
||||
"ascent++shared +adios2 ^adios2~shared",
|
||||
[("ascent", "+shared"), ("adios2", "~shared"), ("bzip2", "+shared")],
|
||||
),
|
||||
],
|
||||
)
|
||||
@@ -560,109 +564,21 @@ def test_concretize_propagate_disabled_variant(self, spec_str, expected_propagat
|
||||
for key, expected_satisfies in expected_propagation:
|
||||
spec[key].satisfies(expected_satisfies)
|
||||
|
||||
def test_concretize_propagate_variant_not_dependencies(self):
|
||||
"""Test that when propagating a variant it is not propagated to dependencies that
|
||||
do not have that variant"""
|
||||
spec = Spec("quantum-espresso~~invino")
|
||||
def test_concretize_propagated_variant_is_not_passed_to_dependent(self):
|
||||
"""Test a package variant value was passed from its parent."""
|
||||
spec = Spec("ascent~~shared +adios2 ^adios2+shared")
|
||||
spec.concretize()
|
||||
|
||||
for dep in spec.traverse(root=False):
|
||||
assert "invino" not in dep.variants.keys()
|
||||
|
||||
def test_concretize_propagate_variant_exclude_dependency_fail(self):
|
||||
"""Tests that a propagating variant cannot be allowed to be excluded by any of
|
||||
the source package's dependencies"""
|
||||
spec = Spec("hypre ~~shared ^openblas +shared")
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
spec.concretize()
|
||||
|
||||
def test_concretize_propagate_same_variant_from_direct_dep_fail(self):
|
||||
"""Test that when propagating a variant from the source package and a direct
|
||||
dependency also propagates the same variant with a different value. Raises error"""
|
||||
spec = Spec("ascent +adios2 ++shared ^adios2 ~~shared")
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
spec.concretize()
|
||||
|
||||
def test_concretize_propagate_same_variant_in_dependency_fail(self):
|
||||
"""Test that when propagating a variant from the source package, none of it's
|
||||
dependencies can propagate that variant with a different value. Raises error."""
|
||||
spec = Spec("ascent +adios2 ++shared ^bzip2 ~~shared")
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
spec.concretize()
|
||||
|
||||
def test_concretize_propagate_same_variant_virtual_dependency_fail(self):
|
||||
"""Test that when propagating a variant from the source package and a direct
|
||||
dependency (that is a virtual pkg) also propagates the same variant with a
|
||||
different value. Raises error"""
|
||||
spec = Spec("hypre ++shared ^openblas ~~shared")
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
spec.concretize()
|
||||
|
||||
def test_concretize_propagate_same_variant_multiple_sources_diamond_dep_fail(self):
|
||||
"""Test that fails when propagating the same variant with different values from multiple
|
||||
sources that share a dependency"""
|
||||
spec = Spec("parent-foo-bar ^dependency-foo-bar++bar ^direct-dep-foo-bar~~bar")
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
spec.concretize()
|
||||
assert spec.satisfies("^adios2+shared")
|
||||
assert spec.satisfies("^bzip2~shared")
|
||||
|
||||
def test_concretize_propagate_specified_variant(self):
|
||||
"""Test that only the specified variant is propagated to the dependencies"""
|
||||
spec = Spec("parent-foo-bar ~~foo")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^dependency-foo-bar~foo")
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee~foo")
|
||||
assert spec.satisfies("^direct-dep-foo-bar~foo")
|
||||
|
||||
assert not spec.satisfies("^dependency-foo-bar+bar")
|
||||
assert not spec.satisfies("^second-dependency-foo-bar-fee+bar")
|
||||
assert not spec.satisfies("^direct-dep-foo-bar+bar")
|
||||
|
||||
def test_concretize_propagate_one_variant(self):
|
||||
"""Test that you can specify to propagate one variant and not all"""
|
||||
spec = Spec("parent-foo-bar ++bar ~foo")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("~foo") and not spec.satisfies("^dependency-foo-bar~foo")
|
||||
assert spec.satisfies("+bar") and spec.satisfies("^dependency-foo-bar+bar")
|
||||
|
||||
def test_concretize_propagate_through_first_level_deps(self):
|
||||
"""Test that boolean valued variants can be propagated past first level
|
||||
dependecies even if the first level dependency does have the variant"""
|
||||
spec = Spec("parent-foo-bar-fee ++fee")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("+fee") and not spec.satisfies("dependency-foo-bar+fee")
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee+fee")
|
||||
|
||||
def test_concretize_propagate_multiple_variants(self):
|
||||
"""Test that multiple boolean valued variants can be propagated from
|
||||
the same source package"""
|
||||
spec = Spec("parent-foo-bar-fee ~~foo ++bar")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("~foo") and spec.satisfies("+bar")
|
||||
assert spec.satisfies("^dependency-foo-bar ~foo +bar")
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee ~foo +bar")
|
||||
|
||||
def test_concretize_propagate_multiple_variants_mulitple_sources(self):
|
||||
"""Test the propagates multiple different variants for multiple sources
|
||||
in a diamond dependency"""
|
||||
spec = Spec("parent-foo-bar ^dependency-foo-bar++bar ^direct-dep-foo-bar~~foo")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee+bar")
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee~foo")
|
||||
assert not spec.satisfies("^dependency-foo-bar~foo")
|
||||
assert not spec.satisfies("^direct-dep-foo-bar+bar")
|
||||
|
||||
def test_concretize_propagate_single_valued_variant(self):
|
||||
"""Test propagation for single valued variants"""
|
||||
spec = Spec("multivalue-variant libs==static")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("libs=static")
|
||||
assert spec.satisfies("^pkg-a libs=static")
|
||||
assert spec.satisfies("~foo") and spec.satisfies("^dependency-foo-bar~foo")
|
||||
assert spec.satisfies("+bar") and not spec.satisfies("^dependency-foo-bar+bar")
|
||||
|
||||
def test_concretize_propagate_multivalue_variant(self):
|
||||
"""Test that multivalue variants are propagating the specified value(s)
|
||||
@@ -675,46 +591,6 @@ def test_concretize_propagate_multivalue_variant(self):
|
||||
assert not spec.satisfies("^pkg-a foo=bar")
|
||||
assert not spec.satisfies("^pkg-b foo=bar")
|
||||
|
||||
def test_concretize_propagate_multiple_multivalue_variant(self):
|
||||
"""Tests propagating the same mulitvalued variant from different sources allows
|
||||
the dependents to accept all propagated values"""
|
||||
spec = Spec("multivalue-variant foo==bar ^pkg-a foo==baz")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("multivalue-variant foo=bar")
|
||||
assert spec.satisfies("^pkg-a foo=bar,baz")
|
||||
assert spec.satisfies("^pkg-b foo=bar,baz")
|
||||
|
||||
def test_concretize_propagate_variant_not_in_source(self):
|
||||
"""Test that variant is still propagated even if the source pkg
|
||||
doesn't have the variant"""
|
||||
spec = Spec("callpath++debug")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^mpich+debug")
|
||||
assert not spec.satisfies("callpath+debug")
|
||||
assert not spec.satisfies("^dyninst+debug")
|
||||
|
||||
def test_concretize_propagate_variant_multiple_deps_not_in_source(self):
|
||||
"""Test that a variant can be propagated to multiple dependencies
|
||||
when the variant is not in the source package"""
|
||||
spec = Spec("netlib-lapack++shared")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^openblas+shared")
|
||||
assert spec.satisfies("^perl+shared")
|
||||
assert not spec.satisfies("netlib-lapack+shared")
|
||||
|
||||
def test_concretize_propagate_variant_second_level_dep_not_in_source(self):
|
||||
"""Test that a variant can be propagated past first level dependencies
|
||||
when the variant is not in the source package or any of the first level
|
||||
dependencies"""
|
||||
spec = Spec("parent-foo-bar ++fee")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee +fee")
|
||||
assert not spec.satisfies("parent-foo-bar +fee")
|
||||
|
||||
def test_no_matching_compiler_specs(self, mock_low_high_config):
|
||||
# only relevant when not building compilers as needed
|
||||
with spack.concretize.enable_compiler_existence_check():
|
||||
@@ -797,6 +673,39 @@ def test_external_and_virtual(self, mutable_config):
|
||||
assert spec["externaltool"].compiler.satisfies("gcc")
|
||||
assert spec["stuff"].compiler.satisfies("gcc")
|
||||
|
||||
def test_find_spec_parents(self):
|
||||
"""Tests the spec finding logic used by concretization."""
|
||||
s = Spec.from_literal({"a +foo": {"b +foo": {"c": None, "d+foo": None}, "e +foo": None}})
|
||||
|
||||
assert "a" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
|
||||
def test_find_spec_children(self):
|
||||
s = Spec.from_literal({"a": {"b +foo": {"c": None, "d+foo": None}, "e +foo": None}})
|
||||
|
||||
assert "d" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
|
||||
s = Spec.from_literal({"a": {"b +foo": {"c+foo": None, "d": None}, "e +foo": None}})
|
||||
|
||||
assert "c" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
|
||||
def test_find_spec_sibling(self):
|
||||
s = Spec.from_literal({"a": {"b +foo": {"c": None, "d": None}, "e +foo": None}})
|
||||
|
||||
assert "e" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
assert "b" == find_spec(s["e"], lambda s: "+foo" in s).name
|
||||
|
||||
s = Spec.from_literal({"a": {"b +foo": {"c": None, "d": None}, "e": {"f +foo": None}}})
|
||||
|
||||
assert "f" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
|
||||
def test_find_spec_self(self):
|
||||
s = Spec.from_literal({"a": {"b +foo": {"c": None, "d": None}, "e": None}})
|
||||
assert "b" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
|
||||
def test_find_spec_none(self):
|
||||
s = Spec.from_literal({"a": {"b": {"c": None, "d": None}, "e": None}})
|
||||
assert find_spec(s["b"], lambda s: "+foo" in s) is None
|
||||
|
||||
def test_compiler_child(self):
|
||||
s = Spec("mpileaks%clang target=x86_64 ^dyninst%gcc")
|
||||
s.concretize()
|
||||
@@ -905,7 +814,7 @@ def test_regression_issue_7941(self):
|
||||
)
|
||||
def test_simultaneous_concretization_of_specs(self, abstract_specs):
|
||||
abstract_specs = [Spec(x) for x in abstract_specs]
|
||||
concrete_specs = spack.concretize.concretize_specs_together(abstract_specs)
|
||||
concrete_specs = spack.concretize.concretize_specs_together(*abstract_specs)
|
||||
|
||||
# Check there's only one configuration of each package in the DAG
|
||||
names = set(dep.name for spec in concrete_specs for dep in spec.traverse())
|
||||
@@ -2227,7 +2136,7 @@ def test_external_python_extension_find_unified_python(self):
|
||||
spack.config.set("packages", external_conf)
|
||||
|
||||
abstract_specs = [Spec(s) for s in ["py-extension1", "python"]]
|
||||
specs = spack.concretize.concretize_specs_together(abstract_specs)
|
||||
specs = spack.concretize.concretize_specs_together(*abstract_specs)
|
||||
assert specs[0]["python"] == specs[1]["python"]
|
||||
|
||||
@pytest.mark.regression("36190")
|
||||
@@ -2316,7 +2225,6 @@ def test_compiler_match_constraints_when_selected(self):
|
||||
|
||||
@pytest.mark.regression("36339")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows")
|
||||
@pytest.mark.enable_compiler_execution
|
||||
def test_compiler_with_custom_non_numeric_version(self, mock_executable):
|
||||
"""Test that, when a compiler has a completely made up version, we can use its
|
||||
'real version' to detect targets and don't raise during concretization.
|
||||
@@ -3198,20 +3106,3 @@ def test_reuse_prefers_standard_over_git_versions(
|
||||
test_spec = spack.spec.Spec("git-ref-package@2").concretized()
|
||||
assert git_spec.dag_hash() != test_spec.dag_hash()
|
||||
assert standard_spec.dag_hash() == test_spec.dag_hash()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("unify", [True, "when_possible", False])
|
||||
def test_spec_unification(unify, mutable_config, mock_packages):
|
||||
spack.config.set("concretizer:unify", unify)
|
||||
a = "pkg-a"
|
||||
a_restricted = "pkg-a^pkg-b foo=baz"
|
||||
b = "pkg-b foo=none"
|
||||
|
||||
unrestricted = spack.cmd.parse_specs([a, b], concretize=True)
|
||||
a_concrete_unrestricted = [s for s in unrestricted if s.name == "pkg-a"][0]
|
||||
b_concrete_unrestricted = [s for s in unrestricted if s.name == "pkg-b"][0]
|
||||
assert (a_concrete_unrestricted["pkg-b"] == b_concrete_unrestricted) == (unify is not False)
|
||||
|
||||
maybe_fails = pytest.raises if unify is True else llnl.util.lang.nullcontext
|
||||
with maybe_fails(spack.solver.asp.UnsatisfiableSpecError):
|
||||
_ = spack.cmd.parse_specs([a_restricted, b], concretize=True)
|
@@ -10,10 +10,8 @@
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
import spack.store
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.version
|
||||
from spack.installer import PackageInstaller
|
@@ -1532,30 +1532,3 @@ def test_config_path_dsl(path, it_should_work, expected_parsed):
|
||||
else:
|
||||
with pytest.raises(ValueError):
|
||||
spack.config.ConfigPath._validate(path)
|
||||
|
||||
|
||||
@pytest.mark.regression("48254")
|
||||
def test_env_activation_preserves_config_scopes(mutable_mock_env_path):
|
||||
"""Check that the "command_line" scope remains the highest priority scope, when we activate,
|
||||
or deactivate, environments.
|
||||
"""
|
||||
expected_cl_scope = spack.config.CONFIG.highest()
|
||||
assert expected_cl_scope.name == "command_line"
|
||||
|
||||
# Creating an environment pushes a new scope
|
||||
ev.create("test")
|
||||
with ev.read("test"):
|
||||
assert spack.config.CONFIG.highest() == expected_cl_scope
|
||||
|
||||
# No active environment pops the scope
|
||||
with ev.no_active_environment():
|
||||
assert spack.config.CONFIG.highest() == expected_cl_scope
|
||||
assert spack.config.CONFIG.highest() == expected_cl_scope
|
||||
|
||||
# Switch the environment to another one
|
||||
ev.create("test-2")
|
||||
with ev.read("test-2"):
|
||||
assert spack.config.CONFIG.highest() == expected_cl_scope
|
||||
assert spack.config.CONFIG.highest() == expected_cl_scope
|
||||
|
||||
assert spack.config.CONFIG.highest() == expected_cl_scope
|
||||
|
@@ -973,26 +973,12 @@ def _return_none(*args):
|
||||
return None
|
||||
|
||||
|
||||
def _compiler_output(self):
|
||||
return ""
|
||||
|
||||
|
||||
def _get_real_version(self):
|
||||
return str(self.version)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def disable_compiler_execution(monkeypatch, request):
|
||||
"""Disable compiler execution to determine implicit link paths and libc flavor and version.
|
||||
To re-enable use `@pytest.mark.enable_compiler_execution`"""
|
||||
if "enable_compiler_execution" not in request.keywords:
|
||||
monkeypatch.setattr(spack.compiler.Compiler, "_compile_dummy_c_source", _compiler_output)
|
||||
monkeypatch.setattr(spack.compiler.Compiler, "get_real_version", _get_real_version)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def disable_compiler_output_cache(monkeypatch):
|
||||
monkeypatch.setattr(spack.compiler, "COMPILER_CACHE", spack.compiler.CompilerCache())
|
||||
monkeypatch.setattr(spack.compiler.Compiler, "_compile_dummy_c_source", _return_none)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
|
@@ -906,18 +906,3 @@ def test_only_roots_are_explicitly_installed(tmp_path, mock_packages, config, te
|
||||
assert callpath in temporary_store.db.query(explicit=False)
|
||||
env.install_specs([mpileaks], fake=True)
|
||||
assert temporary_store.db.query(explicit=True) == [mpileaks]
|
||||
|
||||
|
||||
def test_environment_from_name_or_dir(mock_packages, mutable_mock_env_path, tmp_path):
|
||||
test_env = ev.create("test")
|
||||
|
||||
name_env = ev.environment_from_name_or_dir(test_env.name)
|
||||
assert name_env.name == test_env.name
|
||||
assert name_env.path == test_env.path
|
||||
|
||||
dir_env = ev.environment_from_name_or_dir(test_env.path)
|
||||
assert dir_env.name == test_env.name
|
||||
assert dir_env.path == test_env.path
|
||||
|
||||
with pytest.raises(ev.SpackEnvironmentError, match="no such environment"):
|
||||
_ = ev.environment_from_name_or_dir("fake-env")
|
||||
|
@@ -8,7 +8,6 @@
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.spec import Spec
|
@@ -644,12 +644,13 @@ def test_prepare_for_install_on_installed(install_mockery, monkeypatch):
|
||||
def test_installer_init_requests(install_mockery):
|
||||
"""Test of installer initial requests."""
|
||||
spec_name = "dependent-install"
|
||||
installer = create_installer([spec_name], {})
|
||||
with spack.config.override("config:install_missing_compilers", True):
|
||||
installer = create_installer([spec_name], {})
|
||||
|
||||
# There is only one explicit request in this case
|
||||
assert len(installer.build_requests) == 1
|
||||
request = installer.build_requests[0]
|
||||
assert request.pkg.name == spec_name
|
||||
# There is only one explicit request in this case
|
||||
assert len(installer.build_requests) == 1
|
||||
request = installer.build_requests[0]
|
||||
assert request.pkg.name == spec_name
|
||||
|
||||
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
@@ -742,20 +743,21 @@ def _missing(*args, **kwargs):
|
||||
|
||||
# Set the configuration to ensure _requeue_with_build_spec_tasks actually
|
||||
# does something.
|
||||
installer = create_installer(["depb"], {})
|
||||
installer._init_queue()
|
||||
request = installer.build_requests[0]
|
||||
task = create_build_task(request.pkg)
|
||||
with spack.config.override("config:install_missing_compilers", True):
|
||||
installer = create_installer(["depb"], {})
|
||||
installer._init_queue()
|
||||
request = installer.build_requests[0]
|
||||
task = create_build_task(request.pkg)
|
||||
|
||||
# Drop one of the specs so its task is missing before _install_task
|
||||
popped_task = installer._pop_task()
|
||||
assert inst.package_id(popped_task.pkg.spec) not in installer.build_tasks
|
||||
# Drop one of the specs so its task is missing before _install_task
|
||||
popped_task = installer._pop_task()
|
||||
assert inst.package_id(popped_task.pkg.spec) not in installer.build_tasks
|
||||
|
||||
monkeypatch.setattr(task, "execute", _missing)
|
||||
installer._install_task(task, None)
|
||||
monkeypatch.setattr(task, "execute", _missing)
|
||||
installer._install_task(task, None)
|
||||
|
||||
# Ensure the dropped task/spec was added back by _install_task
|
||||
assert inst.package_id(popped_task.pkg.spec) in installer.build_tasks
|
||||
# Ensure the dropped task/spec was added back by _install_task
|
||||
assert inst.package_id(popped_task.pkg.spec) in installer.build_tasks
|
||||
|
||||
|
||||
def test_release_lock_write_n_exception(install_mockery, tmpdir, capsys):
|
||||
|
@@ -9,7 +9,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, find_headers, find_libraries
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, find, find_headers, find_libraries
|
||||
|
||||
import spack.paths
|
||||
|
||||
@@ -324,3 +324,33 @@ def test_searching_order(search_fn, search_list, root, kwargs):
|
||||
|
||||
# List should be empty here
|
||||
assert len(rlist) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"root,search_list,kwargs,expected",
|
||||
[
|
||||
(
|
||||
search_dir,
|
||||
"*/*bar.tx?",
|
||||
{"recursive": False},
|
||||
[
|
||||
os.path.join(search_dir, os.path.join("a", "foobar.txt")),
|
||||
os.path.join(search_dir, os.path.join("b", "bar.txp")),
|
||||
os.path.join(search_dir, os.path.join("c", "bar.txt")),
|
||||
],
|
||||
),
|
||||
(
|
||||
search_dir,
|
||||
"*/*bar.tx?",
|
||||
{"recursive": True},
|
||||
[
|
||||
os.path.join(search_dir, os.path.join("a", "foobar.txt")),
|
||||
os.path.join(search_dir, os.path.join("b", "bar.txp")),
|
||||
os.path.join(search_dir, os.path.join("c", "bar.txt")),
|
||||
],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_find_with_globbing(root, search_list, kwargs, expected):
|
||||
matches = find(root, search_list, **kwargs)
|
||||
assert sorted(matches) == sorted(expected)
|
||||
|
@@ -6,7 +6,6 @@
|
||||
"""Tests for ``llnl/util/filesystem.py``"""
|
||||
import filecmp
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
@@ -15,8 +14,7 @@
|
||||
import pytest
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.symlink
|
||||
from llnl.util.symlink import _windows_can_symlink, islink, readlink, symlink
|
||||
from llnl.util.symlink import islink, readlink, symlink
|
||||
|
||||
import spack.paths
|
||||
|
||||
@@ -1037,226 +1035,3 @@ def test_windows_sfn(tmpdir):
|
||||
assert "d\\LONGER~1" in fs.windows_sfn(d)
|
||||
assert "d\\LONGER~2" in fs.windows_sfn(e)
|
||||
shutil.rmtree(tmpdir.join("d"))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def dir_structure_with_things_to_find(tmpdir):
|
||||
"""
|
||||
<root>/
|
||||
dir_one/
|
||||
file_one
|
||||
dir_two/
|
||||
dir_three/
|
||||
dir_four/
|
||||
file_two
|
||||
file_three
|
||||
file_four
|
||||
"""
|
||||
dir_one = tmpdir.join("dir_one").ensure(dir=True)
|
||||
tmpdir.join("dir_two").ensure(dir=True)
|
||||
dir_three = tmpdir.join("dir_three").ensure(dir=True)
|
||||
dir_four = dir_three.join("dir_four").ensure(dir=True)
|
||||
|
||||
locations = {}
|
||||
locations["file_one"] = str(dir_one.join("file_one").ensure())
|
||||
locations["file_two"] = str(dir_four.join("file_two").ensure())
|
||||
locations["file_three"] = str(dir_three.join("file_three").ensure())
|
||||
locations["file_four"] = str(tmpdir.join("file_four").ensure())
|
||||
|
||||
return str(tmpdir), locations
|
||||
|
||||
|
||||
def test_find_path_glob_matches(dir_structure_with_things_to_find):
|
||||
root, locations = dir_structure_with_things_to_find
|
||||
# both file name and path match
|
||||
assert (
|
||||
fs.find(root, "file_two")
|
||||
== fs.find(root, "*/*/file_two")
|
||||
== fs.find(root, "dir_t*/*/*two")
|
||||
== [locations["file_two"]]
|
||||
)
|
||||
# ensure that * does not match directory separators
|
||||
assert fs.find(root, "dir*file_two") == []
|
||||
# ensure that file name matches after / are matched from the start of the file name
|
||||
assert fs.find(root, "*/ile_two") == []
|
||||
# file name matches exist, but not with these paths
|
||||
assert fs.find(root, "dir_one/*/*two") == fs.find(root, "*/*/*/*/file_two") == []
|
||||
|
||||
|
||||
def test_find_max_depth(dir_structure_with_things_to_find):
|
||||
root, locations = dir_structure_with_things_to_find
|
||||
|
||||
# Make sure the paths we use to verify are absolute
|
||||
assert os.path.isabs(locations["file_one"])
|
||||
|
||||
assert set(fs.find(root, "file_*", max_depth=0)) == {locations["file_four"]}
|
||||
assert set(fs.find(root, "file_*", max_depth=1)) == {
|
||||
locations["file_one"],
|
||||
locations["file_three"],
|
||||
locations["file_four"],
|
||||
}
|
||||
assert set(fs.find(root, "file_two", max_depth=2)) == {locations["file_two"]}
|
||||
assert not set(fs.find(root, "file_two", max_depth=1))
|
||||
assert set(fs.find(root, "file_two")) == {locations["file_two"]}
|
||||
assert set(fs.find(root, "file_*")) == set(locations.values())
|
||||
|
||||
|
||||
def test_find_max_depth_relative(dir_structure_with_things_to_find):
|
||||
"""find_max_depth should return absolute paths even if the provided path is relative."""
|
||||
root, locations = dir_structure_with_things_to_find
|
||||
with fs.working_dir(root):
|
||||
assert set(fs.find(".", "file_*", max_depth=0)) == {locations["file_four"]}
|
||||
assert set(fs.find(".", "file_two", max_depth=2)) == {locations["file_two"]}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("recursive,max_depth", [(False, -1), (False, 1)])
|
||||
def test_max_depth_and_recursive_errors(tmpdir, recursive, max_depth):
|
||||
root = str(tmpdir)
|
||||
error_str = "cannot be set if recursive is False"
|
||||
with pytest.raises(ValueError, match=error_str):
|
||||
fs.find(root, ["some_file"], recursive=recursive, max_depth=max_depth)
|
||||
|
||||
with pytest.raises(ValueError, match=error_str):
|
||||
fs.find_libraries(["some_lib"], root, recursive=recursive, max_depth=max_depth)
|
||||
|
||||
|
||||
@pytest.fixture(params=[True, False])
|
||||
def complex_dir_structure(request, tmpdir):
|
||||
"""
|
||||
"lx-dy" means "level x, directory y"
|
||||
"lx-fy" means "level x, file y"
|
||||
"lx-sy" means "level x, symlink y"
|
||||
|
||||
<root>/
|
||||
l1-d1/
|
||||
l2-d1/
|
||||
l3-d2/
|
||||
l4-f1
|
||||
l3-d4/
|
||||
l4-f2
|
||||
l3-s1 -> l1-d2 # points to directory above l2-d1
|
||||
l3-s3 -> l1-d1 # cyclic link
|
||||
l1-d2/
|
||||
l2-d2/
|
||||
l3-f3
|
||||
l2-f1
|
||||
l2-s3 -> l2-d2
|
||||
l1-s3 -> l3-d4 # a link that "skips" a directory level
|
||||
l1-s4 -> l2-s3 # a link to a link to a dir
|
||||
"""
|
||||
use_junctions = request.param
|
||||
if sys.platform == "win32" and not use_junctions and not _windows_can_symlink():
|
||||
pytest.skip("This Windows instance is not configured with symlink support")
|
||||
elif sys.platform != "win32" and use_junctions:
|
||||
pytest.skip("Junctions are a Windows-only feature")
|
||||
|
||||
l1_d1 = tmpdir.join("l1-d1").ensure(dir=True)
|
||||
l2_d1 = l1_d1.join("l2-d1").ensure(dir=True)
|
||||
l3_d2 = l2_d1.join("l3-d2").ensure(dir=True)
|
||||
l3_d4 = l2_d1.join("l3-d4").ensure(dir=True)
|
||||
l1_d2 = tmpdir.join("l1-d2").ensure(dir=True)
|
||||
l2_d2 = l1_d2.join("l2-d2").ensure(dir=True)
|
||||
|
||||
if use_junctions:
|
||||
link_fn = llnl.util.symlink._windows_create_junction
|
||||
else:
|
||||
link_fn = os.symlink
|
||||
|
||||
link_fn(l1_d2, pathlib.Path(l2_d1) / "l3-s1")
|
||||
link_fn(l1_d1, pathlib.Path(l2_d1) / "l3-s3")
|
||||
link_fn(l3_d4, pathlib.Path(tmpdir) / "l1-s3")
|
||||
l2_s3 = pathlib.Path(l1_d2) / "l2-s3"
|
||||
link_fn(l2_d2, l2_s3)
|
||||
link_fn(l2_s3, pathlib.Path(tmpdir) / "l1-s4")
|
||||
|
||||
locations = {
|
||||
"l4-f1": str(l3_d2.join("l4-f1").ensure()),
|
||||
"l4-f2-full": str(l3_d4.join("l4-f2").ensure()),
|
||||
"l4-f2-link": str(pathlib.Path(tmpdir) / "l1-s3" / "l4-f2"),
|
||||
"l2-f1": str(l1_d2.join("l2-f1").ensure()),
|
||||
"l2-f1-link": str(pathlib.Path(tmpdir) / "l1-d1" / "l2-d1" / "l3-s1" / "l2-f1"),
|
||||
"l3-f3-full": str(l2_d2.join("l3-f3").ensure()),
|
||||
"l3-f3-link-l1": str(pathlib.Path(tmpdir) / "l1-s4" / "l3-f3"),
|
||||
}
|
||||
|
||||
return str(tmpdir), locations
|
||||
|
||||
|
||||
def test_find_max_depth_symlinks(complex_dir_structure):
|
||||
root, locations = complex_dir_structure
|
||||
root = pathlib.Path(root)
|
||||
assert set(fs.find(root, "l4-f1")) == {locations["l4-f1"]}
|
||||
assert set(fs.find(root / "l1-s3", "l4-f2", max_depth=0)) == {locations["l4-f2-link"]}
|
||||
assert set(fs.find(root / "l1-d1", "l2-f1")) == {locations["l2-f1-link"]}
|
||||
# File is accessible via symlink and subdir, the link path will be
|
||||
# searched first, and the directory will not be searched again when
|
||||
# it is encountered the second time (via not-link) in the traversal
|
||||
assert set(fs.find(root, "l4-f2")) == {locations["l4-f2-link"]}
|
||||
# File is accessible only via the dir, so the full file path should
|
||||
# be reported
|
||||
assert set(fs.find(root / "l1-d1", "l4-f2")) == {locations["l4-f2-full"]}
|
||||
# Check following links to links
|
||||
assert set(fs.find(root, "l3-f3")) == {locations["l3-f3-link-l1"]}
|
||||
|
||||
|
||||
def test_find_max_depth_multiple_and_repeated_entry_points(complex_dir_structure):
|
||||
root, locations = complex_dir_structure
|
||||
|
||||
fst = str(pathlib.Path(root) / "l1-d1" / "l2-d1")
|
||||
snd = str(pathlib.Path(root) / "l1-d2")
|
||||
nonexistent = str(pathlib.Path(root) / "nonexistent")
|
||||
|
||||
assert set(fs.find([fst, snd, fst, snd, nonexistent], ["l*-f*"], max_depth=1)) == {
|
||||
locations["l2-f1"],
|
||||
locations["l4-f1"],
|
||||
locations["l4-f2-full"],
|
||||
locations["l3-f3-full"],
|
||||
}
|
||||
|
||||
|
||||
def test_multiple_patterns(complex_dir_structure):
|
||||
root, _ = complex_dir_structure
|
||||
paths = fs.find(root, ["l2-f1", "l*-d*/l3-f3", "*-f*", "*/*-f*"])
|
||||
# There shouldn't be duplicate results with multiple, overlapping patterns
|
||||
assert len(set(paths)) == len(paths)
|
||||
# All files should be found
|
||||
filenames = [os.path.basename(p) for p in paths]
|
||||
assert set(filenames) == {"l2-f1", "l3-f3", "l4-f1", "l4-f2"}
|
||||
# They are ordered by first matching pattern (this is a bit of an implementation detail,
|
||||
# and we could decide to change the exact order in the future)
|
||||
assert filenames[0] == "l2-f1"
|
||||
assert filenames[1] == "l3-f3"
|
||||
|
||||
|
||||
def test_find_input_types(tmp_path: pathlib.Path):
|
||||
"""test that find only accepts sequences and instances of pathlib.Path and str for root, and
|
||||
only sequences and instances of str for patterns. In principle mypy catches these issues, but
|
||||
it is not enabled on all call-sites."""
|
||||
(tmp_path / "file.txt").write_text("")
|
||||
assert (
|
||||
fs.find(tmp_path, "file.txt")
|
||||
== fs.find(str(tmp_path), "file.txt")
|
||||
== fs.find([tmp_path, str(tmp_path)], "file.txt")
|
||||
== fs.find((tmp_path, str(tmp_path)), "file.txt")
|
||||
== fs.find(tmp_path, "file.txt")
|
||||
== fs.find(tmp_path, ["file.txt"])
|
||||
== fs.find(tmp_path, ("file.txt",))
|
||||
== [str(tmp_path / "file.txt")]
|
||||
)
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
fs.find(tmp_path, pathlib.Path("file.txt")) # type: ignore
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
fs.find(1, "file.txt") # type: ignore
|
||||
|
||||
|
||||
def test_edit_in_place_through_temporary_file(tmp_path):
|
||||
(tmp_path / "example.txt").write_text("Hello")
|
||||
current_ino = os.stat(tmp_path / "example.txt").st_ino
|
||||
with fs.edit_in_place_through_temporary_file(tmp_path / "example.txt") as temporary:
|
||||
os.unlink(temporary)
|
||||
with open(temporary, "w") as f:
|
||||
f.write("World")
|
||||
assert (tmp_path / "example.txt").read_text() == "World"
|
||||
assert os.stat(tmp_path / "example.txt").st_ino == current_ino
|
||||
|
@@ -298,6 +298,30 @@ def inner():
|
||||
top-level raised TypeError: ok"""
|
||||
)
|
||||
|
||||
full_message = h.grouped_message(with_tracebacks=True)
|
||||
no_line_numbers = re.sub(r"line [0-9]+,", "line xxx,", full_message)
|
||||
|
||||
assert (
|
||||
no_line_numbers
|
||||
== dedent(
|
||||
"""\
|
||||
due to the following failures:
|
||||
inner method raised ValueError: wow!
|
||||
File "{0}", \
|
||||
line xxx, in test_grouped_exception
|
||||
inner()
|
||||
File "{0}", \
|
||||
line xxx, in inner
|
||||
raise ValueError("wow!")
|
||||
|
||||
top-level raised TypeError: ok
|
||||
File "{0}", \
|
||||
line xxx, in test_grouped_exception
|
||||
raise TypeError("ok")
|
||||
"""
|
||||
).format(__file__)
|
||||
)
|
||||
|
||||
|
||||
def test_grouped_exception_base_type():
|
||||
h = llnl.util.lang.GroupedExceptionHandler()
|
||||
@@ -349,19 +373,3 @@ class _SomeClass:
|
||||
_SomeClass.deprecated.error_lvl = 2
|
||||
with pytest.raises(AttributeError):
|
||||
_ = s.deprecated
|
||||
|
||||
|
||||
def test_fnmatch_multiple():
|
||||
named_patterns = {"a": "libf*o.so", "b": "libb*r.so"}
|
||||
regex = re.compile(llnl.util.lang.fnmatch_translate_multiple(named_patterns))
|
||||
|
||||
a = regex.match("libfoo.so")
|
||||
assert a and a.group("a") == "libfoo.so"
|
||||
|
||||
b = regex.match("libbar.so")
|
||||
assert b and b.group("b") == "libbar.so"
|
||||
|
||||
assert not regex.match("libfoo.so.1")
|
||||
assert not regex.match("libbar.so.1")
|
||||
assert not regex.match("libfoo.solibbar.so")
|
||||
assert not regex.match("libbaz.so")
|
||||
|
@@ -57,16 +57,18 @@ def test_log_python_output_without_echo(capfd, tmpdir):
|
||||
assert capfd.readouterr()[0] == ""
|
||||
|
||||
|
||||
def test_log_python_output_with_invalid_utf8(capfd, tmp_path):
|
||||
tmp_file = str(tmp_path / "foo.txt")
|
||||
with log.log_output(tmp_file, echo=True):
|
||||
sys.stdout.buffer.write(b"\xc3helloworld\n")
|
||||
def test_log_python_output_with_invalid_utf8(capfd, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
with log.log_output("foo.txt"):
|
||||
sys.stdout.buffer.write(b"\xc3\x28\n")
|
||||
|
||||
# we should be able to read this as valid utf-8
|
||||
with open(tmp_file, "r", encoding="utf-8") as f:
|
||||
assert f.read() == "<EFBFBD>helloworld\n"
|
||||
expected = b"<line lost: output was not encoded as UTF-8>\n"
|
||||
with open("foo.txt", "rb") as f:
|
||||
written = f.read()
|
||||
assert written == expected
|
||||
|
||||
assert capfd.readouterr().out == "<EFBFBD>helloworld\n"
|
||||
# nothing on stdout or stderr
|
||||
assert capfd.readouterr()[0] == ""
|
||||
|
||||
|
||||
def test_log_python_output_and_echo_output(capfd, tmpdir):
|
||||
|
@@ -329,9 +329,9 @@ def test_update_4():
|
||||
|
||||
|
||||
@pytest.mark.parametrize("direction", ["fetch", "push"])
|
||||
def test_update_connection_params(direction, tmpdir, monkeypatch):
|
||||
def test_update_connection_params(direction):
|
||||
"""Test whether new connection params expand the mirror config to a dict."""
|
||||
m = spack.mirror.Mirror("https://example.com", "example")
|
||||
m = spack.mirror.Mirror("https://example.com")
|
||||
|
||||
assert m.update(
|
||||
{
|
||||
@@ -354,64 +354,12 @@ def test_update_connection_params(direction, tmpdir, monkeypatch):
|
||||
"endpoint_url": "https://example.com",
|
||||
},
|
||||
}
|
||||
assert m.get_access_pair(direction) == ("username", "password")
|
||||
|
||||
assert m.get_access_pair(direction) == ["username", "password"]
|
||||
assert m.get_access_token(direction) == "token"
|
||||
assert m.get_profile(direction) == "profile"
|
||||
assert m.get_endpoint_url(direction) == "https://example.com"
|
||||
|
||||
# Expand environment variables
|
||||
os.environ["_SPACK_TEST_PAIR_USERNAME"] = "expanded_username"
|
||||
os.environ["_SPACK_TEST_PAIR_PASSWORD"] = "expanded_password"
|
||||
os.environ["_SPACK_TEST_TOKEN"] = "expanded_token"
|
||||
|
||||
assert m.update(
|
||||
{
|
||||
"access_pair": {
|
||||
"id_variable": "_SPACK_TEST_PAIR_USERNAME",
|
||||
"secret_variable": "_SPACK_TEST_PAIR_PASSWORD",
|
||||
}
|
||||
},
|
||||
direction,
|
||||
)
|
||||
|
||||
assert m.to_dict() == {
|
||||
"url": "https://example.com",
|
||||
direction: {
|
||||
"url": "http://example.org",
|
||||
"access_pair": {
|
||||
"id_variable": "_SPACK_TEST_PAIR_USERNAME",
|
||||
"secret_variable": "_SPACK_TEST_PAIR_PASSWORD",
|
||||
},
|
||||
"access_token": "token",
|
||||
"profile": "profile",
|
||||
"endpoint_url": "https://example.com",
|
||||
},
|
||||
}
|
||||
|
||||
assert m.get_access_pair(direction) == ("expanded_username", "expanded_password")
|
||||
|
||||
assert m.update(
|
||||
{
|
||||
"access_pair": {"id": "username", "secret_variable": "_SPACK_TEST_PAIR_PASSWORD"},
|
||||
"access_token_variable": "_SPACK_TEST_TOKEN",
|
||||
},
|
||||
direction,
|
||||
)
|
||||
|
||||
assert m.to_dict() == {
|
||||
"url": "https://example.com",
|
||||
direction: {
|
||||
"url": "http://example.org",
|
||||
"access_pair": {"id": "username", "secret_variable": "_SPACK_TEST_PAIR_PASSWORD"},
|
||||
"access_token_variable": "_SPACK_TEST_TOKEN",
|
||||
"profile": "profile",
|
||||
"endpoint_url": "https://example.com",
|
||||
},
|
||||
}
|
||||
|
||||
assert m.get_access_pair(direction) == ("username", "expanded_password")
|
||||
assert m.get_access_token(direction) == "expanded_token"
|
||||
|
||||
|
||||
def test_mirror_name_or_url_dir_parsing(tmp_path):
|
||||
curdir = tmp_path / "mirror"
|
||||
|
@@ -302,48 +302,3 @@ def test_get_repo(self, mock_test_cache):
|
||||
# foo is not there, raise
|
||||
with pytest.raises(spack.repo.UnknownNamespaceError):
|
||||
repo.get_repo("foo")
|
||||
|
||||
|
||||
def test_parse_package_api_version():
|
||||
"""Test that we raise an error if a repository has a version that is not supported."""
|
||||
# valid version
|
||||
assert spack.repo._parse_package_api_version(
|
||||
{"api": "v1.2"}, min_api=(1, 0), max_api=(2, 3)
|
||||
) == (1, 2)
|
||||
# too new and too old
|
||||
with pytest.raises(
|
||||
spack.repo.BadRepoError,
|
||||
match=r"Package API v2.4 is not supported .* \(must be between v1.0 and v2.3\)",
|
||||
):
|
||||
spack.repo._parse_package_api_version({"api": "v2.4"}, min_api=(1, 0), max_api=(2, 3))
|
||||
with pytest.raises(
|
||||
spack.repo.BadRepoError,
|
||||
match=r"Package API v0.9 is not supported .* \(must be between v1.0 and v2.3\)",
|
||||
):
|
||||
spack.repo._parse_package_api_version({"api": "v0.9"}, min_api=(1, 0), max_api=(2, 3))
|
||||
# default to v1.0 if not specified
|
||||
assert spack.repo._parse_package_api_version({}, min_api=(1, 0), max_api=(2, 3)) == (1, 0)
|
||||
# if v1.0 support is dropped we should also raise
|
||||
with pytest.raises(
|
||||
spack.repo.BadRepoError,
|
||||
match=r"Package API v1.0 is not supported .* \(must be between v2.0 and v2.3\)",
|
||||
):
|
||||
spack.repo._parse_package_api_version({}, min_api=(2, 0), max_api=(2, 3))
|
||||
# finally test invalid input
|
||||
with pytest.raises(spack.repo.BadRepoError, match="Invalid Package API version"):
|
||||
spack.repo._parse_package_api_version({"api": "v2"}, min_api=(1, 0), max_api=(3, 3))
|
||||
with pytest.raises(spack.repo.BadRepoError, match="Invalid Package API version"):
|
||||
spack.repo._parse_package_api_version({"api": 2.0}, min_api=(1, 0), max_api=(3, 3))
|
||||
|
||||
|
||||
def test_repo_package_api_version(tmp_path: pathlib.Path):
|
||||
"""Test that we can specify the API version of a repository."""
|
||||
(tmp_path / "example" / "packages").mkdir(parents=True)
|
||||
(tmp_path / "example" / "repo.yaml").write_text(
|
||||
"""\
|
||||
repo:
|
||||
namespace: example
|
||||
"""
|
||||
)
|
||||
cache = spack.util.file_cache.FileCache(str(tmp_path / "cache"))
|
||||
assert spack.repo.Repo(str(tmp_path / "example"), cache=cache).package_api == (1, 0)
|
||||
|
@@ -138,19 +138,3 @@ def test_round_trip_configuration(initial_content, expected_final_content, tmp_p
|
||||
expected_final_content = initial_content
|
||||
|
||||
assert final_content.getvalue() == expected_final_content
|
||||
|
||||
|
||||
def test_sorted_dict():
|
||||
assert syaml.sorted_dict(
|
||||
{
|
||||
"z": 0,
|
||||
"y": [{"x": 0, "w": [2, 1, 0]}, 0],
|
||||
"v": ({"u": 0, "t": 0, "s": 0}, 0, {"r": 0, "q": 0}),
|
||||
"p": 0,
|
||||
}
|
||||
) == {
|
||||
"p": 0,
|
||||
"v": ({"s": 0, "t": 0, "u": 0}, 0, {"q": 0, "r": 0}),
|
||||
"y": [{"w": [2, 1, 0], "x": 0}, 0],
|
||||
"z": 0,
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user