Compare commits
278 Commits
show-sha25
...
solve-prof
Author | SHA1 | Date | |
---|---|---|---|
![]() |
75e599490c | ||
![]() |
c6d4037758 | ||
![]() |
08f1cf9ae2 | ||
![]() |
48dfa3c95e | ||
![]() |
e5c411d8f0 | ||
![]() |
020e30f3e6 | ||
![]() |
181c404af5 | ||
![]() |
9642b04513 | ||
![]() |
bf16f0bf74 | ||
![]() |
ad518d975c | ||
![]() |
a76e3f2030 | ||
![]() |
1809b81e1d | ||
![]() |
a02b40b670 | ||
![]() |
6d8fdbcf82 | ||
![]() |
3dadf569a4 | ||
![]() |
751585f1e3 | ||
![]() |
f6d6a5a480 | ||
![]() |
57a1ebc77e | ||
![]() |
acdcd1016a | ||
![]() |
e7c9bb5258 | ||
![]() |
e083acdc5d | ||
![]() |
99fd37931c | ||
![]() |
00e68af794 | ||
![]() |
e33cbac01f | ||
![]() |
ada4c208d4 | ||
![]() |
91310d3ae6 | ||
![]() |
def1613741 | ||
![]() |
ac703bc88d | ||
![]() |
f0f5ffa9de | ||
![]() |
65929888de | ||
![]() |
2987efa93c | ||
![]() |
37de92e7a2 | ||
![]() |
42fd1cafe6 | ||
![]() |
370694f112 | ||
![]() |
fc7125fdf3 | ||
![]() |
3fed708618 | ||
![]() |
0614ded2ef | ||
![]() |
e38e51a6bc | ||
![]() |
c44c938caf | ||
![]() |
cdaacce4db | ||
![]() |
b98e5886e5 | ||
![]() |
09a88ad3bd | ||
![]() |
4d91d3f77f | ||
![]() |
b748907a61 | ||
![]() |
cbd9fad66e | ||
![]() |
82dd33c04c | ||
![]() |
31b2b790e7 | ||
![]() |
9fd698edcb | ||
![]() |
247446a8f3 | ||
![]() |
993f743245 | ||
![]() |
786f8dfcce | ||
![]() |
4691301eba | ||
![]() |
a55073e7b0 | ||
![]() |
484c9cf47c | ||
![]() |
9ed5e1de8e | ||
![]() |
4eb7b998e8 | ||
![]() |
3b423a67a2 | ||
![]() |
b803dabb2c | ||
![]() |
33dd894eff | ||
![]() |
f458392c1b | ||
![]() |
8c962a94b0 | ||
![]() |
8b165c2cfe | ||
![]() |
01edde35be | ||
![]() |
84d33fccce | ||
![]() |
c4a5a996a5 | ||
![]() |
6961514122 | ||
![]() |
a9e6074996 | ||
![]() |
30db764449 | ||
![]() |
f5b8b0ac5d | ||
![]() |
913dcd97bc | ||
![]() |
68570b7587 | ||
![]() |
2da4366ba6 | ||
![]() |
2713b0c216 | ||
![]() |
16b01c5661 | ||
![]() |
ebd4ef934c | ||
![]() |
97b5ec6e4f | ||
![]() |
4c9bc8d879 | ||
![]() |
825fd1ccf6 | ||
![]() |
33109ce9b9 | ||
![]() |
fb5910d139 | ||
![]() |
fa6b8a4ceb | ||
![]() |
97acf2614a | ||
![]() |
e99bf48d28 | ||
![]() |
b97015b791 | ||
![]() |
1884520f7b | ||
![]() |
7fbfb0f6dc | ||
![]() |
11d276ab6f | ||
![]() |
da1d533877 | ||
![]() |
c6997e11a7 | ||
![]() |
4322cf56b1 | ||
![]() |
907a37145f | ||
![]() |
4778d2d332 | ||
![]() |
eb256476d2 | ||
![]() |
ff26d2f833 | ||
![]() |
ed916ffe6c | ||
![]() |
4fbdf2f2c0 | ||
![]() |
60ba61f6b2 | ||
![]() |
0a4563fd02 | ||
![]() |
754408ca2b | ||
![]() |
0d817878ea | ||
![]() |
bf11fb037b | ||
![]() |
074b845cd3 | ||
![]() |
dd26732897 | ||
![]() |
3665c5c01b | ||
![]() |
73219e4b02 | ||
![]() |
57a90c91a4 | ||
![]() |
8f4a0718bf | ||
![]() |
9049ffdc7a | ||
![]() |
d1f313342e | ||
![]() |
e62cf9c45b | ||
![]() |
ee2723dc46 | ||
![]() |
d09b185522 | ||
![]() |
a31c525778 | ||
![]() |
2aa5a16433 | ||
![]() |
0c164d2740 | ||
![]() |
801390f6be | ||
![]() |
c601692bc7 | ||
![]() |
2b9c6790f2 | ||
![]() |
09ae2516d5 | ||
![]() |
eb9ff5d7a7 | ||
![]() |
dadb30f0e2 | ||
![]() |
d45f682573 | ||
![]() |
b7601f3042 | ||
![]() |
6b5a479d1e | ||
![]() |
1297dd7fbc | ||
![]() |
75c169d870 | ||
![]() |
afe431cfb5 | ||
![]() |
14bc900e9d | ||
![]() |
e42e541605 | ||
![]() |
9310fcabd8 | ||
![]() |
6822f99cc6 | ||
![]() |
703cd6a313 | ||
![]() |
5b59a53545 | ||
![]() |
b862eec6bc | ||
![]() |
dcc199ae63 | ||
![]() |
f8da72cffe | ||
![]() |
8650ba3cea | ||
![]() |
54aaa95a35 | ||
![]() |
5a29c9d82b | ||
![]() |
c8873ea35c | ||
![]() |
c7659df4af | ||
![]() |
0de6c17477 | ||
![]() |
6924c530e2 | ||
![]() |
38c8069ab4 | ||
![]() |
5cc07522ab | ||
![]() |
575a006ca3 | ||
![]() |
23ac56edfb | ||
![]() |
8c3068809f | ||
![]() |
2214fc855d | ||
![]() |
d44bdc40c9 | ||
![]() |
e952f6be8e | ||
![]() |
b95936f752 | ||
![]() |
8d0856d1cc | ||
![]() |
10f7014add | ||
![]() |
c9ed91758d | ||
![]() |
2c1d74db9b | ||
![]() |
5b93466340 | ||
![]() |
1ee344c75c | ||
![]() |
754011643c | ||
![]() |
2148292bdb | ||
![]() |
cf3576a9bb | ||
![]() |
a86f164835 | ||
![]() |
2782ae6d7e | ||
![]() |
b1fd6dbb6d | ||
![]() |
18936771ff | ||
![]() |
9a94ea7dfe | ||
![]() |
a93bd6cee4 | ||
![]() |
4c247e206c | ||
![]() |
fcdaccfeb6 | ||
![]() |
2fc056e27c | ||
![]() |
417c48b07a | ||
![]() |
f05033b0d2 | ||
![]() |
d63f06e4b7 | ||
![]() |
8296aaf175 | ||
![]() |
86ebcabd46 | ||
![]() |
87329639f2 | ||
![]() |
0acd6ae7b2 | ||
![]() |
395c911689 | ||
![]() |
2664303d7a | ||
![]() |
ff9568fa2f | ||
![]() |
632c009569 | ||
![]() |
55918c31d2 | ||
![]() |
b8461f3d2d | ||
![]() |
133895e785 | ||
![]() |
19e3ab83cf | ||
![]() |
e42a4a8bac | ||
![]() |
1462c35761 | ||
![]() |
0cf8cb70f4 | ||
![]() |
7b2450c22a | ||
![]() |
8f09f523cc | ||
![]() |
24d3ed8c18 | ||
![]() |
492c52089f | ||
![]() |
5df7dc88fc | ||
![]() |
4a75c3c87a | ||
![]() |
35aa02771a | ||
![]() |
b38a29f4df | ||
![]() |
9a25a58219 | ||
![]() |
c0c9743300 | ||
![]() |
a69af3c71f | ||
![]() |
cb92d70d6d | ||
![]() |
76ed4578e7 | ||
![]() |
504cc808d6 | ||
![]() |
8076134c91 | ||
![]() |
b4b3320f71 | ||
![]() |
e35bc1f82d | ||
![]() |
0de1ddcbe8 | ||
![]() |
e3aca49e25 | ||
![]() |
94c29e1cfc | ||
![]() |
0c00a297e1 | ||
![]() |
c6a1ec996c | ||
![]() |
0437c5314e | ||
![]() |
ffde309a99 | ||
![]() |
a08b4ae538 | ||
![]() |
404b1c6c19 | ||
![]() |
275339ab4c | ||
![]() |
877930c4ef | ||
![]() |
89d0215d5b | ||
![]() |
f003d8c0c3 | ||
![]() |
6ab92b119d | ||
![]() |
f809b56f81 | ||
![]() |
ec058556ad | ||
![]() |
ce78e8a1f8 | ||
![]() |
c3435b4e7d | ||
![]() |
02d2c4a9ff | ||
![]() |
9d03170cb2 | ||
![]() |
8892c878ce | ||
![]() |
cbf4d3967a | ||
![]() |
8bc0b2e086 | ||
![]() |
354615d491 | ||
![]() |
9ac261af58 | ||
![]() |
34b2f28a5e | ||
![]() |
8a10eff757 | ||
![]() |
44d09f2b2b | ||
![]() |
161b2d7cb0 | ||
![]() |
4de5b664cd | ||
![]() |
5d0c6c3350 | ||
![]() |
8391c8eb87 | ||
![]() |
3108849121 | ||
![]() |
52471bab02 | ||
![]() |
b8e3246e89 | ||
![]() |
60cb628283 | ||
![]() |
5bca7187a5 | ||
![]() |
65daf17b54 | ||
![]() |
d776dead56 | ||
![]() |
741a4a5d4f | ||
![]() |
dbe7b6bc6b | ||
![]() |
ffc904aa6b | ||
![]() |
f889b2a95e | ||
![]() |
7f609ba934 | ||
![]() |
ffd7830bfa | ||
![]() |
20a6b22f78 | ||
![]() |
1bff2f7034 | ||
![]() |
ca48233ef7 | ||
![]() |
c302049b5d | ||
![]() |
360dbe41f7 | ||
![]() |
ea1aa0714b | ||
![]() |
7af1a3d240 | ||
![]() |
962115b386 | ||
![]() |
f81ca0cd89 | ||
![]() |
25a5585f7d | ||
![]() |
e81ce18cad | ||
![]() |
d48d993ae7 | ||
![]() |
fbd5c3d589 | ||
![]() |
11aa02b37a | ||
![]() |
b9ebf8cc9c | ||
![]() |
1229d5a3cc | ||
![]() |
be5a096665 | ||
![]() |
32ce278a51 | ||
![]() |
e83536de38 | ||
![]() |
ff058377c5 | ||
![]() |
e855bb011d | ||
![]() |
dbab4828ed | ||
![]() |
fac92dceca | ||
![]() |
035b890b17 | ||
![]() |
2a7e5cafa1 | ||
![]() |
49845760b6 | ||
![]() |
ce6255c0bb | ||
![]() |
f0d54ba39d |
26
.github/workflows/ci.yaml
vendored
26
.github/workflows/ci.yaml
vendored
@@ -83,10 +83,17 @@ jobs:
|
||||
|
||||
all-prechecks:
|
||||
needs: [ prechecks ]
|
||||
if: ${{ always() }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Success
|
||||
run: "true"
|
||||
run: |
|
||||
if [ "${{ needs.prechecks.result }}" == "failure" ] || [ "${{ needs.prechecks.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
|
||||
coverage:
|
||||
needs: [ unit-tests, prechecks ]
|
||||
@@ -94,8 +101,19 @@ jobs:
|
||||
secrets: inherit
|
||||
|
||||
all:
|
||||
needs: [ coverage, bootstrap ]
|
||||
needs: [ unit-tests, coverage, bootstrap ]
|
||||
if: ${{ always() }}
|
||||
runs-on: ubuntu-latest
|
||||
# See https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/accessing-contextual-information-about-workflow-runs#needs-context
|
||||
steps:
|
||||
- name: Success
|
||||
run: "true"
|
||||
- name: Status summary
|
||||
run: |
|
||||
if [ "${{ needs.unit-tests.result }}" == "failure" ] || [ "${{ needs.unit-tests.result }}" == "canceled" ]; then
|
||||
echo "Unit tests failed."
|
||||
exit 1
|
||||
elif [ "${{ needs.bootstrap.result }}" == "failure" ] || [ "${{ needs.bootstrap.result }}" == "canceled" ]; then
|
||||
echo "Bootstrap tests failed."
|
||||
exit 1
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
|
@@ -3,5 +3,5 @@ clingo==5.7.1
|
||||
flake8==7.1.1
|
||||
isort==5.13.2
|
||||
mypy==1.8.0
|
||||
types-six==1.16.21.20241009
|
||||
types-six==1.16.21.20241105
|
||||
vermin==1.6.0
|
||||
|
2
.github/workflows/unit_tests.yaml
vendored
2
.github/workflows/unit_tests.yaml
vendored
@@ -174,7 +174,7 @@ jobs:
|
||||
spack bootstrap disable github-actions-v0.6
|
||||
spack bootstrap status
|
||||
spack solve zlib
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretize.py
|
||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
|
||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||
with:
|
||||
name: coverage-clingo-cffi
|
||||
|
364
CHANGELOG.md
364
CHANGELOG.md
@@ -1,3 +1,365 @@
|
||||
# v0.23.0 (2024-11-13)
|
||||
|
||||
`v0.23.0` is a major feature release.
|
||||
|
||||
We are planning to make this the last major release before Spack `v1.0`
|
||||
in June 2025. Alongside `v0.23`, we will be making pre-releases (alpha,
|
||||
beta, etc.) of `v1.0`, and we encourage users to try them and send us
|
||||
feedback, either on GitHub or on Slack. You can track the road to
|
||||
`v1.0` here:
|
||||
|
||||
* https://github.com/spack/spack/releases
|
||||
* https://github.com/spack/spack/discussions/30634
|
||||
|
||||
## Features in this Release
|
||||
|
||||
1. **Language virtuals**
|
||||
|
||||
Your packages can now explicitly depend on the languages they require.
|
||||
Historically, Spack has considered C, C++, and Fortran compiler
|
||||
dependencies to be implicit. In `v0.23`, you should ensure that
|
||||
new packages add relevant C, C++, and Fortran dependencies like this:
|
||||
|
||||
```python
|
||||
depends_on("c", type="build")
|
||||
depends_on("cxx", type="build")
|
||||
depends_on("fortran", type="build")
|
||||
```
|
||||
|
||||
We encourage you to add these annotations to your packages now, to prepare
|
||||
for Spack `v1.0.0`. In `v1.0.0`, these annotations will be necessary for
|
||||
your package to use C, C++, and Fortran compilers. Note that you should
|
||||
*not* add language dependencies to packages that don't need them, e.g.,
|
||||
pure python packages.
|
||||
|
||||
We have already auto-generated these dependencies for packages in the
|
||||
`builtin` repository (see #45217), based on the types of source files
|
||||
present in each package's source code. We *may* have added too many or too
|
||||
few language dependencies, so please submit pull requests to correct
|
||||
packages if you find that the language dependencies are incorrect.
|
||||
|
||||
Note that we have also backported support for these dependencies to
|
||||
`v0.21.3` and `v0.22.2`, to make all of them forward-compatible with
|
||||
`v0.23`. This should allow you to move easily between older and newer Spack
|
||||
releases without breaking your packages.
|
||||
|
||||
2. **Spec splicing**
|
||||
|
||||
We are working to make binary installation more seamless in Spack. `v0.23`
|
||||
introduces "splicing", which allows users to deploy binaries using local,
|
||||
optimized versions of a binary interface, even if they were not built with
|
||||
that interface. For example, this would allow you to build binaries in the
|
||||
cloud using `mpich` and install them on a system using a local, optimized
|
||||
version of `mvapich2` *without rebuilding*. Spack preserves full provenance
|
||||
for the installed packages and knows that they were built one way but
|
||||
deployed another.
|
||||
|
||||
Our intent is to leverage this across many key HPC binary packages,
|
||||
e.g. MPI, CUDA, ROCm, and libfabric.
|
||||
|
||||
Fundamentally, splicing allows Spack to redeploy an existing spec with
|
||||
different dependencies than how it was built. There are two interfaces to
|
||||
splicing.
|
||||
|
||||
a. Explicit Splicing
|
||||
|
||||
#39136 introduced the explicit splicing interface. In the
|
||||
concretizer config, you can specify a target spec and a replacement
|
||||
by hash.
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
splice:
|
||||
explicit:
|
||||
- target: mpi
|
||||
replacement: mpich/abcdef
|
||||
```
|
||||
|
||||
Here, every installation that would normally use the target spec will
|
||||
instead use its replacement. Above, any spec using *any* `mpi` will be
|
||||
spliced to depend on the specific `mpich` installation requested. This
|
||||
*can* go wrong if you try to replace something built with, e.g.,
|
||||
`openmpi` with `mpich`, and it is on the user to ensure ABI
|
||||
compatibility between target and replacement specs. This currently
|
||||
requires some expertise to use, but it will allow users to reuse the
|
||||
binaries they create across more machines and environments.
|
||||
|
||||
b. Automatic Splicing (experimental)
|
||||
|
||||
#46729 introduced automatic splicing. In the concretizer config, enable
|
||||
automatic splicing:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
splice:
|
||||
automatic: true
|
||||
```
|
||||
|
||||
or run:
|
||||
|
||||
```console
|
||||
spack config add concretizer:splice:automatic:true
|
||||
```
|
||||
|
||||
The concretizer will select splices for ABI compatibility to maximize
|
||||
package reuse. Packages can denote ABI compatibility using the
|
||||
`can_splice` directive. No packages in Spack yet use this directive, so
|
||||
if you want to use this feature you will need to add `can_splice`
|
||||
annotations to your packages. We are working on ways to add more ABI
|
||||
compatibility information to the Spack package repository, and this
|
||||
directive may change in the future.
|
||||
|
||||
See the documentation for more details:
|
||||
* https://spack.readthedocs.io/en/latest/build_settings.html#splicing
|
||||
* https://spack.readthedocs.io/en/latest/packaging_guide.html#specifying-abi-compatibility
|
||||
|
||||
3. Broader variant propagation
|
||||
|
||||
Since #42931, you can specify propagated variants like `hdf5
|
||||
build_type==RelWithDebInfo` or `trilinos ++openmp` to propagate a variant
|
||||
to all dependencies for which it is relevant. This is valid *even* if the
|
||||
variant does not exist on the package or its dependencies.
|
||||
|
||||
See https://spack.readthedocs.io/en/latest/basic_usage.html#variants.
|
||||
|
||||
4. Query specs by namespace
|
||||
|
||||
#45416 allows a package's namespace (indicating the repository it came from)
|
||||
to be treated like a variant. You can request packages from particular repos
|
||||
like this:
|
||||
|
||||
```console
|
||||
spack find zlib namespace=builtin
|
||||
spack find zlib namespace=myrepo
|
||||
```
|
||||
|
||||
Previously, the spec syntax only allowed namespaces to be prefixes of spec
|
||||
names, e.g. `builtin.zlib`. The previous syntax still works.
|
||||
|
||||
5. `spack spec` respects environment settings and `unify:true`
|
||||
|
||||
`spack spec` did not previously respect environment lockfiles or
|
||||
unification settings, which made it difficult to see exactly how a spec
|
||||
would concretize within an environment. Now it does, so the output you get
|
||||
with `spack spec` will be *the same* as what your environment will
|
||||
concretize to when you run `spack concretize`. Similarly, if you provide
|
||||
multiple specs on the command line with `spack spec`, it will concretize
|
||||
them together if `unify:true` is set.
|
||||
|
||||
See #47556 and #44843.
|
||||
|
||||
6. Less noisy `spack spec` output
|
||||
|
||||
`spack spec` previously showed output like this:
|
||||
|
||||
```console
|
||||
> spack spec /v5fn6xo
|
||||
Input spec
|
||||
--------------------------------
|
||||
- /v5fn6xo
|
||||
|
||||
Concretized
|
||||
--------------------------------
|
||||
[+] openssl@3.3.1%apple-clang@16.0.0~docs+shared arch=darwin-sequoia-m1
|
||||
...
|
||||
```
|
||||
|
||||
But the input spec is redundant, and we know we run `spack spec` to concretize
|
||||
the input spec. `spack spec` now *only* shows the concretized spec. See #47574.
|
||||
|
||||
7. Better output for `spack find -c`
|
||||
|
||||
In an environmnet, `spack find -c` lets you search the concretized, but not
|
||||
yet installed, specs, just as you would the installed ones. As with `spack
|
||||
spec`, this should make it easier for you to see what *will* be built
|
||||
before building and installing it. See #44713.
|
||||
|
||||
8. `spack -C <env>`: use an environment's configuration without activation
|
||||
|
||||
Spack environments allow you to associate:
|
||||
1. a set of (possibly concretized) specs, and
|
||||
2. configuration
|
||||
|
||||
When you activate an environment, you're using both of these. Previously, we
|
||||
supported:
|
||||
* `spack -e <env>` to run spack in the context of a specific environment, and
|
||||
* `spack -C <directory>` to run spack using a directory with configuration files.
|
||||
|
||||
You can now also pass an environment to `spack -C` to use *only* the environment's
|
||||
configuration, but not the specs or lockfile. See #45046.
|
||||
|
||||
## New commands, options, and directives
|
||||
|
||||
* The new `spack env track` command (#41897) takes a non-managed Spack
|
||||
environment and adds a symlink to Spack's `$environments_root` directory, so
|
||||
that it will be included for reference counting for commands like `spack
|
||||
uninstall` and `spack gc`. If you use free-standing directory environments,
|
||||
this is useful for preventing Spack from removing things required by your
|
||||
environments. You can undo this tracking with the `spack env untrack`
|
||||
command.
|
||||
|
||||
* Add `-t` short option for `spack --backtrace` (#47227)
|
||||
|
||||
`spack -d / --debug` enables backtraces on error, but it can be very
|
||||
verbose, and sometimes you just want the backtrace. `spack -t / --backtrace`
|
||||
provides that option.
|
||||
|
||||
* `gc`: restrict to specific specs (#46790)
|
||||
|
||||
If you only want to garbage-collect specific packages, you can now provide
|
||||
them on the command line. This gives users finer-grained control over what
|
||||
is uninstalled.
|
||||
|
||||
* oci buildcaches now support `--only=package`. You can now push *just* a
|
||||
package and not its dependencies to an OCI registry. This allows dependents
|
||||
of non-redistributable specs to be stored in OCI registries without an
|
||||
error. See #45775.
|
||||
|
||||
## Notable refactors
|
||||
* Variants are now fully conditional
|
||||
|
||||
The `variants` dictionary on packages was previously keyed by variant name,
|
||||
and allowed only one definition of any given variant. Spack is now smart
|
||||
enough to understand that variants may have different values and defaults
|
||||
for different versions. For example, `warpx` prior to `23.06` only supported
|
||||
builds for one dimensionality, and newer `warpx` versions could be built
|
||||
with support for many different dimensions:
|
||||
|
||||
```python
|
||||
variant(
|
||||
"dims",
|
||||
default="3",
|
||||
values=("1", "2", "3", "rz"),
|
||||
multi=False,
|
||||
description="Number of spatial dimensions",
|
||||
when="@:23.05",
|
||||
)
|
||||
variant(
|
||||
"dims",
|
||||
default="1,2,rz,3",
|
||||
values=("1", "2", "3", "rz"),
|
||||
multi=True,
|
||||
description="Number of spatial dimensions",
|
||||
when="@23.06:",
|
||||
)
|
||||
```
|
||||
|
||||
Previously, the default for the old version of `warpx` was not respected and
|
||||
had to be specified manually. Now, Spack will select the right variant
|
||||
definition for each version at concretization time. This allows variants to
|
||||
evolve more smoothly over time. See #44425 for details.
|
||||
|
||||
## Highlighted bugfixes
|
||||
|
||||
1. Externals no longer override the preferred provider (#45025).
|
||||
|
||||
External definitions could interfere with package preferences. Now, if
|
||||
`openmpi` is the preferred `mpi`, and an external `mpich` is defined, a new
|
||||
`openmpi` *will* be built if building it is possible. Previously we would
|
||||
prefer `mpich` despite the preference.
|
||||
|
||||
2. Composable `cflags` (#41049).
|
||||
|
||||
This release fixes a longstanding bug that concretization would fail if
|
||||
there were different `cflags` specified in `packages.yaml`,
|
||||
`compilers.yaml`, or on `the` CLI. Flags and their ordering are now tracked
|
||||
in the concretizer and flags from multiple sources will be merged.
|
||||
|
||||
3. Fix concretizer Unification for included environments (#45139).
|
||||
|
||||
## Deprecations, removals, and syntax changes
|
||||
|
||||
1. The old concretizer has been removed from Spack, along with the
|
||||
`config:concretizer` config option. Spack will emit a warning if the option
|
||||
is present in user configuration, since it now has no effect. Spack now
|
||||
uses a simpler bootstrapping mechanism, where a JSON prototype is tweaked
|
||||
slightly to get an initial concrete spec to download. See #45215.
|
||||
|
||||
2. Best-effort expansion of spec matrices has been removed. This feature did
|
||||
not work with the "new" ASP-based concretizer, and did not work with
|
||||
`unify: True` or `unify: when_possible`. Use the
|
||||
[exclude key](https://spack.readthedocs.io/en/latest/environments.html#spec-matrices)
|
||||
for the environment to exclude invalid components, or use multiple spec
|
||||
matrices to combine the list of specs for which the constraint is valid and
|
||||
the list of specs for which it is not. See #40792.
|
||||
|
||||
3. The old Cray `platform` (based on Cray PE modules) has been removed, and
|
||||
`platform=cray` is no longer supported. Since `v0.19`, Spack has handled
|
||||
Cray machines like Linux clusters with extra packages, and we have
|
||||
encouraged using this option to support Cray. The new approach allows us to
|
||||
correctly handle Cray machines with non-SLES operating systems, and it is
|
||||
much more reliable than making assumptions about Cray modules. See the
|
||||
`v0.19` release notes and #43796 for more details.
|
||||
|
||||
4. The `config:install_missing_compilers` config option has been deprecated,
|
||||
and it is a no-op when set in `v0.23`. Our new compiler dependency model
|
||||
will replace it with a much more reliable and robust mechanism in `v1.0`.
|
||||
See #46237.
|
||||
|
||||
5. Config options that deprecated in `v0.21` have been removed in `v0.23`. You
|
||||
can now only specify preferences for `compilers`, `targets`, and
|
||||
`providers` globally via the `packages:all:` section. Similarly, you can
|
||||
only specify `versions:` locally for a specific package. See #44061 and
|
||||
#31261 for details.
|
||||
|
||||
6. Spack's old test interface has been removed (#45752), having been
|
||||
deprecated in `v0.22.0` (#34236). All `builtin` packages have been updated
|
||||
to use the new interface. See the [stand-alone test documentation](
|
||||
https://spack.readthedocs.io/en/latest/packaging_guide.html#stand-alone-tests)
|
||||
|
||||
7. The `spack versions --safe-only` option, deprecated since `v0.21.0`, has
|
||||
been removed. See #45765.
|
||||
|
||||
* The `--dependencies` and `--optimize` arguments to `spack ci` have been
|
||||
deprecated. See #45005.
|
||||
|
||||
## Binary caches
|
||||
1. Public binary caches now include an ML stack for Linux/aarch64 (#39666)We
|
||||
now build an ML stack for Linux/aarch64 for all pull requests and on
|
||||
develop. The ML stack includes both CPU-only and CUDA builds for Horovod,
|
||||
Hugging Face, JAX, Keras, PyTorch,scikit-learn, TensorBoard, and
|
||||
TensorFlow, and related packages. The CPU-only stack also includes XGBoost.
|
||||
See https://cache.spack.io/tag/develop/?stack=ml-linux-aarch64-cuda.
|
||||
|
||||
2. There is also now an stack of developer tools for macOS (#46910), which is
|
||||
analogous to the Linux devtools stack. You can use this to avoid building
|
||||
many common build dependencies. See
|
||||
https://cache.spack.io/tag/develop/?stack=developer-tools-darwin.
|
||||
|
||||
## Architecture support
|
||||
* archspec has been updated to `v0.2.5`, with support for `zen5`
|
||||
* Spack's CUDA package now supports the Grace Hopper `9.0a` compute capability (#45540)
|
||||
|
||||
## Windows
|
||||
* Windows bootstrapping: `file` and `gpg` (#41810)
|
||||
* `scripts` directory added to PATH on Windows for python extensions (#45427)
|
||||
* Fix `spack load --list` and `spack unload` on Windows (#35720)
|
||||
|
||||
## Other notable changes
|
||||
* Bugfix: `spack find -x` in environments (#46798)
|
||||
* Spec splices are now robust to duplicate nodes with the same name in a spec (#46382)
|
||||
* Cache per-compiler libc calculations for performance (#47213)
|
||||
* Fixed a bug in external detection for openmpi (#47541)
|
||||
* Mirror configuration allows username/password as environment variables (#46549)
|
||||
* Default library search caps maximum depth (#41945)
|
||||
* Unify interface for `spack spec` and `spack solve` commands (#47182)
|
||||
* Spack no longer RPATHs directories in the default library search path (#44686)
|
||||
* Improved performance of Spack database (#46554)
|
||||
* Enable package reuse for packages with versions from git refs (#43859)
|
||||
* Improved handling for `uuid` virtual on macos (#43002)
|
||||
* Improved tracking of task queueing/requeueing in the installer (#46293)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* Over 2,000 pull requests updated package recipes
|
||||
* 8,307 total packages, 329 new since `v0.22.0`
|
||||
* 140 new Python packages
|
||||
* 14 new R packages
|
||||
* 373 people contributed to this release
|
||||
* 357 committers to packages
|
||||
* 60 committers to core
|
||||
|
||||
|
||||
# v0.22.2 (2024-09-21)
|
||||
|
||||
## Bugfixes
|
||||
@@ -419,7 +781,7 @@
|
||||
- spack graph: fix coloring with environments (#41240)
|
||||
- spack info: sort variants in --variants-by-name (#41389)
|
||||
- Spec.format: error on old style format strings (#41934)
|
||||
- ASP-based solver:
|
||||
- ASP-based solver:
|
||||
- fix infinite recursion when computing concretization errors (#41061)
|
||||
- don't error for type mismatch on preferences (#41138)
|
||||
- don't emit spurious debug output (#41218)
|
||||
|
@@ -39,11 +39,19 @@ concretizer:
|
||||
# Option to deal with possible duplicate nodes (i.e. different nodes from the same package) in the DAG.
|
||||
duplicates:
|
||||
# "none": allows a single node for any package in the DAG.
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only
|
||||
# (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
# Option to specify compatiblity between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||
os_compatible: {}
|
||||
|
||||
# Option to specify whether to support splicing. Splicing allows for
|
||||
# the relinking of concrete package dependencies in order to better
|
||||
# reuse already built packages with ABI compatible dependencies
|
||||
splice:
|
||||
explicit: []
|
||||
automatic: false
|
||||
|
@@ -40,9 +40,9 @@ packages:
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
libgfortran: [ gcc-runtime ]
|
||||
libgfortran: [gcc-runtime]
|
||||
libglx: [mesa+glx]
|
||||
libifcore: [ intel-oneapi-runtime ]
|
||||
libifcore: [intel-oneapi-runtime]
|
||||
libllvm: [llvm]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
luajit: [lua-luajit-openresty, lua-luajit]
|
||||
|
@@ -1359,6 +1359,10 @@ For example, for the ``stackstart`` variant:
|
||||
mpileaks stackstart==4 # variant will be propagated to dependencies
|
||||
mpileaks stackstart=4 # only mpileaks will have this variant value
|
||||
|
||||
Spack also allows variants to be propagated from a package that does
|
||||
not have that variant.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Compiler Flags
|
||||
^^^^^^^^^^^^^^
|
||||
|
@@ -237,3 +237,35 @@ is optional -- by default, splices will be transitive.
|
||||
``mpich/abcdef`` instead of ``mvapich2`` as the MPI provider. Spack
|
||||
will warn the user in this case, but will not fail the
|
||||
concretization.
|
||||
|
||||
.. _automatic_splicing:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Automatic Splicing
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The Spack solver can be configured to do automatic splicing for
|
||||
ABI-compatible packages. Automatic splices are enabled in the concretizer
|
||||
config section
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
splice:
|
||||
automatic: True
|
||||
|
||||
Packages can include ABI-compatibility information using the
|
||||
``can_splice`` directive. See :ref:`the packaging
|
||||
guide<abi_compatibility>` for instructions on specifying ABI
|
||||
compatibility using the ``can_splice`` directive.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``can_splice`` directive is experimental and may be changed in
|
||||
future versions.
|
||||
|
||||
When automatic splicing is enabled, the concretizer will combine any
|
||||
number of ABI-compatible specs if possible to reuse installed packages
|
||||
and packages available from binary caches. The end result of these
|
||||
specs is equivalent to a series of transitive/intransitive splices,
|
||||
but the series may be non-obvious.
|
||||
|
@@ -214,12 +214,14 @@ def setup(sphinx):
|
||||
# Spack classes that intersphinx is unable to resolve
|
||||
("py:class", "spack.version.StandardVersion"),
|
||||
("py:class", "spack.spec.DependencySpec"),
|
||||
("py:class", "spack.spec.ArchSpec"),
|
||||
("py:class", "spack.spec.InstallStatus"),
|
||||
("py:class", "spack.spec.SpecfileReaderBase"),
|
||||
("py:class", "spack.install_test.Pb"),
|
||||
("py:class", "spack.filesystem_view.SimpleFilesystemView"),
|
||||
("py:class", "spack.traverse.EdgeAndDepth"),
|
||||
("py:class", "archspec.cpu.microarchitecture.Microarchitecture"),
|
||||
("py:class", "spack.compiler.CompilerCache"),
|
||||
# TypeVar that is not handled correctly
|
||||
("py:class", "llnl.util.lang.T"),
|
||||
]
|
||||
|
@@ -184,7 +184,7 @@ Style Tests
|
||||
|
||||
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
||||
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
|
||||
`mypy <https://mypy.readthedocs.io/en/stable/>` for type checking. PEP 8 is
|
||||
`mypy <https://mypy.readthedocs.io/en/stable/>`_ for type checking. PEP 8 is
|
||||
a series of style guides for Python that provide suggestions for everything
|
||||
from variable naming to indentation. In order to limit the number of PRs that
|
||||
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
||||
|
@@ -333,13 +333,9 @@ inserting them at different places in the spack code base. Whenever a hook
|
||||
type triggers by way of a function call, we find all the hooks of that type,
|
||||
and run them.
|
||||
|
||||
Spack defines hooks by way of a module at ``lib/spack/spack/hooks`` where we can define
|
||||
types of hooks in the ``__init__.py``, and then python files in that folder
|
||||
can use hook functions. The files are automatically parsed, so if you write
|
||||
a new file for some integration (e.g., ``lib/spack/spack/hooks/myintegration.py``
|
||||
you can then write hook functions in that file that will be automatically detected,
|
||||
and run whenever your hook is called. This section will cover the basic kind
|
||||
of hooks, and how to write them.
|
||||
Spack defines hooks by way of a module in the ``lib/spack/spack/hooks`` directory.
|
||||
This module has to be registered in ``__init__.py`` so that Spack is aware of it.
|
||||
This section will cover the basic kind of hooks, and how to write them.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Types of Hooks
|
||||
|
@@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
.. code-block:: console
|
||||
|
||||
apt update
|
||||
apt install build-essential ca-certificates coreutils curl environment-modules gfortran git gpg lsb-release python3 python3-distutils python3-venv unzip zip
|
||||
apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
|
||||
|
||||
.. tab-item:: RHEL
|
||||
|
||||
@@ -43,14 +43,14 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
|
||||
dnf install epel-release
|
||||
dnf group install "Development Tools"
|
||||
dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3
|
||||
dnf install gcc-gfortran redhat-lsb-core python3 unzip
|
||||
|
||||
.. tab-item:: macOS Brew
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
brew update
|
||||
brew install curl gcc git gnupg zip
|
||||
brew install gcc git zip
|
||||
|
||||
------------
|
||||
Installation
|
||||
|
@@ -12,10 +12,6 @@
|
||||
Spack
|
||||
===================
|
||||
|
||||
.. epigraph::
|
||||
|
||||
`These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
|
||||
|
||||
Spack is a package management tool designed to support multiple
|
||||
versions and configurations of software on a wide variety of platforms
|
||||
and environments. It was designed for large supercomputing centers,
|
||||
|
@@ -1267,7 +1267,7 @@ Git fetching supports the following parameters to ``version``:
|
||||
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
|
||||
large repositories that have separate portions that can be built independently.
|
||||
If paths provided are directories then all the subdirectories and associated files
|
||||
will also be cloned.
|
||||
will also be cloned.
|
||||
|
||||
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
|
||||
|
||||
@@ -1367,8 +1367,8 @@ Submodules
|
||||
git-submodule``.
|
||||
|
||||
Sparse-Checkout
|
||||
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
||||
sparse-checkout feature. This will only clone the paths that are specified in the
|
||||
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
||||
sparse-checkout feature. This will only clone the paths that are specified in the
|
||||
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
|
||||
This feature allows you to only clone what you need from a large repository.
|
||||
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
|
||||
@@ -2392,7 +2392,7 @@ by the ``--jobs`` option:
|
||||
.. code-block:: python
|
||||
:emphasize-lines: 7, 11
|
||||
:linenos:
|
||||
|
||||
|
||||
class Xios(Package):
|
||||
...
|
||||
def install(self, spec, prefix):
|
||||
@@ -2503,15 +2503,14 @@ with. For example, suppose that in the ``libdwarf`` package you write:
|
||||
|
||||
depends_on("libelf@0.8")
|
||||
|
||||
Now ``libdwarf`` will require ``libelf`` at *exactly* version ``0.8``.
|
||||
You can also specify a requirement for a particular variant or for
|
||||
specific compiler flags:
|
||||
Now ``libdwarf`` will require ``libelf`` in the range ``0.8``, which
|
||||
includes patch versions ``0.8.1``, ``0.8.2``, etc. Apart from version
|
||||
restrictions, you can also specify variants if this package requires
|
||||
optional features of the dependency.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("libelf@0.8+debug")
|
||||
depends_on("libelf debug=True")
|
||||
depends_on("libelf cppflags='-fPIC'")
|
||||
depends_on("libelf@0.8 +parser +pic")
|
||||
|
||||
Both users *and* package authors can use the same spec syntax to refer
|
||||
to different package configurations. Users use the spec syntax on the
|
||||
@@ -2519,46 +2518,82 @@ command line to find installed packages or to install packages with
|
||||
particular constraints, and package authors can use specs to describe
|
||||
relationships between packages.
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Version ranges
|
||||
^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Specifying backward and forward compatibility
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Although some packages require a specific version for their dependencies,
|
||||
most can be built with a range of versions. For example, if you are
|
||||
writing a package for a legacy Python module that only works with Python
|
||||
2.4 through 2.6, this would look like:
|
||||
Packages are often compatible with a range of versions of their
|
||||
dependencies. This is typically referred to as backward and forward
|
||||
compatibility. Spack allows you to specify this in the ``depends_on``
|
||||
directive using version ranges.
|
||||
|
||||
**Backwards compatibility** means that the package requires at least a
|
||||
certain version of its dependency:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@2.4:2.6")
|
||||
depends_on("python@3.10:")
|
||||
|
||||
Version ranges in Spack are *inclusive*, so ``2.4:2.6`` means any version
|
||||
greater than or equal to ``2.4`` and up to and including any ``2.6.x``. If
|
||||
you want to specify that a package works with any version of Python 3 (or
|
||||
higher), this would look like:
|
||||
In this case, the package requires Python 3.10 or newer.
|
||||
|
||||
Commonly, packages drop support for older versions of a dependency as
|
||||
they release new versions. In Spack you can conveniently add every
|
||||
backward compatibility rule as a separate line:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@3:")
|
||||
# backward compatibility with Python
|
||||
depends_on("python@3.8:")
|
||||
depends_on("python@3.9:", when="@1.2:")
|
||||
depends_on("python@3.10:", when="@1.4:")
|
||||
|
||||
Here we leave out the upper bound. If you want to say that a package
|
||||
requires Python 2, you can similarly leave out the lower bound:
|
||||
This means that in general we need Python 3.8 or newer; from version
|
||||
1.2 onwards we need Python 3.9 or newer; from version 1.4 onwards we
|
||||
need Python 3.10 or newer. Notice that it's fine to have overlapping
|
||||
ranges in the ``when`` clauses.
|
||||
|
||||
**Forward compatibility** means that the package requires at most a
|
||||
certain version of its dependency. Forward compatibility rules are
|
||||
necessary when there are breaking changes in the dependency that the
|
||||
package cannot handle. In Spack we often add forward compatibility
|
||||
bounds only at the time a new, breaking version of a dependency is
|
||||
released. As with backward compatibility, it is typical to see a list
|
||||
of forward compatibility bounds in a package file as seperate lines:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@:2")
|
||||
# forward compatibility with Python
|
||||
depends_on("python@:3.12", when="@:1.10")
|
||||
depends_on("python@:3.13", when="@:1.12")
|
||||
|
||||
Notice that we didn't use ``@:3``. Version ranges are *inclusive*, so
|
||||
``@:3`` means "up to and including any 3.x version".
|
||||
Notice how the ``:`` now appears before the version number both in the
|
||||
dependency and in the ``when`` clause. This tells Spack that in general
|
||||
we need Python 3.13 or older up to version ``1.12.x``, and up to version
|
||||
``1.10.x`` we need Python 3.12 or older. Said differently, forward compatibility
|
||||
with Python 3.13 was added in version 1.11, while version 1.13 added forward
|
||||
compatibility with Python 3.14.
|
||||
|
||||
You can also simply write
|
||||
Notice that a version range ``@:3.12`` includes *any* patch version
|
||||
number ``3.12.x``, which is often useful when specifying forward compatibility
|
||||
bounds.
|
||||
|
||||
So far we have seen open-ended version ranges, which is by far the most
|
||||
common use case. It is also possible to specify both a lower and an upper bound
|
||||
on the version of a dependency, like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@2.7")
|
||||
depends_on("python@3.10:3.12")
|
||||
|
||||
to tell Spack that the package needs Python 2.7.x. This is equivalent to
|
||||
``@2.7:2.7``.
|
||||
There is short syntax to specify that a package is compatible with say any
|
||||
``3.x`` version:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
depends_on("python@3")
|
||||
|
||||
The above is equivalent to ``depends_on("python@3:3")``, which means at least
|
||||
Python version 3 and at most any version ``3.x.y``.
|
||||
|
||||
In very rare cases, you may need to specify an exact version, for example
|
||||
if you need to distinguish between ``3.2`` and ``3.2.1``:
|
||||
@@ -5385,7 +5420,7 @@ by build recipes. Examples of checking :ref:`variant settings <variants>` and
|
||||
determine whether it needs to also set up build dependencies (see
|
||||
:ref:`test-build-tests`).
|
||||
|
||||
The ``MyPackage`` package below provides two basic test examples:
|
||||
The ``MyPackage`` package below provides two basic test examples:
|
||||
``test_example`` and ``test_example2``. The first runs the installed
|
||||
``example`` and ensures its output contains an expected string. The second
|
||||
runs ``example2`` without checking output so is only concerned with confirming
|
||||
@@ -5702,7 +5737,7 @@ subdirectory of the installation prefix. They are automatically copied to
|
||||
the appropriate relative paths under the test stage directory prior to
|
||||
executing stand-alone tests.
|
||||
|
||||
.. tip::
|
||||
.. tip::
|
||||
|
||||
*Perform test-related conversions once when copying files.*
|
||||
|
||||
@@ -7078,6 +7113,46 @@ might write:
|
||||
CXXFLAGS += -I$DWARF_PREFIX/include
|
||||
CXXFLAGS += -L$DWARF_PREFIX/lib
|
||||
|
||||
.. _abi_compatibility:
|
||||
|
||||
----------------------------
|
||||
Specifying ABI Compatibility
|
||||
----------------------------
|
||||
|
||||
Packages can include ABI-compatibility information using the
|
||||
``can_splice`` directive. For example, if ``Foo`` version 1.1 can
|
||||
always replace version 1.0, then the package could have:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
can_splice("foo@1.0", when="@1.1")
|
||||
|
||||
For virtual packages, packages can also specify ABI-compabitiliby with
|
||||
other packages providing the same virtual. For example, ``zlib-ng``
|
||||
could specify:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
can_splice("zlib@1.3.1", when="@2.2+compat")
|
||||
|
||||
Some packages have ABI-compatibility that is dependent on matching
|
||||
variant values, either for all variants or for some set of
|
||||
ABI-relevant variants. In those cases, it is not necessary to specify
|
||||
the full combinatorial explosion. The ``match_variants`` keyword can
|
||||
cover all single-value variants.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
can_splice("foo@1.1", when="@1.2", match_variants=["bar"]) # any value for bar as long as they're the same
|
||||
can_splice("foo@1.2", when="@1.3", match_variants="*") # any variant values if all single-value variants match
|
||||
|
||||
The concretizer will use ABI compatibility to determine automatic
|
||||
splices when :ref:`automatic splicing<automatic_splicing>` is enabled.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``can_splice`` directive is experimental, and may be replaced
|
||||
by a higher-level interface in future versions of Spack.
|
||||
|
||||
.. _package_class_structure:
|
||||
|
||||
|
@@ -2,8 +2,8 @@ sphinx==8.1.3
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.6.1
|
||||
sphinx-rtd-theme==3.0.1
|
||||
python-levenshtein==0.26.0
|
||||
docutils==0.20.1
|
||||
python-levenshtein==0.26.1
|
||||
docutils==0.21.2
|
||||
pygments==2.18.0
|
||||
urllib3==2.2.3
|
||||
pytest==8.3.3
|
||||
|
238
lib/spack/env/cc
vendored
238
lib/spack/env/cc
vendored
@@ -101,10 +101,9 @@ setsep() {
|
||||
esac
|
||||
}
|
||||
|
||||
# prepend LISTNAME ELEMENT [SEP]
|
||||
# prepend LISTNAME ELEMENT
|
||||
#
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME,
|
||||
# assuming the list is separated by SEP.
|
||||
# Prepend ELEMENT to the list stored in the variable LISTNAME.
|
||||
# Handles empty lists and single-element lists.
|
||||
prepend() {
|
||||
varname="$1"
|
||||
@@ -238,6 +237,36 @@ esac
|
||||
}
|
||||
"
|
||||
|
||||
# path_list functions. Path_lists have 3 parts: spack_store_<list>, <list> and system_<list>,
|
||||
# which are used to prioritize paths when assembling the final command line.
|
||||
|
||||
# init_path_lists LISTNAME
|
||||
# Set <LISTNAME>, spack_store_<LISTNAME>, and system_<LISTNAME> to "".
|
||||
init_path_lists() {
|
||||
eval "spack_store_$1=\"\""
|
||||
eval "$1=\"\""
|
||||
eval "system_$1=\"\""
|
||||
}
|
||||
|
||||
# assign_path_lists LISTNAME1 LISTNAME2
|
||||
# Copy contents of LISTNAME2 into LISTNAME1, for each path_list prefix.
|
||||
assign_path_lists() {
|
||||
eval "spack_store_$1=\"\${spack_store_$2}\""
|
||||
eval "$1=\"\${$2}\""
|
||||
eval "system_$1=\"\${system_$2}\""
|
||||
}
|
||||
|
||||
# append_path_lists LISTNAME ELT
|
||||
# Append the provided ELT to the appropriate list, based on the result of path_order().
|
||||
append_path_lists() {
|
||||
path_order "$2"
|
||||
case $? in
|
||||
0) eval "append spack_store_$1 \"\$2\"" ;;
|
||||
1) eval "append $1 \"\$2\"" ;;
|
||||
2) eval "append system_$1 \"\$2\"" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Check if optional parameters are defined
|
||||
# If we aren't asking for debug flags, don't add them
|
||||
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
||||
@@ -470,12 +499,7 @@ input_command="$*"
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
@@ -484,24 +508,14 @@ parse_Wl() {
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
wl_expect_rpath=yes
|
||||
@@ -509,8 +523,7 @@ parse_Wl() {
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
-Wl)
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory, we don't support
|
||||
# it.
|
||||
# Nested -Wl,-Wl means we're in NAG compiler territory. We don't support it.
|
||||
return 1
|
||||
;;
|
||||
*)
|
||||
@@ -529,21 +542,10 @@ categorize_arguments() {
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
|
||||
return_isystem_spack_store_include_dirs_list=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
|
||||
return_spack_store_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
|
||||
return_spack_store_lib_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
|
||||
return_spack_store_rpath_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
init_path_lists return_isystem_include_dirs_list
|
||||
init_path_lists return_include_dirs_list
|
||||
init_path_lists return_lib_dirs_list
|
||||
init_path_lists return_rpath_dirs_list
|
||||
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
@@ -609,32 +611,17 @@ categorize_arguments() {
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_isystem_include_dirs_list "$arg" ;;
|
||||
2) append return_isystem_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
append_path_lists return_isystem_include_dirs_list "$arg"
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_include_dirs_list "$arg" ;;
|
||||
2) append return_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
append_path_lists return_include_dirs_list "$arg"
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_lib_dirs_list "$arg" ;;
|
||||
1) append return_lib_dirs_list "$arg" ;;
|
||||
2) append return_system_lib_dirs_list "$arg" ;;
|
||||
esac
|
||||
append_path_lists return_lib_dirs_list "$arg"
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
@@ -667,32 +654,17 @@ categorize_arguments() {
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
@@ -709,7 +681,36 @@ categorize_arguments() {
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "$1"
|
||||
# if mode is not ld, we can just add to other args
|
||||
if [ "$mode" != "ld" ]; then
|
||||
append return_other_args_list "$1"
|
||||
shift
|
||||
continue
|
||||
fi
|
||||
|
||||
# if we're in linker mode, we need to parse raw RPATH args
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
append_path_lists return_rpath_dirs_list "$arg"
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
if [ $# -eq 1 ]; then
|
||||
# -rpath without value: let the linker raise an error.
|
||||
append return_other_args_list "$1"
|
||||
break
|
||||
fi
|
||||
shift
|
||||
append_path_lists return_rpath_dirs_list "$1"
|
||||
;;
|
||||
*)
|
||||
append return_other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
@@ -731,21 +732,10 @@ categorize_arguments() {
|
||||
|
||||
categorize_arguments "$@"
|
||||
|
||||
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
assign_path_lists isystem_include_dirs_list return_isystem_include_dirs_list
|
||||
assign_path_lists include_dirs_list return_include_dirs_list
|
||||
assign_path_lists lib_dirs_list return_lib_dirs_list
|
||||
assign_path_lists rpath_dirs_list return_rpath_dirs_list
|
||||
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
other_args_list="$return_other_args_list"
|
||||
@@ -821,21 +811,10 @@ IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
|
||||
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
assign_path_lists spack_flags_isystem_include_dirs_list return_isystem_include_dirs_list
|
||||
assign_path_lists spack_flags_include_dirs_list return_include_dirs_list
|
||||
assign_path_lists spack_flags_lib_dirs_list return_lib_dirs_list
|
||||
assign_path_lists spack_flags_rpath_dirs_list return_rpath_dirs_list
|
||||
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
@@ -894,7 +873,7 @@ esac
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
||||
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend spack_store_isystem_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
||||
else
|
||||
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
@@ -910,64 +889,63 @@ args_list="$flags_list"
|
||||
|
||||
# Include search paths partitioned by (in store, non-sytem, system)
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_flags_spack_store_include_dirs_list -I
|
||||
extend args_list spack_store_spack_flags_include_dirs_list -I
|
||||
extend args_list spack_store_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_include_dirs_list -I
|
||||
extend args_list include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_store_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list spack_store_isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_spack_flags_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list system_spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list system_isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths partitioned by (in store, non-sytem, system)
|
||||
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
|
||||
extend args_list spack_store_spack_flags_lib_dirs_list "-L"
|
||||
extend args_list spack_store_lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_spack_flags_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
# RPATHs arguments
|
||||
rpath_prefix=""
|
||||
case "$mode" in
|
||||
ccld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
rpath_prefix="$rpath"
|
||||
;;
|
||||
ld)
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
rpath_prefix="-rpath${lsep}"
|
||||
;;
|
||||
esac
|
||||
|
||||
# if mode is ccld or ld, extend RPATH lists with the prefix determined above
|
||||
if [ -n "$rpath_prefix" ]; then
|
||||
extend args_list spack_store_spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath_prefix"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend args_list rpath_dirs_list "$rpath_prefix"
|
||||
|
||||
extend args_list system_spack_flags_rpath_dirs_list "$rpath_prefix"
|
||||
extend args_list system_rpath_dirs_list "$rpath_prefix"
|
||||
fi
|
||||
|
||||
# Other arguments from the input command
|
||||
extend args_list other_args_list
|
||||
extend args_list spack_flags_other_args_list
|
||||
|
@@ -20,11 +20,23 @@
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from itertools import accumulate
|
||||
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
||||
from typing import (
|
||||
Callable,
|
||||
Deque,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Match,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
|
||||
import llnl.util.symlink
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.lang import dedupe, fnmatch_translate_multiple, memoized
|
||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||
|
||||
from ..path import path_to_os_path, system_path_filter
|
||||
@@ -85,6 +97,8 @@
|
||||
"visit_directory_tree",
|
||||
]
|
||||
|
||||
Path = Union[str, pathlib.Path]
|
||||
|
||||
if sys.version_info < (3, 7, 4):
|
||||
# monkeypatch shutil.copystat to fix PermissionError when copying read-only
|
||||
# files on Lustre when using Python < 3.7.4
|
||||
@@ -1673,105 +1687,203 @@ def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2)
|
||||
return FindFirstFile(root, *files, bfs_depth=bfs_depth).find()
|
||||
|
||||
|
||||
def find(root, files, recursive=True):
|
||||
"""Search for ``files`` starting from the ``root`` directory.
|
||||
|
||||
Like GNU/BSD find but written entirely in Python.
|
||||
|
||||
Examples:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ find /usr -name python
|
||||
|
||||
is equivalent to:
|
||||
|
||||
>>> find('/usr', 'python')
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ find /usr/local/bin -maxdepth 1 -name python
|
||||
|
||||
is equivalent to:
|
||||
|
||||
>>> find('/usr/local/bin', 'python', recursive=False)
|
||||
def find(
|
||||
root: Union[Path, Sequence[Path]],
|
||||
files: Union[str, Sequence[str]],
|
||||
recursive: bool = True,
|
||||
max_depth: Optional[int] = None,
|
||||
) -> List[str]:
|
||||
"""Finds all files matching the patterns from ``files`` starting from ``root``. This function
|
||||
returns a deterministic result for the same input and directory structure when run multiple
|
||||
times. Symlinked directories are followed, and unique directories are searched only once. Each
|
||||
matching file is returned only once at lowest depth in case multiple paths exist due to
|
||||
symlinked directories.
|
||||
|
||||
Accepts any glob characters accepted by fnmatch:
|
||||
|
||||
========== ====================================
|
||||
Pattern Meaning
|
||||
========== ====================================
|
||||
``*`` matches everything
|
||||
``*`` matches one or more characters
|
||||
``?`` matches any single character
|
||||
``[seq]`` matches any character in ``seq``
|
||||
``[!seq]`` matches any character not in ``seq``
|
||||
========== ====================================
|
||||
|
||||
Parameters:
|
||||
root (str): The root directory to start searching from
|
||||
files (str or collections.abc.Sequence): Library name(s) to search for
|
||||
recursive (bool): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to True.
|
||||
Examples:
|
||||
|
||||
Returns:
|
||||
list: The files that have been found
|
||||
>>> find("/usr", "*.txt", recursive=True, max_depth=2)
|
||||
|
||||
finds all files with the extension ``.txt`` in the directory ``/usr`` and subdirectories up to
|
||||
depth 2.
|
||||
|
||||
>>> find(["/usr", "/var"], ["*.txt", "*.log"], recursive=True)
|
||||
|
||||
finds all files with the extension ``.txt`` or ``.log`` in the directories ``/usr`` and
|
||||
``/var`` at any depth.
|
||||
|
||||
>>> find("/usr", "GL/*.h", recursive=True)
|
||||
|
||||
finds all header files in a directory GL at any depth in the directory ``/usr``.
|
||||
|
||||
Parameters:
|
||||
root: One or more root directories to start searching from
|
||||
files: One or more filename patterns to search for
|
||||
recursive: if False search only root, if True descends from roots. Defaults to True.
|
||||
max_depth: if set, don't search below this depth. Cannot be set if recursive is False
|
||||
|
||||
Returns a list of absolute, matching file paths.
|
||||
"""
|
||||
if isinstance(root, (str, pathlib.Path)):
|
||||
root = [root]
|
||||
elif not isinstance(root, collections.abc.Sequence):
|
||||
raise TypeError(f"'root' arg must be a path or a sequence of paths, not '{type(root)}']")
|
||||
|
||||
if isinstance(files, str):
|
||||
files = [files]
|
||||
elif not isinstance(files, collections.abc.Sequence):
|
||||
raise TypeError(f"'files' arg must be str or a sequence of str, not '{type(files)}']")
|
||||
|
||||
if recursive:
|
||||
tty.debug(f"Find (recursive): {root} {str(files)}")
|
||||
result = _find_recursive(root, files)
|
||||
else:
|
||||
tty.debug(f"Find (not recursive): {root} {str(files)}")
|
||||
result = _find_non_recursive(root, files)
|
||||
# If recursive is false, max_depth can only be None or 0
|
||||
if max_depth and not recursive:
|
||||
raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False")
|
||||
|
||||
tty.debug(f"Find complete: {root} {str(files)}")
|
||||
tty.debug(f"Find (max depth = {max_depth}): {root} {files}")
|
||||
if not recursive:
|
||||
max_depth = 0
|
||||
elif max_depth is None:
|
||||
max_depth = sys.maxsize
|
||||
result = _find_max_depth(root, files, max_depth)
|
||||
tty.debug(f"Find complete: {root} {files}")
|
||||
return result
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def _find_recursive(root, search_files):
|
||||
# The variable here is **on purpose** a defaultdict. The idea is that
|
||||
# we want to poke the filesystem as little as possible, but still maintain
|
||||
# stability in the order of the answer. Thus we are recording each library
|
||||
# found in a key, and reconstructing the stable order later.
|
||||
found_files = collections.defaultdict(list)
|
||||
|
||||
# Make the path absolute to have os.walk also return an absolute path
|
||||
root = os.path.abspath(root)
|
||||
for path, _, list_files in os.walk(root):
|
||||
for search_file in search_files:
|
||||
matches = glob.glob(os.path.join(path, search_file))
|
||||
matches = [os.path.join(path, x) for x in matches]
|
||||
found_files[search_file].extend(matches)
|
||||
|
||||
answer = []
|
||||
for search_file in search_files:
|
||||
answer.extend(found_files[search_file])
|
||||
|
||||
return answer
|
||||
def _log_file_access_issue(e: OSError, path: str) -> None:
|
||||
errno_name = errno.errorcode.get(e.errno, "UNKNOWN")
|
||||
tty.debug(f"find must skip {path}: {errno_name} {e}")
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def _find_non_recursive(root, search_files):
|
||||
# The variable here is **on purpose** a defaultdict as os.list_dir
|
||||
# can return files in any order (does not preserve stability)
|
||||
found_files = collections.defaultdict(list)
|
||||
def _file_id(s: os.stat_result) -> Tuple[int, int]:
|
||||
# Note: on windows, st_ino is the file index and st_dev is the volume serial number. See
|
||||
# https://github.com/python/cpython/blob/3.9/Python/fileutils.c
|
||||
return (s.st_ino, s.st_dev)
|
||||
|
||||
# Make the path absolute to have absolute path returned
|
||||
root = os.path.abspath(root)
|
||||
|
||||
for search_file in search_files:
|
||||
matches = glob.glob(os.path.join(root, search_file))
|
||||
matches = [os.path.join(root, x) for x in matches]
|
||||
found_files[search_file].extend(matches)
|
||||
def _dedupe_files(paths: List[str]) -> List[str]:
|
||||
"""Deduplicate files by inode and device, dropping files that cannot be accessed."""
|
||||
unique_files: List[str] = []
|
||||
# tuple of (inode, device) for each file without following symlinks
|
||||
visited: Set[Tuple[int, int]] = set()
|
||||
for path in paths:
|
||||
try:
|
||||
stat_info = os.lstat(path)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, path)
|
||||
continue
|
||||
file_id = _file_id(stat_info)
|
||||
if file_id not in visited:
|
||||
unique_files.append(path)
|
||||
visited.add(file_id)
|
||||
return unique_files
|
||||
|
||||
answer = []
|
||||
for search_file in search_files:
|
||||
answer.extend(found_files[search_file])
|
||||
|
||||
return answer
|
||||
def _find_max_depth(
|
||||
roots: Sequence[Path], globs: Sequence[str], max_depth: int = sys.maxsize
|
||||
) -> List[str]:
|
||||
"""See ``find`` for the public API."""
|
||||
# We optimize for the common case of simple filename only patterns: a single, combined regex
|
||||
# is used. For complex patterns that include path components, we use a slower glob call from
|
||||
# every directory we visit within max_depth.
|
||||
filename_only_patterns = {
|
||||
f"pattern_{i}": os.path.normcase(x) for i, x in enumerate(globs) if "/" not in x
|
||||
}
|
||||
complex_patterns = {f"pattern_{i}": x for i, x in enumerate(globs) if "/" in x}
|
||||
regex = re.compile(fnmatch_translate_multiple(filename_only_patterns))
|
||||
# Ordered dictionary that keeps track of what pattern found which files
|
||||
matched_paths: Dict[str, List[str]] = {f"pattern_{i}": [] for i, _ in enumerate(globs)}
|
||||
# Ensure returned paths are always absolute
|
||||
roots = [os.path.abspath(r) for r in roots]
|
||||
# Breadth-first search queue. Each element is a tuple of (depth, dir)
|
||||
dir_queue: Deque[Tuple[int, str]] = collections.deque()
|
||||
# Set of visited directories. Each element is a tuple of (inode, device)
|
||||
visited_dirs: Set[Tuple[int, int]] = set()
|
||||
|
||||
for root in roots:
|
||||
try:
|
||||
stat_root = os.stat(root)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, root)
|
||||
continue
|
||||
dir_id = _file_id(stat_root)
|
||||
if dir_id not in visited_dirs:
|
||||
dir_queue.appendleft((0, root))
|
||||
visited_dirs.add(dir_id)
|
||||
|
||||
while dir_queue:
|
||||
depth, curr_dir = dir_queue.pop()
|
||||
try:
|
||||
dir_iter = os.scandir(curr_dir)
|
||||
except OSError as e:
|
||||
_log_file_access_issue(e, curr_dir)
|
||||
continue
|
||||
|
||||
# Use glob.glob for complex patterns.
|
||||
for pattern_name, pattern in complex_patterns.items():
|
||||
matched_paths[pattern_name].extend(
|
||||
path for path in glob.glob(os.path.join(curr_dir, pattern))
|
||||
)
|
||||
|
||||
# List of subdirectories by path and (inode, device) tuple
|
||||
subdirs: List[Tuple[str, Tuple[int, int]]] = []
|
||||
|
||||
with dir_iter:
|
||||
for dir_entry in dir_iter:
|
||||
|
||||
# Match filename only patterns
|
||||
if filename_only_patterns:
|
||||
m = regex.match(os.path.normcase(dir_entry.name))
|
||||
if m:
|
||||
for pattern_name in filename_only_patterns:
|
||||
if m.group(pattern_name):
|
||||
matched_paths[pattern_name].append(dir_entry.path)
|
||||
break
|
||||
|
||||
# Collect subdirectories
|
||||
if depth >= max_depth:
|
||||
continue
|
||||
|
||||
try:
|
||||
if not dir_entry.is_dir(follow_symlinks=True):
|
||||
continue
|
||||
if sys.platform == "win32":
|
||||
# Note: st_ino/st_dev on DirEntry.stat are not set on Windows, so we have
|
||||
# to call os.stat
|
||||
stat_info = os.stat(dir_entry.path, follow_symlinks=True)
|
||||
else:
|
||||
stat_info = dir_entry.stat(follow_symlinks=True)
|
||||
except OSError as e:
|
||||
# Possible permission issue, or a symlink that cannot be resolved (ELOOP).
|
||||
_log_file_access_issue(e, dir_entry.path)
|
||||
continue
|
||||
|
||||
subdirs.append((dir_entry.path, _file_id(stat_info)))
|
||||
|
||||
# Enqueue subdirectories in a deterministic order
|
||||
if subdirs:
|
||||
subdirs.sort(key=lambda s: os.path.basename(s[0]))
|
||||
for subdir, subdir_id in subdirs:
|
||||
if subdir_id not in visited_dirs:
|
||||
dir_queue.appendleft((depth + 1, subdir))
|
||||
visited_dirs.add(subdir_id)
|
||||
|
||||
# Sort the matched paths for deterministic output
|
||||
for paths in matched_paths.values():
|
||||
paths.sort()
|
||||
all_matching_paths = [path for paths in matched_paths.values() for path in paths]
|
||||
|
||||
# We only dedupe files if we have any complex patterns, since only they can match the same file
|
||||
# multiple times
|
||||
return _dedupe_files(all_matching_paths) if complex_patterns else all_matching_paths
|
||||
|
||||
|
||||
# Utilities for libraries and headers
|
||||
@@ -2210,7 +2322,9 @@ def find_system_libraries(libraries, shared=True):
|
||||
return libraries_found
|
||||
|
||||
|
||||
def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
def find_libraries(
|
||||
libraries, root, shared=True, recursive=False, runtime=True, max_depth: Optional[int] = None
|
||||
):
|
||||
"""Returns an iterable of full paths to libraries found in a root dir.
|
||||
|
||||
Accepts any glob characters accepted by fnmatch:
|
||||
@@ -2231,6 +2345,8 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
otherwise for static. Defaults to True.
|
||||
recursive (bool): if False search only root folder,
|
||||
if True descends top-down from the root. Defaults to False.
|
||||
max_depth (int): if set, don't search below this depth. Cannot be set
|
||||
if recursive is False
|
||||
runtime (bool): Windows only option, no-op elsewhere. If true,
|
||||
search for runtime shared libs (.DLL), otherwise, search
|
||||
for .Lib files. If shared is false, this has no meaning.
|
||||
@@ -2239,6 +2355,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
Returns:
|
||||
LibraryList: The libraries that have been found
|
||||
"""
|
||||
|
||||
if isinstance(libraries, str):
|
||||
libraries = [libraries]
|
||||
elif not isinstance(libraries, collections.abc.Sequence):
|
||||
@@ -2271,8 +2388,10 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes]
|
||||
|
||||
if not recursive:
|
||||
if max_depth:
|
||||
raise ValueError(f"max_depth ({max_depth}) cannot be set if recursive is False")
|
||||
# If not recursive, look for the libraries directly in root
|
||||
return LibraryList(find(root, libraries, False))
|
||||
return LibraryList(find(root, libraries, recursive=False))
|
||||
|
||||
# To speedup the search for external packages configured e.g. in /usr,
|
||||
# perform first non-recursive search in root/lib then in root/lib64 and
|
||||
@@ -2290,7 +2409,7 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||
if found_libs:
|
||||
break
|
||||
else:
|
||||
found_libs = find(root, libraries, True)
|
||||
found_libs = find(root, libraries, recursive=True, max_depth=max_depth)
|
||||
|
||||
return LibraryList(found_libs)
|
||||
|
||||
|
@@ -5,14 +5,17 @@
|
||||
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import fnmatch
|
||||
import functools
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
import typing
|
||||
import warnings
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Iterable, List, Tuple, TypeVar
|
||||
from typing import Callable, Dict, Iterable, List, Tuple, TypeVar
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = r"^\.#|~$"
|
||||
@@ -858,6 +861,19 @@ def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
|
||||
return line_list
|
||||
|
||||
|
||||
if sys.version_info >= (3, 9):
|
||||
PatternStr = re.Pattern[str]
|
||||
else:
|
||||
PatternStr = typing.Pattern[str]
|
||||
|
||||
|
||||
def fnmatch_translate_multiple(named_patterns: Dict[str, str]) -> str:
|
||||
"""Similar to ``fnmatch.translate``, but takes an ordered dictionary where keys are pattern
|
||||
names, and values are filename patterns. The output is a regex that matches any of the
|
||||
patterns in order, and named capture groups are used to identify which pattern matched."""
|
||||
return "|".join(f"(?P<{n}>{fnmatch.translate(p)})" for n, p in named_patterns.items())
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def nullcontext(*args, **kwargs):
|
||||
"""Empty context manager.
|
||||
@@ -870,15 +886,6 @@ class UnhashableArguments(TypeError):
|
||||
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
|
||||
|
||||
|
||||
def enum(**kwargs):
|
||||
"""Return an enum-like class.
|
||||
|
||||
Args:
|
||||
**kwargs: explicit dictionary of enums
|
||||
"""
|
||||
return type("Enum", (object,), kwargs)
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
@@ -914,6 +921,21 @@ def ensure_last(lst, *elements):
|
||||
lst.append(lst.pop(lst.index(elt)))
|
||||
|
||||
|
||||
class Const:
|
||||
"""Class level constant, raises when trying to set the attribute"""
|
||||
|
||||
__slots__ = ["value"]
|
||||
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
return self.value
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise TypeError(f"Const value does not support assignment [value={self.value}]")
|
||||
|
||||
|
||||
class TypedMutableSequence(collections.abc.MutableSequence):
|
||||
"""Base class that behaves like a list, just with a different type.
|
||||
|
||||
@@ -1018,3 +1040,42 @@ def __init__(self, callback):
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
return self.callback(owner)
|
||||
|
||||
|
||||
class DeprecatedProperty:
|
||||
"""Data descriptor to error or warn when a deprecated property is accessed.
|
||||
|
||||
Derived classes must define a factory method to return an adaptor for the deprecated
|
||||
property, if the descriptor is not set to error.
|
||||
"""
|
||||
|
||||
__slots__ = ["name"]
|
||||
|
||||
#: 0 - Nothing
|
||||
#: 1 - Warning
|
||||
#: 2 - Error
|
||||
error_lvl = 0
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
|
||||
def __get__(self, instance, owner):
|
||||
if instance is None:
|
||||
return self
|
||||
|
||||
if self.error_lvl == 1:
|
||||
warnings.warn(
|
||||
f"accessing the '{self.name}' property of '{instance}', which is deprecated"
|
||||
)
|
||||
elif self.error_lvl == 2:
|
||||
raise AttributeError(f"cannot access the '{self.name}' attribute of '{instance}'")
|
||||
|
||||
return self.factory(instance, owner)
|
||||
|
||||
def __set__(self, instance, value):
|
||||
raise TypeError(
|
||||
f"the deprecated property '{self.name}' of '{instance}' does not support assignment"
|
||||
)
|
||||
|
||||
def factory(self, instance, owner):
|
||||
raise NotImplementedError("must be implemented by derived classes")
|
||||
|
@@ -11,7 +11,7 @@
|
||||
import spack.util.git
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.23.0.dev0"
|
||||
__version__ = "0.23.0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -714,17 +714,16 @@ def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
# values are either Value objects (for conditional values) or the values themselves
|
||||
# values are either ConditionalValue objects or the values themselves
|
||||
build_system_names = set(
|
||||
v.value if isinstance(v, spack.variant.Value) else v
|
||||
v.value if isinstance(v, spack.variant.ConditionalValue) else v
|
||||
for _, variant in pkg_cls.variant_definitions("build_system")
|
||||
for v in variant.values
|
||||
)
|
||||
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in build_system_names]
|
||||
|
||||
module = pkg_cls.module
|
||||
has_builders_in_package_py = any(
|
||||
getattr(module, name, False) for name in builder_cls_names
|
||||
spack.builder.get_builder_class(pkg_cls, name) for name in builder_cls_names
|
||||
)
|
||||
if not has_builders_in_package_py:
|
||||
continue
|
||||
@@ -806,7 +805,7 @@ def _uses_deprecated_globals(pkgs, error_cls):
|
||||
|
||||
file = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||
tree = ast.parse(open(file).read())
|
||||
visitor = DeprecatedMagicGlobals(("std_cmake_args",))
|
||||
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args"))
|
||||
visitor.visit(tree)
|
||||
if visitor.references_to_globals:
|
||||
errors.append(
|
||||
|
@@ -1182,6 +1182,9 @@ def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool)
|
||||
self.tmpdir: str
|
||||
self.executor: concurrent.futures.Executor
|
||||
|
||||
# Verify if the mirror meets the requirements to push
|
||||
self.mirror.ensure_mirror_usable("push")
|
||||
|
||||
def __enter__(self):
|
||||
self._tmpdir = tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root())
|
||||
self._executor = spack.util.parallel.make_concurrent_executor()
|
||||
|
@@ -602,7 +602,10 @@ def bootstrapping_sources(scope: Optional[str] = None):
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
list_of_sources.append(current)
|
||||
try:
|
||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||
current.update(spack.util.spack_yaml.load(stream))
|
||||
list_of_sources.append(current)
|
||||
except OSError:
|
||||
pass
|
||||
return list_of_sources
|
||||
|
@@ -12,6 +12,7 @@
|
||||
|
||||
import spack.error
|
||||
import spack.multimethod
|
||||
import spack.repo
|
||||
|
||||
#: Builder classes, as registered by the "builder" decorator
|
||||
BUILDER_CLS = {}
|
||||
@@ -74,6 +75,14 @@ def __call__(self, spec, prefix):
|
||||
return self.phase_fn(self.builder.pkg, spec, prefix)
|
||||
|
||||
|
||||
def get_builder_class(pkg, name: str) -> Optional[type]:
|
||||
"""Return the builder class if a package module defines it."""
|
||||
cls = getattr(pkg.module, name, None)
|
||||
if cls and cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
return cls
|
||||
return None
|
||||
|
||||
|
||||
def _create(pkg):
|
||||
"""Return a new builder object for the package object being passed as argument.
|
||||
|
||||
@@ -99,9 +108,10 @@ class hierarchy (look at AspellDictPackage for an example of that)
|
||||
package_buildsystem = buildsystem_name(pkg)
|
||||
default_builder_cls = BUILDER_CLS[package_buildsystem]
|
||||
builder_cls_name = default_builder_cls.__name__
|
||||
builder_cls = getattr(pkg.module, builder_cls_name, None)
|
||||
if builder_cls:
|
||||
return builder_cls(pkg)
|
||||
builder_class = get_builder_class(pkg, builder_cls_name)
|
||||
|
||||
if builder_class:
|
||||
return builder_class(pkg)
|
||||
|
||||
# Specialized version of a given buildsystem can subclass some
|
||||
# base classes and specialize certain phases or methods or attributes.
|
||||
|
@@ -5,7 +5,6 @@
|
||||
|
||||
"""Caches used by Spack to store data"""
|
||||
import os
|
||||
from typing import Union
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
@@ -32,12 +31,8 @@ def _misc_cache():
|
||||
return spack.util.file_cache.FileCache(path)
|
||||
|
||||
|
||||
FileCacheType = Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton]
|
||||
|
||||
#: Spack's cache for small data
|
||||
MISC_CACHE: Union[spack.util.file_cache.FileCache, llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_misc_cache)
|
||||
)
|
||||
MISC_CACHE: spack.util.file_cache.FileCache = llnl.util.lang.Singleton(_misc_cache) # type: ignore
|
||||
|
||||
|
||||
def fetch_cache_location():
|
||||
@@ -74,6 +69,4 @@ def store(self, fetcher, relative_dest):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
FETCH_CACHE: Union["spack.fetch_strategy.FsCache", llnl.util.lang.Singleton] = (
|
||||
llnl.util.lang.Singleton(_fetch_cache)
|
||||
)
|
||||
FETCH_CACHE: spack.fetch_strategy.FsCache = llnl.util.lang.Singleton(_fetch_cache) # type: ignore
|
||||
|
@@ -8,6 +8,7 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections import Counter
|
||||
from typing import List, Union
|
||||
|
||||
import llnl.string
|
||||
@@ -17,12 +18,14 @@
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import colorize
|
||||
|
||||
import spack.concretize
|
||||
import spack.config # breaks a cycle.
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.extensions
|
||||
import spack.parser
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
@@ -173,10 +176,66 @@ def parse_specs(
|
||||
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
||||
|
||||
specs = spack.parser.parse(arg_string)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests)
|
||||
return specs
|
||||
if not concretize:
|
||||
return specs
|
||||
|
||||
to_concretize = [(s, None) for s in specs]
|
||||
return _concretize_spec_pairs(to_concretize, tests=tests)
|
||||
|
||||
|
||||
def _concretize_spec_pairs(to_concretize, tests=False):
|
||||
"""Helper method that concretizes abstract specs from a list of abstract,concrete pairs.
|
||||
|
||||
Any spec with a concrete spec associated with it will concretize to that spec. Any spec
|
||||
with ``None`` for its concrete spec will be newly concretized. This method respects unification
|
||||
rules from config."""
|
||||
unify = spack.config.get("concretizer:unify", False)
|
||||
|
||||
# Special case for concretizing a single spec
|
||||
if len(to_concretize) == 1:
|
||||
abstract, concrete = to_concretize[0]
|
||||
return [concrete or abstract.concretized()]
|
||||
|
||||
# Special case if every spec is either concrete or has an abstract hash
|
||||
if all(
|
||||
concrete or abstract.concrete or abstract.abstract_hash
|
||||
for abstract, concrete in to_concretize
|
||||
):
|
||||
# Get all the concrete specs
|
||||
ret = [
|
||||
concrete or (abstract if abstract.concrete else abstract.lookup_hash())
|
||||
for abstract, concrete in to_concretize
|
||||
]
|
||||
|
||||
# If unify: true, check that specs don't conflict
|
||||
# Since all concrete, "when_possible" is not relevant
|
||||
if unify is True: # True, "when_possible", False are possible values
|
||||
runtimes = spack.repo.PATH.packages_with_tags("runtime")
|
||||
specs_per_name = Counter(
|
||||
spec.name
|
||||
for spec in traverse.traverse_nodes(
|
||||
ret, deptype=("link", "run"), key=traverse.by_dag_hash
|
||||
)
|
||||
if spec.name not in runtimes # runtimes are allowed multiple times
|
||||
)
|
||||
|
||||
conflicts = sorted(name for name, count in specs_per_name.items() if count > 1)
|
||||
if conflicts:
|
||||
raise spack.error.SpecError(
|
||||
"Specs conflict and `concretizer:unify` is configured true.",
|
||||
f" specs depend on multiple versions of {', '.join(conflicts)}",
|
||||
)
|
||||
return ret
|
||||
|
||||
# Standard case
|
||||
concretize_method = spack.concretize.concretize_separately # unify: false
|
||||
if unify is True:
|
||||
concretize_method = spack.concretize.concretize_together
|
||||
elif unify == "when_possible":
|
||||
concretize_method = spack.concretize.concretize_together_when_possible
|
||||
|
||||
concretized = concretize_method(to_concretize, tests=tests)
|
||||
return [concrete for _, concrete in concretized]
|
||||
|
||||
|
||||
def matching_spec_from_env(spec):
|
||||
@@ -192,6 +251,22 @@ def matching_spec_from_env(spec):
|
||||
return spec.concretized()
|
||||
|
||||
|
||||
def matching_specs_from_env(specs):
|
||||
"""
|
||||
Same as ``matching_spec_from_env`` but respects spec unification rules.
|
||||
|
||||
For each spec, if there is a matching spec in the environment it is used. If no
|
||||
matching spec is found, this will return the given spec but concretized in the
|
||||
context of the active environment and other given specs, with unification rules applied.
|
||||
"""
|
||||
env = ev.active_environment()
|
||||
spec_pairs = [(spec, env.matching_spec(spec) if env else None) for spec in specs]
|
||||
additional_concrete_specs = (
|
||||
[(concrete, concrete) for _, concrete in env.concretized_specs()] if env else []
|
||||
)
|
||||
return _concretize_spec_pairs(spec_pairs + additional_concrete_specs)[: len(spec_pairs)]
|
||||
|
||||
|
||||
def disambiguate_spec(spec, env, local=False, installed=True, first=False):
|
||||
"""Given a spec, figure out which installed package it refers to.
|
||||
|
||||
@@ -509,6 +584,18 @@ def __init__(self, name):
|
||||
super().__init__("{0} is not a permissible Spack command name.".format(name))
|
||||
|
||||
|
||||
class MultipleSpecsMatch(Exception):
|
||||
"""Raised when multiple specs match a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
class NoSpecMatches(Exception):
|
||||
"""Raised when no spec matches a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
########################################
|
||||
# argparse types for argument validation
|
||||
########################################
|
||||
|
@@ -105,7 +105,8 @@ def clean(parser, args):
|
||||
# Then do the cleaning falling through the cases
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
specs = list(spack.cmd.matching_spec_from_env(x) for x in specs)
|
||||
specs = spack.cmd.matching_specs_from_env(specs)
|
||||
|
||||
for spec in specs:
|
||||
msg = "Cleaning build stage [{0}]"
|
||||
tty.msg(msg.format(spec.short_spec))
|
||||
|
@@ -581,23 +581,51 @@ def add_concretizer_args(subparser):
|
||||
|
||||
|
||||
def add_connection_args(subparser, add_help):
|
||||
subparser.add_argument(
|
||||
"--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
|
||||
def add_argument_string_or_variable(parser, arg: str, *, deprecate_str: bool = True, **kwargs):
|
||||
group = parser.add_mutually_exclusive_group()
|
||||
group.add_argument(arg, **kwargs)
|
||||
# Update help string
|
||||
if "help" in kwargs:
|
||||
kwargs["help"] = "environment variable containing " + kwargs["help"]
|
||||
group.add_argument(arg + "-variable", **kwargs)
|
||||
|
||||
s3_connection_parser = subparser.add_argument_group("S3 Connection")
|
||||
|
||||
add_argument_string_or_variable(
|
||||
s3_connection_parser,
|
||||
"--s3-access-key-id",
|
||||
help="ID string to use to connect to this S3 mirror",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-access-key-secret", help="secret string to use to connect to this S3 mirror"
|
||||
add_argument_string_or_variable(
|
||||
s3_connection_parser,
|
||||
"--s3-access-key-secret",
|
||||
help="secret string to use to connect to this S3 mirror",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--s3-access-token", help="access token to use to connect to this S3 mirror"
|
||||
add_argument_string_or_variable(
|
||||
s3_connection_parser,
|
||||
"--s3-access-token",
|
||||
help="access token to use to connect to this S3 mirror",
|
||||
)
|
||||
subparser.add_argument(
|
||||
s3_connection_parser.add_argument(
|
||||
"--s3-profile", help="S3 profile name to use to connect to this S3 mirror", default=None
|
||||
)
|
||||
subparser.add_argument(
|
||||
s3_connection_parser.add_argument(
|
||||
"--s3-endpoint-url", help="endpoint URL to use to connect to this S3 mirror"
|
||||
)
|
||||
subparser.add_argument("--oci-username", help="username to use to connect to this OCI mirror")
|
||||
subparser.add_argument("--oci-password", help="password to use to connect to this OCI mirror")
|
||||
|
||||
oci_connection_parser = subparser.add_argument_group("OCI Connection")
|
||||
|
||||
add_argument_string_or_variable(
|
||||
oci_connection_parser,
|
||||
"--oci-username",
|
||||
deprecate_str=False,
|
||||
help="username to use to connect to this OCI mirror",
|
||||
)
|
||||
add_argument_string_or_variable(
|
||||
oci_connection_parser,
|
||||
"--oci-password",
|
||||
help="password to use to connect to this OCI mirror",
|
||||
)
|
||||
|
||||
|
||||
def use_buildcache(cli_arg_value):
|
||||
|
@@ -10,11 +10,12 @@
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
from typing import List, Optional, Set
|
||||
|
||||
import llnl.string as string
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import islink, symlink
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
@@ -50,6 +51,8 @@
|
||||
"update",
|
||||
"revert",
|
||||
"depfile",
|
||||
"track",
|
||||
"untrack",
|
||||
]
|
||||
|
||||
|
||||
@@ -446,6 +449,193 @@ def env_deactivate(args):
|
||||
sys.stdout.write(cmds)
|
||||
|
||||
|
||||
#
|
||||
# env track
|
||||
#
|
||||
def env_track_setup_parser(subparser):
|
||||
"""track an environment from a directory in Spack"""
|
||||
subparser.add_argument("-n", "--name", help="custom environment name")
|
||||
subparser.add_argument("dir", help="path to environment")
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
|
||||
|
||||
def env_track(args):
|
||||
src_path = os.path.abspath(args.dir)
|
||||
if not ev.is_env_dir(src_path):
|
||||
tty.die("Cannot track environment. Path doesn't contain an environment")
|
||||
|
||||
if args.name:
|
||||
name = args.name
|
||||
else:
|
||||
name = os.path.basename(src_path)
|
||||
|
||||
try:
|
||||
dst_path = ev.environment_dir_from_name(name, exists_ok=False)
|
||||
except ev.SpackEnvironmentError:
|
||||
tty.die(
|
||||
f"An environment named {name} already exists. Set a name with:"
|
||||
"\n\n"
|
||||
f" spack env track --name NAME {src_path}\n"
|
||||
)
|
||||
|
||||
symlink(src_path, dst_path)
|
||||
|
||||
tty.msg(f"Tracking environment in {src_path}")
|
||||
tty.msg(
|
||||
"You can now activate this environment with the following command:\n\n"
|
||||
f" spack env activate {name}\n"
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# env remove & untrack helpers
|
||||
#
|
||||
def filter_managed_env_names(env_names: Set[str]) -> Set[str]:
|
||||
tracked_env_names = {e for e in env_names if islink(ev.environment_dir_from_name(e))}
|
||||
managed_env_names = env_names - set(tracked_env_names)
|
||||
|
||||
num_managed_envs = len(managed_env_names)
|
||||
managed_envs_str = " ".join(managed_env_names)
|
||||
if num_managed_envs >= 2:
|
||||
tty.error(
|
||||
f"The following are not tracked environments. "
|
||||
"To remove them completely run,"
|
||||
"\n\n"
|
||||
f" spack env rm {managed_envs_str}\n"
|
||||
)
|
||||
|
||||
elif num_managed_envs > 0:
|
||||
tty.error(
|
||||
f"'{managed_envs_str}' is not a tracked env. "
|
||||
"To remove it completely run,"
|
||||
"\n\n"
|
||||
f" spack env rm {managed_envs_str}\n"
|
||||
)
|
||||
|
||||
return tracked_env_names
|
||||
|
||||
|
||||
def get_valid_envs(env_names: Set[str]) -> Set[ev.Environment]:
|
||||
valid_envs = set()
|
||||
for env_name in env_names:
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
valid_envs.add(env)
|
||||
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
pass
|
||||
|
||||
return valid_envs
|
||||
|
||||
|
||||
def _env_untrack_or_remove(
|
||||
env_names: List[str], remove: bool = False, force: bool = False, yes_to_all: bool = False
|
||||
):
|
||||
all_env_names = set(ev.all_environment_names())
|
||||
known_env_names = set(env_names).intersection(all_env_names)
|
||||
unknown_env_names = set(env_names) - known_env_names
|
||||
|
||||
# print error for unknown environments
|
||||
for env_name in unknown_env_names:
|
||||
tty.error(f"Environment '{env_name}' does not exist")
|
||||
|
||||
# if only unlinking is allowed, remove all environments
|
||||
# which do not point internally at symlinks
|
||||
if not remove:
|
||||
env_names_to_remove = filter_managed_env_names(known_env_names)
|
||||
else:
|
||||
env_names_to_remove = known_env_names
|
||||
|
||||
# initalize all environments with valid spack.yaml configs
|
||||
all_valid_envs = get_valid_envs(all_env_names)
|
||||
|
||||
# build a task list of environments and bad env names to remove
|
||||
envs_to_remove = [e for e in all_valid_envs if e.name in env_names_to_remove]
|
||||
bad_env_names_to_remove = env_names_to_remove - {e.name for e in envs_to_remove}
|
||||
for remove_env in envs_to_remove:
|
||||
for env in all_valid_envs:
|
||||
# don't check if an environment is included to itself
|
||||
if env.name == remove_env.name:
|
||||
continue
|
||||
|
||||
# check if an environment is included un another
|
||||
if remove_env.path in env.included_concrete_envs:
|
||||
msg = f"Environment '{remove_env.name}' is used by environment '{env.name}'"
|
||||
if force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
tty.error(msg)
|
||||
envs_to_remove.remove(remove_env)
|
||||
|
||||
# ask the user if they really want to remove the known environments
|
||||
# force should do the same as yes to all here following the symantics of rm
|
||||
if not (yes_to_all or force) and (envs_to_remove or bad_env_names_to_remove):
|
||||
environments = string.plural(len(env_names_to_remove), "environment", show_n=False)
|
||||
envs = string.comma_and(list(env_names_to_remove))
|
||||
answer = tty.get_yes_or_no(
|
||||
f"Really {'remove' if remove else 'untrack'} {environments} {envs}?", default=False
|
||||
)
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
# keep track of the environments we remove for later printing the exit code
|
||||
removed_env_names = []
|
||||
for env in envs_to_remove:
|
||||
name = env.name
|
||||
if not force and env.active:
|
||||
tty.error(
|
||||
f"Environment '{name}' can't be "
|
||||
f"{'removed' if remove else 'untracked'} while activated."
|
||||
)
|
||||
continue
|
||||
# Get path to check if environment is a tracked / symlinked environment
|
||||
if islink(env.path):
|
||||
real_env_path = os.path.realpath(env.path)
|
||||
os.unlink(env.path)
|
||||
tty.msg(
|
||||
f"Sucessfully untracked environment '{name}', "
|
||||
"but it can still be found at:\n\n"
|
||||
f" {real_env_path}\n"
|
||||
)
|
||||
else:
|
||||
env.destroy()
|
||||
tty.msg(f"Successfully removed environment '{name}'")
|
||||
|
||||
removed_env_names.append(env.name)
|
||||
|
||||
for bad_env_name in bad_env_names_to_remove:
|
||||
shutil.rmtree(
|
||||
spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True)
|
||||
)
|
||||
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
||||
removed_env_names.append(env.name)
|
||||
|
||||
# Following the design of linux rm we should exit with a status of 1
|
||||
# anytime we cannot delete every environment the user asks for.
|
||||
# However, we should still process all the environments we know about
|
||||
# and delete them instead of failing on the first unknown enviornment.
|
||||
if len(removed_env_names) < len(known_env_names):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
#
|
||||
# env untrack
|
||||
#
|
||||
def env_untrack_setup_parser(subparser):
|
||||
"""track an environment from a directory in Spack"""
|
||||
subparser.add_argument("env", nargs="+", help="tracked environment name")
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="force unlink even when environment is active"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
|
||||
|
||||
def env_untrack(args):
|
||||
_env_untrack_or_remove(
|
||||
env_names=args.env, force=args.force, yes_to_all=args.yes_to_all, remove=False
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# env remove
|
||||
#
|
||||
@@ -471,54 +661,9 @@ def env_remove_setup_parser(subparser):
|
||||
|
||||
def env_remove(args):
|
||||
"""remove existing environment(s)"""
|
||||
remove_envs = []
|
||||
valid_envs = []
|
||||
bad_envs = []
|
||||
|
||||
for env_name in ev.all_environment_names():
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
valid_envs.append(env)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
remove_envs.append(env)
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
if env_name in args.rm_env:
|
||||
bad_envs.append(env_name)
|
||||
|
||||
# Check if remove_env is included from another env before trying to remove
|
||||
for env in valid_envs:
|
||||
for remove_env in remove_envs:
|
||||
# don't check if environment is included to itself
|
||||
if env.name == remove_env.name:
|
||||
continue
|
||||
|
||||
if remove_env.path in env.included_concrete_envs:
|
||||
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
|
||||
if args.force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
tty.die(msg)
|
||||
|
||||
if not args.yes_to_all:
|
||||
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
||||
envs = string.comma_and(args.rm_env)
|
||||
answer = tty.get_yes_or_no(f"Really remove {environments} {envs}?", default=False)
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
for env in remove_envs:
|
||||
name = env.name
|
||||
if env.active:
|
||||
tty.die(f"Environment {name} can't be removed while activated.")
|
||||
env.destroy()
|
||||
tty.msg(f"Successfully removed environment '{name}'")
|
||||
|
||||
for bad_env_name in bad_envs:
|
||||
shutil.rmtree(
|
||||
spack.environment.environment.environment_dir_from_name(bad_env_name, exists_ok=True)
|
||||
)
|
||||
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
||||
_env_untrack_or_remove(
|
||||
env_names=args.rm_env, remove=True, force=args.force, yes_to_all=args.yes_to_all
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
|
@@ -222,11 +222,9 @@ def decorator(spec, fmt):
|
||||
def display_env(env, args, decorator, results):
|
||||
"""Display extra find output when running in an environment.
|
||||
|
||||
Find in an environment outputs 2 or 3 sections:
|
||||
|
||||
1. Root specs
|
||||
2. Concretized roots (if asked for with -c)
|
||||
3. Installed specs
|
||||
In an environment, `spack find` outputs a preliminary section
|
||||
showing the root specs of the environment (this is in addition
|
||||
to the section listing out specs matching the query parameters).
|
||||
|
||||
"""
|
||||
tty.msg("In environment %s" % env.name)
|
||||
@@ -299,6 +297,56 @@ def root_decorator(spec, string):
|
||||
print()
|
||||
|
||||
|
||||
def _find_query(args, env):
|
||||
q_args = query_arguments(args)
|
||||
concretized_but_not_installed = list()
|
||||
if env:
|
||||
all_env_specs = env.all_specs()
|
||||
if args.constraint:
|
||||
init_specs = cmd.parse_specs(args.constraint)
|
||||
env_specs = env.all_matching_specs(*init_specs)
|
||||
else:
|
||||
env_specs = all_env_specs
|
||||
|
||||
spec_hashes = set(x.dag_hash() for x in env_specs)
|
||||
specs_meeting_q_args = set(spack.store.STORE.db.query(hashes=spec_hashes, **q_args))
|
||||
|
||||
results = list()
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
for spec in env_specs:
|
||||
if not spec.installed:
|
||||
concretized_but_not_installed.append(spec)
|
||||
if spec in specs_meeting_q_args:
|
||||
results.append(spec)
|
||||
else:
|
||||
results = args.specs(**q_args)
|
||||
|
||||
# use groups by default except with format.
|
||||
if args.groups is None:
|
||||
args.groups = not args.format
|
||||
|
||||
# Exit early with an error code if no package matches the constraint
|
||||
if concretized_but_not_installed and args.show_concretized:
|
||||
pass
|
||||
elif results:
|
||||
pass
|
||||
elif args.constraint:
|
||||
raise cmd.NoSpecMatches()
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
concretized_but_not_installed = [
|
||||
x for x in concretized_but_not_installed if x.name in packages_with_tags
|
||||
]
|
||||
|
||||
if args.loaded:
|
||||
results = cmd.filter_loaded_specs(results)
|
||||
|
||||
return results, concretized_but_not_installed
|
||||
|
||||
|
||||
def find(parser, args):
|
||||
env = ev.active_environment()
|
||||
|
||||
@@ -307,34 +355,12 @@ def find(parser, args):
|
||||
if not env and args.show_concretized:
|
||||
tty.die("-c / --show-concretized requires an active environment")
|
||||
|
||||
if env:
|
||||
if args.constraint:
|
||||
init_specs = spack.cmd.parse_specs(args.constraint)
|
||||
results = env.all_matching_specs(*init_specs)
|
||||
else:
|
||||
results = env.all_specs()
|
||||
else:
|
||||
q_args = query_arguments(args)
|
||||
results = args.specs(**q_args)
|
||||
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
# use groups by default except with format.
|
||||
if args.groups is None:
|
||||
args.groups = not args.format
|
||||
|
||||
# Exit early with an error code if no package matches the constraint
|
||||
if not results and args.constraint:
|
||||
constraint_str = " ".join(str(s) for s in args.constraint_specs)
|
||||
tty.die(f"No package matches the query: {constraint_str}")
|
||||
|
||||
# If tags have been specified on the command line, filter by tags
|
||||
if args.tags:
|
||||
packages_with_tags = spack.repo.PATH.packages_with_tags(*args.tags)
|
||||
results = [x for x in results if x.name in packages_with_tags]
|
||||
|
||||
if args.loaded:
|
||||
results = spack.cmd.filter_loaded_specs(results)
|
||||
try:
|
||||
results, concretized_but_not_installed = _find_query(args, env)
|
||||
except cmd.NoSpecMatches:
|
||||
# Note: this uses args.constraint vs. args.constraint_specs because
|
||||
# the latter only exists if you call args.specs()
|
||||
tty.die(f"No package matches the query: {' '.join(args.constraint)}")
|
||||
|
||||
if args.install_status or args.show_concretized:
|
||||
status_fn = spack.spec.Spec.install_status
|
||||
@@ -345,14 +371,16 @@ def find(parser, args):
|
||||
if args.json:
|
||||
cmd.display_specs_as_json(results, deps=args.deps)
|
||||
else:
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
if not args.format:
|
||||
if env:
|
||||
display_env(env, args, decorator, results)
|
||||
|
||||
if not args.only_roots:
|
||||
display_results = results
|
||||
if not args.show_concretized:
|
||||
display_results = list(x for x in results if x.installed)
|
||||
display_results = list(results)
|
||||
if args.show_concretized:
|
||||
display_results += concretized_but_not_installed
|
||||
cmd.display_specs(
|
||||
display_results, args, decorator=decorator, all_headers=True, status_fn=status_fn
|
||||
)
|
||||
@@ -370,13 +398,9 @@ def find(parser, args):
|
||||
concretized_suffix += " (show with `spack find -c`)"
|
||||
|
||||
pkg_type = "loaded" if args.loaded else "installed"
|
||||
spack.cmd.print_how_many_pkgs(
|
||||
list(x for x in results if x.installed), pkg_type, suffix=installed_suffix
|
||||
)
|
||||
cmd.print_how_many_pkgs(results, pkg_type, suffix=installed_suffix)
|
||||
|
||||
if env:
|
||||
spack.cmd.print_how_many_pkgs(
|
||||
list(x for x in results if not x.installed),
|
||||
"concretized",
|
||||
suffix=concretized_suffix,
|
||||
cmd.print_how_many_pkgs(
|
||||
concretized_but_not_installed, "concretized", suffix=concretized_suffix
|
||||
)
|
||||
|
@@ -98,8 +98,9 @@ def do_mark(specs, explicit):
|
||||
specs (list): list of specs to be marked
|
||||
explicit (bool): whether to mark specs as explicitly installed
|
||||
"""
|
||||
for spec in specs:
|
||||
spack.store.STORE.db.update_explicit(spec, explicit)
|
||||
with spack.store.STORE.db.write_transaction():
|
||||
for spec in specs:
|
||||
spack.store.STORE.db.mark(spec, "explicit", explicit)
|
||||
|
||||
|
||||
def mark_specs(args, specs):
|
||||
|
@@ -231,31 +231,133 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
|
||||
def _configure_access_pair(
|
||||
args, id_tok, id_variable_tok, secret_tok, secret_variable_tok, default=None
|
||||
):
|
||||
"""Configure the access_pair options"""
|
||||
|
||||
# Check if any of the arguments are set to update this access_pair.
|
||||
# If none are set, then skip computing the new access pair
|
||||
args_id = getattr(args, id_tok)
|
||||
args_id_variable = getattr(args, id_variable_tok)
|
||||
args_secret = getattr(args, secret_tok)
|
||||
args_secret_variable = getattr(args, secret_variable_tok)
|
||||
if not any([args_id, args_id_variable, args_secret, args_secret_variable]):
|
||||
return None
|
||||
|
||||
def _default_value(id_):
|
||||
if isinstance(default, list):
|
||||
return default[0] if id_ == "id" else default[1]
|
||||
elif isinstance(default, dict):
|
||||
return default.get(id_)
|
||||
else:
|
||||
return None
|
||||
|
||||
def _default_variable(id_):
|
||||
if isinstance(default, dict):
|
||||
return default.get(id_ + "_variable")
|
||||
else:
|
||||
return None
|
||||
|
||||
id_ = None
|
||||
id_variable = None
|
||||
secret = None
|
||||
secret_variable = None
|
||||
|
||||
# Get the value/default value if the argument of the inverse
|
||||
if not args_id_variable:
|
||||
id_ = getattr(args, id_tok) or _default_value("id")
|
||||
if not args_id:
|
||||
id_variable = getattr(args, id_variable_tok) or _default_variable("id")
|
||||
if not args_secret_variable:
|
||||
secret = getattr(args, secret_tok) or _default_value("secret")
|
||||
if not args_secret:
|
||||
secret_variable = getattr(args, secret_variable_tok) or _default_variable("secret")
|
||||
|
||||
if (id_ or id_variable) and (secret or secret_variable):
|
||||
if secret:
|
||||
if not id_:
|
||||
raise SpackError("Cannot add mirror with a variable id and text secret")
|
||||
|
||||
return [id_, secret]
|
||||
else:
|
||||
return dict(
|
||||
[
|
||||
(("id", id_) if id_ else ("id_variable", id_variable)),
|
||||
("secret_variable", secret_variable),
|
||||
]
|
||||
)
|
||||
else:
|
||||
if id_ or id_variable or secret or secret_variable is not None:
|
||||
id_arg_tok = id_tok.replace("_", "-")
|
||||
secret_arg_tok = secret_tok.replace("_", "-")
|
||||
tty.warn(
|
||||
"Expected both parts of the access pair to be specified. "
|
||||
f"(i.e. --{id_arg_tok} and --{secret_arg_tok})"
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def mirror_add(args):
|
||||
"""add a mirror to Spack"""
|
||||
if (
|
||||
args.s3_access_key_id
|
||||
or args.s3_access_key_secret
|
||||
or args.s3_access_token
|
||||
or args.s3_access_key_id_variable
|
||||
or args.s3_access_key_secret_variable
|
||||
or args.s3_access_token_variable
|
||||
or args.s3_profile
|
||||
or args.s3_endpoint_url
|
||||
or args.type
|
||||
or args.oci_username
|
||||
or args.oci_password
|
||||
or args.oci_username_variable
|
||||
or args.oci_password_variable
|
||||
or args.autopush
|
||||
or args.signed is not None
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
connection["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret]
|
||||
# S3 Connection
|
||||
if args.s3_access_key_secret:
|
||||
tty.warn(
|
||||
"Configuring mirror secrets as plain text with --s3-access-key-secret is "
|
||||
"deprecated. Use --s3-access-key-secret-variable instead"
|
||||
)
|
||||
if args.oci_password:
|
||||
tty.warn(
|
||||
"Configuring mirror secrets as plain text with --oci-password is deprecated. "
|
||||
"Use --oci-password-variable instead"
|
||||
)
|
||||
access_pair = _configure_access_pair(
|
||||
args,
|
||||
"s3_access_key_id",
|
||||
"s3_access_key_id_variable",
|
||||
"s3_access_key_secret",
|
||||
"s3_access_key_secret_variable",
|
||||
)
|
||||
if access_pair:
|
||||
connection["access_pair"] = access_pair
|
||||
|
||||
if args.s3_access_token:
|
||||
connection["access_token"] = args.s3_access_token
|
||||
elif args.s3_access_token_variable:
|
||||
connection["access_token_variable"] = args.s3_access_token_variable
|
||||
|
||||
if args.s3_profile:
|
||||
connection["profile"] = args.s3_profile
|
||||
|
||||
if args.s3_endpoint_url:
|
||||
connection["endpoint_url"] = args.s3_endpoint_url
|
||||
if args.oci_username and args.oci_password:
|
||||
connection["access_pair"] = [args.oci_username, args.oci_password]
|
||||
|
||||
# OCI Connection
|
||||
access_pair = _configure_access_pair(
|
||||
args, "oci_username", "oci_username_variable", "oci_password", "oci_password_variable"
|
||||
)
|
||||
if access_pair:
|
||||
connection["access_pair"] = access_pair
|
||||
|
||||
if args.type:
|
||||
connection["binary"] = "binary" in args.type
|
||||
connection["source"] = "source" in args.type
|
||||
@@ -285,16 +387,35 @@ def _configure_mirror(args):
|
||||
changes = {}
|
||||
if args.url:
|
||||
changes["url"] = args.url
|
||||
if args.s3_access_key_id and args.s3_access_key_secret:
|
||||
changes["access_pair"] = [args.s3_access_key_id, args.s3_access_key_secret]
|
||||
|
||||
default_access_pair = entry._get_value("access_pair", direction or "fetch")
|
||||
# TODO: Init access_pair args with the fetch/push/base values in the current mirror state
|
||||
access_pair = _configure_access_pair(
|
||||
args,
|
||||
"s3_access_key_id",
|
||||
"s3_access_key_id_variable",
|
||||
"s3_access_key_secret",
|
||||
"s3_access_key_secret_variable",
|
||||
default=default_access_pair,
|
||||
)
|
||||
if access_pair:
|
||||
changes["access_pair"] = access_pair
|
||||
if args.s3_access_token:
|
||||
changes["access_token"] = args.s3_access_token
|
||||
if args.s3_profile:
|
||||
changes["profile"] = args.s3_profile
|
||||
if args.s3_endpoint_url:
|
||||
changes["endpoint_url"] = args.s3_endpoint_url
|
||||
if args.oci_username and args.oci_password:
|
||||
changes["access_pair"] = [args.oci_username, args.oci_password]
|
||||
access_pair = _configure_access_pair(
|
||||
args,
|
||||
"oci_username",
|
||||
"oci_username_variable",
|
||||
"oci_password",
|
||||
"oci_password_variable",
|
||||
default=default_access_pair,
|
||||
)
|
||||
if access_pair:
|
||||
changes["access_pair"] = access_pair
|
||||
if getattr(args, "signed", None) is not None:
|
||||
changes["signed"] = args.signed
|
||||
if getattr(args, "autopush", None) is not None:
|
||||
|
@@ -19,6 +19,7 @@
|
||||
import spack.modules
|
||||
import spack.modules.common
|
||||
import spack.repo
|
||||
from spack.cmd import MultipleSpecsMatch, NoSpecMatches
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "manipulate module files"
|
||||
@@ -91,18 +92,6 @@ def add_loads_arguments(subparser):
|
||||
arguments.add_common_arguments(subparser, ["recurse_dependencies"])
|
||||
|
||||
|
||||
class MultipleSpecsMatch(Exception):
|
||||
"""Raised when multiple specs match a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
class NoSpecMatches(Exception):
|
||||
"""Raised when no spec matches a constraint, in a context where
|
||||
this is not allowed.
|
||||
"""
|
||||
|
||||
|
||||
def one_spec_or_raise(specs):
|
||||
"""Ensures exactly one spec has been selected, or raises the appropriate
|
||||
exception.
|
||||
|
@@ -33,8 +33,9 @@ def patch(parser, args):
|
||||
spack.config.set("config:checksum", False, scope="command_line")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
specs = spack.cmd.matching_specs_from_env(specs)
|
||||
for spec in specs:
|
||||
_patch(spack.cmd.matching_spec_from_env(spec).package)
|
||||
_patch(spec.package)
|
||||
|
||||
|
||||
def _patch_env(env: ev.Environment):
|
||||
|
@@ -12,13 +12,12 @@
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments
|
||||
import spack.cmd.spec
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.hash_types as ht
|
||||
import spack.solver.asp as asp
|
||||
import spack.spec
|
||||
from spack.cmd.common import arguments
|
||||
|
||||
description = "concretize a specs using an ASP solver"
|
||||
section = "developer"
|
||||
@@ -41,42 +40,6 @@ def setup_parser(subparser):
|
||||
" solutions models found by asp program\n"
|
||||
" all all of the above",
|
||||
)
|
||||
|
||||
# Below are arguments w.r.t. spec display (like spack spec)
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "namespaces"])
|
||||
|
||||
install_status_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(install_status_group, ["install_status", "no_install_status"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-y",
|
||||
"--yaml",
|
||||
action="store_const",
|
||||
dest="format",
|
||||
default=None,
|
||||
const="yaml",
|
||||
help="print concrete spec as yaml",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-j",
|
||||
"--json",
|
||||
action="store_const",
|
||||
dest="format",
|
||||
default=None,
|
||||
const="json",
|
||||
help="print concrete spec as json",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-c",
|
||||
"--cover",
|
||||
action="store",
|
||||
default="nodes",
|
||||
choices=["nodes", "edges", "paths"],
|
||||
help="how extensively to traverse the DAG (default: nodes)",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-t", "--types", action="store_true", default=False, help="show dependency types"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--timers",
|
||||
action="store_true",
|
||||
@@ -86,9 +49,15 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--stats", action="store_true", default=False, help="print out statistics from clingo"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--profile",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="profile the solve phase and print out statistics on atoms",
|
||||
)
|
||||
subparser.add_argument("specs", nargs=argparse.REMAINDER, help="specs of packages")
|
||||
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
spack.cmd.spec.setup_parser(subparser)
|
||||
|
||||
|
||||
def _process_result(result, show, required_format, kwargs):
|
||||
@@ -164,11 +133,12 @@ def solve(parser, args):
|
||||
|
||||
# If we have an active environment, pick the specs from there
|
||||
env = spack.environment.active_environment()
|
||||
if env and args.specs:
|
||||
msg = "cannot give explicit specs when an environment is active"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
specs = list(env.user_specs) if env else spack.cmd.parse_specs(args.specs)
|
||||
if args.specs:
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
elif env:
|
||||
specs = list(env.user_specs)
|
||||
else:
|
||||
tty.die("spack solve requires at least one spec or an active environment")
|
||||
|
||||
solver = asp.Solver()
|
||||
output = sys.stdout if "asp" in show else None
|
||||
@@ -185,6 +155,7 @@ def solve(parser, args):
|
||||
stats=args.stats,
|
||||
setup_only=setup_only,
|
||||
allow_deprecated=allow_deprecated,
|
||||
profile=args.profile,
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
|
@@ -82,64 +82,44 @@ def spec(parser, args):
|
||||
if args.namespaces:
|
||||
fmt = "{namespace}." + fmt
|
||||
|
||||
tree_kwargs = {
|
||||
"cover": args.cover,
|
||||
"format": fmt,
|
||||
"hashlen": None if args.very_long else 7,
|
||||
"show_types": args.types,
|
||||
"status_fn": install_status_fn if args.install_status else None,
|
||||
}
|
||||
|
||||
# use a read transaction if we are getting install status for every
|
||||
# spec in the DAG. This avoids repeatedly querying the DB.
|
||||
tree_context = lang.nullcontext
|
||||
if args.install_status:
|
||||
tree_context = spack.store.STORE.db.read_transaction
|
||||
|
||||
# Use command line specified specs, otherwise try to use environment specs.
|
||||
env = ev.active_environment()
|
||||
|
||||
if args.specs:
|
||||
input_specs = spack.cmd.parse_specs(args.specs)
|
||||
concretized_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = list(zip(input_specs, concretized_specs))
|
||||
concrete_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
elif env:
|
||||
env.concretize()
|
||||
concrete_specs = env.concrete_roots()
|
||||
else:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
env.concretize()
|
||||
specs = env.concretized_specs()
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
# environments are printed together in a combined tree() invocation,
|
||||
# except when using --yaml or --json, which we print spec by spec below.
|
||||
if not args.format:
|
||||
tree_kwargs["key"] = spack.traverse.by_dag_hash
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(spack.spec.tree([concrete for _, concrete in specs], **tree_kwargs))
|
||||
return
|
||||
else:
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
for input, output in specs:
|
||||
# With --yaml or --json, just print the raw specs to output
|
||||
if args.format:
|
||||
# With --yaml, --json, or --format, just print the raw specs to output
|
||||
if args.format:
|
||||
for spec in concrete_specs:
|
||||
if args.format == "yaml":
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(output.to_yaml(hash=ht.dag_hash))
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif args.format == "json":
|
||||
print(output.to_json(hash=ht.dag_hash))
|
||||
print(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
print(output.format(args.format))
|
||||
continue
|
||||
print(spec.format(args.format))
|
||||
return
|
||||
|
||||
with tree_context():
|
||||
# Only show the headers for input specs that are not concrete to avoid
|
||||
# repeated output. This happens because parse_specs outputs concrete
|
||||
# specs for `/hash` inputs.
|
||||
if not input.concrete:
|
||||
tree_kwargs["hashes"] = False # Always False for input spec
|
||||
print("Input spec")
|
||||
print("--------------------------------")
|
||||
print(input.tree(**tree_kwargs))
|
||||
print("Concretized")
|
||||
print("--------------------------------")
|
||||
|
||||
tree_kwargs["hashes"] = args.long or args.very_long
|
||||
print(output.tree(**tree_kwargs))
|
||||
with tree_context():
|
||||
print(
|
||||
spack.spec.tree(
|
||||
concrete_specs,
|
||||
cover=args.cover,
|
||||
format=fmt,
|
||||
hashlen=None if args.very_long else 7,
|
||||
show_types=args.types,
|
||||
status_fn=install_status_fn if args.install_status else None,
|
||||
hashes=args.long or args.very_long,
|
||||
key=spack.traverse.by_dag_hash,
|
||||
)
|
||||
)
|
||||
|
@@ -47,8 +47,8 @@ def stage(parser, args):
|
||||
if len(specs) > 1 and custom_path:
|
||||
tty.die("`--path` requires a single spec, but multiple were provided")
|
||||
|
||||
specs = spack.cmd.matching_specs_from_env(specs)
|
||||
for spec in specs:
|
||||
spec = spack.cmd.matching_spec_from_env(spec)
|
||||
pkg = spec.package
|
||||
|
||||
if custom_path:
|
||||
|
@@ -24,7 +24,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.22"
|
||||
tutorial_branch = "releases/v0.23"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
@@ -4,20 +4,23 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import contextlib
|
||||
import hashlib
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List, Optional, Sequence
|
||||
from typing import Dict, List, Optional, Sequence
|
||||
|
||||
import llnl.path
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import path_contains_subdirectory, paths_containing_libs
|
||||
|
||||
import spack.caches
|
||||
import spack.error
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
@@ -26,6 +29,7 @@
|
||||
import spack.util.module_cmd
|
||||
import spack.version
|
||||
from spack.util.environment import filter_system_paths
|
||||
from spack.util.file_cache import FileCache
|
||||
|
||||
__all__ = ["Compiler"]
|
||||
|
||||
@@ -34,7 +38,7 @@
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()) -> str:
|
||||
"""Invokes the compiler at a given path passing a single
|
||||
version argument and returns the output.
|
||||
|
||||
@@ -57,7 +61,7 @@ def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
return output
|
||||
|
||||
|
||||
def get_compiler_version_output(compiler_path, *args, **kwargs):
|
||||
def get_compiler_version_output(compiler_path, *args, **kwargs) -> str:
|
||||
"""Wrapper for _get_compiler_version_output()."""
|
||||
# This ensures that we memoize compiler output by *absolute path*,
|
||||
# not just executable name. If we don't do this, and the path changes
|
||||
@@ -290,6 +294,7 @@ def __init__(
|
||||
self.environment = environment or {}
|
||||
self.extra_rpaths = extra_rpaths or []
|
||||
self.enable_implicit_rpaths = enable_implicit_rpaths
|
||||
self.cache = COMPILER_CACHE
|
||||
|
||||
self.cc = paths[0]
|
||||
self.cxx = paths[1]
|
||||
@@ -390,15 +395,11 @@ def real_version(self):
|
||||
|
||||
E.g. C++11 flag checks.
|
||||
"""
|
||||
if not self._real_version:
|
||||
try:
|
||||
real_version = spack.version.Version(self.get_real_version())
|
||||
if real_version == spack.version.Version("unknown"):
|
||||
return self.version
|
||||
self._real_version = real_version
|
||||
except spack.util.executable.ProcessError:
|
||||
self._real_version = self.version
|
||||
return self._real_version
|
||||
real_version_str = self.cache.get(self).real_version
|
||||
if not real_version_str or real_version_str == "unknown":
|
||||
return self.version
|
||||
|
||||
return spack.version.StandardVersion.from_string(real_version_str)
|
||||
|
||||
def implicit_rpaths(self) -> List[str]:
|
||||
if self.enable_implicit_rpaths is False:
|
||||
@@ -427,6 +428,11 @@ def default_dynamic_linker(self) -> Optional[str]:
|
||||
@property
|
||||
def default_libc(self) -> Optional["spack.spec.Spec"]:
|
||||
"""Determine libc targeted by the compiler from link line"""
|
||||
# technically this should be testing the target platform of the compiler, but we don't have
|
||||
# that, so stick to host platform for now.
|
||||
if sys.platform in ("darwin", "win32"):
|
||||
return None
|
||||
|
||||
dynamic_linker = self.default_dynamic_linker
|
||||
|
||||
if not dynamic_linker:
|
||||
@@ -445,19 +451,23 @@ def required_libs(self):
|
||||
@property
|
||||
def compiler_verbose_output(self) -> Optional[str]:
|
||||
"""Verbose output from compiling a dummy C source file. Output is cached."""
|
||||
if not hasattr(self, "_compile_c_source_output"):
|
||||
self._compile_c_source_output = self._compile_dummy_c_source()
|
||||
return self._compile_c_source_output
|
||||
return self.cache.get(self).c_compiler_output
|
||||
|
||||
def _compile_dummy_c_source(self) -> Optional[str]:
|
||||
cc = self.cc if self.cc else self.cxx
|
||||
if self.cc:
|
||||
cc = self.cc
|
||||
ext = "c"
|
||||
else:
|
||||
cc = self.cxx
|
||||
ext = "cc"
|
||||
|
||||
if not cc or not self.verbose_flag:
|
||||
return None
|
||||
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
|
||||
fout = os.path.join(tmpdir, "output")
|
||||
fin = os.path.join(tmpdir, "main.c")
|
||||
fin = os.path.join(tmpdir, f"main.{ext}")
|
||||
|
||||
with open(fin, "w") as csource:
|
||||
csource.write(
|
||||
@@ -559,7 +569,7 @@ def fc_pic_flag(self):
|
||||
# Note: This is not a class method. The class methods are used to detect
|
||||
# compilers on PATH based systems, and do not set up the run environment of
|
||||
# the compiler. This method can be called on `module` based systems as well
|
||||
def get_real_version(self):
|
||||
def get_real_version(self) -> str:
|
||||
"""Query the compiler for its version.
|
||||
|
||||
This is the "real" compiler version, regardless of what is in the
|
||||
@@ -569,14 +579,17 @@ def get_real_version(self):
|
||||
modifications) to enable the compiler to run properly on any platform.
|
||||
"""
|
||||
cc = spack.util.executable.Executable(self.cc)
|
||||
with self.compiler_environment():
|
||||
output = cc(
|
||||
self.version_argument,
|
||||
output=str,
|
||||
error=str,
|
||||
ignore_errors=tuple(self.ignore_version_errors),
|
||||
)
|
||||
return self.extract_version_from_output(output)
|
||||
try:
|
||||
with self.compiler_environment():
|
||||
output = cc(
|
||||
self.version_argument,
|
||||
output=str,
|
||||
error=str,
|
||||
ignore_errors=tuple(self.ignore_version_errors),
|
||||
)
|
||||
return self.extract_version_from_output(output)
|
||||
except spack.util.executable.ProcessError:
|
||||
return "unknown"
|
||||
|
||||
@property
|
||||
def prefix(self):
|
||||
@@ -603,7 +616,7 @@ def default_version(cls, cc):
|
||||
|
||||
@classmethod
|
||||
@llnl.util.lang.memoized
|
||||
def extract_version_from_output(cls, output):
|
||||
def extract_version_from_output(cls, output: str) -> str:
|
||||
"""Extracts the version from compiler's output."""
|
||||
match = re.search(cls.version_regex, output)
|
||||
return match.group(1) if match else "unknown"
|
||||
@@ -732,3 +745,106 @@ def __init__(self, compiler, feature, flag_name, ver_string=None):
|
||||
)
|
||||
+ " implement the {0} property and submit a pull request or issue.".format(flag_name),
|
||||
)
|
||||
|
||||
|
||||
class CompilerCacheEntry:
|
||||
"""Deserialized cache entry for a compiler"""
|
||||
|
||||
__slots__ = ["c_compiler_output", "real_version"]
|
||||
|
||||
def __init__(self, c_compiler_output: Optional[str], real_version: str):
|
||||
self.c_compiler_output = c_compiler_output
|
||||
self.real_version = real_version
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Optional[str]]):
|
||||
if not isinstance(data, dict):
|
||||
raise ValueError(f"Invalid {cls.__name__} data")
|
||||
c_compiler_output = data.get("c_compiler_output")
|
||||
real_version = data.get("real_version")
|
||||
if not isinstance(real_version, str) or not isinstance(
|
||||
c_compiler_output, (str, type(None))
|
||||
):
|
||||
raise ValueError(f"Invalid {cls.__name__} data")
|
||||
return cls(c_compiler_output, real_version)
|
||||
|
||||
|
||||
class CompilerCache:
|
||||
"""Base class for compiler output cache. Default implementation does not cache anything."""
|
||||
|
||||
def value(self, compiler: Compiler) -> Dict[str, Optional[str]]:
|
||||
return {
|
||||
"c_compiler_output": compiler._compile_dummy_c_source(),
|
||||
"real_version": compiler.get_real_version(),
|
||||
}
|
||||
|
||||
def get(self, compiler: Compiler) -> CompilerCacheEntry:
|
||||
return CompilerCacheEntry.from_dict(self.value(compiler))
|
||||
|
||||
|
||||
class FileCompilerCache(CompilerCache):
|
||||
"""Cache for compiler output, which is used to determine implicit link paths, the default libc
|
||||
version, and the compiler version."""
|
||||
|
||||
name = os.path.join("compilers", "compilers.json")
|
||||
|
||||
def __init__(self, cache: "FileCache") -> None:
|
||||
self.cache = cache
|
||||
self.cache.init_entry(self.name)
|
||||
self._data: Dict[str, Dict[str, Optional[str]]] = {}
|
||||
|
||||
def _get_entry(self, key: str) -> Optional[CompilerCacheEntry]:
|
||||
try:
|
||||
return CompilerCacheEntry.from_dict(self._data[key])
|
||||
except ValueError:
|
||||
del self._data[key]
|
||||
except KeyError:
|
||||
pass
|
||||
return None
|
||||
|
||||
def get(self, compiler: Compiler) -> CompilerCacheEntry:
|
||||
# Cache hit
|
||||
try:
|
||||
with self.cache.read_transaction(self.name) as f:
|
||||
assert f is not None
|
||||
self._data = json.loads(f.read())
|
||||
assert isinstance(self._data, dict)
|
||||
except (json.JSONDecodeError, AssertionError):
|
||||
self._data = {}
|
||||
|
||||
key = self._key(compiler)
|
||||
value = self._get_entry(key)
|
||||
if value is not None:
|
||||
return value
|
||||
|
||||
# Cache miss
|
||||
with self.cache.write_transaction(self.name) as (old, new):
|
||||
try:
|
||||
assert old is not None
|
||||
self._data = json.loads(old.read())
|
||||
assert isinstance(self._data, dict)
|
||||
except (json.JSONDecodeError, AssertionError):
|
||||
self._data = {}
|
||||
|
||||
# Use cache entry that may have been created by another process in the meantime.
|
||||
entry = self._get_entry(key)
|
||||
|
||||
# Finally compute the cache entry
|
||||
if entry is None:
|
||||
self._data[key] = self.value(compiler)
|
||||
entry = CompilerCacheEntry.from_dict(self._data[key])
|
||||
|
||||
new.write(json.dumps(self._data, separators=(",", ":")))
|
||||
|
||||
return entry
|
||||
|
||||
def _key(self, compiler: Compiler) -> str:
|
||||
as_bytes = json.dumps(compiler.to_dict(), separators=(",", ":")).encode("utf-8")
|
||||
return hashlib.sha256(as_bytes).hexdigest()
|
||||
|
||||
|
||||
def _make_compiler_cache():
|
||||
return FileCompilerCache(spack.caches.MISC_CACHE)
|
||||
|
||||
|
||||
COMPILER_CACHE: CompilerCache = llnl.util.lang.Singleton(_make_compiler_cache) # type: ignore
|
||||
|
@@ -116,5 +116,5 @@ def fflags(self):
|
||||
def _handle_default_flag_addtions(self):
|
||||
# This is a known issue for AOCC 3.0 see:
|
||||
# https://developer.amd.com/wp-content/resources/AOCC-3.0-Install-Guide.pdf
|
||||
if self.real_version.satisfies(ver("3.0.0")):
|
||||
if self.version.satisfies(ver("3.0.0")):
|
||||
return "-Wno-unused-command-line-argument " "-mllvm -eliminate-similar-expr=false"
|
||||
|
@@ -2,14 +2,20 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""
|
||||
(DEPRECATED) Used to contain the code for the original concretizer
|
||||
"""
|
||||
"""High-level functions to concretize list of specs"""
|
||||
import sys
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
from itertools import chain
|
||||
from typing import Iterable, Optional, Sequence, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.repo
|
||||
import spack.util.parallel
|
||||
from spack.spec import ArchSpec, CompilerSpec, Spec
|
||||
|
||||
CHECK_COMPILER_EXISTENCE = True
|
||||
|
||||
@@ -30,67 +36,167 @@ def enable_compiler_existence_check():
|
||||
CHECK_COMPILER_EXISTENCE = saved
|
||||
|
||||
|
||||
def find_spec(spec, condition, default=None):
|
||||
"""Searches the dag from spec in an intelligent order and looks
|
||||
for a spec that matches a condition"""
|
||||
# First search parents, then search children
|
||||
deptype = ("build", "link")
|
||||
dagiter = chain(
|
||||
spec.traverse(direction="parents", deptype=deptype, root=False),
|
||||
spec.traverse(direction="children", deptype=deptype, root=False),
|
||||
)
|
||||
visited = set()
|
||||
for relative in dagiter:
|
||||
if condition(relative):
|
||||
return relative
|
||||
visited.add(id(relative))
|
||||
|
||||
# Then search all other relatives in the DAG *except* spec
|
||||
for relative in spec.root.traverse(deptype="all"):
|
||||
if relative is spec:
|
||||
continue
|
||||
if id(relative) in visited:
|
||||
continue
|
||||
if condition(relative):
|
||||
return relative
|
||||
|
||||
# Finally search spec itself.
|
||||
if condition(spec):
|
||||
return spec
|
||||
|
||||
return default # Nothing matched the condition; return default.
|
||||
SpecPair = Tuple[Spec, Spec]
|
||||
SpecLike = Union[Spec, str]
|
||||
TestsType = Union[bool, Iterable[str]]
|
||||
|
||||
|
||||
def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
def concretize_specs_together(
|
||||
abstract_specs: Sequence[SpecLike], tests: TestsType = False
|
||||
) -> Sequence[Spec]:
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
Args:
|
||||
tests (bool or list or set): False to run no tests, True to test
|
||||
all packages, or a list of package names to run tests for some
|
||||
*abstract_specs: abstract specs to be concretized, given either
|
||||
as Specs or strings
|
||||
|
||||
Returns:
|
||||
List of concretized specs
|
||||
abstract_specs: abstract specs to be concretized
|
||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||
will have test dependencies. If False, test dependencies will be disregarded.
|
||||
"""
|
||||
import spack.solver.asp
|
||||
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
solver = spack.solver.asp.Solver()
|
||||
result = solver.solve(
|
||||
abstract_specs, tests=kwargs.get("tests", False), allow_deprecated=allow_deprecated
|
||||
)
|
||||
result = solver.solve(abstract_specs, tests=tests, allow_deprecated=allow_deprecated)
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
|
||||
def concretize_together(
|
||||
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||
) -> Sequence[SpecPair]:
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
Args:
|
||||
spec_list: list of tuples to concretize. First entry is abstract spec, second entry is
|
||||
already concrete spec or None if not yet concretized
|
||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||
will have test dependencies. If False, test dependencies will be disregarded.
|
||||
"""
|
||||
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
||||
abstract_specs = [abstract for abstract, _ in spec_list]
|
||||
concrete_specs = concretize_specs_together(to_concretize, tests=tests)
|
||||
return list(zip(abstract_specs, concrete_specs))
|
||||
|
||||
|
||||
def concretize_together_when_possible(
|
||||
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||
) -> Sequence[SpecPair]:
|
||||
"""Given a number of specs as input, tries to concretize them together to the extent possible.
|
||||
|
||||
See documentation for ``unify: when_possible`` concretization for the precise definition of
|
||||
"to the extent possible".
|
||||
|
||||
Args:
|
||||
spec_list: list of tuples to concretize. First entry is abstract spec, second entry is
|
||||
already concrete spec or None if not yet concretized
|
||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||
will have test dependencies. If False, test dependencies will be disregarded.
|
||||
"""
|
||||
import spack.solver.asp
|
||||
|
||||
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
||||
old_concrete_to_abstract = {
|
||||
concrete: abstract for (abstract, concrete) in spec_list if concrete
|
||||
}
|
||||
|
||||
result_by_user_spec = {}
|
||||
solver = spack.solver.asp.Solver()
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
for result in solver.solve_in_rounds(
|
||||
to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
||||
):
|
||||
result_by_user_spec.update(result.specs_by_input)
|
||||
|
||||
# If the "abstract" spec is a concrete spec from the previous concretization
|
||||
# translate it back to an abstract spec. Otherwise, keep the abstract spec
|
||||
return [
|
||||
(old_concrete_to_abstract.get(abstract, abstract), concrete)
|
||||
for abstract, concrete in sorted(result_by_user_spec.items())
|
||||
]
|
||||
|
||||
|
||||
def concretize_separately(
|
||||
spec_list: Sequence[SpecPair], tests: TestsType = False
|
||||
) -> Sequence[SpecPair]:
|
||||
"""Concretizes the input specs separately from each other.
|
||||
|
||||
Args:
|
||||
spec_list: list of tuples to concretize. First entry is abstract spec, second entry is
|
||||
already concrete spec or None if not yet concretized
|
||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||
will have test dependencies. If False, test dependencies will be disregarded.
|
||||
"""
|
||||
import spack.bootstrap
|
||||
|
||||
to_concretize = [abstract for abstract, concrete in spec_list if not concrete]
|
||||
args = [
|
||||
(i, str(abstract), tests)
|
||||
for i, abstract in enumerate(to_concretize)
|
||||
if not abstract.concrete
|
||||
]
|
||||
ret = [(i, abstract) for i, abstract in enumerate(to_concretize) if abstract.concrete]
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
|
||||
# Ensure all the indexes have been built or updated, since
|
||||
# otherwise the processes in the pool may timeout on waiting
|
||||
# for a write lock. We do this indirectly by retrieving the
|
||||
# provider index, which should in turn trigger the update of
|
||||
# all the indexes if there's any need for that.
|
||||
_ = spack.repo.PATH.provider_index
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.all_compilers_config(spack.config.CONFIG)
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(args) == 0:
|
||||
# Still have to combine the things that were passed in as abstract with the things
|
||||
# that were passed in as pairs
|
||||
return [(abstract, concrete) for abstract, (_, concrete) in zip(to_concretize, ret)] + [
|
||||
(abstract, concrete) for abstract, concrete in spec_list if concrete
|
||||
]
|
||||
|
||||
# Solve the environment in parallel on Linux
|
||||
# TODO: support parallel concretization on macOS and Windows
|
||||
num_procs = min(len(args), spack.config.determine_number_of_jobs(parallel=True))
|
||||
|
||||
for j, (i, concrete, duration) in enumerate(
|
||||
spack.util.parallel.imap_unordered(
|
||||
_concretize_task, args, processes=num_procs, debug=tty.is_debug(), maxtaskperchild=1
|
||||
)
|
||||
):
|
||||
ret.append((i, concrete))
|
||||
percentage = (j + 1) / len(args) * 100
|
||||
tty.verbose(
|
||||
f"{duration:6.1f}s [{percentage:3.0f}%] {concrete.cformat('{hash:7}')} "
|
||||
f"{to_concretize[i].colored_str}"
|
||||
)
|
||||
sys.stdout.flush()
|
||||
|
||||
# Add specs in original order
|
||||
ret.sort(key=lambda x: x[0])
|
||||
|
||||
return [(abstract, concrete) for abstract, (_, concrete) in zip(to_concretize, ret)] + [
|
||||
(abstract, concrete) for abstract, concrete in spec_list if concrete
|
||||
]
|
||||
|
||||
|
||||
def _concretize_task(packed_arguments: Tuple[int, str, TestsType]) -> Tuple[int, Spec, float]:
|
||||
index, spec_str, tests = packed_arguments
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
start = time.time()
|
||||
spec = Spec(spec_str).concretized(tests=tests)
|
||||
return index, spec, time.time() - start
|
||||
|
||||
|
||||
class UnavailableCompilerVersionError(spack.error.SpackError):
|
||||
"""Raised when there is no available compiler that satisfies a
|
||||
compiler spec."""
|
||||
|
||||
def __init__(self, compiler_spec, arch=None):
|
||||
err_msg = "No compilers with spec {0} found".format(compiler_spec)
|
||||
def __init__(self, compiler_spec: CompilerSpec, arch: Optional[ArchSpec] = None) -> None:
|
||||
err_msg = f"No compilers with spec {compiler_spec} found"
|
||||
if arch:
|
||||
err_msg += " for operating system {0} and target {1}.".format(arch.os, arch.target)
|
||||
err_msg += f" for operating system {arch.os} and target {arch.target}."
|
||||
|
||||
super().__init__(
|
||||
err_msg,
|
||||
|
@@ -427,6 +427,10 @@ def __init__(self, *scopes: ConfigScope) -> None:
|
||||
self.push_scope(scope)
|
||||
self.format_updates: Dict[str, List[ConfigScope]] = collections.defaultdict(list)
|
||||
|
||||
def ensure_unwrapped(self) -> "Configuration":
|
||||
"""Ensure we unwrap this object from any dynamic wrapper (like Singleton)"""
|
||||
return self
|
||||
|
||||
@_config_mutator
|
||||
def push_scope(self, scope: ConfigScope) -> None:
|
||||
"""Add a higher precedence scope to the Configuration."""
|
||||
@@ -714,7 +718,7 @@ def print_section(self, section: str, blame: bool = False, *, scope=None) -> Non
|
||||
@contextlib.contextmanager
|
||||
def override(
|
||||
path_or_scope: Union[ConfigScope, str], value: Optional[Any] = None
|
||||
) -> Generator[Union[lang.Singleton, Configuration], None, None]:
|
||||
) -> Generator[Configuration, None, None]:
|
||||
"""Simple way to override config settings within a context.
|
||||
|
||||
Arguments:
|
||||
@@ -752,13 +756,7 @@ def override(
|
||||
assert scope is overrides
|
||||
|
||||
|
||||
#: configuration scopes added on the command line set by ``spack.main.main()``
|
||||
COMMAND_LINE_SCOPES: List[str] = []
|
||||
|
||||
|
||||
def _add_platform_scope(
|
||||
cfg: Union[Configuration, lang.Singleton], name: str, path: str, writable: bool = True
|
||||
) -> None:
|
||||
def _add_platform_scope(cfg: Configuration, name: str, path: str, writable: bool = True) -> None:
|
||||
"""Add a platform-specific subdirectory for the current platform."""
|
||||
platform = spack.platforms.host().name
|
||||
scope = DirectoryConfigScope(
|
||||
@@ -792,9 +790,7 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
||||
return config_paths
|
||||
|
||||
|
||||
def _add_command_line_scopes(
|
||||
cfg: Union[Configuration, lang.Singleton], command_line_scopes: List[str]
|
||||
) -> None:
|
||||
def _add_command_line_scopes(cfg: Configuration, command_line_scopes: List[str]) -> None:
|
||||
"""Add additional scopes from the --config-scope argument, either envs or dirs."""
|
||||
import spack.environment.environment as env # circular import
|
||||
|
||||
@@ -864,18 +860,11 @@ def create() -> Configuration:
|
||||
# Each scope can have per-platfom overrides in subdirectories
|
||||
_add_platform_scope(cfg, name, path)
|
||||
|
||||
# add command-line scopes
|
||||
_add_command_line_scopes(cfg, COMMAND_LINE_SCOPES)
|
||||
|
||||
# we make a special scope for spack commands so that they can
|
||||
# override configuration options.
|
||||
cfg.push_scope(InternalConfigScope("command_line"))
|
||||
|
||||
return cfg
|
||||
|
||||
|
||||
#: This is the singleton configuration instance for Spack.
|
||||
CONFIG: Union[Configuration, lang.Singleton] = lang.Singleton(create)
|
||||
CONFIG: Configuration = lang.Singleton(create) # type: ignore
|
||||
|
||||
|
||||
def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
||||
|
@@ -1336,7 +1336,7 @@ def _deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") ->
|
||||
self._data[spec_key] = spec_rec
|
||||
|
||||
@_autospec
|
||||
def mark(self, spec: "spack.spec.Spec", key, value) -> None:
|
||||
def mark(self, spec: "spack.spec.Spec", key: str, value: Any) -> None:
|
||||
"""Mark an arbitrary record on a spec."""
|
||||
with self.write_transaction():
|
||||
return self._mark(spec, key, value)
|
||||
@@ -1771,24 +1771,6 @@ def root(key, record):
|
||||
if id(rec.spec) not in needed and rec.installed
|
||||
]
|
||||
|
||||
def update_explicit(self, spec, explicit):
|
||||
"""
|
||||
Update the spec's explicit state in the database.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): the spec whose install record is being updated
|
||||
explicit (bool): ``True`` if the package was requested explicitly
|
||||
by the user, ``False`` if it was pulled in as a dependency of
|
||||
an explicit package.
|
||||
"""
|
||||
rec = self.get_record(spec)
|
||||
if explicit != rec.explicit:
|
||||
with self.write_transaction():
|
||||
message = "{s.name}@{s.version} : marking the package {0}"
|
||||
status = "explicit" if explicit else "implicit"
|
||||
tty.debug(message.format(status, s=spec))
|
||||
rec.explicit = explicit
|
||||
|
||||
|
||||
class NoUpstreamVisitor:
|
||||
"""Gives edges to upstream specs, but does follow edges from upstream specs."""
|
||||
|
@@ -64,6 +64,7 @@ class OpenMpi(Package):
|
||||
"DirectiveMeta",
|
||||
"DisableRedistribute",
|
||||
"version",
|
||||
"conditional",
|
||||
"conflicts",
|
||||
"depends_on",
|
||||
"extends",
|
||||
@@ -76,6 +77,7 @@ class OpenMpi(Package):
|
||||
"build_system",
|
||||
"requires",
|
||||
"redistribute",
|
||||
"can_splice",
|
||||
]
|
||||
|
||||
_patch_order_index = 0
|
||||
@@ -504,6 +506,43 @@ def _execute_provides(pkg: "spack.package_base.PackageBase"):
|
||||
return _execute_provides
|
||||
|
||||
|
||||
@directive("splice_specs")
|
||||
def can_splice(
|
||||
target: SpecType, *, when: SpecType, match_variants: Union[None, str, List[str]] = None
|
||||
):
|
||||
"""Packages can declare whether they are ABI-compatible with another package
|
||||
and thus can be spliced into concrete versions of that package.
|
||||
|
||||
Args:
|
||||
target: The spec that the current package is ABI-compatible with.
|
||||
|
||||
when: An anonymous spec constraining current package for when it is
|
||||
ABI-compatible with target.
|
||||
|
||||
match_variants: A list of variants that must match
|
||||
between target spec and current package, with special value '*'
|
||||
which matches all variants. Example: a variant is defined on both
|
||||
packages called json, and they are ABI-compatible whenever they agree on
|
||||
the json variant (regardless of whether it is turned on or off). Note
|
||||
that this cannot be applied to multi-valued variants and multi-valued
|
||||
variants will be skipped by '*'.
|
||||
"""
|
||||
|
||||
def _execute_can_splice(pkg: "spack.package_base.PackageBase"):
|
||||
when_spec = _make_when_spec(when)
|
||||
if isinstance(match_variants, str) and match_variants != "*":
|
||||
raise ValueError(
|
||||
"* is the only valid string for match_variants "
|
||||
"if looking to provide a single variant, use "
|
||||
f"[{match_variants}] instead"
|
||||
)
|
||||
if when_spec is None:
|
||||
return
|
||||
pkg.splice_specs[when_spec] = (spack.spec.Spec(target), match_variants)
|
||||
|
||||
return _execute_can_splice
|
||||
|
||||
|
||||
@directive("patches")
|
||||
def patch(
|
||||
url_or_filename: str,
|
||||
@@ -577,6 +616,15 @@ def _execute_patch(pkg_or_dep: Union["spack.package_base.PackageBase", Dependenc
|
||||
return _execute_patch
|
||||
|
||||
|
||||
def conditional(*values: List[Any], when: Optional[WhenType] = None):
|
||||
"""Conditional values that can be used in variant declarations."""
|
||||
# _make_when_spec returns None when the condition is statically false.
|
||||
when = _make_when_spec(when)
|
||||
return spack.variant.ConditionalVariantValues(
|
||||
spack.variant.ConditionalValue(x, when=when) for x in values
|
||||
)
|
||||
|
||||
|
||||
@directive("variants")
|
||||
def variant(
|
||||
name: str,
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.error
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
#: Names of possible directives. This list is mostly populated using the @directive decorator.
|
||||
@@ -63,7 +64,7 @@ def __init__(cls, name, bases, attr_dict):
|
||||
# The instance is being initialized: if it is a package we must ensure
|
||||
# that the directives are called to set it up.
|
||||
|
||||
if "spack.pkg" in cls.__module__:
|
||||
if cls.__module__.startswith(spack.repo.ROOT_PYTHON_NAMESPACE):
|
||||
# Ensure the presence of the dictionaries associated with the directives.
|
||||
# All dictionaries are defaultdicts that create lists for missing keys.
|
||||
for d in DirectiveMeta._directive_dict_names:
|
||||
|
@@ -473,6 +473,7 @@
|
||||
active_environment,
|
||||
all_environment_names,
|
||||
all_environments,
|
||||
as_env_dir,
|
||||
create,
|
||||
create_in_dir,
|
||||
deactivate,
|
||||
@@ -480,6 +481,7 @@
|
||||
default_view_name,
|
||||
display_specs,
|
||||
environment_dir_from_name,
|
||||
environment_from_name_or_dir,
|
||||
exists,
|
||||
initialize_environment_dir,
|
||||
installed_specs,
|
||||
@@ -507,6 +509,7 @@
|
||||
"active_environment",
|
||||
"all_environment_names",
|
||||
"all_environments",
|
||||
"as_env_dir",
|
||||
"create",
|
||||
"create_in_dir",
|
||||
"deactivate",
|
||||
@@ -514,6 +517,7 @@
|
||||
"default_view_name",
|
||||
"display_specs",
|
||||
"environment_dir_from_name",
|
||||
"environment_from_name_or_dir",
|
||||
"exists",
|
||||
"initialize_environment_dir",
|
||||
"installed_specs",
|
||||
|
@@ -11,22 +11,19 @@
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import time
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.color as clr
|
||||
from llnl.util.link_tree import ConflictingSpecsError
|
||||
from llnl.util.symlink import readlink, symlink
|
||||
from llnl.util.symlink import islink, readlink, symlink
|
||||
|
||||
import spack
|
||||
import spack.caches
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
@@ -45,7 +42,6 @@
|
||||
import spack.util.environment
|
||||
import spack.util.hash
|
||||
import spack.util.lock as lk
|
||||
import spack.util.parallel
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -57,6 +53,8 @@
|
||||
from spack.spec_list import SpecList
|
||||
from spack.util.path import substitute_path_variables
|
||||
|
||||
SpecPair = spack.concretize.SpecPair
|
||||
|
||||
#: environment variable used to indicate the active environment
|
||||
spack_env_var = "SPACK_ENV"
|
||||
|
||||
@@ -277,6 +275,22 @@ def is_env_dir(path):
|
||||
return os.path.isdir(path) and os.path.exists(os.path.join(path, manifest_name))
|
||||
|
||||
|
||||
def as_env_dir(name_or_dir):
|
||||
"""Translate an environment name or directory to the environment directory"""
|
||||
if is_env_dir(name_or_dir):
|
||||
return name_or_dir
|
||||
else:
|
||||
validate_env_name(name_or_dir)
|
||||
if not exists(name_or_dir):
|
||||
raise SpackEnvironmentError("no such environment '%s'" % name_or_dir)
|
||||
return root(name_or_dir)
|
||||
|
||||
|
||||
def environment_from_name_or_dir(name_or_dir):
|
||||
"""Get an environment with the supplied name."""
|
||||
return Environment(as_env_dir(name_or_dir))
|
||||
|
||||
|
||||
def read(name):
|
||||
"""Get an environment with the supplied name."""
|
||||
validate_env_name(name)
|
||||
@@ -654,7 +668,7 @@ def from_dict(base_path, d):
|
||||
|
||||
@property
|
||||
def _current_root(self):
|
||||
if not os.path.islink(self.root):
|
||||
if not islink(self.root):
|
||||
return None
|
||||
|
||||
root = readlink(self.root)
|
||||
@@ -1494,7 +1508,7 @@ def deconcretize(self, spec: spack.spec.Spec, concrete: bool = True):
|
||||
|
||||
def _get_specs_to_concretize(
|
||||
self,
|
||||
) -> Tuple[Set[spack.spec.Spec], Set[spack.spec.Spec], List[spack.spec.Spec]]:
|
||||
) -> Tuple[List[spack.spec.Spec], List[spack.spec.Spec], List[SpecPair]]:
|
||||
"""Compute specs to concretize for unify:true and unify:when_possible.
|
||||
|
||||
This includes new user specs and any already concretized specs.
|
||||
@@ -1504,23 +1518,20 @@ def _get_specs_to_concretize(
|
||||
|
||||
"""
|
||||
# Exit early if the set of concretized specs is the set of user specs
|
||||
new_user_specs = set(self.user_specs) - set(self.concretized_user_specs)
|
||||
kept_user_specs = set(self.user_specs) & set(self.concretized_user_specs)
|
||||
new_user_specs = list(set(self.user_specs) - set(self.concretized_user_specs))
|
||||
kept_user_specs = list(set(self.user_specs) & set(self.concretized_user_specs))
|
||||
kept_user_specs += self.included_user_specs
|
||||
if not new_user_specs:
|
||||
return new_user_specs, kept_user_specs, []
|
||||
|
||||
concrete_specs_to_keep = [
|
||||
concrete
|
||||
specs_to_concretize = [(s, None) for s in new_user_specs] + [
|
||||
(abstract, concrete)
|
||||
for abstract, concrete in self.concretized_specs()
|
||||
if abstract in kept_user_specs
|
||||
]
|
||||
|
||||
specs_to_concretize = list(new_user_specs) + concrete_specs_to_keep
|
||||
return new_user_specs, kept_user_specs, specs_to_concretize
|
||||
|
||||
def _concretize_together_where_possible(
|
||||
self, tests: bool = False
|
||||
) -> List[Tuple[spack.spec.Spec, spack.spec.Spec]]:
|
||||
def _concretize_together_where_possible(self, tests: bool = False) -> Sequence[SpecPair]:
|
||||
# Avoid cyclic dependency
|
||||
import spack.solver.asp
|
||||
|
||||
@@ -1529,36 +1540,26 @@ def _concretize_together_where_possible(
|
||||
if not new_user_specs:
|
||||
return []
|
||||
|
||||
old_concrete_to_abstract = {
|
||||
concrete: abstract for (abstract, concrete) in self.concretized_specs()
|
||||
}
|
||||
|
||||
self.concretized_user_specs = []
|
||||
self.concretized_order = []
|
||||
self.specs_by_hash = {}
|
||||
|
||||
result_by_user_spec = {}
|
||||
solver = spack.solver.asp.Solver()
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
for result in solver.solve_in_rounds(
|
||||
specs_to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
||||
):
|
||||
result_by_user_spec.update(result.specs_by_input)
|
||||
ret = []
|
||||
result = spack.concretize.concretize_together_when_possible(
|
||||
specs_to_concretize, tests=tests
|
||||
)
|
||||
for abstract, concrete in result:
|
||||
# Only add to the environment if it's from this environment (not included in)
|
||||
if abstract in self.user_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
|
||||
result = []
|
||||
for abstract, concrete in sorted(result_by_user_spec.items()):
|
||||
# If the "abstract" spec is a concrete spec from the previous concretization
|
||||
# translate it back to an abstract spec. Otherwise, keep the abstract spec
|
||||
abstract = old_concrete_to_abstract.get(abstract, abstract)
|
||||
# Return only the new specs
|
||||
if abstract in new_user_specs:
|
||||
result.append((abstract, concrete))
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
ret.append((abstract, concrete))
|
||||
|
||||
return result
|
||||
return ret
|
||||
|
||||
def _concretize_together(
|
||||
self, tests: bool = False
|
||||
) -> List[Tuple[spack.spec.Spec, spack.spec.Spec]]:
|
||||
def _concretize_together(self, tests: bool = False) -> Sequence[SpecPair]:
|
||||
"""Concretization strategy that concretizes all the specs
|
||||
in the same DAG.
|
||||
"""
|
||||
@@ -1572,8 +1573,8 @@ def _concretize_together(
|
||||
self.specs_by_hash = {}
|
||||
|
||||
try:
|
||||
concrete_specs: List[spack.spec.Spec] = spack.concretize.concretize_specs_together(
|
||||
*specs_to_concretize, tests=tests
|
||||
concretized_specs = spack.concretize.concretize_together(
|
||||
specs_to_concretize, tests=tests
|
||||
)
|
||||
except spack.error.UnsatisfiableSpecError as e:
|
||||
# "Enhance" the error message for multiple root specs, suggest a less strict
|
||||
@@ -1591,14 +1592,13 @@ def _concretize_together(
|
||||
)
|
||||
raise
|
||||
|
||||
# set() | set() does not preserve ordering, even though sets are ordered
|
||||
ordered_user_specs = list(new_user_specs) + list(kept_user_specs)
|
||||
concretized_specs = [x for x in zip(ordered_user_specs, concrete_specs)]
|
||||
for abstract, concrete in concretized_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
# Don't add if it's just included
|
||||
if abstract in self.user_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
|
||||
# zip truncates the longer list, which is exactly what we want here
|
||||
return list(zip(new_user_specs, concrete_specs))
|
||||
# Return the portion of the return value that is new
|
||||
return concretized_specs[: len(new_user_specs)]
|
||||
|
||||
def _concretize_separately(self, tests=False):
|
||||
"""Concretization strategy that concretizes separately one
|
||||
@@ -1620,71 +1620,16 @@ def _concretize_separately(self, tests=False):
|
||||
concrete = old_specs_by_hash[h]
|
||||
self._add_concrete_spec(s, concrete, new=False)
|
||||
|
||||
# Concretize any new user specs that we haven't concretized yet
|
||||
args, root_specs, i = [], [], 0
|
||||
for uspec in self.user_specs:
|
||||
if uspec not in old_concretized_user_specs:
|
||||
root_specs.append(uspec)
|
||||
args.append((i, str(uspec), tests))
|
||||
i += 1
|
||||
to_concretize = [
|
||||
(root, None) for root in self.user_specs if root not in old_concretized_user_specs
|
||||
]
|
||||
concretized_specs = spack.concretize.concretize_separately(to_concretize, tests=tests)
|
||||
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
|
||||
# Ensure all the indexes have been built or updated, since
|
||||
# otherwise the processes in the pool may timeout on waiting
|
||||
# for a write lock. We do this indirectly by retrieving the
|
||||
# provider index, which should in turn trigger the update of
|
||||
# all the indexes if there's any need for that.
|
||||
_ = spack.repo.PATH.provider_index
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.all_compilers_config(spack.config.CONFIG)
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(args) == 0:
|
||||
return []
|
||||
|
||||
# Solve the environment in parallel on Linux
|
||||
start = time.time()
|
||||
num_procs = min(len(args), spack.config.determine_number_of_jobs(parallel=True))
|
||||
|
||||
# TODO: support parallel concretization on macOS and Windows
|
||||
msg = "Starting concretization"
|
||||
if sys.platform not in ("darwin", "win32") and num_procs > 1:
|
||||
msg += f" pool with {num_procs} processes"
|
||||
tty.msg(msg)
|
||||
|
||||
batch = []
|
||||
for j, (i, concrete, duration) in enumerate(
|
||||
spack.util.parallel.imap_unordered(
|
||||
_concretize_task,
|
||||
args,
|
||||
processes=num_procs,
|
||||
debug=tty.is_debug(),
|
||||
maxtaskperchild=1,
|
||||
)
|
||||
):
|
||||
batch.append((i, concrete))
|
||||
percentage = (j + 1) / len(args) * 100
|
||||
tty.verbose(
|
||||
f"{duration:6.1f}s [{percentage:3.0f}%] {concrete.cformat('{hash:7}')} "
|
||||
f"{root_specs[i].colored_str}"
|
||||
)
|
||||
sys.stdout.flush()
|
||||
|
||||
# Add specs in original order
|
||||
batch.sort(key=lambda x: x[0])
|
||||
by_hash = {} # for attaching information on test dependencies
|
||||
for root, (_, concrete) in zip(root_specs, batch):
|
||||
self._add_concrete_spec(root, concrete)
|
||||
by_hash = {}
|
||||
for abstract, concrete in concretized_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
by_hash[concrete.dag_hash()] = concrete
|
||||
|
||||
finish = time.time()
|
||||
tty.msg(f"Environment concretized in {finish - start:.2f} seconds")
|
||||
|
||||
# Unify the specs objects, so we get correct references to all parents
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
|
||||
@@ -1704,11 +1649,7 @@ def _concretize_separately(self, tests=False):
|
||||
test_dependency.copy(), depflag=dt.TEST, virtuals=current_edge.virtuals
|
||||
)
|
||||
|
||||
results = [
|
||||
(abstract, self.specs_by_hash[h])
|
||||
for abstract, h in zip(self.concretized_user_specs, self.concretized_order)
|
||||
]
|
||||
return results
|
||||
return concretized_specs
|
||||
|
||||
@property
|
||||
def default_view(self):
|
||||
@@ -2515,14 +2456,6 @@ def display_specs(specs):
|
||||
print(tree_string)
|
||||
|
||||
|
||||
def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
|
||||
index, spec_str, tests = packed_arguments
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
start = time.time()
|
||||
spec = Spec(spec_str).concretized(tests=tests)
|
||||
return index, spec, time.time() - start
|
||||
|
||||
|
||||
def make_repo_path(root):
|
||||
"""Make a RepoPath from the repo subdirectories in an environment."""
|
||||
path = spack.repo.RepoPath(cache=spack.caches.MISC_CACHE)
|
||||
|
@@ -33,7 +33,7 @@
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
from pathlib import PurePath
|
||||
from typing import Dict, List, Optional
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.url
|
||||
import llnl.util
|
||||
@@ -49,7 +49,6 @@
|
||||
import spack.util.archive
|
||||
import spack.util.crypto as crypto
|
||||
import spack.util.git
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
import spack.version
|
||||
@@ -111,28 +110,6 @@ def __init__(self, **kwargs):
|
||||
|
||||
self.package = None
|
||||
|
||||
def source_provenance(self) -> Dict:
|
||||
"""Create a metadata dictionary that describes the artifacts fetched by this FetchStrategy.
|
||||
|
||||
The returned dictionary is added to the content used to determine the full hash
|
||||
for a package. It should be serializable as JSON.
|
||||
|
||||
It should include data like sha256 hashes for archives, commits for source
|
||||
repositories, and any information needed to describe exactly what artifacts went
|
||||
into a build.
|
||||
|
||||
If a package has no soruce artifacts, it should return an empty dictionary.
|
||||
|
||||
"""
|
||||
attrs = syaml.syaml_dict()
|
||||
if self.url_attr:
|
||||
attrs["type"] = "archive" if self.url_attr == "url" else self.url_attr
|
||||
for attr in self.optional_attrs:
|
||||
value = getattr(self, attr, None)
|
||||
if value:
|
||||
attrs[attr] = value
|
||||
return attrs
|
||||
|
||||
def set_package(self, package):
|
||||
self.package = package
|
||||
|
||||
@@ -175,6 +152,17 @@ def cachable(self):
|
||||
bool: True if can cache, False otherwise.
|
||||
"""
|
||||
|
||||
def source_id(self):
|
||||
"""A unique ID for the source.
|
||||
|
||||
It is intended that a human could easily generate this themselves using
|
||||
the information available to them in the Spack package.
|
||||
|
||||
The returned value is added to the content which determines the full
|
||||
hash for a package using `str()`.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def mirror_id(self):
|
||||
"""This is a unique ID for a source that is intended to help identify
|
||||
reuse of resources across packages.
|
||||
@@ -225,9 +213,9 @@ def cachable(self):
|
||||
"""Report False as there is no code to cache."""
|
||||
return False
|
||||
|
||||
def source_provenance(self) -> Dict:
|
||||
"""BundlePackages don't have a source of their own."""
|
||||
return {}
|
||||
def source_id(self):
|
||||
"""BundlePackages don't have a source id."""
|
||||
return ""
|
||||
|
||||
def mirror_id(self):
|
||||
"""BundlePackages don't have a mirror id."""
|
||||
@@ -272,15 +260,8 @@ def curl(self) -> Executable:
|
||||
self._curl = web_util.require_curl()
|
||||
return self._curl
|
||||
|
||||
def source_provenance(self) -> Dict:
|
||||
attrs = super().source_provenance()
|
||||
if self.digest:
|
||||
try:
|
||||
hash_type = spack.util.crypto.hash_algo_for_digest(self.digest)
|
||||
except ValueError:
|
||||
hash_type = "digest"
|
||||
attrs[hash_type] = self.digest
|
||||
return attrs
|
||||
def source_id(self):
|
||||
return self.digest
|
||||
|
||||
def mirror_id(self):
|
||||
if not self.digest:
|
||||
@@ -791,15 +772,9 @@ def git(self):
|
||||
def cachable(self):
|
||||
return self.cache_enabled and bool(self.commit)
|
||||
|
||||
def source_provenance(self) -> Dict:
|
||||
attrs = super().source_provenance()
|
||||
|
||||
# need to fully resolve submodule callbacks for node dicts
|
||||
submodules = attrs.get("submodules", None)
|
||||
if submodules and callable(submodules):
|
||||
attrs["submodules"] = submodules(self.package)
|
||||
|
||||
return attrs
|
||||
def source_id(self):
|
||||
# TODO: tree-hash would secure download cache and mirrors, commit only secures checkouts.
|
||||
return self.commit
|
||||
|
||||
def mirror_id(self):
|
||||
if self.commit:
|
||||
@@ -1109,6 +1084,17 @@ def cvs(self):
|
||||
def cachable(self):
|
||||
return self.cache_enabled and (bool(self.branch) or bool(self.date))
|
||||
|
||||
def source_id(self):
|
||||
if not (self.branch or self.date):
|
||||
# We need a branch or a date to make a checkout reproducible
|
||||
return None
|
||||
id = "id"
|
||||
if self.branch:
|
||||
id += "-branch=" + self.branch
|
||||
if self.date:
|
||||
id += "-date=" + self.date
|
||||
return id
|
||||
|
||||
def mirror_id(self):
|
||||
if not (self.branch or self.date):
|
||||
# We need a branch or a date to make a checkout reproducible
|
||||
@@ -1211,6 +1197,9 @@ def svn(self):
|
||||
def cachable(self):
|
||||
return self.cache_enabled and bool(self.revision)
|
||||
|
||||
def source_id(self):
|
||||
return self.revision
|
||||
|
||||
def mirror_id(self):
|
||||
if self.revision:
|
||||
repo_path = urllib.parse.urlparse(self.url).path
|
||||
@@ -1318,6 +1307,9 @@ def hg(self):
|
||||
def cachable(self):
|
||||
return self.cache_enabled and bool(self.revision)
|
||||
|
||||
def source_id(self):
|
||||
return self.revision
|
||||
|
||||
def mirror_id(self):
|
||||
if self.revision:
|
||||
repo_path = urllib.parse.urlparse(self.url).path
|
||||
|
@@ -5,6 +5,7 @@
|
||||
"""Definitions that control how Spack creates Spec hashes."""
|
||||
|
||||
import spack.deptypes as dt
|
||||
import spack.repo
|
||||
|
||||
hashes = []
|
||||
|
||||
@@ -12,17 +13,20 @@
|
||||
class SpecHashDescriptor:
|
||||
"""This class defines how hashes are generated on Spec objects.
|
||||
|
||||
Spec hashes in Spack are generated from a serialized JSON representation of the DAG.
|
||||
The representation may only include certain dependency types, and it may optionally
|
||||
include a canonicalized hash of the ``package.py`` for each node in the graph.
|
||||
Spec hashes in Spack are generated from a serialized (e.g., with
|
||||
YAML) representation of the Spec graph. The representation may only
|
||||
include certain dependency types, and it may optionally include a
|
||||
canonicalized hash of the package.py for each node in the graph.
|
||||
|
||||
"""
|
||||
We currently use different hashes for different use cases."""
|
||||
|
||||
def __init__(self, depflag: dt.DepFlag, package_hash, name):
|
||||
def __init__(self, depflag: dt.DepFlag, package_hash, name, override=None):
|
||||
self.depflag = depflag
|
||||
self.package_hash = package_hash
|
||||
self.name = name
|
||||
hashes.append(self)
|
||||
# Allow spec hashes to have an alternate computation method
|
||||
self.override = override
|
||||
|
||||
@property
|
||||
def attr(self):
|
||||
@@ -50,6 +54,18 @@ def __repr__(self):
|
||||
)
|
||||
|
||||
|
||||
def _content_hash_override(spec):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
return pkg.content_hash()
|
||||
|
||||
|
||||
#: Package hash used as part of dag hash
|
||||
package_hash = SpecHashDescriptor(
|
||||
depflag=0, package_hash=True, name="package_hash", override=_content_hash_override
|
||||
)
|
||||
|
||||
|
||||
# Deprecated hash types, no longer used, but needed to understand old serialized
|
||||
# spec formats
|
||||
|
||||
|
@@ -21,43 +21,40 @@
|
||||
features.
|
||||
"""
|
||||
import importlib
|
||||
|
||||
from llnl.util.lang import ensure_last, list_modules
|
||||
|
||||
import spack.paths
|
||||
import types
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
class _HookRunner:
|
||||
#: Stores all hooks on first call, shared among
|
||||
#: all HookRunner objects
|
||||
_hooks = None
|
||||
#: Order in which hooks are executed
|
||||
HOOK_ORDER = [
|
||||
"spack.hooks.module_file_generation",
|
||||
"spack.hooks.licensing",
|
||||
"spack.hooks.sbang",
|
||||
"spack.hooks.windows_runtime_linkage",
|
||||
"spack.hooks.drop_redundant_rpaths",
|
||||
"spack.hooks.absolutify_elf_sonames",
|
||||
"spack.hooks.permissions_setters",
|
||||
# after all mutations to the install prefix, write metadata
|
||||
"spack.hooks.write_install_manifest",
|
||||
# after all metadata is written
|
||||
"spack.hooks.autopush",
|
||||
]
|
||||
|
||||
#: Contains all hook modules after first call, shared among all HookRunner objects
|
||||
_hooks: Optional[List[types.ModuleType]] = None
|
||||
|
||||
def __init__(self, hook_name):
|
||||
self.hook_name = hook_name
|
||||
|
||||
@classmethod
|
||||
def _populate_hooks(cls):
|
||||
# Lazily populate the list of hooks
|
||||
cls._hooks = []
|
||||
|
||||
relative_names = list(list_modules(spack.paths.hooks_path))
|
||||
|
||||
# Ensure that write_install_manifest comes last
|
||||
ensure_last(relative_names, "absolutify_elf_sonames", "write_install_manifest")
|
||||
|
||||
for name in relative_names:
|
||||
module_name = __name__ + "." + name
|
||||
module_obj = importlib.import_module(module_name)
|
||||
cls._hooks.append((module_name, module_obj))
|
||||
|
||||
@property
|
||||
def hooks(self):
|
||||
def hooks(self) -> List[types.ModuleType]:
|
||||
if not self._hooks:
|
||||
self._populate_hooks()
|
||||
self._hooks = [importlib.import_module(module_name) for module_name in self.HOOK_ORDER]
|
||||
return self._hooks
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
for _, module in self.hooks:
|
||||
for module in self.hooks:
|
||||
if hasattr(module, self.hook_name):
|
||||
hook = getattr(module, self.hook_name)
|
||||
if hasattr(hook, "__call__"):
|
||||
|
@@ -36,7 +36,6 @@
|
||||
import shutil
|
||||
import sys
|
||||
import time
|
||||
import traceback
|
||||
from collections import defaultdict
|
||||
from gzip import GzipFile
|
||||
from typing import Dict, Iterator, List, Optional, Set, Tuple, Union
|
||||
@@ -413,7 +412,7 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
|
||||
tty.debug(f"{pre} already registered in DB")
|
||||
record = spack.store.STORE.db.get_record(spec)
|
||||
if explicit and not record.explicit:
|
||||
spack.store.STORE.db.update_explicit(spec, explicit)
|
||||
spack.store.STORE.db.mark(spec, "explicit", True)
|
||||
|
||||
except KeyError:
|
||||
# If not, register it and generate the module file.
|
||||
@@ -1508,8 +1507,8 @@ def _prepare_for_install(self, task: Task) -> None:
|
||||
self._update_installed(task)
|
||||
|
||||
# Only update the explicit entry once for the explicit package
|
||||
if task.explicit:
|
||||
spack.store.STORE.db.update_explicit(task.pkg.spec, True)
|
||||
if task.explicit and not rec.explicit:
|
||||
spack.store.STORE.db.mark(task.pkg.spec, "explicit", True)
|
||||
|
||||
def _cleanup_all_tasks(self) -> None:
|
||||
"""Cleanup all tasks to include releasing their locks."""
|
||||
@@ -2215,7 +2214,7 @@ def install(self) -> None:
|
||||
if task.is_build_request:
|
||||
if single_requested_spec:
|
||||
raise
|
||||
failed_build_requests.append((pkg, pkg_id, exc))
|
||||
failed_build_requests.append((pkg, pkg_id, str(exc)))
|
||||
|
||||
finally:
|
||||
# Remove the install prefix if anything went wrong during
|
||||
@@ -2242,9 +2241,6 @@ def install(self) -> None:
|
||||
if failed_build_requests or missing:
|
||||
for _, pkg_id, err in failed_build_requests:
|
||||
tty.error(f"{pkg_id}: {err}")
|
||||
if spack.error.debug:
|
||||
# note: in python 3.10+ this can just be print_exception(err)
|
||||
traceback.print_exception(type(err), err, err.__traceback__)
|
||||
|
||||
for _, pkg_id in missing:
|
||||
tty.error(f"{pkg_id}: Package was not installed")
|
||||
|
@@ -911,13 +911,6 @@ def _main(argv=None):
|
||||
# Make spack load / env activate work on macOS
|
||||
restore_macos_dyld_vars()
|
||||
|
||||
# make spack.config aware of any command line configuration scopes
|
||||
if args.config_scopes:
|
||||
spack.config.COMMAND_LINE_SCOPES = args.config_scopes
|
||||
|
||||
# ensure options on spack command come before everything
|
||||
setup_main_options(args)
|
||||
|
||||
# activate an environment if one was specified on the command line
|
||||
env_format_error = None
|
||||
if not args.no_env:
|
||||
@@ -931,6 +924,12 @@ def _main(argv=None):
|
||||
e.print_context()
|
||||
env_format_error = e
|
||||
|
||||
# Push scopes from the command line last
|
||||
if args.config_scopes:
|
||||
spack.config._add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
|
||||
spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("command_line"))
|
||||
setup_main_options(args)
|
||||
|
||||
# ------------------------------------------------------------------------
|
||||
# Things that require configuration should go below here
|
||||
# ------------------------------------------------------------------------
|
||||
|
@@ -18,7 +18,7 @@
|
||||
import sys
|
||||
import traceback
|
||||
import urllib.parse
|
||||
from typing import List, Optional, Union
|
||||
from typing import Any, Dict, Optional, Tuple, Union
|
||||
|
||||
import llnl.url
|
||||
import llnl.util.symlink
|
||||
@@ -153,8 +153,66 @@ def push_url(self):
|
||||
"""Get the valid, canonicalized fetch URL"""
|
||||
return self.get_url("push")
|
||||
|
||||
def ensure_mirror_usable(self, direction: str = "push"):
|
||||
access_pair = self._get_value("access_pair", direction)
|
||||
access_token_variable = self._get_value("access_token_variable", direction)
|
||||
|
||||
errors = []
|
||||
|
||||
# Verify that the credentials that are variables expand
|
||||
if access_pair and isinstance(access_pair, dict):
|
||||
if "id_variable" in access_pair and access_pair["id_variable"] not in os.environ:
|
||||
errors.append(f"id_variable {access_pair['id_variable']} not set in environment")
|
||||
if "secret_variable" in access_pair:
|
||||
if access_pair["secret_variable"] not in os.environ:
|
||||
errors.append(
|
||||
f"environment variable `{access_pair['secret_variable']}` "
|
||||
"(secret_variable) not set"
|
||||
)
|
||||
|
||||
if access_token_variable:
|
||||
if access_token_variable not in os.environ:
|
||||
errors.append(
|
||||
f"environment variable `{access_pair['access_token_variable']}` "
|
||||
"(access_token_variable) not set"
|
||||
)
|
||||
|
||||
if errors:
|
||||
msg = f"invalid {direction} configuration for mirror {self.name}: "
|
||||
msg += "\n ".join(errors)
|
||||
raise MirrorError(msg)
|
||||
|
||||
def _update_connection_dict(self, current_data: dict, new_data: dict, top_level: bool):
|
||||
keys = ["url", "access_pair", "access_token", "profile", "endpoint_url"]
|
||||
# Only allow one to exist in the config
|
||||
if "access_token" in current_data and "access_token_variable" in new_data:
|
||||
current_data.pop("access_token")
|
||||
elif "access_token_variable" in current_data and "access_token" in new_data:
|
||||
current_data.pop("access_token_variable")
|
||||
|
||||
# If updating to a new access_pair that is the deprecated list, warn
|
||||
warn_deprecated_access_pair = False
|
||||
if "access_pair" in new_data:
|
||||
warn_deprecated_access_pair = isinstance(new_data["access_pair"], list)
|
||||
# If the not updating the current access_pair, and it is the deprecated list, warn
|
||||
elif "access_pair" in current_data:
|
||||
warn_deprecated_access_pair = isinstance(current_data["access_pair"], list)
|
||||
|
||||
if warn_deprecated_access_pair:
|
||||
tty.warn(
|
||||
f"in mirror {self.name}: support for plain text secrets in config files "
|
||||
"(access_pair: [id, secret]) is deprecated and will be removed in a future Spack "
|
||||
"version. Use environment variables instead (access_pair: "
|
||||
"{id: ..., secret_variable: ...})"
|
||||
)
|
||||
|
||||
keys = [
|
||||
"url",
|
||||
"access_pair",
|
||||
"access_token",
|
||||
"access_token_variable",
|
||||
"profile",
|
||||
"endpoint_url",
|
||||
]
|
||||
if top_level:
|
||||
keys += ["binary", "source", "signed", "autopush"]
|
||||
changed = False
|
||||
@@ -270,11 +328,53 @@ def get_url(self, direction: str) -> str:
|
||||
|
||||
return _url_or_path_to_url(url)
|
||||
|
||||
def get_access_token(self, direction: str) -> Optional[str]:
|
||||
return self._get_value("access_token", direction)
|
||||
def get_credentials(self, direction: str) -> Dict[str, Any]:
|
||||
"""Get the mirror credentials from the mirror config
|
||||
|
||||
def get_access_pair(self, direction: str) -> Optional[List]:
|
||||
return self._get_value("access_pair", direction)
|
||||
Args:
|
||||
direction: fetch or push mirror config
|
||||
|
||||
Returns:
|
||||
Dictionary from credential type string to value
|
||||
|
||||
Credential Type Map:
|
||||
access_token -> str
|
||||
access_pair -> tuple(str,str)
|
||||
profile -> str
|
||||
"""
|
||||
creddict: Dict[str, Any] = {}
|
||||
access_token = self.get_access_token(direction)
|
||||
if access_token:
|
||||
creddict["access_token"] = access_token
|
||||
|
||||
access_pair = self.get_access_pair(direction)
|
||||
if access_pair:
|
||||
creddict.update({"access_pair": access_pair})
|
||||
|
||||
profile = self.get_profile(direction)
|
||||
if profile:
|
||||
creddict["profile"] = profile
|
||||
|
||||
return creddict
|
||||
|
||||
def get_access_token(self, direction: str) -> Optional[str]:
|
||||
tok = self._get_value("access_token_variable", direction)
|
||||
if tok:
|
||||
return os.environ.get(tok)
|
||||
else:
|
||||
return self._get_value("access_token", direction)
|
||||
return None
|
||||
|
||||
def get_access_pair(self, direction: str) -> Optional[Tuple[str, str]]:
|
||||
pair = self._get_value("access_pair", direction)
|
||||
if isinstance(pair, (tuple, list)) and len(pair) == 2:
|
||||
return (pair[0], pair[1]) if all(pair) else None
|
||||
elif isinstance(pair, dict):
|
||||
id_ = os.environ.get(pair["id_variable"]) if "id_variable" in pair else pair["id"]
|
||||
secret = os.environ.get(pair["secret_variable"])
|
||||
return (id_, secret) if id_ and secret else None
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_profile(self, direction: str) -> Optional[str]:
|
||||
return self._get_value("profile", direction)
|
||||
|
@@ -377,9 +377,10 @@ def credentials_from_mirrors(
|
||||
# Prefer push credentials over fetch. Unlikely that those are different
|
||||
# but our config format allows it.
|
||||
for direction in ("push", "fetch"):
|
||||
pair = mirror.get_access_pair(direction)
|
||||
if pair is None:
|
||||
pair = mirror.get_credentials(direction).get("access_pair")
|
||||
if not pair:
|
||||
continue
|
||||
|
||||
url = mirror.get_url(direction)
|
||||
if not url.startswith("oci://"):
|
||||
continue
|
||||
|
@@ -103,12 +103,7 @@
|
||||
from spack.spec import InvalidSpecDetected, Spec
|
||||
from spack.util.executable import *
|
||||
from spack.util.filesystem import file_command, fix_darwin_install_name, mime_type
|
||||
from spack.variant import (
|
||||
any_combination_of,
|
||||
auto_or_any_combination_of,
|
||||
conditional,
|
||||
disjoint_sets,
|
||||
)
|
||||
from spack.variant import any_combination_of, auto_or_any_combination_of, disjoint_sets
|
||||
from spack.version import Version, ver
|
||||
|
||||
# These are just here for editor support; they will be replaced when the build env
|
||||
|
@@ -9,10 +9,12 @@
|
||||
packages.
|
||||
"""
|
||||
|
||||
import base64
|
||||
import collections
|
||||
import copy
|
||||
import functools
|
||||
import glob
|
||||
import hashlib
|
||||
import importlib
|
||||
import io
|
||||
import os
|
||||
@@ -47,15 +49,14 @@
|
||||
import spack.store
|
||||
import spack.url
|
||||
import spack.util.environment
|
||||
import spack.util.package_hash as ph
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.web
|
||||
from spack.error import InstallError, NoURLError, PackageError
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
from spack.install_test import PackageTest, TestSuite
|
||||
from spack.solver.version_order import concretization_version_order
|
||||
from spack.stage import DevelopStage, ResourceStage, Stage, StageComposite, compute_stage_name
|
||||
from spack.util.package_hash import package_hash
|
||||
from spack.version import GitVersion, StandardVersion
|
||||
|
||||
FLAG_HANDLER_RETURN_TYPE = Tuple[
|
||||
@@ -621,6 +622,7 @@ class PackageBase(WindowsRPath, PackageViewMixin, RedistributionMixin, metaclass
|
||||
patches: Dict["spack.spec.Spec", List["spack.patch.Patch"]]
|
||||
variants: Dict["spack.spec.Spec", Dict[str, "spack.variant.Variant"]]
|
||||
languages: Dict["spack.spec.Spec", Set[str]]
|
||||
splice_specs: Dict["spack.spec.Spec", Tuple["spack.spec.Spec", Union[None, str, List[str]]]]
|
||||
|
||||
#: By default, packages are not virtual
|
||||
#: Virtual packages override this attribute
|
||||
@@ -1753,78 +1755,65 @@ def all_patches(cls):
|
||||
|
||||
return patches
|
||||
|
||||
def artifact_hashes(self, content=None):
|
||||
"""Create a dictionary of hashes of artifacts used in the build of this package.
|
||||
def content_hash(self, content=None):
|
||||
"""Create a hash based on the artifacts and patches used to build this package.
|
||||
|
||||
This includes:
|
||||
* source artifacts (tarballs, repositories) used to build;
|
||||
* content hashes (``sha256``'s) of all patches applied by Spack; and
|
||||
* canonicalized contents the ``package.py`` recipe used to build.
|
||||
|
||||
Example::
|
||||
|
||||
{
|
||||
"package_hash": "qovi2hm2n2qsatng2r4n55yzjlhnwflx",
|
||||
"sources": [
|
||||
{
|
||||
"sha256": "fc5fd69bb8736323f026672b1b7235da613d7177e72558893a0bdcd320466d60",
|
||||
"type": "archive"
|
||||
},
|
||||
{
|
||||
"sha256": "56ab9b90f5acbc42eb7a94cf482e6c058a63e8a1effdf572b8b2a6323a06d923",
|
||||
"type": "archive"
|
||||
}
|
||||
}
|
||||
|
||||
All hashes are added to concrete specs at the end of concretization. If this
|
||||
method is called on an abstract spec, only hashes that can be known from the
|
||||
abstract spec will be included.
|
||||
This hash is only included in Spack's DAG hash for concrete specs, but if it
|
||||
happens to be called on a package with an abstract spec, only applicable (i.e.,
|
||||
determinable) portions of the hash will be included.
|
||||
|
||||
"""
|
||||
hashes = syaml.syaml_dict()
|
||||
# list of components to make up the hash
|
||||
hash_content = []
|
||||
|
||||
# source artifacts/repositories
|
||||
# TODO: resources
|
||||
if self.spec.versions.concrete:
|
||||
sources = []
|
||||
try:
|
||||
fetcher = fs.for_package_version(self)
|
||||
provenance_dict = fetcher.source_provenance()
|
||||
if provenance_dict:
|
||||
sources.append(provenance_dict)
|
||||
|
||||
source_id = fs.for_package_version(self).source_id()
|
||||
except (fs.ExtrapolationError, fs.InvalidArgsError):
|
||||
# ExtrapolationError happens if the package has no fetchers defined.
|
||||
# InvalidArgsError happens when there are version directives with args,
|
||||
# but none of them identifies an actual fetcher.
|
||||
source_id = None
|
||||
|
||||
# if this is a develop spec, say so
|
||||
if not source_id:
|
||||
# TODO? in cases where a digest or source_id isn't available,
|
||||
# should this attempt to download the source and set one? This
|
||||
# probably only happens for source repositories which are
|
||||
# referenced by branch name rather than tag or commit ID.
|
||||
from_local_sources = "dev_path" in self.spec.variants
|
||||
|
||||
# don't bother setting a hash if none is available, but warn if
|
||||
# it seems like there should be one.
|
||||
if self.has_code and not self.spec.external and not from_local_sources:
|
||||
message = "Missing a hash for {s.name}@{s.version}"
|
||||
message = "Missing a source id for {s.name}@{s.version}"
|
||||
tty.debug(message.format(s=self))
|
||||
|
||||
for resource in self._get_needed_resources():
|
||||
sources.append(resource.fetcher.source_provenance())
|
||||
|
||||
if sources:
|
||||
hashes["sources"] = sources
|
||||
hash_content.append("".encode("utf-8"))
|
||||
else:
|
||||
hash_content.append(source_id.encode("utf-8"))
|
||||
|
||||
# patch sha256's
|
||||
# Only include these if they've been assigned by the concretizer.
|
||||
# We check spec._patches_assigned instead of spec.concrete because
|
||||
# we have to call package_hash *before* marking specs concrete
|
||||
if self.spec._patches_assigned():
|
||||
hashes["patches"] = [
|
||||
{"sha256": patch.sha256, "level": patch.level} for patch in self.spec.patches
|
||||
]
|
||||
hash_content.extend(
|
||||
":".join((p.sha256, str(p.level))).encode("utf-8") for p in self.spec.patches
|
||||
)
|
||||
|
||||
# package.py contents
|
||||
hashes["package_hash"] = ph.package_hash(self.spec, source=content)
|
||||
hash_content.append(package_hash(self.spec, source=content).encode("utf-8"))
|
||||
|
||||
return hashes
|
||||
# put it all together and encode as base32
|
||||
b32_hash = base64.b32encode(
|
||||
hashlib.sha256(bytes().join(sorted(hash_content))).digest()
|
||||
).lower()
|
||||
b32_hash = b32_hash.decode("utf-8")
|
||||
|
||||
return b32_hash
|
||||
|
||||
@property
|
||||
def cmake_prefix_paths(self):
|
||||
|
@@ -41,6 +41,7 @@
|
||||
import spack.provider_index
|
||||
import spack.spec
|
||||
import spack.tag
|
||||
import spack.util.file_cache
|
||||
import spack.util.git
|
||||
import spack.util.naming as nm
|
||||
import spack.util.path
|
||||
@@ -589,7 +590,7 @@ def __init__(
|
||||
self,
|
||||
package_checker: FastPackageChecker,
|
||||
namespace: str,
|
||||
cache: "spack.caches.FileCacheType",
|
||||
cache: spack.util.file_cache.FileCache,
|
||||
):
|
||||
self.checker = package_checker
|
||||
self.packages_path = self.checker.packages_path
|
||||
@@ -682,7 +683,7 @@ class RepoPath:
|
||||
def __init__(
|
||||
self,
|
||||
*repos: Union[str, "Repo"],
|
||||
cache: Optional["spack.caches.FileCacheType"],
|
||||
cache: Optional[spack.util.file_cache.FileCache],
|
||||
overrides: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
self.repos: List[Repo] = []
|
||||
@@ -805,7 +806,7 @@ def tag_index(self) -> spack.tag.TagIndex:
|
||||
return self._tag_index
|
||||
|
||||
@property
|
||||
def patch_index(self) -> "spack.patch.PatchCache":
|
||||
def patch_index(self) -> spack.patch.PatchCache:
|
||||
"""Merged PatchIndex from all Repos in the RepoPath."""
|
||||
if self._patch_index is None:
|
||||
self._patch_index = spack.patch.PatchCache(repository=self)
|
||||
@@ -964,7 +965,7 @@ def __init__(
|
||||
self,
|
||||
root: str,
|
||||
*,
|
||||
cache: "spack.caches.FileCacheType",
|
||||
cache: spack.util.file_cache.FileCache,
|
||||
overrides: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
"""Instantiate a package repository from a filesystem path.
|
||||
@@ -1158,7 +1159,7 @@ def tag_index(self) -> spack.tag.TagIndex:
|
||||
return self.index["tags"]
|
||||
|
||||
@property
|
||||
def patch_index(self) -> "spack.patch.PatchCache":
|
||||
def patch_index(self) -> spack.patch.PatchCache:
|
||||
"""Index of patches and packages they're defined on."""
|
||||
return self.index["patches"]
|
||||
|
||||
@@ -1439,9 +1440,7 @@ def _path(configuration=None):
|
||||
return create(configuration=configuration)
|
||||
|
||||
|
||||
def create(
|
||||
configuration: Union["spack.config.Configuration", llnl.util.lang.Singleton]
|
||||
) -> RepoPath:
|
||||
def create(configuration: spack.config.Configuration) -> RepoPath:
|
||||
"""Create a RepoPath from a configuration object.
|
||||
|
||||
Args:
|
||||
@@ -1464,7 +1463,7 @@ def create(
|
||||
|
||||
|
||||
#: Singleton repo path instance
|
||||
PATH: Union[RepoPath, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_path)
|
||||
PATH: RepoPath = llnl.util.lang.Singleton(_path) # type: ignore
|
||||
|
||||
# Add the finder to sys.meta_path
|
||||
REPOS_FINDER = ReposFinder()
|
||||
|
@@ -33,8 +33,14 @@
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": ["local", "buildcache", "external"],
|
||||
"enum": [
|
||||
"local",
|
||||
"buildcache",
|
||||
"external",
|
||||
"environment",
|
||||
],
|
||||
},
|
||||
"path": {"type": "string"},
|
||||
"include": LIST_OF_SPECS,
|
||||
"exclude": LIST_OF_SPECS,
|
||||
},
|
||||
@@ -72,7 +78,8 @@
|
||||
"transitive": {"type": "boolean", "default": False},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
"automatic": {"type": "boolean"},
|
||||
},
|
||||
},
|
||||
"duplicates": {
|
||||
|
@@ -19,6 +19,8 @@
|
||||
#: Top level key in a manifest file
|
||||
TOP_LEVEL_KEY = "spack"
|
||||
|
||||
include_concrete = {"type": "array", "default": [], "items": {"type": "string"}}
|
||||
|
||||
properties: Dict[str, Any] = {
|
||||
"spack": {
|
||||
"type": "object",
|
||||
@@ -31,7 +33,7 @@
|
||||
{
|
||||
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
"specs": spec_list_schema,
|
||||
"include_concrete": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
"include_concrete": include_concrete,
|
||||
},
|
||||
),
|
||||
}
|
||||
|
@@ -15,14 +15,42 @@
|
||||
"url": {"type": "string"},
|
||||
# todo: replace this with named keys "username" / "password" or "id" / "secret"
|
||||
"access_pair": {
|
||||
"type": "array",
|
||||
"items": {"type": ["string", "null"], "minItems": 2, "maxItems": 2},
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "array",
|
||||
"items": {"minItems": 2, "maxItems": 2, "type": ["string", "null"]},
|
||||
}, # deprecated
|
||||
{
|
||||
"type": "object",
|
||||
"required": ["secret_variable"],
|
||||
# Only allow id or id_variable to be set, not both
|
||||
"oneOf": [{"required": ["id"]}, {"required": ["id_variable"]}],
|
||||
"properties": {
|
||||
"id": {"type": "string"},
|
||||
"id_variable": {"type": "string"},
|
||||
"secret_variable": {"type": "string"},
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
"access_token": {"type": ["string", "null"]},
|
||||
"profile": {"type": ["string", "null"]},
|
||||
"endpoint_url": {"type": ["string", "null"]},
|
||||
"access_token": {"type": ["string", "null"]}, # deprecated
|
||||
"access_token_variable": {"type": ["string", "null"]},
|
||||
}
|
||||
|
||||
connection_ext = {
|
||||
"deprecatedProperties": [
|
||||
{
|
||||
"names": ["access_token"],
|
||||
"message": "Use of plain text `access_token` in mirror config is deprecated, use "
|
||||
"environment variables instead (access_token_variable)",
|
||||
"error": False,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
#: Mirror connection inside pull/push keys
|
||||
fetch_and_push = {
|
||||
"anyOf": [
|
||||
@@ -31,6 +59,7 @@
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {**connection}, # type: ignore
|
||||
**connection_ext, # type: ignore
|
||||
},
|
||||
]
|
||||
}
|
||||
@@ -49,6 +78,7 @@
|
||||
"autopush": {"type": "boolean"},
|
||||
**connection, # type: ignore
|
||||
},
|
||||
**connection_ext, # type: ignore
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
@@ -70,3 +100,28 @@
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
||||
|
||||
|
||||
def update(data):
|
||||
import jsonschema
|
||||
|
||||
errors = []
|
||||
|
||||
def check_access_pair(name, section):
|
||||
if not section or not isinstance(section, dict):
|
||||
return
|
||||
|
||||
if "access_token" in section and "access_token_variable" in section:
|
||||
errors.append(
|
||||
f'{name}: mirror credential "access_token" conflicts with "access_token_variable"'
|
||||
)
|
||||
|
||||
# Check all of the sections
|
||||
for name, section in data.items():
|
||||
check_access_pair(name, section)
|
||||
if isinstance(section, dict):
|
||||
check_access_pair(name, section.get("fetch"))
|
||||
check_access_pair(name, section.get("push"))
|
||||
|
||||
if errors:
|
||||
raise jsonschema.ValidationError("\n".join(errors))
|
||||
|
@@ -27,7 +27,6 @@
|
||||
|
||||
import spack
|
||||
import spack.binary_distribution
|
||||
import spack.bootstrap.core
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.config
|
||||
@@ -53,6 +52,7 @@
|
||||
|
||||
from .core import (
|
||||
AspFunction,
|
||||
AspVar,
|
||||
NodeArgument,
|
||||
ast_sym,
|
||||
ast_type,
|
||||
@@ -64,6 +64,7 @@
|
||||
parse_term,
|
||||
)
|
||||
from .counter import FullDuplicatesCounter, MinimalDuplicatesCounter, NoDuplicatesCounter
|
||||
from .profiler import ProfilePropagator
|
||||
from .version_order import concretization_version_order
|
||||
|
||||
GitOrStandardVersion = Union[spack.version.GitVersion, spack.version.StandardVersion]
|
||||
@@ -515,6 +516,8 @@ def _compute_specs_from_answer_set(self):
|
||||
best = min(self.answers)
|
||||
opt, _, answer = best
|
||||
for input_spec in self.abstract_specs:
|
||||
# The specs must be unified to get here, so it is safe to associate any satisfying spec
|
||||
# with the input. Multiple inputs may be matched to the same concrete spec
|
||||
node = SpecBuilder.make_node(pkg=input_spec.name)
|
||||
if input_spec.virtual:
|
||||
providers = [
|
||||
@@ -523,12 +526,14 @@ def _compute_specs_from_answer_set(self):
|
||||
node = SpecBuilder.make_node(pkg=providers[0])
|
||||
candidate = answer.get(node)
|
||||
|
||||
if candidate and candidate.build_spec.satisfies(input_spec):
|
||||
if not candidate.satisfies(input_spec):
|
||||
tty.warn(
|
||||
"explicit splice configuration has caused the concretized spec"
|
||||
f" {candidate} not to satisfy the input spec {input_spec}"
|
||||
)
|
||||
if candidate and candidate.satisfies(input_spec):
|
||||
self._concrete_specs.append(answer[node])
|
||||
self._concrete_specs_by_input[input_spec] = answer[node]
|
||||
elif candidate and candidate.build_spec.satisfies(input_spec):
|
||||
tty.warn(
|
||||
"explicit splice configuration has caused the concretized spec"
|
||||
f" {candidate} not to satisfy the input spec {input_spec}"
|
||||
)
|
||||
self._concrete_specs.append(answer[node])
|
||||
self._concrete_specs_by_input[input_spec] = answer[node]
|
||||
else:
|
||||
@@ -796,7 +801,16 @@ def __init__(self, cores=True):
|
||||
# This attribute will be reset at each call to solve
|
||||
self.control = None
|
||||
|
||||
def solve(self, setup, specs, reuse=None, output=None, control=None, allow_deprecated=False):
|
||||
def solve(
|
||||
self,
|
||||
setup,
|
||||
specs,
|
||||
reuse=None,
|
||||
output=None,
|
||||
control=None,
|
||||
allow_deprecated=False,
|
||||
profile=False,
|
||||
):
|
||||
"""Set up the input and solve for dependencies of ``specs``.
|
||||
|
||||
Arguments:
|
||||
@@ -814,7 +828,7 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
||||
solve, and the internal statistics from clingo.
|
||||
"""
|
||||
# avoid circular import
|
||||
import spack.bootstrap
|
||||
import spack.bootstrap.core
|
||||
|
||||
output = output or DEFAULT_OUTPUT_CONFIGURATION
|
||||
timer = spack.util.timer.Timer()
|
||||
@@ -822,6 +836,11 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
||||
# Initialize the control object for the solver
|
||||
self.control = control or default_clingo_control()
|
||||
|
||||
# if profiling is enabled, register a profiling propagator
|
||||
if profile:
|
||||
propagator = ProfilePropagator()
|
||||
self.control.register_propagator(propagator)
|
||||
|
||||
# ensure core deps are present on Windows
|
||||
# needs to modify active config scope, so cannot be run within
|
||||
# bootstrap config scope
|
||||
@@ -853,6 +872,8 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
||||
self.control.load(os.path.join(parent_dir, "libc_compatibility.lp"))
|
||||
else:
|
||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||
if setup.enable_splicing:
|
||||
self.control.load(os.path.join(parent_dir, "splices.lp"))
|
||||
|
||||
timer.stop("load")
|
||||
|
||||
@@ -887,6 +908,7 @@ def on_model(model):
|
||||
result.satisfiable = solve_result.satisfiable
|
||||
|
||||
if result.satisfiable:
|
||||
timer.start("construct_specs")
|
||||
# get the best model
|
||||
builder = SpecBuilder(specs, hash_lookup=setup.reusable_and_possible)
|
||||
min_cost, best_model = min(models)
|
||||
@@ -911,19 +933,25 @@ def on_model(model):
|
||||
|
||||
# record the possible dependencies in the solve
|
||||
result.possible_dependencies = setup.pkgs
|
||||
|
||||
timer.stop("construct_specs")
|
||||
timer.stop()
|
||||
elif cores:
|
||||
result.control = self.control
|
||||
result.cores.extend(cores)
|
||||
|
||||
if output.timers:
|
||||
tty.msg("Timers:")
|
||||
timer.write_tty()
|
||||
print()
|
||||
|
||||
if output.stats:
|
||||
print("Statistics:")
|
||||
tty.msg("Statistics:")
|
||||
pprint.pprint(self.control.statistics)
|
||||
|
||||
if profile:
|
||||
tty.msg("Profile:")
|
||||
propagator.print_profile(40)
|
||||
|
||||
result.raise_if_unsat()
|
||||
|
||||
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
|
||||
@@ -1163,6 +1191,9 @@ def __init__(self, tests: bool = False):
|
||||
# list of unique libc specs targeted by compilers (or an educated guess if no compiler)
|
||||
self.libcs: List[spack.spec.Spec] = []
|
||||
|
||||
# If true, we have to load the code for synthesizing splices
|
||||
self.enable_splicing: bool = spack.config.CONFIG.get("concretizer:splice:automatic")
|
||||
|
||||
def pkg_version_rules(self, pkg):
|
||||
"""Output declared versions of a package.
|
||||
|
||||
@@ -1333,6 +1364,10 @@ def pkg_rules(self, pkg, tests):
|
||||
# dependencies
|
||||
self.package_dependencies_rules(pkg)
|
||||
|
||||
# splices
|
||||
if self.enable_splicing:
|
||||
self.package_splice_rules(pkg)
|
||||
|
||||
# virtual preferences
|
||||
self.virtual_preferences(
|
||||
pkg.name,
|
||||
@@ -1433,14 +1468,13 @@ def define_variant(
|
||||
for value in sorted(values):
|
||||
pkg_fact(fn.variant_possible_value(vid, value))
|
||||
|
||||
# when=True means unconditional, so no need for conditional values
|
||||
if getattr(value, "when", True) is True:
|
||||
# we're done here for unconditional values
|
||||
if not isinstance(value, vt.ConditionalValue):
|
||||
continue
|
||||
|
||||
# now we have to handle conditional values
|
||||
quoted_value = spack.parser.quote_if_needed(str(value))
|
||||
vstring = f"{name}={quoted_value}"
|
||||
variant_has_value = spack.spec.Spec(vstring)
|
||||
# make a spec indicating whether the variant has this conditional value
|
||||
variant_has_value = spack.spec.Spec()
|
||||
variant_has_value.variants[name] = spack.variant.AbstractVariant(name, value.value)
|
||||
|
||||
if value.when:
|
||||
# the conditional value is always "possible", but it imposes its when condition as
|
||||
@@ -1451,10 +1485,12 @@ def define_variant(
|
||||
imposed_spec=value.when,
|
||||
required_name=pkg.name,
|
||||
imposed_name=pkg.name,
|
||||
msg=f"{pkg.name} variant {name} has value '{quoted_value}' when {value.when}",
|
||||
msg=f"{pkg.name} variant {name} has value '{value.value}' when {value.when}",
|
||||
)
|
||||
else:
|
||||
# We know the value is never allowed statically (when was false), but we can't just
|
||||
vstring = f"{name}='{value.value}'"
|
||||
|
||||
# We know the value is never allowed statically (when was None), but we can't just
|
||||
# ignore it b/c it could come in as a possible value and we need a good error msg.
|
||||
# So, it's a conflict -- if the value is somehow used, it'll trigger an error.
|
||||
trigger_id = self.condition(
|
||||
@@ -1670,6 +1706,94 @@ def dependency_holds(input_spec, requirements):
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
def _gen_match_variant_splice_constraints(
|
||||
self,
|
||||
pkg,
|
||||
cond_spec: "spack.spec.Spec",
|
||||
splice_spec: "spack.spec.Spec",
|
||||
hash_asp_var: "AspVar",
|
||||
splice_node,
|
||||
match_variants: List[str],
|
||||
):
|
||||
# If there are no variants to match, no constraints are needed
|
||||
variant_constraints = []
|
||||
for i, variant_name in enumerate(match_variants):
|
||||
vari_defs = pkg.variant_definitions(variant_name)
|
||||
# the spliceable config of the package always includes the variant
|
||||
if vari_defs != [] and any(cond_spec.satisfies(s) for (s, _) in vari_defs):
|
||||
variant = vari_defs[0][1]
|
||||
if variant.multi:
|
||||
continue # cannot automatically match multi-valued variants
|
||||
value_var = AspVar(f"VariValue{i}")
|
||||
attr_constraint = fn.attr("variant_value", splice_node, variant_name, value_var)
|
||||
hash_attr_constraint = fn.hash_attr(
|
||||
hash_asp_var, "variant_value", splice_spec.name, variant_name, value_var
|
||||
)
|
||||
variant_constraints.append(attr_constraint)
|
||||
variant_constraints.append(hash_attr_constraint)
|
||||
return variant_constraints
|
||||
|
||||
def package_splice_rules(self, pkg):
|
||||
self.gen.h2("Splice rules")
|
||||
for i, (cond, (spec_to_splice, match_variants)) in enumerate(
|
||||
sorted(pkg.splice_specs.items())
|
||||
):
|
||||
with named_spec(cond, pkg.name):
|
||||
self.version_constraints.add((cond.name, cond.versions))
|
||||
self.version_constraints.add((spec_to_splice.name, spec_to_splice.versions))
|
||||
hash_var = AspVar("Hash")
|
||||
splice_node = fn.node(AspVar("NID"), cond.name)
|
||||
when_spec_attrs = [
|
||||
fn.attr(c.args[0], splice_node, *(c.args[2:]))
|
||||
for c in self.spec_clauses(cond, body=True, required_from=None)
|
||||
if c.args[0] != "node"
|
||||
]
|
||||
splice_spec_hash_attrs = [
|
||||
fn.hash_attr(hash_var, *(c.args))
|
||||
for c in self.spec_clauses(spec_to_splice, body=True, required_from=None)
|
||||
if c.args[0] != "node"
|
||||
]
|
||||
if match_variants is None:
|
||||
variant_constraints = []
|
||||
elif match_variants == "*":
|
||||
filt_match_variants = set()
|
||||
for map in pkg.variants.values():
|
||||
for k in map:
|
||||
filt_match_variants.add(k)
|
||||
filt_match_variants = list(filt_match_variants)
|
||||
variant_constraints = self._gen_match_variant_splice_constraints(
|
||||
pkg, cond, spec_to_splice, hash_var, splice_node, filt_match_variants
|
||||
)
|
||||
else:
|
||||
if any(
|
||||
v in cond.variants or v in spec_to_splice.variants for v in match_variants
|
||||
):
|
||||
raise Exception(
|
||||
"Overlap between match_variants and explicitly set variants"
|
||||
)
|
||||
variant_constraints = self._gen_match_variant_splice_constraints(
|
||||
pkg, cond, spec_to_splice, hash_var, splice_node, match_variants
|
||||
)
|
||||
|
||||
rule_head = fn.abi_splice_conditions_hold(
|
||||
i, splice_node, spec_to_splice.name, hash_var
|
||||
)
|
||||
rule_body_components = (
|
||||
[
|
||||
# splice_set_fact,
|
||||
fn.attr("node", splice_node),
|
||||
fn.installed_hash(spec_to_splice.name, hash_var),
|
||||
]
|
||||
+ when_spec_attrs
|
||||
+ splice_spec_hash_attrs
|
||||
+ variant_constraints
|
||||
)
|
||||
rule_body = ",\n ".join(str(r) for r in rule_body_components)
|
||||
rule = f"{rule_head} :-\n {rule_body}."
|
||||
self.gen.append(rule)
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
def virtual_preferences(self, pkg_name, func):
|
||||
"""Call func(vspec, provider, i) for each of pkg's provider prefs."""
|
||||
config = spack.config.get("packages")
|
||||
@@ -2028,9 +2152,12 @@ def _spec_clauses(
|
||||
for variant_def in variant_defs:
|
||||
self.variant_values_from_specs.add((spec.name, id(variant_def), value))
|
||||
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
if variant.propagate:
|
||||
clauses.append(f.propagate(spec.name, fn.variant_value(vname, value)))
|
||||
if self.pkg_class(spec.name).has_variant(vname):
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
else:
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
|
||||
# compiler and compiler version
|
||||
if spec.compiler:
|
||||
@@ -2529,8 +2656,9 @@ def concrete_specs(self):
|
||||
for h, spec in self.reusable_and_possible.explicit_items():
|
||||
# this indicates that there is a spec like this installed
|
||||
self.gen.fact(fn.installed_hash(spec.name, h))
|
||||
# this describes what constraints it imposes on the solve
|
||||
self.impose(h, spec, body=True)
|
||||
# indirection layer between hash constraints and imposition to allow for splicing
|
||||
for pred in self.spec_clauses(spec, body=True, required_from=None):
|
||||
self.gen.fact(fn.hash_attr(h, *pred.args))
|
||||
self.gen.newline()
|
||||
# Declare as possible parts of specs that are not in package.py
|
||||
# - Add versions to possible versions
|
||||
@@ -2616,6 +2744,7 @@ def setup(
|
||||
)
|
||||
for name, info in env.dev_specs.items()
|
||||
)
|
||||
|
||||
specs = tuple(specs) # ensure compatible types to add
|
||||
|
||||
self.gen.h1("Reusable concrete specs")
|
||||
@@ -3470,6 +3599,14 @@ def consume_facts(self):
|
||||
self._setup.effect_rules()
|
||||
|
||||
|
||||
# This should be a dataclass, but dataclasses don't work on Python 3.6
|
||||
class Splice:
|
||||
def __init__(self, splice_node: NodeArgument, child_name: str, child_hash: str):
|
||||
self.splice_node = splice_node
|
||||
self.child_name = child_name
|
||||
self.child_hash = child_hash
|
||||
|
||||
|
||||
class SpecBuilder:
|
||||
"""Class with actions to rebuild a spec from ASP results."""
|
||||
|
||||
@@ -3505,10 +3642,11 @@ def make_node(*, pkg: str) -> NodeArgument:
|
||||
"""
|
||||
return NodeArgument(id="0", pkg=pkg)
|
||||
|
||||
def __init__(
|
||||
self, specs: List[spack.spec.Spec], *, hash_lookup: Optional[ConcreteSpecsByHash] = None
|
||||
):
|
||||
def __init__(self, specs, hash_lookup=None):
|
||||
self._specs: Dict[NodeArgument, spack.spec.Spec] = {}
|
||||
|
||||
# Matches parent nodes to splice node
|
||||
self._splices: Dict[NodeArgument, List[Splice]] = {}
|
||||
self._result = None
|
||||
self._command_line_specs = specs
|
||||
self._flag_sources: Dict[Tuple[NodeArgument, str], Set[str]] = collections.defaultdict(
|
||||
@@ -3592,16 +3730,8 @@ def external_spec_selected(self, node, idx):
|
||||
|
||||
def depends_on(self, parent_node, dependency_node, type):
|
||||
dependency_spec = self._specs[dependency_node]
|
||||
edges = self._specs[parent_node].edges_to_dependencies(name=dependency_spec.name)
|
||||
edges = [x for x in edges if id(x.spec) == id(dependency_spec)]
|
||||
depflag = dt.flag_from_string(type)
|
||||
|
||||
if not edges:
|
||||
self._specs[parent_node].add_dependency_edge(
|
||||
self._specs[dependency_node], depflag=depflag, virtuals=()
|
||||
)
|
||||
else:
|
||||
edges[0].update_deptypes(depflag=depflag)
|
||||
self._specs[parent_node].add_dependency_edge(dependency_spec, depflag=depflag, virtuals=())
|
||||
|
||||
def virtual_on_edge(self, parent_node, provider_node, virtual):
|
||||
dependencies = self._specs[parent_node].edges_to_dependencies(name=(provider_node.pkg))
|
||||
@@ -3718,6 +3848,57 @@ def _order_index(flag_group):
|
||||
def deprecated(self, node: NodeArgument, version: str) -> None:
|
||||
tty.warn(f'using "{node.pkg}@{version}" which is a deprecated version')
|
||||
|
||||
def splice_at_hash(
|
||||
self,
|
||||
parent_node: NodeArgument,
|
||||
splice_node: NodeArgument,
|
||||
child_name: str,
|
||||
child_hash: str,
|
||||
):
|
||||
splice = Splice(splice_node, child_name=child_name, child_hash=child_hash)
|
||||
self._splices.setdefault(parent_node, []).append(splice)
|
||||
|
||||
def _resolve_automatic_splices(self):
|
||||
"""After all of the specs have been concretized, apply all immediate splices.
|
||||
|
||||
Use reverse topological order to ensure that all dependencies are resolved
|
||||
before their parents, allowing for maximal sharing and minimal copying.
|
||||
|
||||
"""
|
||||
fixed_specs = {}
|
||||
|
||||
# create a mapping from dag hash to an integer representing position in reverse topo order.
|
||||
specs = self._specs.values()
|
||||
topo_order = list(traverse.traverse_nodes(specs, order="topo", key=traverse.by_dag_hash))
|
||||
topo_lookup = {spec.dag_hash(): index for index, spec in enumerate(reversed(topo_order))}
|
||||
|
||||
# iterate over specs, children before parents
|
||||
for node, spec in sorted(self._specs.items(), key=lambda x: topo_lookup[x[1].dag_hash()]):
|
||||
immediate = self._splices.get(node, [])
|
||||
if not immediate and not any(
|
||||
edge.spec in fixed_specs for edge in spec.edges_to_dependencies()
|
||||
):
|
||||
continue
|
||||
new_spec = spec.copy(deps=False)
|
||||
new_spec.build_spec = spec
|
||||
for edge in spec.edges_to_dependencies():
|
||||
depflag = edge.depflag & ~dt.BUILD
|
||||
if any(edge.spec.dag_hash() == splice.child_hash for splice in immediate):
|
||||
splice = [s for s in immediate if s.child_hash == edge.spec.dag_hash()][0]
|
||||
new_spec.add_dependency_edge(
|
||||
self._specs[splice.splice_node], depflag=depflag, virtuals=edge.virtuals
|
||||
)
|
||||
elif edge.spec in fixed_specs:
|
||||
new_spec.add_dependency_edge(
|
||||
fixed_specs[edge.spec], depflag=depflag, virtuals=edge.virtuals
|
||||
)
|
||||
else:
|
||||
new_spec.add_dependency_edge(
|
||||
edge.spec, depflag=depflag, virtuals=edge.virtuals
|
||||
)
|
||||
self._specs[node] = new_spec
|
||||
fixed_specs[spec] = new_spec
|
||||
|
||||
@staticmethod
|
||||
def sort_fn(function_tuple) -> Tuple[int, int]:
|
||||
"""Ensure attributes are evaluated in the correct order.
|
||||
@@ -3747,7 +3928,6 @@ def build_specs(self, function_tuples):
|
||||
# them here so that directives that build objects (like node and
|
||||
# node_compiler) are called in the right order.
|
||||
self.function_tuples = sorted(set(function_tuples), key=self.sort_fn)
|
||||
|
||||
self._specs = {}
|
||||
for name, args in self.function_tuples:
|
||||
if SpecBuilder.ignored_attributes.match(name):
|
||||
@@ -3777,10 +3957,14 @@ def build_specs(self, function_tuples):
|
||||
continue
|
||||
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it
|
||||
# do not bother calling actions on it except for node_flag_source,
|
||||
# since node_flag_source is tracking information not in the spec itself
|
||||
# we also need to keep track of splicing information.
|
||||
spec = self._specs.get(args[0])
|
||||
if spec and spec.concrete:
|
||||
continue
|
||||
do_not_ignore_attrs = ["node_flag_source", "splice_at_hash"]
|
||||
if name not in do_not_ignore_attrs:
|
||||
continue
|
||||
|
||||
action(*args)
|
||||
|
||||
@@ -3790,7 +3974,7 @@ def build_specs(self, function_tuples):
|
||||
# inject patches -- note that we' can't use set() to unique the
|
||||
# roots here, because the specs aren't complete, and the hash
|
||||
# function will loop forever.
|
||||
roots = [spec.root for spec in self._specs.values() if not spec.root.installed]
|
||||
roots = [spec.root for spec in self._specs.values()]
|
||||
roots = dict((id(r), r) for r in roots)
|
||||
for root in roots.values():
|
||||
spack.spec.Spec.inject_patches_variant(root)
|
||||
@@ -3806,6 +3990,8 @@ def build_specs(self, function_tuples):
|
||||
for root in roots.values():
|
||||
root._finalize_concretization()
|
||||
|
||||
self._resolve_automatic_splices()
|
||||
|
||||
for s in self._specs.values():
|
||||
spack.spec.Spec.ensure_no_deprecated(s)
|
||||
|
||||
@@ -3820,7 +4006,6 @@ def build_specs(self, function_tuples):
|
||||
)
|
||||
|
||||
specs = self.execute_explicit_splices()
|
||||
|
||||
return specs
|
||||
|
||||
def execute_explicit_splices(self):
|
||||
@@ -3978,7 +4163,7 @@ def selected_specs(self) -> List[spack.spec.Spec]:
|
||||
return [s for s in self.factory() if self.is_selected(s)]
|
||||
|
||||
@staticmethod
|
||||
def from_store(configuration, include, exclude) -> "SpecFilter":
|
||||
def from_store(configuration, *, include, exclude) -> "SpecFilter":
|
||||
"""Constructs a filter that takes the specs from the current store."""
|
||||
packages = _external_config_with_implicit_externals(configuration)
|
||||
is_reusable = functools.partial(_is_reusable, packages=packages, local=True)
|
||||
@@ -3986,7 +4171,7 @@ def from_store(configuration, include, exclude) -> "SpecFilter":
|
||||
return SpecFilter(factory=factory, is_usable=is_reusable, include=include, exclude=exclude)
|
||||
|
||||
@staticmethod
|
||||
def from_buildcache(configuration, include, exclude) -> "SpecFilter":
|
||||
def from_buildcache(configuration, *, include, exclude) -> "SpecFilter":
|
||||
"""Constructs a filter that takes the specs from the configured buildcaches."""
|
||||
packages = _external_config_with_implicit_externals(configuration)
|
||||
is_reusable = functools.partial(_is_reusable, packages=packages, local=False)
|
||||
@@ -3994,6 +4179,29 @@ def from_buildcache(configuration, include, exclude) -> "SpecFilter":
|
||||
factory=_specs_from_mirror, is_usable=is_reusable, include=include, exclude=exclude
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def from_environment(configuration, *, include, exclude, env) -> "SpecFilter":
|
||||
packages = _external_config_with_implicit_externals(configuration)
|
||||
is_reusable = functools.partial(_is_reusable, packages=packages, local=True)
|
||||
factory = functools.partial(_specs_from_environment, env=env)
|
||||
return SpecFilter(factory=factory, is_usable=is_reusable, include=include, exclude=exclude)
|
||||
|
||||
@staticmethod
|
||||
def from_environment_included_concrete(
|
||||
configuration,
|
||||
*,
|
||||
include: List[str],
|
||||
exclude: List[str],
|
||||
env: ev.Environment,
|
||||
included_concrete: str,
|
||||
) -> "SpecFilter":
|
||||
packages = _external_config_with_implicit_externals(configuration)
|
||||
is_reusable = functools.partial(_is_reusable, packages=packages, local=True)
|
||||
factory = functools.partial(
|
||||
_specs_from_environment_included_concrete, env=env, included_concrete=included_concrete
|
||||
)
|
||||
return SpecFilter(factory=factory, is_usable=is_reusable, include=include, exclude=exclude)
|
||||
|
||||
|
||||
def _specs_from_store(configuration):
|
||||
store = spack.store.create(configuration)
|
||||
@@ -4011,6 +4219,23 @@ def _specs_from_mirror():
|
||||
return []
|
||||
|
||||
|
||||
def _specs_from_environment(env):
|
||||
"""Return all concrete specs from the environment. This includes all included concrete"""
|
||||
if env:
|
||||
return [concrete for _, concrete in env.concretized_specs()]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
def _specs_from_environment_included_concrete(env, included_concrete):
|
||||
"""Return only concrete specs from the environment included from the included_concrete"""
|
||||
if env:
|
||||
assert included_concrete in env.included_concrete_envs
|
||||
return [concrete for concrete in env.included_specs_by_hash[included_concrete].values()]
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
class ReuseStrategy(enum.Enum):
|
||||
ROOTS = enum.auto()
|
||||
DEPENDENCIES = enum.auto()
|
||||
@@ -4040,6 +4265,12 @@ def __init__(self, configuration: spack.config.Configuration) -> None:
|
||||
SpecFilter.from_buildcache(
|
||||
configuration=self.configuration, include=[], exclude=[]
|
||||
),
|
||||
SpecFilter.from_environment(
|
||||
configuration=self.configuration,
|
||||
include=[],
|
||||
exclude=[],
|
||||
env=ev.active_environment(), # includes all concrete includes
|
||||
),
|
||||
]
|
||||
)
|
||||
else:
|
||||
@@ -4054,7 +4285,46 @@ def __init__(self, configuration: spack.config.Configuration) -> None:
|
||||
for source in reuse_yaml.get("from", default_sources):
|
||||
include = source.get("include", default_include)
|
||||
exclude = source.get("exclude", default_exclude)
|
||||
if source["type"] == "local":
|
||||
if source["type"] == "environment" and "path" in source:
|
||||
env_dir = ev.as_env_dir(source["path"])
|
||||
active_env = ev.active_environment()
|
||||
if active_env and env_dir in active_env.included_concrete_envs:
|
||||
# If environment is included as a concrete environment, use the local copy
|
||||
# of specs in the active environment.
|
||||
# note: included concrete environments are only updated at concretization
|
||||
# time, and reuse needs to matchthe included specs.
|
||||
self.reuse_sources.append(
|
||||
SpecFilter.from_environment_included_concrete(
|
||||
self.configuration,
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
env=active_env,
|
||||
included_concrete=env_dir,
|
||||
)
|
||||
)
|
||||
else:
|
||||
# If the environment is not included as a concrete environment, use the
|
||||
# current specs from its lockfile.
|
||||
self.reuse_sources.append(
|
||||
SpecFilter.from_environment(
|
||||
self.configuration,
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
env=ev.environment_from_name_or_dir(env_dir),
|
||||
)
|
||||
)
|
||||
elif source["type"] == "environment":
|
||||
# reusing from the current environment implicitly reuses from all of the
|
||||
# included concrete environments
|
||||
self.reuse_sources.append(
|
||||
SpecFilter.from_environment(
|
||||
self.configuration,
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
env=ev.active_environment(),
|
||||
)
|
||||
)
|
||||
elif source["type"] == "local":
|
||||
self.reuse_sources.append(
|
||||
SpecFilter.from_store(self.configuration, include=include, exclude=exclude)
|
||||
)
|
||||
@@ -4072,7 +4342,6 @@ def reusable_specs(self, specs: List[spack.spec.Spec]) -> List[spack.spec.Spec]:
|
||||
result = []
|
||||
for reuse_source in self.reuse_sources:
|
||||
result.extend(reuse_source.selected_specs())
|
||||
|
||||
# If we only want to reuse dependencies, remove the root specs
|
||||
if self.reuse_strategy == ReuseStrategy.DEPENDENCIES:
|
||||
result = [spec for spec in result if not any(root in spec for root in specs)]
|
||||
@@ -4103,7 +4372,7 @@ def _check_input_and_extract_concrete_specs(specs):
|
||||
spack.spec.Spec.ensure_valid_variants(s)
|
||||
return reusable
|
||||
|
||||
def solve(
|
||||
def solve_with_stats(
|
||||
self,
|
||||
specs,
|
||||
out=None,
|
||||
@@ -4112,8 +4381,11 @@ def solve(
|
||||
tests=False,
|
||||
setup_only=False,
|
||||
allow_deprecated=False,
|
||||
profile=False,
|
||||
):
|
||||
"""
|
||||
Concretize a set of specs and track the timing and statistics for the solve
|
||||
|
||||
Arguments:
|
||||
specs (list): List of ``Spec`` objects to solve for.
|
||||
out: Optionally write the generate ASP program to a file-like object.
|
||||
@@ -4125,15 +4397,27 @@ def solve(
|
||||
setup_only (bool): if True, stop after setup and don't solve (default False).
|
||||
allow_deprecated (bool): allow deprecated version in the solve
|
||||
"""
|
||||
# Check upfront that the variants are admissible
|
||||
specs = [s.lookup_hash() for s in specs]
|
||||
reusable_specs = self._check_input_and_extract_concrete_specs(specs)
|
||||
reusable_specs.extend(self.selector.reusable_specs(specs))
|
||||
setup = SpackSolverSetup(tests=tests)
|
||||
output = OutputConfiguration(timers=timers, stats=stats, out=out, setup_only=setup_only)
|
||||
result, _, _ = self.driver.solve(
|
||||
setup, specs, reuse=reusable_specs, output=output, allow_deprecated=allow_deprecated
|
||||
return self.driver.solve(
|
||||
setup,
|
||||
specs,
|
||||
reuse=reusable_specs,
|
||||
output=output,
|
||||
allow_deprecated=allow_deprecated,
|
||||
profile=profile,
|
||||
)
|
||||
|
||||
def solve(self, specs, **kwargs):
|
||||
"""
|
||||
Convenience function for concretizing a set of specs and ignoring timing
|
||||
and statistics. Uses the same kwargs as solve_with_stats.
|
||||
"""
|
||||
# Check upfront that the variants are admissible
|
||||
result, _, _ = self.solve_with_stats(specs, **kwargs)
|
||||
return result
|
||||
|
||||
def solve_in_rounds(
|
||||
@@ -4233,11 +4517,10 @@ def __init__(self, provided, conflicts):
|
||||
|
||||
super().__init__(msg)
|
||||
|
||||
self.provided = provided
|
||||
|
||||
# Add attribute expected of the superclass interface
|
||||
self.required = None
|
||||
self.constraint_type = None
|
||||
self.provided = provided
|
||||
|
||||
|
||||
class InvalidSpliceError(spack.error.SpackError):
|
||||
|
@@ -57,6 +57,12 @@
|
||||
internal_error("provider with no virtual node").
|
||||
:- provider(PackageNode, _), not attr("node", PackageNode),
|
||||
internal_error("provider with no real node").
|
||||
:- node_has_variant(PackageNode, _, _), not attr("node", PackageNode),
|
||||
internal_error("node has variant for a non-node").
|
||||
:- attr("variant_set", PackageNode, _, _), not attr("node", PackageNode),
|
||||
internal_error("variant_set for a non-node").
|
||||
:- variant_is_propagated(PackageNode, _), not attr("node", PackageNode),
|
||||
internal_error("variant_is_propagated for a non-node").
|
||||
|
||||
:- attr("root", node(ID, PackageNode)), ID > min_dupe_id,
|
||||
internal_error("root with a non-minimal duplicate ID").
|
||||
@@ -575,7 +581,8 @@ attr("virtual_on_edge", PackageNode, ProviderNode, Virtual)
|
||||
% or used somewhere
|
||||
:- attr("virtual_node", node(_, Virtual)),
|
||||
not attr("virtual_on_incoming_edges", _, Virtual),
|
||||
not attr("virtual_root", node(_, Virtual)).
|
||||
not attr("virtual_root", node(_, Virtual)),
|
||||
internal_error("virtual node does not match incoming edge").
|
||||
|
||||
attr("virtual_on_incoming_edges", ProviderNode, Virtual)
|
||||
:- attr("virtual_on_edge", _, ProviderNode, Virtual).
|
||||
@@ -629,7 +636,8 @@ do_not_impose(EffectID, node(X, Package))
|
||||
virtual_condition_holds(_, PossibleProvider, Virtual),
|
||||
PossibleProvider != ProviderNode,
|
||||
explicitly_requested_root(PossibleProvider),
|
||||
not explicitly_requested_root(ProviderNode).
|
||||
not explicitly_requested_root(ProviderNode),
|
||||
internal_error("If a root can provide a virtual, it must be the provider").
|
||||
|
||||
% A package cannot be the actual provider for a virtual if it does not
|
||||
% fulfill the conditions to provide that virtual
|
||||
@@ -772,7 +780,8 @@ required_provider(Provider, Virtual)
|
||||
pkg_fact(Virtual, condition_effect(ConditionID, EffectID)),
|
||||
imposed_constraint(EffectID, "node", Provider).
|
||||
|
||||
:- provider(node(Y, Package), node(X, Virtual)), required_provider(Provider, Virtual), Package != Provider.
|
||||
:- provider(node(Y, Package), node(X, Virtual)), required_provider(Provider, Virtual), Package != Provider,
|
||||
internal_error("If a provider is required the concretizer must use it").
|
||||
|
||||
% TODO: the following choice rule allows the solver to add compiler
|
||||
% flags if their only source is from a requirement. This is overly-specific
|
||||
@@ -852,7 +861,8 @@ variant_defined(PackageNode, Name) :- variant_definition(PackageNode, Name, _).
|
||||
% for two or more variant definitions, this prefers the last one defined.
|
||||
:- node_has_variant(node(NodeID, Package), Name, SelectedVariantID),
|
||||
variant_definition(node(NodeID, Package), Name, VariantID),
|
||||
VariantID > SelectedVariantID.
|
||||
VariantID > SelectedVariantID,
|
||||
internal_error("If the solver picks a variant descriptor it must use that variant descriptor").
|
||||
|
||||
% B: Associating applicable package rules with nodes
|
||||
|
||||
@@ -969,6 +979,7 @@ error(100, "{0} variant '{1}' cannot have values '{2}' and '{3}' as they come fr
|
||||
|
||||
:- attr("variant_set", node(ID, Package), Variant, Value),
|
||||
not attr("variant_value", node(ID, Package), Variant, Value).
|
||||
internal_error("If a variant is set to a value it must have that value").
|
||||
|
||||
% The rules below allow us to prefer default values for variants
|
||||
% whenever possible. If a variant is set in a spec, or if it is
|
||||
@@ -979,7 +990,7 @@ variant_not_default(node(ID, Package), Variant, Value)
|
||||
% variants set explicitly on the CLI don't count as non-default
|
||||
not attr("variant_set", node(ID, Package), Variant, Value),
|
||||
% variant values forced by propagation don't count as non-default
|
||||
not propagate(node(ID, Package), variant_value(Variant, Value)),
|
||||
not propagate(node(ID, Package), variant_value(Variant, Value, _)),
|
||||
% variants set on externals that we could use don't count as non-default
|
||||
% this makes spack prefer to use an external over rebuilding with the
|
||||
% default configuration
|
||||
@@ -991,7 +1002,7 @@ variant_default_not_used(node(ID, Package), Variant, Value)
|
||||
:- variant_default_value(node(ID, Package), Variant, Value),
|
||||
node_has_variant(node(ID, Package), Variant, _),
|
||||
not attr("variant_value", node(ID, Package), Variant, Value),
|
||||
not propagate(node(ID, Package), variant_value(Variant, _)),
|
||||
not propagate(node(ID, Package), variant_value(Variant, _, _)),
|
||||
attr("node", node(ID, Package)).
|
||||
|
||||
% The variant is set in an external spec
|
||||
@@ -1036,10 +1047,14 @@ variant_single_value(PackageNode, Variant)
|
||||
% Propagation semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
non_default_propagation(variant_value(Name, Value)) :- attr("propagate", RootNode, variant_value(Name, Value)).
|
||||
|
||||
% Propagation roots have a corresponding attr("propagate", ...)
|
||||
propagate(RootNode, PropagatedAttribute) :- attr("propagate", RootNode, PropagatedAttribute).
|
||||
propagate(RootNode, PropagatedAttribute) :- attr("propagate", RootNode, PropagatedAttribute), not non_default_propagation(PropagatedAttribute).
|
||||
propagate(RootNode, PropagatedAttribute, EdgeTypes) :- attr("propagate", RootNode, PropagatedAttribute, EdgeTypes).
|
||||
|
||||
% Special case variants, to inject the source node in the propagated attribute
|
||||
propagate(RootNode, variant_value(Name, Value, RootNode)) :- attr("propagate", RootNode, variant_value(Name, Value)).
|
||||
|
||||
% Propagate an attribute along edges to child nodes
|
||||
propagate(ChildNode, PropagatedAttribute) :-
|
||||
@@ -1061,21 +1076,53 @@ propagate(ChildNode, PropagatedAttribute, edge_types(DepType1, DepType2)) :-
|
||||
|
||||
% If a variant is propagated, and can be accepted, set its value
|
||||
attr("variant_selected", PackageNode, Variant, Value, VariantType, VariantID) :-
|
||||
propagate(PackageNode, variant_value(Variant, Value)),
|
||||
propagate(PackageNode, variant_value(Variant, Value, _)),
|
||||
node_has_variant(PackageNode, Variant, VariantID),
|
||||
variant_type(VariantID, VariantType),
|
||||
variant_possible_value(PackageNode, Variant, Value),
|
||||
not attr("variant_set", PackageNode, Variant).
|
||||
variant_possible_value(PackageNode, Variant, Value).
|
||||
|
||||
% If a variant is propagated, we cannot have extraneous values
|
||||
variant_is_propagated(PackageNode, Variant) :-
|
||||
attr("variant_value", PackageNode, Variant, Value),
|
||||
propagate(PackageNode, variant_value(Variant, Value)),
|
||||
propagate(PackageNode, variant_value(Variant, Value, _)),
|
||||
not attr("variant_set", PackageNode, Variant).
|
||||
|
||||
:- variant_is_propagated(PackageNode, Variant),
|
||||
attr("variant_selected", PackageNode, Variant, Value, _, _),
|
||||
not propagate(PackageNode, variant_value(Variant, Value)).
|
||||
not propagate(PackageNode, variant_value(Variant, Value, _)).
|
||||
|
||||
error(100, "{0} and {1} cannot both propagate variant '{2}' to the shared dependency: {3}",
|
||||
Package1, Package2, Variant, Dependency) :-
|
||||
% The variant is a singlevalued variant
|
||||
variant_single_value(node(X, Package1), Variant),
|
||||
% Dependency is trying to propagate Variant with different values and is not the source package
|
||||
propagate(node(Z, Dependency), variant_value(Variant, Value1, node(X, Package1))),
|
||||
propagate(node(Z, Dependency), variant_value(Variant, Value2, node(Y, Package2))),
|
||||
% Package1 and Package2 and their values are different
|
||||
Package1 > Package2, Value1 != Value2,
|
||||
not propagate(node(Z, Dependency), variant_value(Variant, _, node(Z, Dependency))).
|
||||
|
||||
% Cannot propagate the same variant from two different packages if one is a dependency of the other
|
||||
error(100, "{0} and {1} cannot both propagate variant '{2}'", Package1, Package2, Variant) :-
|
||||
% The variant is a single-valued variant
|
||||
variant_single_value(node(X, Package1), Variant),
|
||||
% Package1 and Package2 and their values are different
|
||||
Package1 != Package2, Value1 != Value2,
|
||||
% Package2 is set to propagate the value from Package1
|
||||
propagate(node(Y, Package2), variant_value(Variant, Value2, node(X, Package2))),
|
||||
propagate(node(Y, Package2), variant_value(Variant, Value1, node(X, Package1))),
|
||||
variant_is_propagated(node(Y, Package2), Variant).
|
||||
|
||||
% Cannot propagate a variant if a different value was set for it in a dependency
|
||||
error(100, "Cannot propagate the variant '{0}' from the package: {1} because package: {2} is set to exclude it", Variant, Source, Package) :-
|
||||
% Package has a Variant and Source is propagating Variant
|
||||
attr("variant_set", node(X, Package), Variant, Value1),
|
||||
% The packages and values are different
|
||||
Source != Package, Value1 != Value2,
|
||||
% The variant is a single-valued variant
|
||||
variant_single_value(node(X, Package1), Variant),
|
||||
% A different value is being propagated from somewhere else
|
||||
propagate(node(X, Package), variant_value(Variant, Value2, node(Y, Source))).
|
||||
|
||||
%----
|
||||
% Flags
|
||||
@@ -1402,25 +1449,71 @@ attr("node_flag", PackageNode, NodeFlag) :- attr("node_flag_set", PackageNode, N
|
||||
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Installed packages
|
||||
% Installed Packages
|
||||
%-----------------------------------------------------------------------------
|
||||
% the solver is free to choose at most one installed hash for each package
|
||||
{ attr("hash", node(ID, Package), Hash) : installed_hash(Package, Hash) } 1
|
||||
:- attr("node", node(ID, Package)), internal_error("Package must resolve to at most one hash").
|
||||
|
||||
#defined installed_hash/2.
|
||||
#defined abi_splice_conditions_hold/4.
|
||||
|
||||
% These are the previously concretized attributes of the installed package as
|
||||
% a hash. It has the general form:
|
||||
% hash_attr(Hash, Attribute, PackageName, Args*)
|
||||
#defined hash_attr/3.
|
||||
#defined hash_attr/4.
|
||||
#defined hash_attr/5.
|
||||
#defined hash_attr/6.
|
||||
#defined hash_attr/7.
|
||||
|
||||
{ attr("hash", node(ID, PackageName), Hash): installed_hash(PackageName, Hash) } 1 :-
|
||||
attr("node", node(ID, PackageName)),
|
||||
internal_error("Package must resolve to at most 1 hash").
|
||||
% you can't choose an installed hash for a dev spec
|
||||
:- attr("hash", PackageNode, Hash), attr("variant_value", PackageNode, "dev_path", _).
|
||||
|
||||
% You can't install a hash, if it is not installed
|
||||
:- attr("hash", node(ID, Package), Hash), not installed_hash(Package, Hash).
|
||||
% This should be redundant given the constraint above
|
||||
:- attr("node", PackageNode), 2 { attr("hash", PackageNode, Hash) }.
|
||||
|
||||
% if a hash is selected, we impose all the constraints that implies
|
||||
impose(Hash, PackageNode) :- attr("hash", PackageNode, Hash).
|
||||
% hash_attrs are versions, but can_splice_attr are usually node_version_satisfies
|
||||
hash_attr(Hash, "node_version_satisfies", PackageName, Constraint) :-
|
||||
hash_attr(Hash, "version", PackageName, Version),
|
||||
pkg_fact(PackageName, version_satisfies(Constraint, Version)).
|
||||
|
||||
% This recovers the exact semantics for hash reuse hash and depends_on are where
|
||||
% splices are decided, and virtual_on_edge can result in name-changes, which is
|
||||
% why they are all treated separately.
|
||||
imposed_constraint(Hash, Attr, PackageName) :-
|
||||
hash_attr(Hash, Attr, PackageName).
|
||||
imposed_constraint(Hash, Attr, PackageName, A1) :-
|
||||
hash_attr(Hash, Attr, PackageName, A1), Attr != "hash".
|
||||
imposed_constraint(Hash, Attr, PackageName, Arg1, Arg2) :-
|
||||
hash_attr(Hash, Attr, PackageName, Arg1, Arg2),
|
||||
Attr != "depends_on",
|
||||
Attr != "virtual_on_edge".
|
||||
imposed_constraint(Hash, Attr, PackageName, A1, A2, A3) :-
|
||||
hash_attr(Hash, Attr, PackageName, A1, A2, A3).
|
||||
imposed_constraint(Hash, "hash", PackageName, Hash) :- installed_hash(PackageName, Hash).
|
||||
% Without splicing, we simply recover the exact semantics
|
||||
imposed_constraint(ParentHash, "hash", ChildName, ChildHash) :-
|
||||
hash_attr(ParentHash, "hash", ChildName, ChildHash),
|
||||
ChildHash != ParentHash,
|
||||
not abi_splice_conditions_hold(_, _, ChildName, ChildHash).
|
||||
|
||||
imposed_constraint(Hash, "depends_on", PackageName, DepName, Type) :-
|
||||
hash_attr(Hash, "depends_on", PackageName, DepName, Type),
|
||||
hash_attr(Hash, "hash", DepName, DepHash),
|
||||
not attr("splice_at_hash", _, _, DepName, DepHash).
|
||||
|
||||
imposed_constraint(Hash, "virtual_on_edge", PackageName, DepName, VirtName) :-
|
||||
hash_attr(Hash, "virtual_on_edge", PackageName, DepName, VirtName),
|
||||
not attr("splice_at_hash", _, _, DepName,_).
|
||||
|
||||
% Rules pertaining to attr("splice_at_hash") and abi_splice_conditions_hold will
|
||||
% be conditionally loaded from splices.lp
|
||||
|
||||
impose(Hash, PackageNode) :- attr("hash", PackageNode, Hash), attr("node", PackageNode).
|
||||
|
||||
% If there is not a hash for a package, we build it.
|
||||
build(PackageNode) :- attr("node", PackageNode), not concrete(PackageNode).
|
||||
|
||||
% if we haven't selected a hash for a package, we'll be building it
|
||||
build(PackageNode) :- not attr("hash", PackageNode, _), attr("node", PackageNode).
|
||||
|
||||
% Minimizing builds is tricky. We want a minimizing criterion
|
||||
|
||||
@@ -1433,6 +1526,7 @@ build(PackageNode) :- not attr("hash", PackageNode, _), attr("node", PackageNode
|
||||
% criteria for built specs -- so that they take precedence over the otherwise
|
||||
% topmost-priority criterion to reuse what is installed.
|
||||
%
|
||||
|
||||
% The priority ranges are:
|
||||
% 1000+ Optimizations for concretization errors
|
||||
% 300 - 1000 Highest priority optimizations for valid solutions
|
||||
@@ -1458,12 +1552,10 @@ build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", Package
|
||||
pkg_fact(Package, version_declared(Version, Weight, "installed")),
|
||||
not optimize_for_reuse().
|
||||
|
||||
#defined installed_hash/2.
|
||||
|
||||
% This statement, which is a hidden feature of clingo, let us avoid cycles in the DAG
|
||||
#edge (A, B) : depends_on(A, B).
|
||||
|
||||
|
||||
%-----------------------------------------------------------------
|
||||
% Optimization to avoid errors
|
||||
%-----------------------------------------------------------------
|
||||
|
@@ -44,6 +44,17 @@ def _id(thing: Any) -> Union[str, AspObject]:
|
||||
return f'"{str(thing)}"'
|
||||
|
||||
|
||||
class AspVar(AspObject):
|
||||
"""Represents a variable in an ASP rule, allows for conditionally generating
|
||||
rules"""
|
||||
|
||||
def __init__(self, name: str):
|
||||
self.name = name
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.name)
|
||||
|
||||
|
||||
@lang.key_ordering
|
||||
class AspFunction(AspObject):
|
||||
"""A term in the ASP logic program"""
|
||||
@@ -88,6 +99,8 @@ def _argify(self, arg: Any) -> Any:
|
||||
return clingo().Number(arg)
|
||||
elif isinstance(arg, AspFunction):
|
||||
return clingo().Function(arg.name, [self._argify(x) for x in arg.args], positive=True)
|
||||
elif isinstance(arg, AspVar):
|
||||
return clingo().Variable(arg.name)
|
||||
return clingo().String(str(arg))
|
||||
|
||||
def symbol(self):
|
||||
|
@@ -15,7 +15,6 @@
|
||||
#show attr/4.
|
||||
#show attr/5.
|
||||
#show attr/6.
|
||||
|
||||
% names of optimization criteria
|
||||
#show opt_criterion/2.
|
||||
|
||||
|
111
lib/spack/spack/solver/profiler.py
Normal file
111
lib/spack/spack/solver/profiler.py
Normal file
@@ -0,0 +1,111 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Profiling propagator for clingo solves."""
|
||||
|
||||
import re
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
|
||||
class Data:
|
||||
"""Counters for propagations and undos of an atom."""
|
||||
|
||||
__slots__ = ["atom", "literal", "prop", "undo"]
|
||||
|
||||
# currently we use Any for clingo types because clingo has a bunch of import
|
||||
# wrappers around it that make typing difficult (see spack.solver.core for details)
|
||||
def __init__(self, atom: Any, literal: int, prop: int, undo: int):
|
||||
self.atom = atom
|
||||
self.literal = literal
|
||||
self.prop = prop
|
||||
self.undo = undo
|
||||
|
||||
|
||||
class AggregatedData:
|
||||
"""Aggregated data for a profile, constructed from ``Data``.
|
||||
|
||||
We coarsen from atom granularity to string keys when aggregating.
|
||||
"""
|
||||
|
||||
__slots__ = ["name", "prop", "undo"]
|
||||
|
||||
def __init__(self, name: str, prop: int, undo: int):
|
||||
self.name = name
|
||||
self.prop = prop
|
||||
self.undo = undo
|
||||
|
||||
|
||||
class ProfilePropagator:
|
||||
"""Profiling propagator for `spack solve --profile`.
|
||||
|
||||
Register this with the ``clingo.Control`` object to profile a solve.
|
||||
"""
|
||||
|
||||
_literal_to_atom: Dict
|
||||
_profile: Dict[int, Data]
|
||||
|
||||
def init(self, init) -> None:
|
||||
self._literal_to_atom = {}
|
||||
self._profile = {}
|
||||
for atom in init.symbolic_atoms:
|
||||
solver_literal = init.solver_literal(atom.literal)
|
||||
self._profile[solver_literal] = Data(atom, solver_literal, 0, 0)
|
||||
init.add_watch(solver_literal)
|
||||
|
||||
def propagate(self, ctl, changes: List[int]) -> bool:
|
||||
"""Record a propagation in the solve."""
|
||||
for literal in changes:
|
||||
data = self._profile[literal]
|
||||
data.prop += 1
|
||||
return True
|
||||
|
||||
def undo(self, solver_id: int, assign, undo: List[int]) -> None:
|
||||
"""Record an undo in the solve."""
|
||||
for literal in undo:
|
||||
data = self._profile[literal]
|
||||
data.undo += 1
|
||||
|
||||
def color_sym(self, string: str) -> str:
|
||||
"""Colorize a symbol for profile output"""
|
||||
string = re.sub(r"^(\w+)", r"@C{\1}", string)
|
||||
string = re.sub(r'("[^"]*")', r"@G{\1}", string)
|
||||
string = re.sub(r"([\(\)])", r"@b{\1}", string)
|
||||
return tty.color.colorize(string)
|
||||
|
||||
def key(self, atom) -> str:
|
||||
"""Convert an atom into an aggregate key for our profile.
|
||||
|
||||
Currently this compresses most things to their function name, and expands
|
||||
``attr("name", ...)`` to ``attr("name")`` so we can see which attributes affect
|
||||
the solve most.
|
||||
|
||||
"""
|
||||
sym = atom.symbol
|
||||
return f"attr({sym.arguments[0]})" if sym.name == "attr" else sym.name
|
||||
|
||||
def print_profile(self, n_atoms: int) -> None:
|
||||
"""Aggregate and print nicely formatted profile data."""
|
||||
aggregated = {}
|
||||
for data in self._profile.values():
|
||||
name = self.key(data.atom)
|
||||
if name not in aggregated:
|
||||
aggregated[name] = AggregatedData(name, data.prop, data.undo)
|
||||
else:
|
||||
agg = aggregated[name]
|
||||
agg.prop += data.prop
|
||||
agg.undo += data.undo
|
||||
|
||||
values = sorted(
|
||||
(x for x in aggregated.values() if x.prop), key=lambda x: x.prop, reverse=True
|
||||
)
|
||||
|
||||
# format the output nicely
|
||||
w = 10 # width for number fields
|
||||
print(tty.color.colorize(f" @*{{{'Prop':<{w}}{'Undo':<{w}}{'Symbol'}}}"))
|
||||
for a in values[:n_atoms]:
|
||||
print(f" {a.prop:<{w}}{a.undo:<{w}}{self.color_sym(a.name)}")
|
||||
if len(values) > n_atoms:
|
||||
print(" ...")
|
56
lib/spack/spack/solver/splices.lp
Normal file
56
lib/spack/spack/solver/splices.lp
Normal file
@@ -0,0 +1,56 @@
|
||||
% Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
%
|
||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
%=============================================================================
|
||||
% These rules are conditionally loaded to handle the synthesis of spliced
|
||||
% packages.
|
||||
% =============================================================================
|
||||
% Consider the concrete spec:
|
||||
% foo@2.72%gcc@11.4 arch=linux-ubuntu22.04-icelake build_system=autotools ^bar ...
|
||||
% It will emit the following facts for reuse (below is a subset)
|
||||
% installed_hash("foo", "xxxyyy")
|
||||
% hash_attr("xxxyyy", "hash", "foo", "xxxyyy")
|
||||
% hash_attr("xxxyyy", "version", "foo", "2.72")
|
||||
% hash_attr("xxxyyy", "node_os", "ubuntu22.04")
|
||||
% hash_attr("xxxyyy", "hash", "bar", "zzzqqq")
|
||||
% hash_attr("xxxyyy", "depends_on", "foo", "bar", "link")
|
||||
% Rules that derive abi_splice_conditions_hold will be generated from
|
||||
% use of the `can_splice` directive. The will have the following form:
|
||||
% can_splice("foo@1.0.0+a", when="@1.0.1+a", match_variants=["b"]) --->
|
||||
% abi_splice_conditions_hold(0, node(SID, "foo"), "foo", BashHash) :-
|
||||
% installed_hash("foo", BaseHash),
|
||||
% attr("node", node(SID, SpliceName)),
|
||||
% attr("node_version_satisfies", node(SID, "foo"), "1.0.1"),
|
||||
% hash_attr("hash", "node_version_satisfies", "foo", "1.0.1"),
|
||||
% attr("variant_value", node(SID, "foo"), "a", "True"),
|
||||
% hash_attr("hash", "variant_value", "foo", "a", "True"),
|
||||
% attr("variant_value", node(SID, "foo"), "b", VariVar0),
|
||||
% hash_attr("hash", "variant_value", "foo", "b", VariVar0),
|
||||
|
||||
% If the splice is valid (i.e. abi_splice_conditions_hold is derived) in the
|
||||
% dependency of a concrete spec the solver free to choose whether to continue
|
||||
% with the exact hash semantics by simply imposing the child hash, or introducing
|
||||
% a spliced node as the dependency instead
|
||||
{ imposed_constraint(ParentHash, "hash", ChildName, ChildHash) } :-
|
||||
hash_attr(ParentHash, "hash", ChildName, ChildHash),
|
||||
abi_splice_conditions_hold(_, node(SID, SpliceName), ChildName, ChildHash).
|
||||
|
||||
attr("splice_at_hash", ParentNode, node(SID, SpliceName), ChildName, ChildHash) :-
|
||||
attr("hash", ParentNode, ParentHash),
|
||||
hash_attr(ParentHash, "hash", ChildName, ChildHash),
|
||||
abi_splice_conditions_hold(_, node(SID, SpliceName), ChildName, ChildHash),
|
||||
ParentHash != ChildHash,
|
||||
not imposed_constraint(ParentHash, "hash", ChildName, ChildHash).
|
||||
|
||||
% Names and virtual providers may change when a dependency is spliced in
|
||||
imposed_constraint(Hash, "dependency_holds", ParentName, SpliceName, Type) :-
|
||||
hash_attr(Hash, "depends_on", ParentName, DepName, Type),
|
||||
hash_attr(Hash, "hash", DepName, DepHash),
|
||||
attr("splice_at_hash", node(ID, ParentName), node(SID, SpliceName), DepName, DepHash).
|
||||
|
||||
imposed_constraint(Hash, "virtual_on_edge", ParentName, SpliceName, VirtName) :-
|
||||
hash_attr(Hash, "virtual_on_edge", ParentName, DepName, VirtName),
|
||||
attr("splice_at_hash", node(ID, ParentName), node(SID, SpliceName), DepName, DepHash).
|
||||
|
@@ -59,7 +59,7 @@
|
||||
import re
|
||||
import socket
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, List, Match, Optional, Set, Tuple, Union
|
||||
from typing import Any, Callable, Dict, Iterable, List, Match, Optional, Set, Tuple, Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -877,8 +877,9 @@ def constrain(self, other):
|
||||
# Next, if any flags in other propagate, we force them to propagate in our case
|
||||
shared = list(sorted(set(other[flag_type]) - extra_other))
|
||||
for x, y in _shared_subset_pair_iterate(shared, sorted(self[flag_type])):
|
||||
if x.propagate:
|
||||
y.propagate = True
|
||||
if y.propagate is True and x.propagate is False:
|
||||
changed = True
|
||||
y.propagate = False
|
||||
|
||||
# TODO: what happens if flag groups with a partial (but not complete)
|
||||
# intersection specify different behaviors for flag propagation?
|
||||
@@ -933,6 +934,7 @@ def _cmp_iter(self):
|
||||
def flags():
|
||||
for flag in v:
|
||||
yield flag
|
||||
yield flag.propagate
|
||||
|
||||
yield flags
|
||||
|
||||
@@ -963,10 +965,6 @@ def _sort_by_dep_types(dspec: DependencySpec):
|
||||
return dspec.depflag
|
||||
|
||||
|
||||
#: Enum for edge directions
|
||||
EdgeDirection = lang.enum(parent=0, child=1)
|
||||
|
||||
|
||||
@lang.lazy_lexicographic_ordering
|
||||
class _EdgeMap(collections.abc.Mapping):
|
||||
"""Represent a collection of edges (DependencySpec objects) in the DAG.
|
||||
@@ -980,26 +978,20 @@ class _EdgeMap(collections.abc.Mapping):
|
||||
|
||||
__slots__ = "edges", "store_by_child"
|
||||
|
||||
def __init__(self, store_by=EdgeDirection.child):
|
||||
# Sanitize input arguments
|
||||
msg = 'unexpected value for "store_by" argument'
|
||||
assert store_by in (EdgeDirection.child, EdgeDirection.parent), msg
|
||||
def __init__(self, store_by_child: bool = True) -> None:
|
||||
self.edges: Dict[str, List[DependencySpec]] = {}
|
||||
self.store_by_child = store_by_child
|
||||
|
||||
#: This dictionary maps a package name to a list of edges
|
||||
#: i.e. to a list of DependencySpec objects
|
||||
self.edges = {}
|
||||
self.store_by_child = store_by == EdgeDirection.child
|
||||
|
||||
def __getitem__(self, key):
|
||||
def __getitem__(self, key: str) -> List[DependencySpec]:
|
||||
return self.edges[key]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.edges)
|
||||
|
||||
def __len__(self):
|
||||
def __len__(self) -> int:
|
||||
return len(self.edges)
|
||||
|
||||
def add(self, edge: DependencySpec):
|
||||
def add(self, edge: DependencySpec) -> None:
|
||||
key = edge.spec.name if self.store_by_child else edge.parent.name
|
||||
if key in self.edges:
|
||||
lst = self.edges[key]
|
||||
@@ -1008,8 +1000,8 @@ def add(self, edge: DependencySpec):
|
||||
else:
|
||||
self.edges[key] = [edge]
|
||||
|
||||
def __str__(self):
|
||||
return "{deps: %s}" % ", ".join(str(d) for d in sorted(self.values()))
|
||||
def __str__(self) -> str:
|
||||
return f"{{deps: {', '.join(str(d) for d in sorted(self.values()))}}}"
|
||||
|
||||
def _cmp_iter(self):
|
||||
for item in sorted(itertools.chain.from_iterable(self.edges.values())):
|
||||
@@ -1026,24 +1018,32 @@ def copy(self):
|
||||
|
||||
return clone
|
||||
|
||||
def select(self, parent=None, child=None, depflag: dt.DepFlag = dt.ALL):
|
||||
"""Select a list of edges and return them.
|
||||
def select(
|
||||
self,
|
||||
*,
|
||||
parent: Optional[str] = None,
|
||||
child: Optional[str] = None,
|
||||
depflag: dt.DepFlag = dt.ALL,
|
||||
virtuals: Optional[List[str]] = None,
|
||||
) -> List[DependencySpec]:
|
||||
"""Selects a list of edges and returns them.
|
||||
|
||||
If an edge:
|
||||
|
||||
- Has *any* of the dependency types passed as argument,
|
||||
- Matches the parent and/or child name, if passed
|
||||
- Matches the parent and/or child name
|
||||
- Provides *any* of the virtuals passed as argument
|
||||
|
||||
then it is selected.
|
||||
|
||||
The deptypes argument needs to be a flag, since the method won't
|
||||
convert it for performance reason.
|
||||
|
||||
Args:
|
||||
parent (str): name of the parent package
|
||||
child (str): name of the child package
|
||||
parent: name of the parent package
|
||||
child: name of the child package
|
||||
depflag: allowed dependency types in flag form
|
||||
|
||||
Returns:
|
||||
List of DependencySpec objects
|
||||
virtuals: list of virtuals on the edge
|
||||
"""
|
||||
if not depflag:
|
||||
return []
|
||||
@@ -1062,6 +1062,10 @@ def select(self, parent=None, child=None, depflag: dt.DepFlag = dt.ALL):
|
||||
# Filter by allowed dependency types
|
||||
selected = (dep for dep in selected if not dep.depflag or (depflag & dep.depflag))
|
||||
|
||||
# Filter by virtuals
|
||||
if virtuals is not None:
|
||||
selected = (dep for dep in selected if any(v in dep.virtuals for v in virtuals))
|
||||
|
||||
return list(selected)
|
||||
|
||||
def clear(self):
|
||||
@@ -1470,20 +1474,14 @@ def __init__(
|
||||
self.architecture = None
|
||||
self.compiler = None
|
||||
self.compiler_flags = FlagMap(self)
|
||||
self._dependents = _EdgeMap(store_by=EdgeDirection.parent)
|
||||
self._dependencies = _EdgeMap(store_by=EdgeDirection.child)
|
||||
self._dependents = _EdgeMap(store_by_child=False)
|
||||
self._dependencies = _EdgeMap(store_by_child=True)
|
||||
self.namespace = None
|
||||
|
||||
# initial values for all spec hash types
|
||||
for h in ht.hashes:
|
||||
setattr(self, h.attr, None)
|
||||
|
||||
# hash of package.py at the time of concretization
|
||||
self._package_hash = None
|
||||
|
||||
# dictionary of source artifact hashes, set at concretization time
|
||||
self._artifact_hashes = None
|
||||
|
||||
# Python __hash__ is handled separately from the cached spec hashes
|
||||
self._dunder_hash = None
|
||||
|
||||
@@ -1597,7 +1595,7 @@ def _get_dependency(self, name):
|
||||
return deps[0]
|
||||
|
||||
def edges_from_dependents(
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL, *, virtuals: Optional[List[str]] = None
|
||||
) -> List[DependencySpec]:
|
||||
"""Return a list of edges connecting this node in the DAG
|
||||
to parents.
|
||||
@@ -1605,20 +1603,25 @@ def edges_from_dependents(
|
||||
Args:
|
||||
name (str): filter dependents by package name
|
||||
depflag: allowed dependency types
|
||||
virtuals: allowed virtuals
|
||||
"""
|
||||
return [d for d in self._dependents.select(parent=name, depflag=depflag)]
|
||||
return [
|
||||
d for d in self._dependents.select(parent=name, depflag=depflag, virtuals=virtuals)
|
||||
]
|
||||
|
||||
def edges_to_dependencies(
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL
|
||||
self, name=None, depflag: dt.DepFlag = dt.ALL, *, virtuals: Optional[List[str]] = None
|
||||
) -> List[DependencySpec]:
|
||||
"""Return a list of edges connecting this node in the DAG
|
||||
to children.
|
||||
"""Returns a list of edges connecting this node in the DAG to children.
|
||||
|
||||
Args:
|
||||
name (str): filter dependencies by package name
|
||||
depflag: allowed dependency types
|
||||
virtuals: allowed virtuals
|
||||
"""
|
||||
return [d for d in self._dependencies.select(child=name, depflag=depflag)]
|
||||
return [
|
||||
d for d in self._dependencies.select(child=name, depflag=depflag, virtuals=virtuals)
|
||||
]
|
||||
|
||||
@property
|
||||
def edge_attributes(self) -> str:
|
||||
@@ -1641,17 +1644,24 @@ def edge_attributes(self) -> str:
|
||||
return f"[{result}]"
|
||||
|
||||
def dependencies(
|
||||
self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL
|
||||
self,
|
||||
name=None,
|
||||
deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL,
|
||||
*,
|
||||
virtuals: Optional[List[str]] = None,
|
||||
) -> List["Spec"]:
|
||||
"""Return a list of direct dependencies (nodes in the DAG).
|
||||
"""Returns a list of direct dependencies (nodes in the DAG)
|
||||
|
||||
Args:
|
||||
name (str): filter dependencies by package name
|
||||
name: filter dependencies by package name
|
||||
deptype: allowed dependency types
|
||||
virtuals: allowed virtuals
|
||||
"""
|
||||
if not isinstance(deptype, dt.DepFlag):
|
||||
deptype = dt.canonicalize(deptype)
|
||||
return [d.spec for d in self.edges_to_dependencies(name, depflag=deptype)]
|
||||
return [
|
||||
d.spec for d in self.edges_to_dependencies(name, depflag=deptype, virtuals=virtuals)
|
||||
]
|
||||
|
||||
def dependents(
|
||||
self, name=None, deptype: Union[dt.DepTypes, dt.DepFlag] = dt.ALL
|
||||
@@ -1974,6 +1984,10 @@ def spec_hash(self, hash):
|
||||
Arguments:
|
||||
hash (spack.hash_types.SpecHashDescriptor): type of hash to generate.
|
||||
"""
|
||||
# TODO: currently we strip build dependencies by default. Rethink
|
||||
# this when we move to using package hashing on all specs.
|
||||
if hash.override is not None:
|
||||
return hash.override(self)
|
||||
node_dict = self.to_node_dict(hash=hash)
|
||||
json_text = sjson.dump(node_dict)
|
||||
# This implements "frankenhashes", preserving the last 7 characters of the
|
||||
@@ -1983,7 +1997,7 @@ def spec_hash(self, hash):
|
||||
return out[:-7] + self.build_spec.spec_hash(hash)[-7:]
|
||||
return out
|
||||
|
||||
def _cached_hash(self, hash, length=None):
|
||||
def _cached_hash(self, hash, length=None, force=False):
|
||||
"""Helper function for storing a cached hash on the spec.
|
||||
|
||||
This will run spec_hash() with the deptype and package_hash
|
||||
@@ -1993,6 +2007,7 @@ def _cached_hash(self, hash, length=None):
|
||||
Arguments:
|
||||
hash (spack.hash_types.SpecHashDescriptor): type of hash to generate.
|
||||
length (int): length of hash prefix to return (default is full hash string)
|
||||
force (bool): cache the hash even if spec is not concrete (default False)
|
||||
"""
|
||||
if not hash.attr:
|
||||
return self.spec_hash(hash)[:length]
|
||||
@@ -2002,24 +2017,21 @@ def _cached_hash(self, hash, length=None):
|
||||
return hash_string[:length]
|
||||
else:
|
||||
hash_string = self.spec_hash(hash)
|
||||
if self.concrete:
|
||||
if force or self.concrete:
|
||||
setattr(self, hash.attr, hash_string)
|
||||
|
||||
return hash_string[:length]
|
||||
|
||||
def package_hash(self):
|
||||
"""Compute the hash of the contents of the package for this node"""
|
||||
if not self.concrete:
|
||||
raise ValueError("Spec is not concrete: " + str(self))
|
||||
|
||||
# Concrete specs with the old DAG hash did not have the package hash, so we do
|
||||
# not know what the package looked like at concretization time
|
||||
if not self._package_hash:
|
||||
if self.concrete and not self._package_hash:
|
||||
raise ValueError(
|
||||
"Cannot call package_hash() on concrete specs with the old dag_hash()"
|
||||
)
|
||||
|
||||
return self._package_hash
|
||||
return self._cached_hash(ht.package_hash)
|
||||
|
||||
def dag_hash(self, length=None):
|
||||
"""This is Spack's default hash, used to identify installations.
|
||||
@@ -2189,6 +2201,18 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
if params:
|
||||
d["parameters"] = params
|
||||
|
||||
if params and not self.concrete:
|
||||
flag_names = [
|
||||
name
|
||||
for name, flags in self.compiler_flags.items()
|
||||
if any(x.propagate for x in flags)
|
||||
]
|
||||
d["propagate"] = sorted(
|
||||
itertools.chain(
|
||||
[v.name for v in self.variants.values() if v.propagate], flag_names
|
||||
)
|
||||
)
|
||||
|
||||
if self.external:
|
||||
d["external"] = syaml.syaml_dict(
|
||||
[
|
||||
@@ -2206,25 +2230,23 @@ def to_node_dict(self, hash=ht.dag_hash):
|
||||
if hasattr(variant, "_patches_in_order_of_appearance"):
|
||||
d["patches"] = variant._patches_in_order_of_appearance
|
||||
|
||||
if self._concrete and hash.package_hash:
|
||||
# We use the `_package_hash` attribute here instead of `self.package_hash()`
|
||||
# because `_package_hash` is *always* assigned at concretization time. If
|
||||
# the attribute is present, we should include it. If it isn't, we avoid
|
||||
# computing it because a) the package may no longer exist, or b) this is an
|
||||
# older spec and the `dag_hash` didn't include the package hash when the
|
||||
# spec was concretized.
|
||||
if hasattr(self, "_package_hash") and self._package_hash:
|
||||
d["package_hash"] = self._package_hash
|
||||
if (
|
||||
self._concrete
|
||||
and hash.package_hash
|
||||
and hasattr(self, "_package_hash")
|
||||
and self._package_hash
|
||||
):
|
||||
# We use the attribute here instead of `self.package_hash()` because this
|
||||
# should *always* be assignhed at concretization time. We don't want to try
|
||||
# to compute a package hash for concrete spec where a) the package might not
|
||||
# exist, or b) the `dag_hash` didn't include the package hash when the spec
|
||||
# was concretized.
|
||||
package_hash = self._package_hash
|
||||
|
||||
if self._artifact_hashes:
|
||||
for key, source_list in sorted(self._artifact_hashes.items()):
|
||||
# sources may be dictionaries (for archives/resources)
|
||||
def order(source):
|
||||
if isinstance(source, dict):
|
||||
return syaml.syaml_dict(sorted(source.items()))
|
||||
return source
|
||||
|
||||
d[key] = [order(source) for source in source_list]
|
||||
# Full hashes are in bytes
|
||||
if not isinstance(package_hash, str) and isinstance(package_hash, bytes):
|
||||
package_hash = package_hash.decode("utf-8")
|
||||
d["package_hash"] = package_hash
|
||||
|
||||
# Note: Relies on sorting dict by keys later in algorithm.
|
||||
deps = self._dependencies_dict(depflag=hash.depflag)
|
||||
@@ -2363,16 +2385,10 @@ def node_dict_with_hashes(self, hash=ht.dag_hash):
|
||||
spec is concrete, the full hash is added as well. If 'build' is in
|
||||
the hash_type, the build hash is also added."""
|
||||
node = self.to_node_dict(hash)
|
||||
# All specs have at least a DAG hash
|
||||
node[ht.dag_hash.name] = self.dag_hash()
|
||||
|
||||
# dag_hash is lazily computed -- but if we write a spec out, we want it
|
||||
# to be included. This is effectively the last chance we get to compute
|
||||
# it accurately.
|
||||
if self.concrete:
|
||||
# all specs have at least a DAG hash
|
||||
node[ht.dag_hash.name] = self.dag_hash()
|
||||
|
||||
else:
|
||||
if not self.concrete:
|
||||
node["concrete"] = False
|
||||
|
||||
# we can also give them other hash types if we want
|
||||
@@ -2812,7 +2828,7 @@ def ensure_no_deprecated(root):
|
||||
msg += " For each package listed, choose another spec\n"
|
||||
raise SpecDeprecatedError(msg)
|
||||
|
||||
def concretize(self, tests: Union[bool, List[str]] = False) -> None:
|
||||
def concretize(self, tests: Union[bool, Iterable[str]] = False) -> None:
|
||||
"""Concretize the current spec.
|
||||
|
||||
Args:
|
||||
@@ -2891,7 +2907,7 @@ def _mark_concrete(self, value=True):
|
||||
if (not value) and s.concrete and s.installed:
|
||||
continue
|
||||
elif not value:
|
||||
s.clear_cached_hashes()
|
||||
s.clear_caches()
|
||||
s._mark_root_concrete(value)
|
||||
|
||||
def _finalize_concretization(self):
|
||||
@@ -2923,15 +2939,12 @@ def _finalize_concretization(self):
|
||||
# We only assign package hash to not-yet-concrete specs, for which we know
|
||||
# we can compute the hash.
|
||||
if not spec.concrete:
|
||||
# package hash assignment has to happen before we mark concrete, so that
|
||||
# we know what was *already* concrete.
|
||||
# can't use self.package here b/c not concrete yet
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg = pkg_cls(spec)
|
||||
artifact_hashes = pkg.artifact_hashes()
|
||||
# we need force=True here because package hash assignment has to happen
|
||||
# before we mark concrete, so that we know what was *already* concrete.
|
||||
spec._cached_hash(ht.package_hash, force=True)
|
||||
|
||||
spec._package_hash = artifact_hashes.pop("package_hash")
|
||||
spec._artifact_hashes = artifact_hashes
|
||||
# keep this check here to ensure package hash is saved
|
||||
assert getattr(spec, ht.package_hash.attr)
|
||||
|
||||
# Mark everything in the spec as concrete
|
||||
self._mark_concrete()
|
||||
@@ -2943,7 +2956,7 @@ def _finalize_concretization(self):
|
||||
for spec in self.traverse():
|
||||
spec._cached_hash(ht.dag_hash)
|
||||
|
||||
def concretized(self, tests=False):
|
||||
def concretized(self, tests: Union[bool, Iterable[str]] = False) -> "spack.spec.Spec":
|
||||
"""This is a non-destructive version of concretize().
|
||||
|
||||
First clones, then returns a concrete version of this package
|
||||
@@ -3007,7 +3020,12 @@ def ensure_valid_variants(spec):
|
||||
pkg_variants = pkg_cls.variant_names()
|
||||
# reserved names are variants that may be set on any package
|
||||
# but are not necessarily recorded by the package's class
|
||||
not_existing = set(spec.variants) - (set(pkg_variants) | set(vt.reserved_names))
|
||||
propagate_variants = [name for name, variant in spec.variants.items() if variant.propagate]
|
||||
|
||||
not_existing = set(spec.variants) - (
|
||||
set(pkg_variants) | set(vt.reserved_names) | set(propagate_variants)
|
||||
)
|
||||
|
||||
if not_existing:
|
||||
raise vt.UnknownVariantError(
|
||||
f"No such variant {not_existing} for spec: '{spec}'", list(not_existing)
|
||||
@@ -3034,6 +3052,10 @@ def constrain(self, other, deps=True):
|
||||
raise spack.error.UnsatisfiableSpecError(self, other, "constrain a concrete spec")
|
||||
|
||||
other = self._autospec(other)
|
||||
if other.concrete and other.satisfies(self):
|
||||
self._dup(other)
|
||||
return True
|
||||
|
||||
if other.abstract_hash:
|
||||
if not self.abstract_hash or other.abstract_hash.startswith(self.abstract_hash):
|
||||
self.abstract_hash = other.abstract_hash
|
||||
@@ -3528,8 +3550,8 @@ def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, clearde
|
||||
self.architecture = other.architecture.copy() if other.architecture else None
|
||||
self.compiler = other.compiler.copy() if other.compiler else None
|
||||
if cleardeps:
|
||||
self._dependents = _EdgeMap(store_by=EdgeDirection.parent)
|
||||
self._dependencies = _EdgeMap(store_by=EdgeDirection.child)
|
||||
self._dependents = _EdgeMap(store_by_child=False)
|
||||
self._dependencies = _EdgeMap(store_by_child=True)
|
||||
self.compiler_flags = other.compiler_flags.copy()
|
||||
self.compiler_flags.spec = self
|
||||
self.variants = other.variants.copy()
|
||||
@@ -3567,8 +3589,6 @@ def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, clearde
|
||||
self._normal = other._normal
|
||||
for h in ht.hashes:
|
||||
setattr(self, h.attr, getattr(other, h.attr, None))
|
||||
self._package_hash = getattr(other, "_package_hash", None)
|
||||
self._artifact_hashes = getattr(other, "_artifact_hashes", None)
|
||||
else:
|
||||
self._dunder_hash = None
|
||||
# Note, we could use other._normal if we are copying all deps, but
|
||||
@@ -3576,8 +3596,6 @@ def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, clearde
|
||||
self._normal = False
|
||||
for h in ht.hashes:
|
||||
setattr(self, h.attr, None)
|
||||
self._package_hash = None
|
||||
self._artifact_hashes = None
|
||||
|
||||
return changed
|
||||
|
||||
@@ -4041,7 +4059,7 @@ def format_path(
|
||||
|
||||
def __str__(self):
|
||||
if self._concrete:
|
||||
return self.format("{name}{@version}{/hash:7}")
|
||||
return self.format("{name}{@version}{/hash}")
|
||||
|
||||
if not self._dependencies:
|
||||
return self.format()
|
||||
@@ -4238,11 +4256,7 @@ def _splice_detach_and_add_dependents(self, replacement, context):
|
||||
for ancestor in ancestors_in_context:
|
||||
# Only set it if it hasn't been spliced before
|
||||
ancestor._build_spec = ancestor._build_spec or ancestor.copy()
|
||||
|
||||
# reset all hashes *except* package and artifact hashes (since we are not
|
||||
# rebuilding the spec)
|
||||
ancestor.clear_cached_hashes(content_hashes=False)
|
||||
|
||||
ancestor.clear_caches(ignore=(ht.package_hash.attr,))
|
||||
for edge in ancestor.edges_to_dependencies(depflag=dt.BUILD):
|
||||
if edge.depflag & ~dt.BUILD:
|
||||
edge.depflag &= ~dt.BUILD
|
||||
@@ -4436,19 +4450,17 @@ def mask_build_deps(in_spec):
|
||||
|
||||
return spec
|
||||
|
||||
def clear_cached_hashes(self, content_hashes=True):
|
||||
def clear_caches(self, ignore=()):
|
||||
"""
|
||||
Clears all cached hashes in a Spec, while preserving other properties.
|
||||
"""
|
||||
for h in ht.hashes:
|
||||
if hasattr(self, h.attr):
|
||||
setattr(self, h.attr, None)
|
||||
|
||||
if content_hashes:
|
||||
self._package_hash = None
|
||||
self._artifact_hashes = None
|
||||
|
||||
self._dunder_hash = None
|
||||
if h.attr not in ignore:
|
||||
if hasattr(self, h.attr):
|
||||
setattr(self, h.attr, None)
|
||||
for attr in ("_dunder_hash", "_prefix"):
|
||||
if attr not in ignore:
|
||||
setattr(self, attr, None)
|
||||
|
||||
def __hash__(self):
|
||||
# If the spec is concrete, we leverage the process hash and just use
|
||||
@@ -4524,8 +4536,69 @@ def substitute(self, vspec):
|
||||
# Set the item
|
||||
super().__setitem__(vspec.name, vspec)
|
||||
|
||||
def satisfies(self, other):
|
||||
return all(k in self and self[k].satisfies(other[k]) for k in other)
|
||||
def partition_variants(self):
|
||||
non_prop, prop = lang.stable_partition(self.values(), lambda x: not x.propagate)
|
||||
# Just return the names
|
||||
non_prop = [x.name for x in non_prop]
|
||||
prop = [x.name for x in prop]
|
||||
return non_prop, prop
|
||||
|
||||
def satisfies(self, other: "VariantMap") -> bool:
|
||||
if self.spec.concrete:
|
||||
return self._satisfies_when_self_concrete(other)
|
||||
return self._satisfies_when_self_abstract(other)
|
||||
|
||||
def _satisfies_when_self_concrete(self, other: "VariantMap") -> bool:
|
||||
non_propagating, propagating = other.partition_variants()
|
||||
result = all(
|
||||
name in self and self[name].satisfies(other[name]) for name in non_propagating
|
||||
)
|
||||
if not propagating:
|
||||
return result
|
||||
|
||||
for node in self.spec.traverse():
|
||||
if not all(
|
||||
node.variants[name].satisfies(other[name])
|
||||
for name in propagating
|
||||
if name in node.variants
|
||||
):
|
||||
return False
|
||||
return result
|
||||
|
||||
def _satisfies_when_self_abstract(self, other: "VariantMap") -> bool:
|
||||
other_non_propagating, other_propagating = other.partition_variants()
|
||||
self_non_propagating, self_propagating = self.partition_variants()
|
||||
|
||||
# First check variants without propagation set
|
||||
result = all(
|
||||
name in self_non_propagating
|
||||
and (self[name].propagate or self[name].satisfies(other[name]))
|
||||
for name in other_non_propagating
|
||||
)
|
||||
if result is False or (not other_propagating and not self_propagating):
|
||||
return result
|
||||
|
||||
# Check that self doesn't contradict variants propagated by other
|
||||
if other_propagating:
|
||||
for node in self.spec.traverse():
|
||||
if not all(
|
||||
node.variants[name].satisfies(other[name])
|
||||
for name in other_propagating
|
||||
if name in node.variants
|
||||
):
|
||||
return False
|
||||
|
||||
# Check that other doesn't contradict variants propagated by self
|
||||
if self_propagating:
|
||||
for node in other.spec.traverse():
|
||||
if not all(
|
||||
node.variants[name].satisfies(self[name])
|
||||
for name in self_propagating
|
||||
if name in node.variants
|
||||
):
|
||||
return False
|
||||
|
||||
return result
|
||||
|
||||
def intersects(self, other):
|
||||
return all(self[k].intersects(other[k]) for k in other if k in self)
|
||||
@@ -4723,14 +4796,6 @@ def from_node_dict(cls, node):
|
||||
for h in ht.hashes:
|
||||
setattr(spec, h.attr, node.get(h.name, None))
|
||||
|
||||
# old and new-style package hash
|
||||
if "package_hash" in node:
|
||||
spec._package_hash = node["package_hash"]
|
||||
|
||||
# all source artifact hashes
|
||||
if "sources" in node:
|
||||
spec._artifact_hashes = syaml.syaml_dict([("sources", node["sources"])])
|
||||
|
||||
spec.name = name
|
||||
spec.namespace = node.get("namespace", None)
|
||||
|
||||
@@ -4746,13 +4811,17 @@ def from_node_dict(cls, node):
|
||||
else:
|
||||
spec.compiler = None
|
||||
|
||||
propagated_names = node.get("propagate", [])
|
||||
for name, values in node.get("parameters", {}).items():
|
||||
propagate = name in propagated_names
|
||||
if name in _valid_compiler_flags:
|
||||
spec.compiler_flags[name] = []
|
||||
for val in values:
|
||||
spec.compiler_flags.add_flag(name, val, False)
|
||||
spec.compiler_flags.add_flag(name, val, propagate)
|
||||
else:
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values)
|
||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(
|
||||
name, values, propagate=propagate
|
||||
)
|
||||
|
||||
spec.external_path = None
|
||||
spec.external_modules = None
|
||||
|
@@ -39,9 +39,6 @@
|
||||
DEFAULT_INSTALL_TREE_ROOT = os.path.join(spack.paths.opt_path, "spack")
|
||||
|
||||
|
||||
ConfigurationType = Union["spack.config.Configuration", "llnl.util.lang.Singleton"]
|
||||
|
||||
|
||||
def parse_install_tree(config_dict):
|
||||
"""Parse config settings and return values relevant to the store object.
|
||||
|
||||
@@ -207,7 +204,7 @@ def __reduce__(self):
|
||||
)
|
||||
|
||||
|
||||
def create(configuration: ConfigurationType) -> Store:
|
||||
def create(configuration: spack.config.Configuration) -> Store:
|
||||
"""Create a store from the configuration passed as input.
|
||||
|
||||
Args:
|
||||
@@ -240,7 +237,7 @@ def _create_global() -> Store:
|
||||
|
||||
|
||||
#: Singleton store instance
|
||||
STORE: Union[Store, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_create_global)
|
||||
STORE: Store = llnl.util.lang.Singleton(_create_global) # type: ignore
|
||||
|
||||
|
||||
def reinitialize():
|
||||
|
@@ -17,7 +17,6 @@
|
||||
import multiprocessing
|
||||
import pickle
|
||||
import pydoc
|
||||
import sys
|
||||
from types import ModuleType
|
||||
|
||||
import spack.config
|
||||
@@ -27,9 +26,6 @@
|
||||
import spack.repo
|
||||
import spack.store
|
||||
|
||||
_SERIALIZE = sys.platform == "win32" or (sys.version_info >= (3, 8) and sys.platform == "darwin")
|
||||
|
||||
|
||||
patches = None
|
||||
|
||||
|
||||
@@ -56,7 +52,7 @@ def _restore_and_run(self, fn, test_state):
|
||||
fn()
|
||||
|
||||
def create(self):
|
||||
test_state = TestState()
|
||||
test_state = GlobalStateMarshaler()
|
||||
return multiprocessing.Process(target=self._restore_and_run, args=(self.fn, test_state))
|
||||
|
||||
|
||||
@@ -65,49 +61,56 @@ class PackageInstallContext:
|
||||
needs to be transmitted to a child process.
|
||||
"""
|
||||
|
||||
def __init__(self, pkg):
|
||||
if _SERIALIZE:
|
||||
def __init__(self, pkg, *, ctx=None):
|
||||
ctx = ctx or multiprocessing.get_context()
|
||||
self.serialize = ctx.get_start_method() != "fork"
|
||||
if self.serialize:
|
||||
self.serialized_pkg = serialize(pkg)
|
||||
self.global_state = GlobalStateMarshaler()
|
||||
self.serialized_env = serialize(spack.environment.active_environment())
|
||||
else:
|
||||
self.pkg = pkg
|
||||
self.global_state = None
|
||||
self.env = spack.environment.active_environment()
|
||||
self.spack_working_dir = spack.paths.spack_working_dir
|
||||
self.test_state = TestState()
|
||||
|
||||
def restore(self):
|
||||
self.test_state.restore()
|
||||
spack.paths.spack_working_dir = self.spack_working_dir
|
||||
env = pickle.load(self.serialized_env) if _SERIALIZE else self.env
|
||||
env = pickle.load(self.serialized_env) if self.serialize else self.env
|
||||
# Activating the environment modifies the global configuration, so globals have to
|
||||
# be restored afterward, in case other modifications were applied on top (e.g. from
|
||||
# command line)
|
||||
if env:
|
||||
spack.environment.activate(env)
|
||||
|
||||
if self.serialize:
|
||||
self.global_state.restore()
|
||||
|
||||
# Order of operation is important, since the package might be retrieved
|
||||
# from a repo defined within the environment configuration
|
||||
pkg = pickle.load(self.serialized_pkg) if _SERIALIZE else self.pkg
|
||||
pkg = pickle.load(self.serialized_pkg) if self.serialize else self.pkg
|
||||
return pkg
|
||||
|
||||
|
||||
class TestState:
|
||||
"""Spack tests may modify state that is normally read from disk in memory;
|
||||
this object is responsible for properly serializing that state to be
|
||||
applied to a subprocess. This isn't needed outside of a testing environment
|
||||
but this logic is designed to behave the same inside or outside of tests.
|
||||
class GlobalStateMarshaler:
|
||||
"""Class to serialize and restore global state for child processes.
|
||||
|
||||
Spack may modify state that is normally read from disk or command line in memory;
|
||||
this object is responsible for properly serializing that state to be applied to a subprocess.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
if _SERIALIZE:
|
||||
self.config = spack.config.CONFIG
|
||||
self.platform = spack.platforms.host
|
||||
self.test_patches = store_patches()
|
||||
self.store = spack.store.STORE
|
||||
self.config = spack.config.CONFIG.ensure_unwrapped()
|
||||
self.platform = spack.platforms.host
|
||||
self.test_patches = store_patches()
|
||||
self.store = spack.store.STORE
|
||||
|
||||
def restore(self):
|
||||
if _SERIALIZE:
|
||||
spack.config.CONFIG = self.config
|
||||
spack.repo.PATH = spack.repo.create(self.config)
|
||||
spack.platforms.host = self.platform
|
||||
spack.store.STORE = self.store
|
||||
self.test_patches.restore()
|
||||
spack.config.CONFIG = self.config
|
||||
spack.repo.PATH = spack.repo.create(self.config)
|
||||
spack.platforms.host = self.platform
|
||||
spack.store.STORE = self.store
|
||||
self.test_patches.restore()
|
||||
|
||||
|
||||
class TestPatches:
|
||||
|
247
lib/spack/spack/test/abi_splicing.py
Normal file
247
lib/spack/spack/test/abi_splicing.py
Normal file
@@ -0,0 +1,247 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
""" Test ABI-based splicing of dependencies """
|
||||
|
||||
from typing import List
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.solver.asp
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.spec import Spec
|
||||
|
||||
|
||||
class CacheManager:
|
||||
def __init__(self, specs: List[str]) -> None:
|
||||
self.req_specs = specs
|
||||
self.concr_specs: List[Spec]
|
||||
self.concr_specs = []
|
||||
|
||||
def __enter__(self):
|
||||
self.concr_specs = [Spec(s).concretized() for s in self.req_specs]
|
||||
for s in self.concr_specs:
|
||||
PackageInstaller([s.package], fake=True, explicit=True).install()
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
for s in self.concr_specs:
|
||||
s.package.do_uninstall()
|
||||
|
||||
|
||||
# MacOS and Windows only work if you pass this function pointer rather than a
|
||||
# closure
|
||||
def _mock_has_runtime_dependencies(_x):
|
||||
return True
|
||||
|
||||
|
||||
def _make_specs_non_buildable(specs: List[str]):
|
||||
output_config = {}
|
||||
for spec in specs:
|
||||
output_config[spec] = {"buildable": False}
|
||||
return output_config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def splicing_setup(mutable_database, mock_packages, monkeypatch):
|
||||
spack.config.set("concretizer:reuse", True)
|
||||
monkeypatch.setattr(
|
||||
spack.solver.asp, "_has_runtime_dependencies", _mock_has_runtime_dependencies
|
||||
)
|
||||
|
||||
|
||||
def _enable_splicing():
|
||||
spack.config.set("concretizer:splice", {"automatic": True})
|
||||
|
||||
|
||||
def _has_build_dependency(spec: Spec, name: str):
|
||||
return any(s.name == name for s in spec.dependencies(None, dt.BUILD))
|
||||
|
||||
|
||||
def test_simple_reuse(splicing_setup):
|
||||
with CacheManager(["splice-z@1.0.0+compat"]):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["splice-z"]))
|
||||
assert Spec("splice-z").concretized().satisfies(Spec("splice-z"))
|
||||
|
||||
|
||||
def test_simple_dep_reuse(splicing_setup):
|
||||
with CacheManager(["splice-z@1.0.0+compat"]):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["splice-z"]))
|
||||
assert Spec("splice-h@1").concretized().satisfies(Spec("splice-h@1"))
|
||||
|
||||
|
||||
def test_splice_installed_hash(splicing_setup):
|
||||
cache = [
|
||||
"splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0",
|
||||
"splice-h@1.0.2+compat ^splice-z@1.0.0",
|
||||
]
|
||||
with CacheManager(cache):
|
||||
packages_config = _make_specs_non_buildable(["splice-t", "splice-h"])
|
||||
spack.config.set("packages", packages_config)
|
||||
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0")
|
||||
with pytest.raises(Exception):
|
||||
goal_spec.concretized()
|
||||
_enable_splicing()
|
||||
assert goal_spec.concretized().satisfies(goal_spec)
|
||||
|
||||
|
||||
def test_splice_build_splice_node(splicing_setup):
|
||||
with CacheManager(["splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0+compat"]):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["splice-t"]))
|
||||
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0+compat")
|
||||
with pytest.raises(Exception):
|
||||
goal_spec.concretized()
|
||||
_enable_splicing()
|
||||
assert goal_spec.concretized().satisfies(goal_spec)
|
||||
|
||||
|
||||
def test_double_splice(splicing_setup):
|
||||
cache = [
|
||||
"splice-t@1 ^splice-h@1.0.0+compat ^splice-z@1.0.0+compat",
|
||||
"splice-h@1.0.2+compat ^splice-z@1.0.1+compat",
|
||||
"splice-z@1.0.2+compat",
|
||||
]
|
||||
with CacheManager(cache):
|
||||
freeze_builds_config = _make_specs_non_buildable(["splice-t", "splice-h", "splice-z"])
|
||||
spack.config.set("packages", freeze_builds_config)
|
||||
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.2+compat")
|
||||
with pytest.raises(Exception):
|
||||
goal_spec.concretized()
|
||||
_enable_splicing()
|
||||
assert goal_spec.concretized().satisfies(goal_spec)
|
||||
|
||||
|
||||
# The next two tests are mirrors of one another
|
||||
def test_virtual_multi_splices_in(splicing_setup):
|
||||
cache = [
|
||||
"depends-on-virtual-with-abi ^virtual-abi-1",
|
||||
"depends-on-virtual-with-abi ^virtual-abi-2",
|
||||
]
|
||||
goal_specs = [
|
||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=one",
|
||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=two",
|
||||
]
|
||||
with CacheManager(cache):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
|
||||
for gs in goal_specs:
|
||||
with pytest.raises(Exception):
|
||||
Spec(gs).concretized()
|
||||
_enable_splicing()
|
||||
for gs in goal_specs:
|
||||
assert Spec(gs).concretized().satisfies(gs)
|
||||
|
||||
|
||||
def test_virtual_multi_can_be_spliced(splicing_setup):
|
||||
cache = [
|
||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=one",
|
||||
"depends-on-virtual-with-abi ^virtual-abi-multi abi=two",
|
||||
]
|
||||
goal_specs = [
|
||||
"depends-on-virtual-with-abi ^virtual-abi-1",
|
||||
"depends-on-virtual-with-abi ^virtual-abi-2",
|
||||
]
|
||||
with CacheManager(cache):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
|
||||
with pytest.raises(Exception):
|
||||
for gs in goal_specs:
|
||||
Spec(gs).concretized()
|
||||
_enable_splicing()
|
||||
for gs in goal_specs:
|
||||
assert Spec(gs).concretized().satisfies(gs)
|
||||
|
||||
|
||||
def test_manyvariant_star_matching_variant_splice(splicing_setup):
|
||||
cache = [
|
||||
# can_splice("manyvariants@1.0.0", when="@1.0.1", match_variants="*")
|
||||
"depends-on-manyvariants ^manyvariants@1.0.0+a+b c=v1 d=v2",
|
||||
"depends-on-manyvariants ^manyvariants@1.0.0~a~b c=v3 d=v3",
|
||||
]
|
||||
goal_specs = [
|
||||
Spec("depends-on-manyvariants ^manyvariants@1.0.1+a+b c=v1 d=v2"),
|
||||
Spec("depends-on-manyvariants ^manyvariants@1.0.1~a~b c=v3 d=v3"),
|
||||
]
|
||||
with CacheManager(cache):
|
||||
freeze_build_config = {"depends-on-manyvariants": {"buildable": False}}
|
||||
spack.config.set("packages", freeze_build_config)
|
||||
for goal in goal_specs:
|
||||
with pytest.raises(Exception):
|
||||
goal.concretized()
|
||||
_enable_splicing()
|
||||
for goal in goal_specs:
|
||||
assert goal.concretized().satisfies(goal)
|
||||
|
||||
|
||||
def test_manyvariant_limited_matching(splicing_setup):
|
||||
cache = [
|
||||
# can_splice("manyvariants@2.0.0+a~b", when="@2.0.1~a+b", match_variants=["c", "d"])
|
||||
"depends-on-manyvariants@2.0 ^manyvariants@2.0.0+a~b c=v3 d=v2",
|
||||
# can_splice("manyvariants@2.0.0 c=v1 d=v1", when="@2.0.1+a+b")
|
||||
"depends-on-manyvariants@2.0 ^manyvariants@2.0.0~a~b c=v1 d=v1",
|
||||
]
|
||||
goal_specs = [
|
||||
Spec("depends-on-manyvariants@2.0 ^manyvariants@2.0.1~a+b c=v3 d=v2"),
|
||||
Spec("depends-on-manyvariants@2.0 ^manyvariants@2.0.1+a+b c=v3 d=v3"),
|
||||
]
|
||||
with CacheManager(cache):
|
||||
freeze_build_config = {"depends-on-manyvariants": {"buildable": False}}
|
||||
spack.config.set("packages", freeze_build_config)
|
||||
for s in goal_specs:
|
||||
with pytest.raises(Exception):
|
||||
s.concretized()
|
||||
_enable_splicing()
|
||||
for s in goal_specs:
|
||||
assert s.concretized().satisfies(s)
|
||||
|
||||
|
||||
def test_external_splice_same_name(splicing_setup):
|
||||
cache = [
|
||||
"splice-h@1.0.0 ^splice-z@1.0.0+compat",
|
||||
"splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.1+compat",
|
||||
]
|
||||
packages_yaml = {
|
||||
"splice-z": {"externals": [{"spec": "splice-z@1.0.2+compat", "prefix": "/usr"}]}
|
||||
}
|
||||
goal_specs = [
|
||||
Spec("splice-h@1.0.0 ^splice-z@1.0.2"),
|
||||
Spec("splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.2"),
|
||||
]
|
||||
with CacheManager(cache):
|
||||
spack.config.set("packages", packages_yaml)
|
||||
_enable_splicing()
|
||||
for s in goal_specs:
|
||||
assert s.concretized().satisfies(s)
|
||||
|
||||
|
||||
def test_spliced_build_deps_only_in_build_spec(splicing_setup):
|
||||
cache = ["splice-t@1.0 ^splice-h@1.0.1 ^splice-z@1.0.0"]
|
||||
goal_spec = Spec("splice-t@1.0 ^splice-h@1.0.2 ^splice-z@1.0.0")
|
||||
|
||||
with CacheManager(cache):
|
||||
_enable_splicing()
|
||||
concr_goal = goal_spec.concretized()
|
||||
build_spec = concr_goal._build_spec
|
||||
# Spec has been spliced
|
||||
assert build_spec is not None
|
||||
# Build spec has spliced build dependencies
|
||||
assert _has_build_dependency(build_spec, "splice-h")
|
||||
assert _has_build_dependency(build_spec, "splice-z")
|
||||
# Spliced build dependencies are removed
|
||||
assert len(concr_goal.dependencies(None, dt.BUILD)) == 0
|
||||
|
||||
|
||||
def test_spliced_transitive_dependency(splicing_setup):
|
||||
cache = ["splice-depends-on-t@1.0 ^splice-h@1.0.1"]
|
||||
goal_spec = Spec("splice-depends-on-t^splice-h@1.0.2")
|
||||
|
||||
with CacheManager(cache):
|
||||
spack.config.set("packages", _make_specs_non_buildable(["splice-depends-on-t"]))
|
||||
_enable_splicing()
|
||||
concr_goal = goal_spec.concretized()
|
||||
# Spec has been spliced
|
||||
assert concr_goal._build_spec is not None
|
||||
assert concr_goal["splice-t"]._build_spec is not None
|
||||
assert concr_goal.satisfies(goal_spec)
|
||||
# Spliced build dependencies are removed
|
||||
assert len(concr_goal.dependencies(None, dt.BUILD)) == 0
|
@@ -15,6 +15,8 @@
|
||||
from llnl.util.filesystem import HeaderList, LibraryList
|
||||
|
||||
import spack.build_environment
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.package_base
|
||||
|
@@ -199,7 +199,7 @@ def check_args(cc, args, expected):
|
||||
"""
|
||||
with set_env(SPACK_TEST_COMMAND="dump-args"):
|
||||
cc_modified_args = cc(*args, output=str).strip().split("\n")
|
||||
assert expected == cc_modified_args
|
||||
assert cc_modified_args == expected
|
||||
|
||||
|
||||
def check_args_contents(cc, args, must_contain, must_not_contain):
|
||||
@@ -272,6 +272,43 @@ def test_ld_mode(wrapper_environment):
|
||||
assert dump_mode(ld, ["foo.o", "bar.o", "baz.o", "-o", "foo", "-Wl,-rpath,foo"]) == "ld"
|
||||
|
||||
|
||||
def test_ld_unterminated_rpath(wrapper_environment):
|
||||
check_args(
|
||||
ld,
|
||||
["foo.o", "bar.o", "baz.o", "-o", "foo", "-rpath"],
|
||||
["ld", "--disable-new-dtags", "foo.o", "bar.o", "baz.o", "-o", "foo", "-rpath"],
|
||||
)
|
||||
|
||||
|
||||
def test_xlinker_unterminated_rpath(wrapper_environment):
|
||||
check_args(
|
||||
cc,
|
||||
["foo.o", "bar.o", "baz.o", "-o", "foo", "-Xlinker", "-rpath"],
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ [
|
||||
"-Wl,--disable-new-dtags",
|
||||
"foo.o",
|
||||
"bar.o",
|
||||
"baz.o",
|
||||
"-o",
|
||||
"foo",
|
||||
"-Xlinker",
|
||||
"-rpath",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def test_wl_unterminated_rpath(wrapper_environment):
|
||||
check_args(
|
||||
cc,
|
||||
["foo.o", "bar.o", "baz.o", "-o", "foo", "-Wl,-rpath"],
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ ["-Wl,--disable-new-dtags", "foo.o", "bar.o", "baz.o", "-o", "foo", "-Wl,-rpath"],
|
||||
)
|
||||
|
||||
|
||||
def test_ld_flags(wrapper_environment, wrapper_flags):
|
||||
check_args(
|
||||
ld,
|
||||
|
@@ -9,6 +9,7 @@
|
||||
import pathlib
|
||||
import shutil
|
||||
from argparse import Namespace
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -74,7 +75,7 @@ def setup_combined_multiple_env():
|
||||
env("create", "test1")
|
||||
test1 = ev.read("test1")
|
||||
with test1:
|
||||
add("zlib")
|
||||
add("mpich@1.0")
|
||||
test1.concretize()
|
||||
test1.write()
|
||||
|
||||
@@ -116,6 +117,99 @@ def check_viewdir_removal(viewdir):
|
||||
) == ["projections.yaml"]
|
||||
|
||||
|
||||
def test_env_track_nonexistant_path_fails(capfd):
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("track", "path/does/not/exist")
|
||||
|
||||
out, _ = capfd.readouterr()
|
||||
assert "doesn't contain an environment" in out
|
||||
|
||||
|
||||
def test_env_track_existing_env_fails(capfd):
|
||||
env("create", "track_test")
|
||||
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("track", "--name", "track_test", ev.environment_dir_from_name("track_test"))
|
||||
|
||||
out, _ = capfd.readouterr()
|
||||
assert "environment named track_test already exists" in out
|
||||
|
||||
|
||||
def test_env_track_valid(tmp_path):
|
||||
with fs.working_dir(str(tmp_path)):
|
||||
# create an independent environment
|
||||
env("create", "-d", ".")
|
||||
|
||||
# test tracking an environment in known store
|
||||
env("track", "--name", "test1", ".")
|
||||
|
||||
# test removing environment to ensure independent isn't deleted
|
||||
env("rm", "-y", "test1")
|
||||
|
||||
assert os.path.isfile("spack.yaml")
|
||||
|
||||
|
||||
def test_env_untrack_valid(tmp_path):
|
||||
with fs.working_dir(str(tmp_path)):
|
||||
# create an independent environment
|
||||
env("create", "-d", ".")
|
||||
|
||||
# test tracking an environment in known store
|
||||
env("track", "--name", "test_untrack", ".")
|
||||
env("untrack", "--yes-to-all", "test_untrack")
|
||||
|
||||
# check that environment was sucessfully untracked
|
||||
out = env("ls")
|
||||
assert "test_untrack" not in out
|
||||
|
||||
|
||||
def test_env_untrack_invalid_name():
|
||||
# test untracking an environment that doesn't exist
|
||||
env_name = "invalid_enviornment_untrack"
|
||||
|
||||
out = env("untrack", env_name)
|
||||
|
||||
assert f"Environment '{env_name}' does not exist" in out
|
||||
|
||||
|
||||
def test_env_untrack_when_active(tmp_path, capfd):
|
||||
env_name = "test_untrack_active"
|
||||
|
||||
with fs.working_dir(str(tmp_path)):
|
||||
# create an independent environment
|
||||
env("create", "-d", ".")
|
||||
|
||||
# test tracking an environment in known store
|
||||
env("track", "--name", env_name, ".")
|
||||
|
||||
active_env = ev.read(env_name)
|
||||
with active_env:
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("untrack", "--yes-to-all", env_name)
|
||||
|
||||
# check that environment could not be untracked while active
|
||||
out, _ = capfd.readouterr()
|
||||
assert f"'{env_name}' can't be untracked while activated" in out
|
||||
|
||||
env("untrack", "-f", env_name)
|
||||
out = env("ls")
|
||||
assert env_name not in out
|
||||
|
||||
|
||||
def test_env_untrack_managed(tmp_path, capfd):
|
||||
env_name = "test_untrack_managed"
|
||||
|
||||
# create an managed environment
|
||||
env("create", env_name)
|
||||
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("untrack", env_name)
|
||||
|
||||
# check that environment could not be untracked while active
|
||||
out, _ = capfd.readouterr()
|
||||
assert f"'{env_name}' is not a tracked env" in out
|
||||
|
||||
|
||||
def test_add():
|
||||
e = ev.create("test")
|
||||
e.add("mpileaks")
|
||||
@@ -127,6 +221,7 @@ def test_change_match_spec():
|
||||
|
||||
e = ev.read("test")
|
||||
with e:
|
||||
|
||||
add("mpileaks@2.1")
|
||||
add("mpileaks@2.2")
|
||||
|
||||
@@ -401,14 +496,17 @@ def test_env_install_single_spec(install_mockery, mock_fetch):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("unify", [True, False, "when_possible"])
|
||||
def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch):
|
||||
def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch, mutable_config):
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
combined.unify = unify
|
||||
if not unify:
|
||||
combined.manifest.set_default_view(False)
|
||||
|
||||
combined.add("mpileaks")
|
||||
combined.concretize()
|
||||
combined.write()
|
||||
|
||||
combined.unify = unify
|
||||
|
||||
with combined:
|
||||
install()
|
||||
|
||||
@@ -422,6 +520,14 @@ def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch):
|
||||
assert test1_roots == combined_included_roots[test1.path]
|
||||
assert test2_roots == combined_included_roots[test2.path]
|
||||
|
||||
mpileaks = combined.specs_by_hash[combined.concretized_order[0]]
|
||||
if unify:
|
||||
assert mpileaks["mpi"].dag_hash() in test1_roots
|
||||
assert mpileaks["libelf"].dag_hash() in test2_roots
|
||||
else:
|
||||
# check that unification is not by accident
|
||||
assert mpileaks["mpi"].dag_hash() not in test1_roots
|
||||
|
||||
|
||||
def test_env_roots_marked_explicit(install_mockery, mock_fetch):
|
||||
install = SpackCommand("install")
|
||||
@@ -676,7 +782,7 @@ def test_force_remove_included_env():
|
||||
rm_output = env("remove", "-f", "-y", "test")
|
||||
list_output = env("list")
|
||||
|
||||
assert '"test" is being used by environment "combined_env"' in rm_output
|
||||
assert "'test' is used by environment 'combined_env'" in rm_output
|
||||
assert "test" not in list_output
|
||||
|
||||
|
||||
@@ -1869,7 +1975,7 @@ def test_env_include_concrete_envs_lockfile():
|
||||
def test_env_include_concrete_add_env():
|
||||
test1, test2, combined = setup_combined_multiple_env()
|
||||
|
||||
# crete new env & crecretize
|
||||
# create new env & concretize
|
||||
env("create", "new")
|
||||
new_env = ev.read("new")
|
||||
with new_env:
|
||||
@@ -1921,6 +2027,116 @@ def test_env_include_concrete_remove_env():
|
||||
assert test2.path not in lockfile_as_dict["include_concrete"].keys()
|
||||
|
||||
|
||||
def configure_reuse(reuse_mode, combined_env) -> Optional[ev.Environment]:
|
||||
override_env = None
|
||||
_config: Dict[Any, Any] = {}
|
||||
if reuse_mode == "true":
|
||||
_config = {"concretizer": {"reuse": True}}
|
||||
elif reuse_mode == "from_environment":
|
||||
_config = {"concretizer": {"reuse": {"from": [{"type": "environment"}]}}}
|
||||
elif reuse_mode == "from_environment_test1":
|
||||
_config = {"concretizer": {"reuse": {"from": [{"type": "environment", "path": "test1"}]}}}
|
||||
elif reuse_mode == "from_environment_external_test":
|
||||
# Create a new environment called external_test that enables the "debug"
|
||||
# The default is "~debug"
|
||||
env("create", "external_test")
|
||||
override_env = ev.read("external_test")
|
||||
with override_env:
|
||||
add("mpich@1.0 +debug")
|
||||
override_env.concretize()
|
||||
override_env.write()
|
||||
|
||||
# Reuse from the environment that is not included.
|
||||
# Specify the requirement for the debug variant. By default this would concretize to use
|
||||
# mpich@3.0 but with include concrete the mpich@1.0 +debug version from the
|
||||
# "external_test" environment will be used.
|
||||
_config = {
|
||||
"concretizer": {"reuse": {"from": [{"type": "environment", "path": "external_test"}]}},
|
||||
"packages": {"mpich": {"require": ["+debug"]}},
|
||||
}
|
||||
elif reuse_mode == "from_environment_raise":
|
||||
_config = {
|
||||
"concretizer": {"reuse": {"from": [{"type": "environment", "path": "not-a-real-env"}]}}
|
||||
}
|
||||
# Disable unification in these tests to avoid confusing reuse due to unification using an
|
||||
# include concrete spec vs reuse due to the reuse configuration
|
||||
_config["concretizer"].update({"unify": False})
|
||||
|
||||
combined_env.manifest.configuration.update(_config)
|
||||
combined_env.manifest.changed = True
|
||||
combined_env.write()
|
||||
|
||||
return override_env
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"reuse_mode",
|
||||
[
|
||||
"true",
|
||||
"from_environment",
|
||||
"from_environment_test1",
|
||||
"from_environment_external_test",
|
||||
"from_environment_raise",
|
||||
],
|
||||
)
|
||||
def test_env_include_concrete_reuse(monkeypatch, reuse_mode):
|
||||
|
||||
# The mock packages do not use the gcc-runtime
|
||||
def mock_has_runtime_dependencies(*args, **kwargs):
|
||||
return True
|
||||
|
||||
monkeypatch.setattr(
|
||||
spack.solver.asp, "_has_runtime_dependencies", mock_has_runtime_dependencies
|
||||
)
|
||||
# The default mpi version is 3.x provided by mpich in the mock repo.
|
||||
# This test verifies that concretizing with an included concrete
|
||||
# environment with "concretizer:reuse:true" the included
|
||||
# concrete spec overrides the default with mpi@1.0.
|
||||
test1, _, combined = setup_combined_multiple_env()
|
||||
|
||||
# Set the reuse mode for the environment
|
||||
override_env = configure_reuse(reuse_mode, combined)
|
||||
if override_env:
|
||||
# If there is an override environment (ie. testing reuse with
|
||||
# an external environment) update it here.
|
||||
test1 = override_env
|
||||
|
||||
# Capture the test1 specs included by combined
|
||||
test1_specs_by_hash = test1.specs_by_hash
|
||||
|
||||
try:
|
||||
# Add mpileaks to the combined environment
|
||||
with combined:
|
||||
add("mpileaks")
|
||||
combined.concretize()
|
||||
comb_specs_by_hash = combined.specs_by_hash
|
||||
|
||||
# create reference env with mpileaks that does not use reuse
|
||||
# This should concretize to the default version of mpich (3.0)
|
||||
env("create", "new")
|
||||
ref_env = ev.read("new")
|
||||
with ref_env:
|
||||
add("mpileaks")
|
||||
ref_env.concretize()
|
||||
ref_specs_by_hash = ref_env.specs_by_hash
|
||||
|
||||
# Ensure that the mpich used by the mpileaks is the mpich from the reused test environment
|
||||
comb_mpileaks_spec = [s for s in comb_specs_by_hash.values() if s.name == "mpileaks"]
|
||||
test1_mpich_spec = [s for s in test1_specs_by_hash.values() if s.name == "mpich"]
|
||||
assert len(comb_mpileaks_spec) == 1
|
||||
assert len(test1_mpich_spec) == 1
|
||||
assert comb_mpileaks_spec[0]["mpich"].dag_hash() == test1_mpich_spec[0].dag_hash()
|
||||
|
||||
# None of the references specs (using mpich@3) reuse specs from test1.
|
||||
# This tests that the reuse is not happening coincidently
|
||||
assert not any([s in test1_specs_by_hash for s in ref_specs_by_hash])
|
||||
|
||||
# Make sure the raise tests raises
|
||||
assert "raise" not in reuse_mode
|
||||
except ev.SpackEnvironmentError:
|
||||
assert "raise" in reuse_mode
|
||||
|
||||
|
||||
@pytest.mark.parametrize("unify", [True, False, "when_possible"])
|
||||
def test_env_include_concrete_env_reconcretized(unify):
|
||||
"""Double check to make sure that concrete_specs for the local specs is empty
|
||||
@@ -4117,13 +4333,13 @@ def test_spack_package_ids_variable(tmpdir, mock_packages):
|
||||
# Include in Makefile and create target that depend on SPACK_PACKAGE_IDS
|
||||
with open(makefile_path, "w") as f:
|
||||
f.write(
|
||||
r"""
|
||||
"""
|
||||
all: post-install
|
||||
|
||||
include include.mk
|
||||
|
||||
example/post-install/%: example/install/%
|
||||
$(info post-install: $(HASH)) # noqa: W191,E101
|
||||
\t$(info post-install: $(HASH)) # noqa: W191,E101
|
||||
|
||||
post-install: $(addprefix example/post-install/,$(example/SPACK_PACKAGE_IDS))
|
||||
"""
|
||||
|
@@ -14,10 +14,13 @@
|
||||
import spack.cmd as cmd
|
||||
import spack.cmd.find
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
from spack.main import SpackCommand
|
||||
from spack.spec import Spec
|
||||
from spack.test.conftest import create_test_repo
|
||||
from spack.test.utilities import SpackCommandArgs
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
find = SpackCommand("find")
|
||||
@@ -453,3 +456,140 @@ def test_environment_with_version_range_in_compiler_doesnt_fail(tmp_path):
|
||||
with test_environment:
|
||||
output = find()
|
||||
assert "zlib%gcc@12.1.0" in output
|
||||
|
||||
|
||||
_pkga = (
|
||||
"a0",
|
||||
"""\
|
||||
class A0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
|
||||
depends_on("b0")
|
||||
depends_on("c0")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgb = (
|
||||
"b0",
|
||||
"""\
|
||||
class B0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgc = (
|
||||
"c0",
|
||||
"""\
|
||||
class C0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
|
||||
tags = ["tag0", "tag1"]
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkgd = (
|
||||
"d0",
|
||||
"""\
|
||||
class D0(Package):
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
|
||||
depends_on("c0")
|
||||
depends_on("e0")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
_pkge = (
|
||||
"e0",
|
||||
"""\
|
||||
class E0(Package):
|
||||
tags = ["tag1", "tag2"]
|
||||
|
||||
version("1.2")
|
||||
version("1.1")
|
||||
""",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _create_test_repo(tmpdir, mutable_config):
|
||||
r"""
|
||||
a0 d0
|
||||
/ \ / \
|
||||
b0 c0 e0
|
||||
"""
|
||||
yield create_test_repo(tmpdir, [_pkga, _pkgb, _pkgc, _pkgd, _pkge])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_repo(_create_test_repo, monkeypatch, mock_stage):
|
||||
with spack.repo.use_repositories(_create_test_repo) as mock_repo_path:
|
||||
yield mock_repo_path
|
||||
|
||||
|
||||
def test_find_concretized_not_installed(
|
||||
mutable_mock_env_path, install_mockery, mock_fetch, test_repo, mock_archive
|
||||
):
|
||||
"""Test queries against installs of specs against fake repo.
|
||||
|
||||
Given A, B, C, D, E, create an environment and install A.
|
||||
Add and concretize (but do not install) D.
|
||||
Test a few queries after force uninstalling a dependency of A (but not
|
||||
A itself).
|
||||
"""
|
||||
add = SpackCommand("add")
|
||||
concretize = SpackCommand("concretize")
|
||||
uninstall = SpackCommand("uninstall")
|
||||
|
||||
def _query(_e, *args):
|
||||
return spack.cmd.find._find_query(SpackCommandArgs("find")(*args), _e)
|
||||
|
||||
def _nresults(_qresult):
|
||||
return len(_qresult[0]), len(_qresult[1])
|
||||
|
||||
env("create", "test")
|
||||
with ev.read("test") as e:
|
||||
install("--fake", "--add", "a0")
|
||||
|
||||
assert _nresults(_query(e)) == (3, 0)
|
||||
assert _nresults(_query(e, "--explicit")) == (1, 0)
|
||||
|
||||
add("d0")
|
||||
concretize("--reuse")
|
||||
|
||||
# At this point d0 should use existing c0, but d/e
|
||||
# are not installed in the env
|
||||
|
||||
# --explicit, --deprecated, --start-date, etc. are all
|
||||
# filters on records, and therefore don't apply to
|
||||
# concretized-but-not-installed results
|
||||
assert _nresults(_query(e, "--explicit")) == (1, 2)
|
||||
|
||||
assert _nresults(_query(e)) == (3, 2)
|
||||
assert _nresults(_query(e, "-c", "d0")) == (0, 1)
|
||||
|
||||
uninstall("-f", "-y", "b0")
|
||||
|
||||
# b0 is now missing (it is not installed, but has an
|
||||
# installed parent)
|
||||
|
||||
assert _nresults(_query(e)) == (2, 3)
|
||||
# b0 is "double-counted" here: it meets the --missing
|
||||
# criteria, and also now qualifies as a
|
||||
# concretized-but-not-installed spec
|
||||
assert _nresults(_query(e, "--missing")) == (3, 3)
|
||||
assert _nresults(_query(e, "--only-missing")) == (1, 3)
|
||||
|
||||
# Tags are not attached to install records, so they
|
||||
# can modify the concretized-but-not-installed results
|
||||
|
||||
assert _nresults(_query(e, "--tag=tag0")) == (1, 0)
|
||||
assert _nresults(_query(e, "--tag=tag1")) == (1, 1)
|
||||
assert _nresults(_query(e, "--tag=tag2")) == (0, 1)
|
||||
|
@@ -4,10 +4,17 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import pytest
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.solver.asp as asp
|
||||
import spack.store
|
||||
from spack.cmd import (
|
||||
CommandNameError,
|
||||
PythonNameError,
|
||||
cmd_name,
|
||||
matching_specs_from_env,
|
||||
parse_specs,
|
||||
python_name,
|
||||
require_cmd_name,
|
||||
require_python_name,
|
||||
@@ -34,3 +41,99 @@ def test_require_cmd_name():
|
||||
with pytest.raises(CommandNameError):
|
||||
require_cmd_name("okey_dokey")
|
||||
require_cmd_name(cmd_name("okey_dokey"))
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"unify,spec_strs,error",
|
||||
[
|
||||
# single spec
|
||||
(True, ["zmpi"], None),
|
||||
(False, ["mpileaks"], None),
|
||||
# multiple specs, some from hash some from file
|
||||
(True, ["zmpi", "mpileaks^zmpi", "libelf"], None),
|
||||
(True, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], spack.error.SpecError),
|
||||
(False, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], None),
|
||||
],
|
||||
)
|
||||
def test_special_cases_concretization_parse_specs(
|
||||
unify, spec_strs, error, monkeypatch, mutable_config, mutable_database, tmpdir
|
||||
):
|
||||
"""Test that special cases in parse_specs(concretize=True) bypass solver"""
|
||||
|
||||
# monkeypatch to ensure we do not call the actual concretizer
|
||||
def _fail(*args, **kwargs):
|
||||
assert False
|
||||
|
||||
monkeypatch.setattr(asp.SpackSolverSetup, "setup", _fail)
|
||||
|
||||
spack.config.set("concretizer:unify", unify)
|
||||
|
||||
args = [f"/{spack.store.STORE.db.query(s)[0].dag_hash()}" for s in spec_strs]
|
||||
if len(args) > 1:
|
||||
# We convert the last one to a specfile input
|
||||
filename = tmpdir.join("spec.json")
|
||||
spec = parse_specs(args[-1], concretize=True)[0]
|
||||
with open(filename, "w") as f:
|
||||
spec.to_json(f)
|
||||
args[-1] = str(filename)
|
||||
|
||||
if error:
|
||||
with pytest.raises(error):
|
||||
parse_specs(args, concretize=True)
|
||||
else:
|
||||
# assertion error from monkeypatch above if test fails
|
||||
parse_specs(args, concretize=True)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"unify,spec_strs,error",
|
||||
[
|
||||
# single spec
|
||||
(True, ["zmpi"], None),
|
||||
(False, ["mpileaks"], None),
|
||||
# multiple specs, some from hash some from file
|
||||
(True, ["zmpi", "mpileaks^zmpi", "libelf"], None),
|
||||
(True, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], spack.error.SpecError),
|
||||
(False, ["mpileaks^zmpi", "mpileaks^mpich", "libelf"], None),
|
||||
],
|
||||
)
|
||||
def test_special_cases_concretization_matching_specs_from_env(
|
||||
unify,
|
||||
spec_strs,
|
||||
error,
|
||||
monkeypatch,
|
||||
mutable_config,
|
||||
mutable_database,
|
||||
tmpdir,
|
||||
mutable_mock_env_path,
|
||||
):
|
||||
"""Test that special cases in parse_specs(concretize=True) bypass solver"""
|
||||
|
||||
# monkeypatch to ensure we do not call the actual concretizer
|
||||
def _fail(*args, **kwargs):
|
||||
assert False
|
||||
|
||||
monkeypatch.setattr(asp.SpackSolverSetup, "setup", _fail)
|
||||
|
||||
spack.config.set("concretizer:unify", unify)
|
||||
|
||||
ev.create("test")
|
||||
env = ev.read("test")
|
||||
|
||||
args = [f"/{spack.store.STORE.db.query(s)[0].dag_hash()}" for s in spec_strs]
|
||||
if len(args) > 1:
|
||||
# We convert the last one to a specfile input
|
||||
filename = tmpdir.join("spec.json")
|
||||
spec = parse_specs(args[-1], concretize=True)[0]
|
||||
with open(filename, "w") as f:
|
||||
spec.to_json(f)
|
||||
args[-1] = str(filename)
|
||||
|
||||
with env:
|
||||
specs = parse_specs(args, concretize=False)
|
||||
if error:
|
||||
with pytest.raises(error):
|
||||
matching_specs_from_env(specs)
|
||||
else:
|
||||
# assertion error from monkeypatch above if test fails
|
||||
matching_specs_from_env(specs)
|
||||
|
@@ -779,7 +779,6 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock
|
||||
# ^pkg-b
|
||||
# pkg-a
|
||||
# ^pkg-b
|
||||
|
||||
e = ev.create("test", with_view=False)
|
||||
e.add("mpileaks")
|
||||
e.add("libelf@0.8.10") # so env has both root and dep libelf specs
|
||||
@@ -787,14 +786,14 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock
|
||||
e.add("pkg-a ~bvv")
|
||||
e.concretize()
|
||||
e.write()
|
||||
initial_concrete_specs = e.all_specs()
|
||||
env_specs = e.all_specs()
|
||||
|
||||
a_spec = None
|
||||
b_spec = None
|
||||
mpi_spec = None
|
||||
|
||||
# First find and remember some target concrete specs in the environment
|
||||
for e_spec in initial_concrete_specs:
|
||||
for e_spec in env_specs:
|
||||
if e_spec.satisfies(Spec("pkg-a ~bvv")):
|
||||
a_spec = e_spec
|
||||
elif e_spec.name == "pkg-b":
|
||||
@@ -816,7 +815,8 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock
|
||||
with e:
|
||||
# Assert using --no-add with a spec not in the env fails
|
||||
inst_out = install("--no-add", "boost", fail_on_error=False, output=str)
|
||||
assert "You can add specs to the environment with 'spack add boost'" in inst_out
|
||||
|
||||
assert "You can add specs to the environment with 'spack add " in inst_out
|
||||
|
||||
# Without --add, ensure that two packages "a" get installed
|
||||
inst_out = install("pkg-a", output=str)
|
||||
@@ -828,18 +828,13 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock
|
||||
install("dyninst")
|
||||
|
||||
find_output = find("-l", output=str)
|
||||
|
||||
assert "dyninst" in find_output
|
||||
assert "libdwarf" in find_output
|
||||
assert "libelf" in find_output
|
||||
assert "callpath" not in find_output
|
||||
|
||||
post_install_concrete_specs = e.all_specs()
|
||||
|
||||
for s in post_install_concrete_specs:
|
||||
assert (
|
||||
s in initial_concrete_specs
|
||||
), f"installed spec {s.format('{name}{@version}{/hash:7}')} not in original env"
|
||||
post_install_specs = e.all_specs()
|
||||
assert all([s in env_specs for s in post_install_specs])
|
||||
|
||||
# Make sure we can install a concrete dependency spec from a spec.json
|
||||
# file on disk, and the spec is installed but not added as a root
|
||||
@@ -911,7 +906,7 @@ def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
specfile = "./spec.json"
|
||||
with open(specfile, "w") as f:
|
||||
f.write(spec.to_json())
|
||||
|
||||
print(spec.to_json())
|
||||
install("--log-file=cdash_reports", "--log-format=cdash", specfile)
|
||||
# Verify Configure.xml exists with expected contents.
|
||||
report_dir = tmpdir.join("cdash_reports")
|
||||
|
@@ -17,6 +17,7 @@
|
||||
import spack.version
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
|
||||
config = SpackCommand("config")
|
||||
mirror = SpackCommand("mirror")
|
||||
env = SpackCommand("env")
|
||||
add = SpackCommand("add")
|
||||
@@ -181,20 +182,122 @@ def test_mirror_crud(mutable_config, capsys):
|
||||
output = mirror("remove", "mirror")
|
||||
assert "Removed mirror" in output
|
||||
|
||||
# Test S3 connection info id/key
|
||||
mirror(
|
||||
"add",
|
||||
"--s3-access-key-id",
|
||||
"foo",
|
||||
"--s3-access-key-secret",
|
||||
"bar",
|
||||
"mirror",
|
||||
"s3://spack-public",
|
||||
)
|
||||
# Test S3 connection info token as variable
|
||||
mirror("add", "--s3-access-token-variable", "aaaaaazzzzz", "mirror", "s3://spack-public")
|
||||
|
||||
output = mirror("remove", "mirror")
|
||||
assert "Removed mirror" in output
|
||||
|
||||
def do_add_set_seturl_access_pair(
|
||||
id_arg, secret_arg, mirror_name="mirror", mirror_url="s3://spack-public"
|
||||
):
|
||||
# Test S3 connection info id/key
|
||||
output = mirror("add", id_arg, "foo", secret_arg, "bar", mirror_name, mirror_url)
|
||||
if "variable" not in secret_arg:
|
||||
assert (
|
||||
f"Configuring mirror secrets as plain text with {secret_arg} is deprecated. "
|
||||
in output
|
||||
)
|
||||
|
||||
output = config("blame", "mirrors")
|
||||
assert all([x in output for x in ("foo", "bar", mirror_name, mirror_url)])
|
||||
# Mirror access_pair deprecation warning should not be in blame output
|
||||
assert "support for plain text secrets" not in output
|
||||
|
||||
output = mirror("set", id_arg, "foo_set", secret_arg, "bar_set", mirror_name)
|
||||
if "variable" not in secret_arg:
|
||||
assert "support for plain text secrets" in output
|
||||
output = config("blame", "mirrors")
|
||||
assert all([x in output for x in ("foo_set", "bar_set", mirror_name, mirror_url)])
|
||||
if "variable" not in secret_arg:
|
||||
output = mirror(
|
||||
"set", id_arg, "foo_set", secret_arg + "-variable", "bar_set_var", mirror_name
|
||||
)
|
||||
assert "support for plain text secrets" not in output
|
||||
output = config("blame", "mirrors")
|
||||
assert all(
|
||||
[x in output for x in ("foo_set", "bar_set_var", mirror_name, mirror_url)]
|
||||
)
|
||||
|
||||
output = mirror(
|
||||
"set-url",
|
||||
id_arg,
|
||||
"foo_set_url",
|
||||
secret_arg,
|
||||
"bar_set_url",
|
||||
"--push",
|
||||
mirror_name,
|
||||
mirror_url + "-push",
|
||||
)
|
||||
output = config("blame", "mirrors")
|
||||
assert all(
|
||||
[
|
||||
x in output
|
||||
for x in ("foo_set_url", "bar_set_url", mirror_name, mirror_url + "-push")
|
||||
]
|
||||
)
|
||||
|
||||
output = mirror("set", id_arg, "a", mirror_name)
|
||||
assert "No changes made to mirror" not in output
|
||||
|
||||
output = mirror("set", secret_arg, "b", mirror_name)
|
||||
assert "No changes made to mirror" not in output
|
||||
|
||||
output = mirror("set-url", id_arg, "c", mirror_name, mirror_url)
|
||||
assert "No changes made to mirror" not in output
|
||||
|
||||
output = mirror("set-url", secret_arg, "d", mirror_name, mirror_url)
|
||||
assert "No changes made to mirror" not in output
|
||||
|
||||
output = mirror("remove", mirror_name)
|
||||
assert "Removed mirror" in output
|
||||
|
||||
output = mirror("add", id_arg, "foo", mirror_name, mirror_url)
|
||||
assert "Expected both parts of the access pair to be specified. " in output
|
||||
|
||||
output = mirror("set-url", id_arg, "bar", mirror_name, mirror_url)
|
||||
assert "Expected both parts of the access pair to be specified. " in output
|
||||
|
||||
output = mirror("set", id_arg, "bar", mirror_name)
|
||||
assert "Expected both parts of the access pair to be specified. " in output
|
||||
|
||||
output = mirror("remove", mirror_name)
|
||||
assert "Removed mirror" in output
|
||||
|
||||
output = mirror("add", secret_arg, "bar", mirror_name, mirror_url)
|
||||
assert "Expected both parts of the access pair to be specified. " in output
|
||||
|
||||
output = mirror("set-url", secret_arg, "bar", mirror_name, mirror_url)
|
||||
assert "Expected both parts of the access pair to be specified. " in output
|
||||
|
||||
output = mirror("set", secret_arg, "bar", mirror_name)
|
||||
assert "Expected both parts of the access pair to be specified. " in output
|
||||
|
||||
output = mirror("remove", mirror_name)
|
||||
assert "Removed mirror" in output
|
||||
|
||||
output = mirror("list")
|
||||
assert "No mirrors configured" in output
|
||||
|
||||
do_add_set_seturl_access_pair("--s3-access-key-id", "--s3-access-key-secret")
|
||||
do_add_set_seturl_access_pair("--s3-access-key-id", "--s3-access-key-secret-variable")
|
||||
do_add_set_seturl_access_pair(
|
||||
"--s3-access-key-id-variable", "--s3-access-key-secret-variable"
|
||||
)
|
||||
with pytest.raises(
|
||||
spack.error.SpackError, match="Cannot add mirror with a variable id and text secret"
|
||||
):
|
||||
do_add_set_seturl_access_pair("--s3-access-key-id-variable", "--s3-access-key-secret")
|
||||
|
||||
# Test OCI connection info user/password
|
||||
do_add_set_seturl_access_pair("--oci-username", "--oci-password")
|
||||
do_add_set_seturl_access_pair("--oci-username", "--oci-password-variable")
|
||||
do_add_set_seturl_access_pair("--oci-username-variable", "--oci-password-variable")
|
||||
with pytest.raises(
|
||||
spack.error.SpackError, match="Cannot add mirror with a variable id and text secret"
|
||||
):
|
||||
do_add_set_seturl_access_pair("--s3-access-key-id-variable", "--s3-access-key-secret")
|
||||
|
||||
# Test S3 connection info with endpoint URL
|
||||
mirror(
|
||||
"add",
|
||||
@@ -218,6 +321,9 @@ def test_mirror_crud(mutable_config, capsys):
|
||||
output = mirror("remove", "mirror")
|
||||
assert "Removed mirror" in output
|
||||
|
||||
output = mirror("list")
|
||||
assert "No mirrors configured" in output
|
||||
|
||||
|
||||
def test_mirror_nonexisting(mutable_config):
|
||||
with pytest.raises(SpackCommandError):
|
||||
|
@@ -311,7 +311,20 @@ def test_pkg_grep(mock_packages, capfd):
|
||||
output, _ = capfd.readouterr()
|
||||
assert output.strip() == "\n".join(
|
||||
spack.repo.PATH.get_pkg_class(name).module.__file__
|
||||
for name in ["splice-a", "splice-h", "splice-t", "splice-vh", "splice-vt", "splice-z"]
|
||||
for name in [
|
||||
"depends-on-manyvariants",
|
||||
"manyvariants",
|
||||
"splice-a",
|
||||
"splice-depends-on-t",
|
||||
"splice-h",
|
||||
"splice-t",
|
||||
"splice-vh",
|
||||
"splice-vt",
|
||||
"splice-z",
|
||||
"virtual-abi-1",
|
||||
"virtual-abi-2",
|
||||
"virtual-abi-multi",
|
||||
]
|
||||
)
|
||||
|
||||
# ensure that this string isn't fouhnd
|
||||
|
@@ -7,6 +7,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.spec
|
||||
@@ -179,3 +180,43 @@ def test_spec_version_assigned_git_ref_as_version(name, version, error):
|
||||
else:
|
||||
output = spec(name + "@" + version)
|
||||
assert version in output
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"unify, spec_hash_args, match, error",
|
||||
[
|
||||
# success cases with unfiy:true
|
||||
(True, ["mpileaks_mpich"], "mpich", None),
|
||||
(True, ["mpileaks_zmpi"], "zmpi", None),
|
||||
(True, ["mpileaks_mpich", "dyninst"], "mpich", None),
|
||||
(True, ["mpileaks_zmpi", "dyninst"], "zmpi", None),
|
||||
# same success cases with unfiy:false
|
||||
(False, ["mpileaks_mpich"], "mpich", None),
|
||||
(False, ["mpileaks_zmpi"], "zmpi", None),
|
||||
(False, ["mpileaks_mpich", "dyninst"], "mpich", None),
|
||||
(False, ["mpileaks_zmpi", "dyninst"], "zmpi", None),
|
||||
# cases with unfiy:false
|
||||
(True, ["mpileaks_mpich", "mpileaks_zmpi"], "callpath, mpileaks", spack.error.SpecError),
|
||||
(False, ["mpileaks_mpich", "mpileaks_zmpi"], "zmpi", None),
|
||||
],
|
||||
)
|
||||
def test_spec_unification_from_cli(
|
||||
install_mockery, mutable_config, mutable_database, unify, spec_hash_args, match, error
|
||||
):
|
||||
"""Ensure specs grouped together on the CLI are concretized together when unify:true."""
|
||||
spack.config.set("concretizer:unify", unify)
|
||||
|
||||
db = spack.store.STORE.db
|
||||
spec_lookup = {
|
||||
"mpileaks_mpich": db.query_one("mpileaks ^mpich").dag_hash(),
|
||||
"mpileaks_zmpi": db.query_one("mpileaks ^zmpi").dag_hash(),
|
||||
"dyninst": db.query_one("dyninst").dag_hash(),
|
||||
}
|
||||
|
||||
hashes = [f"/{spec_lookup[name]}" for name in spec_hash_args]
|
||||
if error:
|
||||
with pytest.raises(error, match=match):
|
||||
output = spec(*hashes)
|
||||
else:
|
||||
output = spec(*hashes)
|
||||
assert match in output
|
||||
|
@@ -3,8 +3,10 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Test basic behavior of compilers in Spack"""
|
||||
import json
|
||||
import os
|
||||
from copy import copy
|
||||
from typing import Optional
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -17,6 +19,7 @@
|
||||
import spack.util.module_cmd
|
||||
from spack.compiler import Compiler
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
from spack.util.file_cache import FileCache
|
||||
|
||||
|
||||
def test_multiple_conflicting_compiler_definitions(mutable_config):
|
||||
@@ -101,11 +104,14 @@ def verbose_flag(self):
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
def test_implicit_rpaths(dirs_with_libfiles):
|
||||
def test_implicit_rpaths(dirs_with_libfiles, monkeypatch):
|
||||
lib_to_dirs, all_dirs = dirs_with_libfiles
|
||||
compiler = MockCompiler()
|
||||
compiler._compile_c_source_output = "ld " + " ".join(f"-L{d}" for d in all_dirs)
|
||||
retrieved_rpaths = compiler.implicit_rpaths()
|
||||
monkeypatch.setattr(
|
||||
MockCompiler,
|
||||
"_compile_dummy_c_source",
|
||||
lambda self: "ld " + " ".join(f"-L{d}" for d in all_dirs),
|
||||
)
|
||||
retrieved_rpaths = MockCompiler().implicit_rpaths()
|
||||
assert set(retrieved_rpaths) == set(lib_to_dirs["libstdc++"] + lib_to_dirs["libgfortran"])
|
||||
|
||||
|
||||
@@ -647,6 +653,7 @@ def test_raising_if_compiler_target_is_over_specific(config):
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Not supported on Windows (yet)")
|
||||
@pytest.mark.enable_compiler_execution
|
||||
def test_compiler_get_real_version(working_env, monkeypatch, tmpdir):
|
||||
# Test variables
|
||||
test_version = "2.2.2"
|
||||
@@ -736,6 +743,7 @@ def test_get_compilers(config):
|
||||
) == [spack.compilers._compiler_from_config_entry(without_suffix)]
|
||||
|
||||
|
||||
@pytest.mark.enable_compiler_execution
|
||||
def test_compiler_get_real_version_fails(working_env, monkeypatch, tmpdir):
|
||||
# Test variables
|
||||
test_version = "2.2.2"
|
||||
@@ -784,15 +792,13 @@ def _call(*args, **kwargs):
|
||||
compilers = spack.compilers.get_compilers([compiler_dict])
|
||||
assert len(compilers) == 1
|
||||
compiler = compilers[0]
|
||||
try:
|
||||
_ = compiler.get_real_version()
|
||||
assert False
|
||||
except ProcessError:
|
||||
# Confirm environment does not change after failed call
|
||||
assert "SPACK_TEST_CMP_ON" not in os.environ
|
||||
assert compiler.get_real_version() == "unknown"
|
||||
# Confirm environment does not change after failed call
|
||||
assert "SPACK_TEST_CMP_ON" not in os.environ
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Bash scripting unsupported on Windows (for now)")
|
||||
@pytest.mark.enable_compiler_execution
|
||||
def test_compiler_flags_use_real_version(working_env, monkeypatch, tmpdir):
|
||||
# Create compiler
|
||||
gcc = str(tmpdir.join("gcc"))
|
||||
@@ -895,3 +901,57 @@ def test_compiler_environment(working_env):
|
||||
)
|
||||
with compiler.compiler_environment():
|
||||
assert os.environ["TEST"] == "yes"
|
||||
|
||||
|
||||
class MockCompilerWithoutExecutables(MockCompiler):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._compile_dummy_c_source_count = 0
|
||||
self._get_real_version_count = 0
|
||||
|
||||
def _compile_dummy_c_source(self) -> Optional[str]:
|
||||
self._compile_dummy_c_source_count += 1
|
||||
return "gcc helloworld.c -o helloworld"
|
||||
|
||||
def get_real_version(self) -> str:
|
||||
self._get_real_version_count += 1
|
||||
return "1.0.0"
|
||||
|
||||
|
||||
def test_compiler_output_caching(tmp_path):
|
||||
"""Test that compiler output is cached on the filesystem."""
|
||||
# The first call should trigger the cache to updated.
|
||||
a = MockCompilerWithoutExecutables()
|
||||
cache = spack.compiler.FileCompilerCache(FileCache(str(tmp_path)))
|
||||
assert cache.get(a).c_compiler_output == "gcc helloworld.c -o helloworld"
|
||||
assert cache.get(a).real_version == "1.0.0"
|
||||
assert a._compile_dummy_c_source_count == 1
|
||||
assert a._get_real_version_count == 1
|
||||
|
||||
# The second call on an equivalent but distinct object should not trigger compiler calls.
|
||||
b = MockCompilerWithoutExecutables()
|
||||
cache = spack.compiler.FileCompilerCache(FileCache(str(tmp_path)))
|
||||
assert cache.get(b).c_compiler_output == "gcc helloworld.c -o helloworld"
|
||||
assert cache.get(b).real_version == "1.0.0"
|
||||
assert b._compile_dummy_c_source_count == 0
|
||||
assert b._get_real_version_count == 0
|
||||
|
||||
# Cache schema change should be handled gracefully.
|
||||
with open(cache.cache.cache_path(cache.name), "w") as f:
|
||||
for k in cache._data:
|
||||
cache._data[k] = "corrupted entry"
|
||||
f.write(json.dumps(cache._data))
|
||||
|
||||
c = MockCompilerWithoutExecutables()
|
||||
cache = spack.compiler.FileCompilerCache(FileCache(str(tmp_path)))
|
||||
assert cache.get(c).c_compiler_output == "gcc helloworld.c -o helloworld"
|
||||
assert cache.get(c).real_version == "1.0.0"
|
||||
|
||||
# Cache corruption should be handled gracefully.
|
||||
with open(cache.cache.cache_path(cache.name), "w") as f:
|
||||
f.write("corrupted cache")
|
||||
|
||||
d = MockCompilerWithoutExecutables()
|
||||
cache = spack.compiler.FileCompilerCache(FileCache(str(tmp_path)))
|
||||
assert cache.get(d).c_compiler_output == "gcc helloworld.c -o helloworld"
|
||||
assert cache.get(d).real_version == "1.0.0"
|
||||
|
@@ -14,6 +14,7 @@
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.cmd
|
||||
import spack.compiler
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
@@ -32,7 +33,6 @@
|
||||
import spack.store
|
||||
import spack.util.file_cache
|
||||
import spack.variant as vt
|
||||
from spack.concretize import find_spec
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.spec import CompilerSpec, Spec
|
||||
from spack.version import Version, VersionList, ver
|
||||
@@ -540,21 +540,17 @@ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self):
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,expected_propagation",
|
||||
[
|
||||
("hypre~~shared ^openblas+shared", [("hypre", "~shared"), ("openblas", "+shared")]),
|
||||
# Propagates past a node that doesn't have the variant
|
||||
("hypre~~shared ^openblas", [("hypre", "~shared"), ("openblas", "~shared")]),
|
||||
# Propagates from root node to all nodes
|
||||
(
|
||||
"ascent~~shared +adios2",
|
||||
[("ascent", "~shared"), ("adios2", "~shared"), ("bzip2", "~shared")],
|
||||
),
|
||||
# Propagates below a node that uses the other value explicitly
|
||||
# Propagate from a node that is not the root node
|
||||
(
|
||||
"ascent~~shared +adios2 ^adios2+shared",
|
||||
[("ascent", "~shared"), ("adios2", "+shared"), ("bzip2", "~shared")],
|
||||
),
|
||||
(
|
||||
"ascent++shared +adios2 ^adios2~shared",
|
||||
[("ascent", "+shared"), ("adios2", "~shared"), ("bzip2", "+shared")],
|
||||
"ascent +adios2 ^adios2~~shared",
|
||||
[("ascent", "+shared"), ("adios2", "~shared"), ("bzip2", "~shared")],
|
||||
),
|
||||
],
|
||||
)
|
||||
@@ -564,21 +560,109 @@ def test_concretize_propagate_disabled_variant(self, spec_str, expected_propagat
|
||||
for key, expected_satisfies in expected_propagation:
|
||||
spec[key].satisfies(expected_satisfies)
|
||||
|
||||
def test_concretize_propagated_variant_is_not_passed_to_dependent(self):
|
||||
"""Test a package variant value was passed from its parent."""
|
||||
spec = Spec("ascent~~shared +adios2 ^adios2+shared")
|
||||
def test_concretize_propagate_variant_not_dependencies(self):
|
||||
"""Test that when propagating a variant it is not propagated to dependencies that
|
||||
do not have that variant"""
|
||||
spec = Spec("quantum-espresso~~invino")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^adios2+shared")
|
||||
assert spec.satisfies("^bzip2~shared")
|
||||
for dep in spec.traverse(root=False):
|
||||
assert "invino" not in dep.variants.keys()
|
||||
|
||||
def test_concretize_propagate_variant_exclude_dependency_fail(self):
|
||||
"""Tests that a propagating variant cannot be allowed to be excluded by any of
|
||||
the source package's dependencies"""
|
||||
spec = Spec("hypre ~~shared ^openblas +shared")
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
spec.concretize()
|
||||
|
||||
def test_concretize_propagate_same_variant_from_direct_dep_fail(self):
|
||||
"""Test that when propagating a variant from the source package and a direct
|
||||
dependency also propagates the same variant with a different value. Raises error"""
|
||||
spec = Spec("ascent +adios2 ++shared ^adios2 ~~shared")
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
spec.concretize()
|
||||
|
||||
def test_concretize_propagate_same_variant_in_dependency_fail(self):
|
||||
"""Test that when propagating a variant from the source package, none of it's
|
||||
dependencies can propagate that variant with a different value. Raises error."""
|
||||
spec = Spec("ascent +adios2 ++shared ^bzip2 ~~shared")
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
spec.concretize()
|
||||
|
||||
def test_concretize_propagate_same_variant_virtual_dependency_fail(self):
|
||||
"""Test that when propagating a variant from the source package and a direct
|
||||
dependency (that is a virtual pkg) also propagates the same variant with a
|
||||
different value. Raises error"""
|
||||
spec = Spec("hypre ++shared ^openblas ~~shared")
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
spec.concretize()
|
||||
|
||||
def test_concretize_propagate_same_variant_multiple_sources_diamond_dep_fail(self):
|
||||
"""Test that fails when propagating the same variant with different values from multiple
|
||||
sources that share a dependency"""
|
||||
spec = Spec("parent-foo-bar ^dependency-foo-bar++bar ^direct-dep-foo-bar~~bar")
|
||||
with pytest.raises(spack.error.UnsatisfiableSpecError):
|
||||
spec.concretize()
|
||||
|
||||
def test_concretize_propagate_specified_variant(self):
|
||||
"""Test that only the specified variant is propagated to the dependencies"""
|
||||
spec = Spec("parent-foo-bar ~~foo")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("~foo") and spec.satisfies("^dependency-foo-bar~foo")
|
||||
assert spec.satisfies("+bar") and not spec.satisfies("^dependency-foo-bar+bar")
|
||||
assert spec.satisfies("^dependency-foo-bar~foo")
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee~foo")
|
||||
assert spec.satisfies("^direct-dep-foo-bar~foo")
|
||||
|
||||
assert not spec.satisfies("^dependency-foo-bar+bar")
|
||||
assert not spec.satisfies("^second-dependency-foo-bar-fee+bar")
|
||||
assert not spec.satisfies("^direct-dep-foo-bar+bar")
|
||||
|
||||
def test_concretize_propagate_one_variant(self):
|
||||
"""Test that you can specify to propagate one variant and not all"""
|
||||
spec = Spec("parent-foo-bar ++bar ~foo")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("~foo") and not spec.satisfies("^dependency-foo-bar~foo")
|
||||
assert spec.satisfies("+bar") and spec.satisfies("^dependency-foo-bar+bar")
|
||||
|
||||
def test_concretize_propagate_through_first_level_deps(self):
|
||||
"""Test that boolean valued variants can be propagated past first level
|
||||
dependecies even if the first level dependency does have the variant"""
|
||||
spec = Spec("parent-foo-bar-fee ++fee")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("+fee") and not spec.satisfies("dependency-foo-bar+fee")
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee+fee")
|
||||
|
||||
def test_concretize_propagate_multiple_variants(self):
|
||||
"""Test that multiple boolean valued variants can be propagated from
|
||||
the same source package"""
|
||||
spec = Spec("parent-foo-bar-fee ~~foo ++bar")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("~foo") and spec.satisfies("+bar")
|
||||
assert spec.satisfies("^dependency-foo-bar ~foo +bar")
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee ~foo +bar")
|
||||
|
||||
def test_concretize_propagate_multiple_variants_mulitple_sources(self):
|
||||
"""Test the propagates multiple different variants for multiple sources
|
||||
in a diamond dependency"""
|
||||
spec = Spec("parent-foo-bar ^dependency-foo-bar++bar ^direct-dep-foo-bar~~foo")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee+bar")
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee~foo")
|
||||
assert not spec.satisfies("^dependency-foo-bar~foo")
|
||||
assert not spec.satisfies("^direct-dep-foo-bar+bar")
|
||||
|
||||
def test_concretize_propagate_single_valued_variant(self):
|
||||
"""Test propagation for single valued variants"""
|
||||
spec = Spec("multivalue-variant libs==static")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("libs=static")
|
||||
assert spec.satisfies("^pkg-a libs=static")
|
||||
|
||||
def test_concretize_propagate_multivalue_variant(self):
|
||||
"""Test that multivalue variants are propagating the specified value(s)
|
||||
@@ -591,6 +675,46 @@ def test_concretize_propagate_multivalue_variant(self):
|
||||
assert not spec.satisfies("^pkg-a foo=bar")
|
||||
assert not spec.satisfies("^pkg-b foo=bar")
|
||||
|
||||
def test_concretize_propagate_multiple_multivalue_variant(self):
|
||||
"""Tests propagating the same mulitvalued variant from different sources allows
|
||||
the dependents to accept all propagated values"""
|
||||
spec = Spec("multivalue-variant foo==bar ^pkg-a foo==baz")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("multivalue-variant foo=bar")
|
||||
assert spec.satisfies("^pkg-a foo=bar,baz")
|
||||
assert spec.satisfies("^pkg-b foo=bar,baz")
|
||||
|
||||
def test_concretize_propagate_variant_not_in_source(self):
|
||||
"""Test that variant is still propagated even if the source pkg
|
||||
doesn't have the variant"""
|
||||
spec = Spec("callpath++debug")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^mpich+debug")
|
||||
assert not spec.satisfies("callpath+debug")
|
||||
assert not spec.satisfies("^dyninst+debug")
|
||||
|
||||
def test_concretize_propagate_variant_multiple_deps_not_in_source(self):
|
||||
"""Test that a variant can be propagated to multiple dependencies
|
||||
when the variant is not in the source package"""
|
||||
spec = Spec("netlib-lapack++shared")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^openblas+shared")
|
||||
assert spec.satisfies("^perl+shared")
|
||||
assert not spec.satisfies("netlib-lapack+shared")
|
||||
|
||||
def test_concretize_propagate_variant_second_level_dep_not_in_source(self):
|
||||
"""Test that a variant can be propagated past first level dependencies
|
||||
when the variant is not in the source package or any of the first level
|
||||
dependencies"""
|
||||
spec = Spec("parent-foo-bar ++fee")
|
||||
spec.concretize()
|
||||
|
||||
assert spec.satisfies("^second-dependency-foo-bar-fee +fee")
|
||||
assert not spec.satisfies("parent-foo-bar +fee")
|
||||
|
||||
def test_no_matching_compiler_specs(self, mock_low_high_config):
|
||||
# only relevant when not building compilers as needed
|
||||
with spack.concretize.enable_compiler_existence_check():
|
||||
@@ -673,39 +797,6 @@ def test_external_and_virtual(self, mutable_config):
|
||||
assert spec["externaltool"].compiler.satisfies("gcc")
|
||||
assert spec["stuff"].compiler.satisfies("gcc")
|
||||
|
||||
def test_find_spec_parents(self):
|
||||
"""Tests the spec finding logic used by concretization."""
|
||||
s = Spec.from_literal({"a +foo": {"b +foo": {"c": None, "d+foo": None}, "e +foo": None}})
|
||||
|
||||
assert "a" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
|
||||
def test_find_spec_children(self):
|
||||
s = Spec.from_literal({"a": {"b +foo": {"c": None, "d+foo": None}, "e +foo": None}})
|
||||
|
||||
assert "d" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
|
||||
s = Spec.from_literal({"a": {"b +foo": {"c+foo": None, "d": None}, "e +foo": None}})
|
||||
|
||||
assert "c" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
|
||||
def test_find_spec_sibling(self):
|
||||
s = Spec.from_literal({"a": {"b +foo": {"c": None, "d": None}, "e +foo": None}})
|
||||
|
||||
assert "e" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
assert "b" == find_spec(s["e"], lambda s: "+foo" in s).name
|
||||
|
||||
s = Spec.from_literal({"a": {"b +foo": {"c": None, "d": None}, "e": {"f +foo": None}}})
|
||||
|
||||
assert "f" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
|
||||
def test_find_spec_self(self):
|
||||
s = Spec.from_literal({"a": {"b +foo": {"c": None, "d": None}, "e": None}})
|
||||
assert "b" == find_spec(s["b"], lambda s: "+foo" in s).name
|
||||
|
||||
def test_find_spec_none(self):
|
||||
s = Spec.from_literal({"a": {"b": {"c": None, "d": None}, "e": None}})
|
||||
assert find_spec(s["b"], lambda s: "+foo" in s) is None
|
||||
|
||||
def test_compiler_child(self):
|
||||
s = Spec("mpileaks%clang target=x86_64 ^dyninst%gcc")
|
||||
s.concretize()
|
||||
@@ -814,7 +905,7 @@ def test_regression_issue_7941(self):
|
||||
)
|
||||
def test_simultaneous_concretization_of_specs(self, abstract_specs):
|
||||
abstract_specs = [Spec(x) for x in abstract_specs]
|
||||
concrete_specs = spack.concretize.concretize_specs_together(*abstract_specs)
|
||||
concrete_specs = spack.concretize.concretize_specs_together(abstract_specs)
|
||||
|
||||
# Check there's only one configuration of each package in the DAG
|
||||
names = set(dep.name for spec in concrete_specs for dep in spec.traverse())
|
||||
@@ -2136,7 +2227,7 @@ def test_external_python_extension_find_unified_python(self):
|
||||
spack.config.set("packages", external_conf)
|
||||
|
||||
abstract_specs = [Spec(s) for s in ["py-extension1", "python"]]
|
||||
specs = spack.concretize.concretize_specs_together(*abstract_specs)
|
||||
specs = spack.concretize.concretize_specs_together(abstract_specs)
|
||||
assert specs[0]["python"] == specs[1]["python"]
|
||||
|
||||
@pytest.mark.regression("36190")
|
||||
@@ -2225,6 +2316,7 @@ def test_compiler_match_constraints_when_selected(self):
|
||||
|
||||
@pytest.mark.regression("36339")
|
||||
@pytest.mark.not_on_windows("Not supported on Windows")
|
||||
@pytest.mark.enable_compiler_execution
|
||||
def test_compiler_with_custom_non_numeric_version(self, mock_executable):
|
||||
"""Test that, when a compiler has a completely made up version, we can use its
|
||||
'real version' to detect targets and don't raise during concretization.
|
||||
@@ -2779,6 +2871,18 @@ def test_specifying_different_versions_build_deps(self):
|
||||
assert any(x.satisfies(hdf5_str) for x in result.specs)
|
||||
assert any(x.satisfies(pinned_str) for x in result.specs)
|
||||
|
||||
def test_solve_with_profile(self, capsys):
|
||||
"""For now, just ensure that the profiler runs."""
|
||||
solver = spack.solver.asp.Solver()
|
||||
solver.solve([Spec("hdf5")], profile=True)
|
||||
|
||||
out, _ = capsys.readouterr()
|
||||
assert "Profile:" in out
|
||||
assert "Symbol" in out
|
||||
assert "Prop" in out
|
||||
assert "Undo" in out
|
||||
assert "internal_error" in out # symbol is always in small solves
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"v_str,v_opts,checksummed",
|
||||
@@ -3106,3 +3210,20 @@ def test_reuse_prefers_standard_over_git_versions(
|
||||
test_spec = spack.spec.Spec("git-ref-package@2").concretized()
|
||||
assert git_spec.dag_hash() != test_spec.dag_hash()
|
||||
assert standard_spec.dag_hash() == test_spec.dag_hash()
|
||||
|
||||
|
||||
@pytest.mark.parametrize("unify", [True, "when_possible", False])
|
||||
def test_spec_unification(unify, mutable_config, mock_packages):
|
||||
spack.config.set("concretizer:unify", unify)
|
||||
a = "pkg-a"
|
||||
a_restricted = "pkg-a^pkg-b foo=baz"
|
||||
b = "pkg-b foo=none"
|
||||
|
||||
unrestricted = spack.cmd.parse_specs([a, b], concretize=True)
|
||||
a_concrete_unrestricted = [s for s in unrestricted if s.name == "pkg-a"][0]
|
||||
b_concrete_unrestricted = [s for s in unrestricted if s.name == "pkg-b"][0]
|
||||
assert (a_concrete_unrestricted["pkg-b"] == b_concrete_unrestricted) == (unify is not False)
|
||||
|
||||
maybe_fails = pytest.raises if unify is True else llnl.util.lang.nullcontext
|
||||
with maybe_fails(spack.solver.asp.UnsatisfiableSpecError):
|
||||
_ = spack.cmd.parse_specs([a_restricted, b], concretize=True)
|
@@ -8,6 +8,7 @@
|
||||
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.spec import Spec
|
@@ -10,8 +10,10 @@
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
import spack.store
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.version
|
||||
from spack.installer import PackageInstaller
|
@@ -973,12 +973,26 @@ def _return_none(*args):
|
||||
return None
|
||||
|
||||
|
||||
def _compiler_output(self):
|
||||
return ""
|
||||
|
||||
|
||||
def _get_real_version(self):
|
||||
return str(self.version)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def disable_compiler_execution(monkeypatch, request):
|
||||
"""Disable compiler execution to determine implicit link paths and libc flavor and version.
|
||||
To re-enable use `@pytest.mark.enable_compiler_execution`"""
|
||||
if "enable_compiler_execution" not in request.keywords:
|
||||
monkeypatch.setattr(spack.compiler.Compiler, "_compile_dummy_c_source", _return_none)
|
||||
monkeypatch.setattr(spack.compiler.Compiler, "_compile_dummy_c_source", _compiler_output)
|
||||
monkeypatch.setattr(spack.compiler.Compiler, "get_real_version", _get_real_version)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def disable_compiler_output_cache(monkeypatch):
|
||||
monkeypatch.setattr(spack.compiler, "COMPILER_CACHE", spack.compiler.CompilerCache())
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
|
@@ -906,3 +906,18 @@ def test_only_roots_are_explicitly_installed(tmp_path, mock_packages, config, te
|
||||
assert callpath in temporary_store.db.query(explicit=False)
|
||||
env.install_specs([mpileaks], fake=True)
|
||||
assert temporary_store.db.query(explicit=True) == [mpileaks]
|
||||
|
||||
|
||||
def test_environment_from_name_or_dir(mock_packages, mutable_mock_env_path, tmp_path):
|
||||
test_env = ev.create("test")
|
||||
|
||||
name_env = ev.environment_from_name_or_dir(test_env.name)
|
||||
assert name_env.name == test_env.name
|
||||
assert name_env.path == test_env.path
|
||||
|
||||
dir_env = ev.environment_from_name_or_dir(test_env.path)
|
||||
assert dir_env.name == test_env.name
|
||||
assert dir_env.path == test_env.path
|
||||
|
||||
with pytest.raises(ev.SpackEnvironmentError, match="no such environment"):
|
||||
_ = ev.environment_from_name_or_dir("fake-env")
|
||||
|
@@ -410,7 +410,7 @@ def test_nosource_pkg_install(install_mockery, mock_fetch, mock_packages, capfd,
|
||||
assert "Installing dependency-install" in out[0]
|
||||
|
||||
# Make sure a warning for missing code is issued
|
||||
assert "Missing a hash for nosource" in out[1]
|
||||
assert "Missing a source id for nosource" in out[1]
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
@@ -427,7 +427,7 @@ def test_nosource_bundle_pkg_install(
|
||||
assert "Installing dependency-install" in out[0]
|
||||
|
||||
# Make sure a warning for missing code is *not* issued
|
||||
assert "Missing a hash for nosource" not in out[1]
|
||||
assert "Missing a source id for nosource" not in out[1]
|
||||
|
||||
|
||||
def test_nosource_pkg_install_post_install(install_mockery, mock_fetch, mock_packages):
|
||||
|
@@ -644,13 +644,12 @@ def test_prepare_for_install_on_installed(install_mockery, monkeypatch):
|
||||
def test_installer_init_requests(install_mockery):
|
||||
"""Test of installer initial requests."""
|
||||
spec_name = "dependent-install"
|
||||
with spack.config.override("config:install_missing_compilers", True):
|
||||
installer = create_installer([spec_name], {})
|
||||
installer = create_installer([spec_name], {})
|
||||
|
||||
# There is only one explicit request in this case
|
||||
assert len(installer.build_requests) == 1
|
||||
request = installer.build_requests[0]
|
||||
assert request.pkg.name == spec_name
|
||||
# There is only one explicit request in this case
|
||||
assert len(installer.build_requests) == 1
|
||||
request = installer.build_requests[0]
|
||||
assert request.pkg.name == spec_name
|
||||
|
||||
|
||||
@pytest.mark.parametrize("transitive", [True, False])
|
||||
@@ -743,21 +742,20 @@ def _missing(*args, **kwargs):
|
||||
|
||||
# Set the configuration to ensure _requeue_with_build_spec_tasks actually
|
||||
# does something.
|
||||
with spack.config.override("config:install_missing_compilers", True):
|
||||
installer = create_installer(["depb"], {})
|
||||
installer._init_queue()
|
||||
request = installer.build_requests[0]
|
||||
task = create_build_task(request.pkg)
|
||||
installer = create_installer(["depb"], {})
|
||||
installer._init_queue()
|
||||
request = installer.build_requests[0]
|
||||
task = create_build_task(request.pkg)
|
||||
|
||||
# Drop one of the specs so its task is missing before _install_task
|
||||
popped_task = installer._pop_task()
|
||||
assert inst.package_id(popped_task.pkg.spec) not in installer.build_tasks
|
||||
# Drop one of the specs so its task is missing before _install_task
|
||||
popped_task = installer._pop_task()
|
||||
assert inst.package_id(popped_task.pkg.spec) not in installer.build_tasks
|
||||
|
||||
monkeypatch.setattr(task, "execute", _missing)
|
||||
installer._install_task(task, None)
|
||||
monkeypatch.setattr(task, "execute", _missing)
|
||||
installer._install_task(task, None)
|
||||
|
||||
# Ensure the dropped task/spec was added back by _install_task
|
||||
assert inst.package_id(popped_task.pkg.spec) in installer.build_tasks
|
||||
# Ensure the dropped task/spec was added back by _install_task
|
||||
assert inst.package_id(popped_task.pkg.spec) in installer.build_tasks
|
||||
|
||||
|
||||
def test_release_lock_write_n_exception(install_mockery, tmpdir, capsys):
|
||||
|
@@ -9,7 +9,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, find, find_headers, find_libraries
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, find_headers, find_libraries
|
||||
|
||||
import spack.paths
|
||||
|
||||
@@ -324,33 +324,3 @@ def test_searching_order(search_fn, search_list, root, kwargs):
|
||||
|
||||
# List should be empty here
|
||||
assert len(rlist) == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"root,search_list,kwargs,expected",
|
||||
[
|
||||
(
|
||||
search_dir,
|
||||
"*/*bar.tx?",
|
||||
{"recursive": False},
|
||||
[
|
||||
os.path.join(search_dir, os.path.join("a", "foobar.txt")),
|
||||
os.path.join(search_dir, os.path.join("b", "bar.txp")),
|
||||
os.path.join(search_dir, os.path.join("c", "bar.txt")),
|
||||
],
|
||||
),
|
||||
(
|
||||
search_dir,
|
||||
"*/*bar.tx?",
|
||||
{"recursive": True},
|
||||
[
|
||||
os.path.join(search_dir, os.path.join("a", "foobar.txt")),
|
||||
os.path.join(search_dir, os.path.join("b", "bar.txp")),
|
||||
os.path.join(search_dir, os.path.join("c", "bar.txt")),
|
||||
],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_find_with_globbing(root, search_list, kwargs, expected):
|
||||
matches = find(root, search_list, **kwargs)
|
||||
assert sorted(matches) == sorted(expected)
|
||||
|
@@ -6,6 +6,7 @@
|
||||
"""Tests for ``llnl/util/filesystem.py``"""
|
||||
import filecmp
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
@@ -14,7 +15,8 @@
|
||||
import pytest
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
from llnl.util.symlink import islink, readlink, symlink
|
||||
import llnl.util.symlink
|
||||
from llnl.util.symlink import _windows_can_symlink, islink, readlink, symlink
|
||||
|
||||
import spack.paths
|
||||
|
||||
@@ -1035,3 +1037,215 @@ def test_windows_sfn(tmpdir):
|
||||
assert "d\\LONGER~1" in fs.windows_sfn(d)
|
||||
assert "d\\LONGER~2" in fs.windows_sfn(e)
|
||||
shutil.rmtree(tmpdir.join("d"))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def dir_structure_with_things_to_find(tmpdir):
|
||||
"""
|
||||
<root>/
|
||||
dir_one/
|
||||
file_one
|
||||
dir_two/
|
||||
dir_three/
|
||||
dir_four/
|
||||
file_two
|
||||
file_three
|
||||
file_four
|
||||
"""
|
||||
dir_one = tmpdir.join("dir_one").ensure(dir=True)
|
||||
tmpdir.join("dir_two").ensure(dir=True)
|
||||
dir_three = tmpdir.join("dir_three").ensure(dir=True)
|
||||
dir_four = dir_three.join("dir_four").ensure(dir=True)
|
||||
|
||||
locations = {}
|
||||
locations["file_one"] = str(dir_one.join("file_one").ensure())
|
||||
locations["file_two"] = str(dir_four.join("file_two").ensure())
|
||||
locations["file_three"] = str(dir_three.join("file_three").ensure())
|
||||
locations["file_four"] = str(tmpdir.join("file_four").ensure())
|
||||
|
||||
return str(tmpdir), locations
|
||||
|
||||
|
||||
def test_find_path_glob_matches(dir_structure_with_things_to_find):
|
||||
root, locations = dir_structure_with_things_to_find
|
||||
# both file name and path match
|
||||
assert (
|
||||
fs.find(root, "file_two")
|
||||
== fs.find(root, "*/*/file_two")
|
||||
== fs.find(root, "dir_t*/*/*two")
|
||||
== [locations["file_two"]]
|
||||
)
|
||||
# ensure that * does not match directory separators
|
||||
assert fs.find(root, "dir*file_two") == []
|
||||
# ensure that file name matches after / are matched from the start of the file name
|
||||
assert fs.find(root, "*/ile_two") == []
|
||||
# file name matches exist, but not with these paths
|
||||
assert fs.find(root, "dir_one/*/*two") == fs.find(root, "*/*/*/*/file_two") == []
|
||||
|
||||
|
||||
def test_find_max_depth(dir_structure_with_things_to_find):
|
||||
root, locations = dir_structure_with_things_to_find
|
||||
|
||||
# Make sure the paths we use to verify are absolute
|
||||
assert os.path.isabs(locations["file_one"])
|
||||
|
||||
assert set(fs.find(root, "file_*", max_depth=0)) == {locations["file_four"]}
|
||||
assert set(fs.find(root, "file_*", max_depth=1)) == {
|
||||
locations["file_one"],
|
||||
locations["file_three"],
|
||||
locations["file_four"],
|
||||
}
|
||||
assert set(fs.find(root, "file_two", max_depth=2)) == {locations["file_two"]}
|
||||
assert not set(fs.find(root, "file_two", max_depth=1))
|
||||
assert set(fs.find(root, "file_two")) == {locations["file_two"]}
|
||||
assert set(fs.find(root, "file_*")) == set(locations.values())
|
||||
|
||||
|
||||
def test_find_max_depth_relative(dir_structure_with_things_to_find):
|
||||
"""find_max_depth should return absolute paths even if the provided path is relative."""
|
||||
root, locations = dir_structure_with_things_to_find
|
||||
with fs.working_dir(root):
|
||||
assert set(fs.find(".", "file_*", max_depth=0)) == {locations["file_four"]}
|
||||
assert set(fs.find(".", "file_two", max_depth=2)) == {locations["file_two"]}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("recursive,max_depth", [(False, -1), (False, 1)])
|
||||
def test_max_depth_and_recursive_errors(tmpdir, recursive, max_depth):
|
||||
root = str(tmpdir)
|
||||
error_str = "cannot be set if recursive is False"
|
||||
with pytest.raises(ValueError, match=error_str):
|
||||
fs.find(root, ["some_file"], recursive=recursive, max_depth=max_depth)
|
||||
|
||||
with pytest.raises(ValueError, match=error_str):
|
||||
fs.find_libraries(["some_lib"], root, recursive=recursive, max_depth=max_depth)
|
||||
|
||||
|
||||
@pytest.fixture(params=[True, False])
|
||||
def complex_dir_structure(request, tmpdir):
|
||||
"""
|
||||
"lx-dy" means "level x, directory y"
|
||||
"lx-fy" means "level x, file y"
|
||||
"lx-sy" means "level x, symlink y"
|
||||
|
||||
<root>/
|
||||
l1-d1/
|
||||
l2-d1/
|
||||
l3-d2/
|
||||
l4-f1
|
||||
l3-d4/
|
||||
l4-f2
|
||||
l3-s1 -> l1-d2 # points to directory above l2-d1
|
||||
l3-s3 -> l1-d1 # cyclic link
|
||||
l1-d2/
|
||||
l2-d2/
|
||||
l3-f3
|
||||
l2-f1
|
||||
l2-s3 -> l2-d2
|
||||
l1-s3 -> l3-d4 # a link that "skips" a directory level
|
||||
l1-s4 -> l2-s3 # a link to a link to a dir
|
||||
"""
|
||||
use_junctions = request.param
|
||||
if sys.platform == "win32" and not use_junctions and not _windows_can_symlink():
|
||||
pytest.skip("This Windows instance is not configured with symlink support")
|
||||
elif sys.platform != "win32" and use_junctions:
|
||||
pytest.skip("Junctions are a Windows-only feature")
|
||||
|
||||
l1_d1 = tmpdir.join("l1-d1").ensure(dir=True)
|
||||
l2_d1 = l1_d1.join("l2-d1").ensure(dir=True)
|
||||
l3_d2 = l2_d1.join("l3-d2").ensure(dir=True)
|
||||
l3_d4 = l2_d1.join("l3-d4").ensure(dir=True)
|
||||
l1_d2 = tmpdir.join("l1-d2").ensure(dir=True)
|
||||
l2_d2 = l1_d2.join("l2-d2").ensure(dir=True)
|
||||
|
||||
if use_junctions:
|
||||
link_fn = llnl.util.symlink._windows_create_junction
|
||||
else:
|
||||
link_fn = os.symlink
|
||||
|
||||
link_fn(l1_d2, pathlib.Path(l2_d1) / "l3-s1")
|
||||
link_fn(l1_d1, pathlib.Path(l2_d1) / "l3-s3")
|
||||
link_fn(l3_d4, pathlib.Path(tmpdir) / "l1-s3")
|
||||
l2_s3 = pathlib.Path(l1_d2) / "l2-s3"
|
||||
link_fn(l2_d2, l2_s3)
|
||||
link_fn(l2_s3, pathlib.Path(tmpdir) / "l1-s4")
|
||||
|
||||
locations = {
|
||||
"l4-f1": str(l3_d2.join("l4-f1").ensure()),
|
||||
"l4-f2-full": str(l3_d4.join("l4-f2").ensure()),
|
||||
"l4-f2-link": str(pathlib.Path(tmpdir) / "l1-s3" / "l4-f2"),
|
||||
"l2-f1": str(l1_d2.join("l2-f1").ensure()),
|
||||
"l2-f1-link": str(pathlib.Path(tmpdir) / "l1-d1" / "l2-d1" / "l3-s1" / "l2-f1"),
|
||||
"l3-f3-full": str(l2_d2.join("l3-f3").ensure()),
|
||||
"l3-f3-link-l1": str(pathlib.Path(tmpdir) / "l1-s4" / "l3-f3"),
|
||||
}
|
||||
|
||||
return str(tmpdir), locations
|
||||
|
||||
|
||||
def test_find_max_depth_symlinks(complex_dir_structure):
|
||||
root, locations = complex_dir_structure
|
||||
root = pathlib.Path(root)
|
||||
assert set(fs.find(root, "l4-f1")) == {locations["l4-f1"]}
|
||||
assert set(fs.find(root / "l1-s3", "l4-f2", max_depth=0)) == {locations["l4-f2-link"]}
|
||||
assert set(fs.find(root / "l1-d1", "l2-f1")) == {locations["l2-f1-link"]}
|
||||
# File is accessible via symlink and subdir, the link path will be
|
||||
# searched first, and the directory will not be searched again when
|
||||
# it is encountered the second time (via not-link) in the traversal
|
||||
assert set(fs.find(root, "l4-f2")) == {locations["l4-f2-link"]}
|
||||
# File is accessible only via the dir, so the full file path should
|
||||
# be reported
|
||||
assert set(fs.find(root / "l1-d1", "l4-f2")) == {locations["l4-f2-full"]}
|
||||
# Check following links to links
|
||||
assert set(fs.find(root, "l3-f3")) == {locations["l3-f3-link-l1"]}
|
||||
|
||||
|
||||
def test_find_max_depth_multiple_and_repeated_entry_points(complex_dir_structure):
|
||||
root, locations = complex_dir_structure
|
||||
|
||||
fst = str(pathlib.Path(root) / "l1-d1" / "l2-d1")
|
||||
snd = str(pathlib.Path(root) / "l1-d2")
|
||||
nonexistent = str(pathlib.Path(root) / "nonexistent")
|
||||
|
||||
assert set(fs.find([fst, snd, fst, snd, nonexistent], ["l*-f*"], max_depth=1)) == {
|
||||
locations["l2-f1"],
|
||||
locations["l4-f1"],
|
||||
locations["l4-f2-full"],
|
||||
locations["l3-f3-full"],
|
||||
}
|
||||
|
||||
|
||||
def test_multiple_patterns(complex_dir_structure):
|
||||
root, _ = complex_dir_structure
|
||||
paths = fs.find(root, ["l2-f1", "l*-d*/l3-f3", "*-f*", "*/*-f*"])
|
||||
# There shouldn't be duplicate results with multiple, overlapping patterns
|
||||
assert len(set(paths)) == len(paths)
|
||||
# All files should be found
|
||||
filenames = [os.path.basename(p) for p in paths]
|
||||
assert set(filenames) == {"l2-f1", "l3-f3", "l4-f1", "l4-f2"}
|
||||
# They are ordered by first matching pattern (this is a bit of an implementation detail,
|
||||
# and we could decide to change the exact order in the future)
|
||||
assert filenames[0] == "l2-f1"
|
||||
assert filenames[1] == "l3-f3"
|
||||
|
||||
|
||||
def test_find_input_types(tmp_path: pathlib.Path):
|
||||
"""test that find only accepts sequences and instances of pathlib.Path and str for root, and
|
||||
only sequences and instances of str for patterns. In principle mypy catches these issues, but
|
||||
it is not enabled on all call-sites."""
|
||||
(tmp_path / "file.txt").write_text("")
|
||||
assert (
|
||||
fs.find(tmp_path, "file.txt")
|
||||
== fs.find(str(tmp_path), "file.txt")
|
||||
== fs.find([tmp_path, str(tmp_path)], "file.txt")
|
||||
== fs.find((tmp_path, str(tmp_path)), "file.txt")
|
||||
== fs.find(tmp_path, "file.txt")
|
||||
== fs.find(tmp_path, ["file.txt"])
|
||||
== fs.find(tmp_path, ("file.txt",))
|
||||
== [str(tmp_path / "file.txt")]
|
||||
)
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
fs.find(tmp_path, pathlib.Path("file.txt")) # type: ignore
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
fs.find(1, "file.txt") # type: ignore
|
||||
|
@@ -336,3 +336,56 @@ def test_grouped_exception_base_type():
|
||||
message = h.grouped_message(with_tracebacks=False)
|
||||
assert "catch-runtime-error" in message
|
||||
assert "catch-value-error" not in message
|
||||
|
||||
|
||||
def test_class_level_constant_value():
|
||||
"""Tests that the Const descriptor does not allow overwriting the value from an instance"""
|
||||
|
||||
class _SomeClass:
|
||||
CONST_VALUE = llnl.util.lang.Const(10)
|
||||
|
||||
with pytest.raises(TypeError, match="not support assignment"):
|
||||
_SomeClass().CONST_VALUE = 11
|
||||
|
||||
|
||||
def test_deprecated_property():
|
||||
"""Tests the behavior of the DeprecatedProperty descriptor, which is can be used when
|
||||
deprecating an attribute.
|
||||
"""
|
||||
|
||||
class _Deprecated(llnl.util.lang.DeprecatedProperty):
|
||||
def factory(self, instance, owner):
|
||||
return 46
|
||||
|
||||
class _SomeClass:
|
||||
deprecated = _Deprecated("deprecated")
|
||||
|
||||
# Default behavior is to just return the deprecated value
|
||||
s = _SomeClass()
|
||||
assert s.deprecated == 46
|
||||
|
||||
# When setting error_level to 1 the attribute warns
|
||||
_SomeClass.deprecated.error_lvl = 1
|
||||
with pytest.warns(UserWarning):
|
||||
assert s.deprecated == 46
|
||||
|
||||
# When setting error_level to 2 an exception is raised
|
||||
_SomeClass.deprecated.error_lvl = 2
|
||||
with pytest.raises(AttributeError):
|
||||
_ = s.deprecated
|
||||
|
||||
|
||||
def test_fnmatch_multiple():
|
||||
named_patterns = {"a": "libf*o.so", "b": "libb*r.so"}
|
||||
regex = re.compile(llnl.util.lang.fnmatch_translate_multiple(named_patterns))
|
||||
|
||||
a = regex.match("libfoo.so")
|
||||
assert a and a.group("a") == "libfoo.so"
|
||||
|
||||
b = regex.match("libbar.so")
|
||||
assert b and b.group("b") == "libbar.so"
|
||||
|
||||
assert not regex.match("libfoo.so.1")
|
||||
assert not regex.match("libbar.so.1")
|
||||
assert not regex.match("libfoo.solibbar.so")
|
||||
assert not regex.match("libbaz.so")
|
||||
|
@@ -329,9 +329,9 @@ def test_update_4():
|
||||
|
||||
|
||||
@pytest.mark.parametrize("direction", ["fetch", "push"])
|
||||
def test_update_connection_params(direction):
|
||||
def test_update_connection_params(direction, tmpdir, monkeypatch):
|
||||
"""Test whether new connection params expand the mirror config to a dict."""
|
||||
m = spack.mirror.Mirror("https://example.com")
|
||||
m = spack.mirror.Mirror("https://example.com", "example")
|
||||
|
||||
assert m.update(
|
||||
{
|
||||
@@ -354,12 +354,64 @@ def test_update_connection_params(direction):
|
||||
"endpoint_url": "https://example.com",
|
||||
},
|
||||
}
|
||||
|
||||
assert m.get_access_pair(direction) == ["username", "password"]
|
||||
assert m.get_access_pair(direction) == ("username", "password")
|
||||
assert m.get_access_token(direction) == "token"
|
||||
assert m.get_profile(direction) == "profile"
|
||||
assert m.get_endpoint_url(direction) == "https://example.com"
|
||||
|
||||
# Expand environment variables
|
||||
os.environ["_SPACK_TEST_PAIR_USERNAME"] = "expanded_username"
|
||||
os.environ["_SPACK_TEST_PAIR_PASSWORD"] = "expanded_password"
|
||||
os.environ["_SPACK_TEST_TOKEN"] = "expanded_token"
|
||||
|
||||
assert m.update(
|
||||
{
|
||||
"access_pair": {
|
||||
"id_variable": "_SPACK_TEST_PAIR_USERNAME",
|
||||
"secret_variable": "_SPACK_TEST_PAIR_PASSWORD",
|
||||
}
|
||||
},
|
||||
direction,
|
||||
)
|
||||
|
||||
assert m.to_dict() == {
|
||||
"url": "https://example.com",
|
||||
direction: {
|
||||
"url": "http://example.org",
|
||||
"access_pair": {
|
||||
"id_variable": "_SPACK_TEST_PAIR_USERNAME",
|
||||
"secret_variable": "_SPACK_TEST_PAIR_PASSWORD",
|
||||
},
|
||||
"access_token": "token",
|
||||
"profile": "profile",
|
||||
"endpoint_url": "https://example.com",
|
||||
},
|
||||
}
|
||||
|
||||
assert m.get_access_pair(direction) == ("expanded_username", "expanded_password")
|
||||
|
||||
assert m.update(
|
||||
{
|
||||
"access_pair": {"id": "username", "secret_variable": "_SPACK_TEST_PAIR_PASSWORD"},
|
||||
"access_token_variable": "_SPACK_TEST_TOKEN",
|
||||
},
|
||||
direction,
|
||||
)
|
||||
|
||||
assert m.to_dict() == {
|
||||
"url": "https://example.com",
|
||||
direction: {
|
||||
"url": "http://example.org",
|
||||
"access_pair": {"id": "username", "secret_variable": "_SPACK_TEST_PAIR_PASSWORD"},
|
||||
"access_token_variable": "_SPACK_TEST_TOKEN",
|
||||
"profile": "profile",
|
||||
"endpoint_url": "https://example.com",
|
||||
},
|
||||
}
|
||||
|
||||
assert m.get_access_pair(direction) == ("username", "expanded_password")
|
||||
assert m.get_access_token(direction) == "expanded_token"
|
||||
|
||||
|
||||
def test_mirror_name_or_url_dir_parsing(tmp_path):
|
||||
curdir = tmp_path / "mirror"
|
||||
|
@@ -756,6 +756,48 @@ def test_spec_tree_respect_deptypes(self):
|
||||
out = s.tree(deptypes=("link", "run"))
|
||||
assert "version-test-pkg" not in out
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"query,expected_length,expected_satisfies",
|
||||
[
|
||||
({"virtuals": ["mpi"]}, 1, ["mpich", "mpi"]),
|
||||
({"depflag": dt.BUILD}, 2, ["mpich", "mpi", "callpath"]),
|
||||
({"depflag": dt.BUILD, "virtuals": ["mpi"]}, 1, ["mpich", "mpi"]),
|
||||
({"depflag": dt.LINK}, 2, ["mpich", "mpi", "callpath"]),
|
||||
({"depflag": dt.BUILD | dt.LINK}, 2, ["mpich", "mpi", "callpath"]),
|
||||
({"virtuals": ["lapack"]}, 0, []),
|
||||
],
|
||||
)
|
||||
def test_query_dependency_edges(
|
||||
self, default_mock_concretization, query, expected_length, expected_satisfies
|
||||
):
|
||||
"""Tests querying edges to dependencies on the following DAG:
|
||||
|
||||
[ ] mpileaks@=2.3
|
||||
[bl ] ^callpath@=1.0
|
||||
[bl ] ^dyninst@=8.2
|
||||
[bl ] ^libdwarf@=20130729
|
||||
[bl ] ^libelf@=0.8.13
|
||||
[bl ] ^mpich@=3.0.4
|
||||
"""
|
||||
mpileaks = default_mock_concretization("mpileaks")
|
||||
edges = mpileaks.edges_to_dependencies(**query)
|
||||
assert len(edges) == expected_length
|
||||
for constraint in expected_satisfies:
|
||||
assert any(x.spec.satisfies(constraint) for x in edges)
|
||||
|
||||
def test_query_dependents_edges(self, default_mock_concretization):
|
||||
"""Tests querying edges from dependents"""
|
||||
mpileaks = default_mock_concretization("mpileaks")
|
||||
mpich = mpileaks["mpich"]
|
||||
|
||||
# Recover the root with 2 different queries
|
||||
edges_of_link_type = mpich.edges_from_dependents(depflag=dt.LINK)
|
||||
edges_with_mpi = mpich.edges_from_dependents(virtuals=["mpi"])
|
||||
assert edges_with_mpi == edges_of_link_type
|
||||
|
||||
# Check a node dependend upon by 2 parents
|
||||
assert len(mpileaks["libelf"].edges_from_dependents(depflag=dt.LINK)) == 2
|
||||
|
||||
|
||||
def test_tree_cover_nodes_reduce_deptype():
|
||||
"""Test that tree output with deptypes sticks to the sub-dag of interest, instead of looking
|
||||
|
@@ -15,7 +15,6 @@
|
||||
import spack.solver.asp
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.package_hash as ph
|
||||
import spack.variant
|
||||
import spack.version as vn
|
||||
from spack.error import SpecError, UnsatisfiableSpecError
|
||||
@@ -232,7 +231,7 @@ class TestSpecSemantics:
|
||||
("mpich+foo", "mpich foo=True", "mpich+foo"),
|
||||
("mpich++foo", "mpich foo=True", "mpich+foo"),
|
||||
("mpich foo=true", "mpich+foo", "mpich+foo"),
|
||||
("mpich foo==true", "mpich++foo", "mpich+foo"),
|
||||
("mpich foo==true", "mpich++foo", "mpich++foo"),
|
||||
("mpich~foo", "mpich foo=FALSE", "mpich~foo"),
|
||||
("mpich~~foo", "mpich foo=FALSE", "mpich~foo"),
|
||||
("mpich foo=False", "mpich~foo", "mpich~foo"),
|
||||
@@ -272,17 +271,17 @@ class TestSpecSemantics:
|
||||
("mpich+foo", "mpich", "mpich+foo"),
|
||||
("mpich~foo", "mpich", "mpich~foo"),
|
||||
("mpich foo=1", "mpich", "mpich foo=1"),
|
||||
("mpich", "mpich++foo", "mpich+foo"),
|
||||
("mpich", "mpich++foo", "mpich++foo"),
|
||||
("libelf+debug", "libelf+foo", "libelf+debug+foo"),
|
||||
("libelf+debug", "libelf+debug+foo", "libelf+debug+foo"),
|
||||
("libelf debug=2", "libelf foo=1", "libelf debug=2 foo=1"),
|
||||
("libelf debug=2", "libelf debug=2 foo=1", "libelf debug=2 foo=1"),
|
||||
("libelf+debug", "libelf~foo", "libelf+debug~foo"),
|
||||
("libelf+debug", "libelf+debug~foo", "libelf+debug~foo"),
|
||||
("libelf++debug", "libelf+debug+foo", "libelf++debug++foo"),
|
||||
("libelf debug==2", "libelf foo=1", "libelf debug==2 foo==1"),
|
||||
("libelf debug==2", "libelf debug=2 foo=1", "libelf debug==2 foo==1"),
|
||||
("libelf++debug", "libelf++debug~foo", "libelf++debug~~foo"),
|
||||
("libelf++debug", "libelf+debug+foo", "libelf+debug+foo"),
|
||||
("libelf debug==2", "libelf foo=1", "libelf debug==2 foo=1"),
|
||||
("libelf debug==2", "libelf debug=2 foo=1", "libelf debug=2 foo=1"),
|
||||
("libelf++debug", "libelf++debug~foo", "libelf++debug~foo"),
|
||||
("libelf foo=bar,baz", "libelf foo=*", "libelf foo=bar,baz"),
|
||||
("libelf foo=*", "libelf foo=bar,baz", "libelf foo=bar,baz"),
|
||||
(
|
||||
@@ -368,19 +367,24 @@ def test_abstract_specs_can_constrain_each_other(self, lhs, rhs, expected):
|
||||
'mpich cflags="-O3 -g"',
|
||||
'mpich cflags=="-O3"',
|
||||
'mpich cflags="-O3 -g"',
|
||||
'mpich cflags="-O3 -g"',
|
||||
[],
|
||||
[],
|
||||
),
|
||||
(
|
||||
'mpich cflags=="-O3 -g"',
|
||||
[("cflags", "-O3")],
|
||||
[("cflags", "-O3")],
|
||||
'mpich cflags=="-O3"',
|
||||
'mpich cflags=="-O3 -g"',
|
||||
'mpich cflags=="-O3 -g"',
|
||||
[("cflags", "-O3"), ("cflags", "-g")],
|
||||
[("cflags", "-O3"), ("cflags", "-g")],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_constrain_compiler_flags(
|
||||
self, lhs, rhs, expected_lhs, expected_rhs, propagated_lhs, propagated_rhs
|
||||
):
|
||||
"""Constraining is asymmetric for compiler flags. Also note that
|
||||
Spec equality does not account for flag propagation, so the checks
|
||||
here are manual.
|
||||
"""
|
||||
"""Constraining is asymmetric for compiler flags."""
|
||||
lhs, rhs, expected_lhs, expected_rhs = (
|
||||
Spec(lhs),
|
||||
Spec(rhs),
|
||||
@@ -508,9 +512,6 @@ def test_constraining_abstract_specs_with_empty_intersection(self, lhs, rhs):
|
||||
("mpich", "mpich +foo"),
|
||||
("mpich", "mpich~foo"),
|
||||
("mpich", "mpich foo=1"),
|
||||
("mpich", "mpich++foo"),
|
||||
("mpich", "mpich~~foo"),
|
||||
("mpich", "mpich foo==1"),
|
||||
("multivalue-variant foo=bar", "multivalue-variant +foo"),
|
||||
("multivalue-variant foo=bar", "multivalue-variant ~foo"),
|
||||
("multivalue-variant fee=bar", "multivalue-variant fee=baz"),
|
||||
@@ -532,6 +533,58 @@ def test_concrete_specs_which_do_not_satisfy_abstract(
|
||||
with pytest.raises(UnsatisfiableSpecError):
|
||||
assert rhs.constrain(lhs)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"lhs,rhs", [("mpich", "mpich++foo"), ("mpich", "mpich~~foo"), ("mpich", "mpich foo==1")]
|
||||
)
|
||||
def test_concrete_specs_which_satisfy_abstract(self, lhs, rhs, default_mock_concretization):
|
||||
lhs, rhs = default_mock_concretization(lhs), Spec(rhs)
|
||||
|
||||
assert lhs.intersects(rhs)
|
||||
assert rhs.intersects(lhs)
|
||||
assert lhs.satisfies(rhs)
|
||||
|
||||
s1 = lhs.copy()
|
||||
s1.constrain(rhs)
|
||||
assert s1 == lhs and s1.satisfies(lhs)
|
||||
|
||||
s2 = rhs.copy()
|
||||
s2.constrain(lhs)
|
||||
assert s2 == lhs and s2.satisfies(lhs)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"lhs,rhs,expected,constrained",
|
||||
[
|
||||
# hdf5++mpi satisfies hdf5, and vice versa, because of the non-contradiction semantic
|
||||
("hdf5++mpi", "hdf5", True, "hdf5++mpi"),
|
||||
("hdf5", "hdf5++mpi", True, "hdf5++mpi"),
|
||||
# Same holds true for arbitrary propagated variants
|
||||
("hdf5++mpi", "hdf5++shared", True, "hdf5++mpi++shared"),
|
||||
# Here hdf5+mpi satisfies hdf5++mpi but not vice versa
|
||||
("hdf5++mpi", "hdf5+mpi", False, "hdf5+mpi"),
|
||||
("hdf5+mpi", "hdf5++mpi", True, "hdf5+mpi"),
|
||||
# Non contradiction is violated
|
||||
("hdf5 ^foo~mpi", "hdf5++mpi", False, "hdf5++mpi ^foo~mpi"),
|
||||
("hdf5++mpi", "hdf5 ^foo~mpi", False, "hdf5++mpi ^foo~mpi"),
|
||||
],
|
||||
)
|
||||
def test_abstract_specs_with_propagation(self, lhs, rhs, expected, constrained):
|
||||
"""Tests (and documents) behavior of variant propagation on abstract specs.
|
||||
|
||||
Propagated variants do not comply with subset semantic, making it difficult to give
|
||||
precise definitions. Here we document the behavior that has been decided for the
|
||||
practical cases we face.
|
||||
"""
|
||||
lhs, rhs, constrained = Spec(lhs), Spec(rhs), Spec(constrained)
|
||||
assert lhs.satisfies(rhs) is expected
|
||||
|
||||
c = lhs.copy()
|
||||
c.constrain(rhs)
|
||||
assert c == constrained
|
||||
|
||||
c = rhs.copy()
|
||||
c.constrain(lhs)
|
||||
assert c == constrained
|
||||
|
||||
def test_satisfies_single_valued_variant(self):
|
||||
"""Tests that the case reported in
|
||||
https://github.com/spack/spack/pull/2386#issuecomment-282147639
|
||||
@@ -1641,27 +1694,20 @@ def test_spec_installed(default_mock_concretization, database):
|
||||
assert not spec.installed
|
||||
|
||||
|
||||
def test_cannot_call_dag_hash_on_abstract_spec():
|
||||
with pytest.raises(ValueError, match="Spec is not concrete"):
|
||||
Spec("pkg-a").package_hash()
|
||||
|
||||
|
||||
@pytest.mark.regression("30678")
|
||||
def test_call_dag_hash_on_old_dag_hash_spec(mock_packages, default_mock_concretization):
|
||||
# create a concrete spec
|
||||
a = default_mock_concretization("pkg-a")
|
||||
dag_hashes = {spec.name: spec.dag_hash() for spec in a.traverse()}
|
||||
|
||||
for spec in a.traverse():
|
||||
assert dag_hashes[spec.name] == spec.dag_hash()
|
||||
assert spec.package_hash() == ph.package_hash(spec)
|
||||
|
||||
# make it look like an old DAG hash spec with no package hash on the spec.
|
||||
for spec in a.traverse():
|
||||
assert spec.concrete
|
||||
spec._package_hash = None
|
||||
|
||||
for spec in a.traverse():
|
||||
assert dag_hashes[spec.name] == spec.dag_hash()
|
||||
|
||||
with pytest.raises(ValueError, match="Cannot call package_hash()"):
|
||||
spec.package_hash()
|
||||
|
||||
@@ -1717,8 +1763,8 @@ def test_package_hash_affects_dunder_and_dag_hash(mock_packages, default_mock_co
|
||||
assert a1.dag_hash() == a2.dag_hash()
|
||||
assert a1.process_hash() == a2.process_hash()
|
||||
|
||||
a1.clear_cached_hashes()
|
||||
a2.clear_cached_hashes()
|
||||
a1.clear_caches()
|
||||
a2.clear_caches()
|
||||
|
||||
# tweak the dag hash of one of these specs
|
||||
new_hash = "00000000000000000000000000000000"
|
||||
@@ -1912,3 +1958,24 @@ def test_old_format_strings_trigger_error(default_mock_concretization):
|
||||
s = Spec("pkg-a").concretized()
|
||||
with pytest.raises(SpecFormatStringError):
|
||||
s.format("${PACKAGE}-${VERSION}-${HASH}")
|
||||
|
||||
|
||||
@pytest.mark.regression("47362")
|
||||
@pytest.mark.parametrize(
|
||||
"lhs,rhs",
|
||||
[
|
||||
("hdf5 +mpi", "hdf5++mpi"),
|
||||
("hdf5 cflags==-g", "hdf5 cflags=-g"),
|
||||
("hdf5 +mpi ++shared", "hdf5+mpi +shared"),
|
||||
("hdf5 +mpi cflags==-g", "hdf5++mpi cflag=-g"),
|
||||
],
|
||||
)
|
||||
def test_equality_discriminate_on_propagation(lhs, rhs):
|
||||
"""Tests that == can discriminate abstract specs based on their 'propagation' status"""
|
||||
s, t = Spec(lhs), Spec(rhs)
|
||||
assert s != t
|
||||
assert len({s, t}) == 2
|
||||
|
||||
|
||||
def test_comparison_multivalued_variants():
|
||||
assert Spec("x=a") < Spec("x=a,b") < Spec("x==a,b") < Spec("x==a,b,c")
|
||||
|
@@ -16,6 +16,7 @@
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import pickle
|
||||
|
||||
import pytest
|
||||
import ruamel.yaml
|
||||
@@ -96,20 +97,6 @@ def test_invalid_json_spec(invalid_json, error_message):
|
||||
# Virtuals on edges
|
||||
"callpath",
|
||||
"mpileaks",
|
||||
# Vvarious types of git versions
|
||||
# Ensure that we try to serialize all the things that might be in the node dict,
|
||||
# e.g., submodule callbacks can fail serialization if they're not fully resolved.
|
||||
"git-url-top-level@develop",
|
||||
"git-url-top-level@submodules",
|
||||
"git-url-top-level@submodules_callback",
|
||||
"git-url-top-level@3.4",
|
||||
"git-url-top-level@3.3",
|
||||
"git-url-top-level@3.2",
|
||||
"git-url-top-level@3.1",
|
||||
"git-url-top-level@3.0",
|
||||
# URL versions with checksums
|
||||
"git-url-top-level@2.3",
|
||||
"git-url-top-level@2.1",
|
||||
],
|
||||
)
|
||||
def test_roundtrip_concrete_specs(abstract_spec, default_mock_concretization):
|
||||
@@ -565,3 +552,26 @@ def test_anchorify_2():
|
||||
e: *id002
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str",
|
||||
[
|
||||
"hdf5 ++mpi",
|
||||
"hdf5 cflags==-g",
|
||||
"hdf5 foo==bar",
|
||||
"hdf5~~mpi++shared",
|
||||
"hdf5 cflags==-g foo==bar cxxflags==-O3",
|
||||
"hdf5 cflags=-g foo==bar cxxflags==-O3",
|
||||
],
|
||||
)
|
||||
def test_pickle_roundtrip_for_abstract_specs(spec_str):
|
||||
"""Tests that abstract specs correctly round trip when pickled.
|
||||
|
||||
This test compares both spec objects and their string representation, due to some
|
||||
inconsistencies in how `Spec.__eq__` is implemented.
|
||||
"""
|
||||
s = spack.spec.Spec(spec_str)
|
||||
t = pickle.loads(pickle.dumps(s))
|
||||
assert s == t
|
||||
assert str(s) == str(t)
|
||||
|
@@ -501,18 +501,20 @@ def test_find_required_file(tmpdir):
|
||||
|
||||
# First just find a single path
|
||||
results = spack.install_test.find_required_file(
|
||||
tmpdir.join("c"), filename, expected=1, recursive=True
|
||||
str(tmpdir.join("c")), filename, expected=1, recursive=True
|
||||
)
|
||||
assert isinstance(results, str)
|
||||
|
||||
# Ensure none file if do not recursively search that directory
|
||||
with pytest.raises(spack.install_test.SkipTest, match="Expected 1"):
|
||||
spack.install_test.find_required_file(
|
||||
tmpdir.join("c"), filename, expected=1, recursive=False
|
||||
str(tmpdir.join("c")), filename, expected=1, recursive=False
|
||||
)
|
||||
|
||||
# Now make sure we get all of the files
|
||||
results = spack.install_test.find_required_file(tmpdir, filename, expected=3, recursive=True)
|
||||
results = spack.install_test.find_required_file(
|
||||
str(tmpdir), filename, expected=3, recursive=True
|
||||
)
|
||||
assert isinstance(results, list) and len(results) == 3
|
||||
|
||||
|
||||
|
@@ -19,27 +19,29 @@
|
||||
datadir = os.path.join(spack.paths.test_path, "data", "unparse")
|
||||
|
||||
|
||||
def canonical_source_equal_sans_name(spec1, spec2):
|
||||
def compare_sans_name(eq, spec1, spec2):
|
||||
content1 = ph.canonical_source(spec1)
|
||||
content1 = content1.replace(spack.repo.PATH.get_pkg_class(spec1.name).__name__, "TestPackage")
|
||||
content2 = ph.canonical_source(spec2)
|
||||
content2 = content2.replace(spack.repo.PATH.get_pkg_class(spec2.name).__name__, "TestPackage")
|
||||
|
||||
return content1 == content2
|
||||
if eq:
|
||||
assert content1 == content2
|
||||
else:
|
||||
assert content1 != content2
|
||||
|
||||
|
||||
def package_hash_equal_sans_name(spec1, spec2):
|
||||
def compare_hash_sans_name(eq, spec1, spec2):
|
||||
content1 = ph.canonical_source(spec1)
|
||||
pkg_cls1 = spack.repo.PATH.get_pkg_class(spec1.name)
|
||||
content1 = content1.replace(pkg_cls1.__name__, "TestPackage")
|
||||
hash1 = ph.package_hash(spec1, source=content1)
|
||||
hash1 = pkg_cls1(spec1).content_hash(content=content1)
|
||||
|
||||
content2 = ph.canonical_source(spec2)
|
||||
pkg_cls2 = spack.repo.PATH.get_pkg_class(spec2.name)
|
||||
content2 = content2.replace(pkg_cls2.__name__, "TestPackage")
|
||||
hash2 = ph.package_hash(spec2, source=content2)
|
||||
hash2 = pkg_cls2(spec2).content_hash(content=content2)
|
||||
|
||||
return hash1 == hash2
|
||||
assert (hash1 == hash2) == eq
|
||||
|
||||
|
||||
def test_hash(mock_packages, config):
|
||||
@@ -55,11 +57,11 @@ def test_different_variants(mock_packages, config):
|
||||
def test_all_same_but_name(mock_packages, config):
|
||||
spec1 = Spec("hash-test1@=1.2")
|
||||
spec2 = Spec("hash-test2@=1.2")
|
||||
assert canonical_source_equal_sans_name(spec1, spec2)
|
||||
compare_sans_name(True, spec1, spec2)
|
||||
|
||||
spec1 = Spec("hash-test1@=1.2 +varianty")
|
||||
spec2 = Spec("hash-test2@=1.2 +varianty")
|
||||
assert canonical_source_equal_sans_name(spec1, spec2)
|
||||
compare_sans_name(True, spec1, spec2)
|
||||
|
||||
|
||||
def test_all_same_but_archive_hash(mock_packages, config):
|
||||
@@ -68,63 +70,60 @@ def test_all_same_but_archive_hash(mock_packages, config):
|
||||
"""
|
||||
spec1 = Spec("hash-test1@=1.3")
|
||||
spec2 = Spec("hash-test2@=1.3")
|
||||
assert canonical_source_equal_sans_name(spec1, spec2)
|
||||
compare_sans_name(True, spec1, spec2)
|
||||
|
||||
|
||||
def test_all_same_but_patch_contents(mock_packages, config):
|
||||
spec1 = Spec("hash-test1@=1.1")
|
||||
spec2 = Spec("hash-test2@=1.1")
|
||||
assert canonical_source_equal_sans_name(spec1, spec2)
|
||||
compare_sans_name(True, spec1, spec2)
|
||||
|
||||
|
||||
def test_all_same_but_patches_to_apply(mock_packages, config):
|
||||
spec1 = Spec("hash-test1@=1.4")
|
||||
spec2 = Spec("hash-test2@=1.4")
|
||||
assert canonical_source_equal_sans_name(spec1, spec2)
|
||||
compare_sans_name(True, spec1, spec2)
|
||||
|
||||
|
||||
def test_all_same_but_install(mock_packages, config):
|
||||
spec1 = Spec("hash-test1@=1.5")
|
||||
spec2 = Spec("hash-test2@=1.5")
|
||||
assert not canonical_source_equal_sans_name(spec1, spec2)
|
||||
compare_sans_name(False, spec1, spec2)
|
||||
|
||||
|
||||
def test_package_hash_all_same_but_patch_contents_different(mock_packages, config):
|
||||
def test_content_hash_all_same_but_patch_contents(mock_packages, config):
|
||||
spec1 = Spec("hash-test1@1.1").concretized()
|
||||
spec2 = Spec("hash-test2@1.1").concretized()
|
||||
|
||||
assert package_hash_equal_sans_name(spec1, spec2)
|
||||
assert spec1.dag_hash() != spec2.dag_hash()
|
||||
assert spec1.to_node_dict()["patches"] != spec2.to_node_dict()["patches"]
|
||||
compare_hash_sans_name(False, spec1, spec2)
|
||||
|
||||
|
||||
def test_package_hash_not_concretized(mock_packages, config):
|
||||
"""Check that ``package_hash()`` works on abstract specs."""
|
||||
# these are different due to patches but not package hash
|
||||
def test_content_hash_not_concretized(mock_packages, config):
|
||||
"""Check that Package.content_hash() works on abstract specs."""
|
||||
# these are different due to the package hash
|
||||
spec1 = Spec("hash-test1@=1.1")
|
||||
spec2 = Spec("hash-test2@=1.3")
|
||||
assert package_hash_equal_sans_name(spec1, spec2)
|
||||
compare_hash_sans_name(False, spec1, spec2)
|
||||
|
||||
# at v1.1 these are actually the same package when @when's are removed
|
||||
# and the name isn't considered
|
||||
spec1 = Spec("hash-test1@=1.1")
|
||||
spec2 = Spec("hash-test2@=1.1")
|
||||
assert package_hash_equal_sans_name(spec1, spec2)
|
||||
compare_hash_sans_name(True, spec1, spec2)
|
||||
|
||||
# these end up being different b/c without a version, we can't eliminate much of the
|
||||
# package.py when canonicalizing source.
|
||||
# these end up being different b/c we can't eliminate much of the package.py
|
||||
# without a version.
|
||||
spec1 = Spec("hash-test1")
|
||||
spec2 = Spec("hash-test2")
|
||||
assert not package_hash_equal_sans_name(spec1, spec2)
|
||||
compare_hash_sans_name(False, spec1, spec2)
|
||||
|
||||
|
||||
def test_package_hash_different_variants(mock_packages, config):
|
||||
def test_content_hash_different_variants(mock_packages, config):
|
||||
spec1 = Spec("hash-test1@1.2 +variantx").concretized()
|
||||
spec2 = Spec("hash-test2@1.2 ~variantx").concretized()
|
||||
assert package_hash_equal_sans_name(spec1, spec2)
|
||||
compare_hash_sans_name(True, spec1, spec2)
|
||||
|
||||
|
||||
def test_package_hash_cannot_get_details_from_ast(mock_packages, config):
|
||||
def test_content_hash_cannot_get_details_from_ast(mock_packages, config):
|
||||
"""Packages hash-test1 and hash-test3 would be considered the same
|
||||
except that hash-test3 conditionally executes a phase based on
|
||||
a "when" directive that Spack cannot evaluate by examining the
|
||||
@@ -136,36 +135,18 @@ def test_package_hash_cannot_get_details_from_ast(mock_packages, config):
|
||||
"""
|
||||
spec3 = Spec("hash-test1@1.7").concretized()
|
||||
spec4 = Spec("hash-test3@1.7").concretized()
|
||||
assert not package_hash_equal_sans_name(spec3, spec4)
|
||||
compare_hash_sans_name(False, spec3, spec4)
|
||||
|
||||
|
||||
def test_package_hash_all_same_but_archive_hash(mock_packages, config):
|
||||
def test_content_hash_all_same_but_archive_hash(mock_packages, config):
|
||||
spec1 = Spec("hash-test1@1.3").concretized()
|
||||
spec2 = Spec("hash-test2@1.3").concretized()
|
||||
|
||||
assert package_hash_equal_sans_name(spec1, spec2)
|
||||
|
||||
# the sources for these two packages will not be the same b/c their archive hashes differ
|
||||
assert spec1.to_node_dict()["sources"] != spec2.to_node_dict()["sources"]
|
||||
compare_hash_sans_name(False, spec1, spec2)
|
||||
|
||||
|
||||
def test_package_hash_all_same_but_resources(mock_packages, config):
|
||||
spec1 = Spec("hash-test1@1.7").concretized()
|
||||
spec2 = Spec("hash-test1@1.8").concretized()
|
||||
|
||||
# these should be the same
|
||||
assert canonical_source_equal_sans_name(spec1, spec2)
|
||||
assert package_hash_equal_sans_name(spec1, spec2)
|
||||
|
||||
# but 1.7 has a resource that affects the hash
|
||||
assert spec1.to_node_dict()["sources"] != spec2.to_node_dict()["sources"]
|
||||
|
||||
assert spec1.dag_hash() != spec2.dag_hash()
|
||||
|
||||
|
||||
def test_package_hash_parse_dynamic_function_call(mock_packages, config):
|
||||
def test_content_hash_parse_dynamic_function_call(mock_packages, config):
|
||||
spec = Spec("hash-test4").concretized()
|
||||
ph.package_hash(spec)
|
||||
spec.package.content_hash()
|
||||
|
||||
|
||||
many_strings = '''\
|
||||
|
32
lib/spack/spack/test/utilities.py
Normal file
32
lib/spack/spack/test/utilities.py
Normal file
@@ -0,0 +1,32 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Non-fixture utilities for test code. Must be imported.
|
||||
"""
|
||||
from spack.main import make_argument_parser
|
||||
|
||||
|
||||
class SpackCommandArgs:
|
||||
"""Use this to get an Args object like what is passed into
|
||||
a command.
|
||||
|
||||
Useful for emulating args in unit tests that want to check
|
||||
helper functions in Spack commands. Ensures that you get all
|
||||
the default arg values established by the parser.
|
||||
|
||||
Example usage::
|
||||
|
||||
install_args = SpackCommandArgs("install")("-v", "mpich")
|
||||
"""
|
||||
|
||||
def __init__(self, command_name):
|
||||
self.parser = make_argument_parser()
|
||||
self.command_name = command_name
|
||||
|
||||
def __call__(self, *argv, **kwargs):
|
||||
self.parser.add_command(self.command_name)
|
||||
prepend = kwargs["global_args"] if "global_args" in kwargs else []
|
||||
args, unknown = self.parser.parse_known_args(prepend + [self.command_name] + list(argv))
|
||||
return args
|
@@ -762,7 +762,7 @@ def test_disjoint_set_fluent_methods():
|
||||
@pytest.mark.regression("32694")
|
||||
@pytest.mark.parametrize("other", [True, False])
|
||||
def test_conditional_value_comparable_to_bool(other):
|
||||
value = spack.variant.Value("98", when="@1.0")
|
||||
value = spack.variant.ConditionalValue("98", when=Spec("@1.0"))
|
||||
comparison = value == other
|
||||
assert comparison is False
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user