Compare commits
147 Commits
features/c
...
v0.19.1
Author | SHA1 | Date | |
---|---|---|---|
![]() |
5e0d210734 | ||
![]() |
e3d4531663 | ||
![]() |
9e8e72592d | ||
![]() |
2d9fa60f53 | ||
![]() |
f3149a6c35 | ||
![]() |
403ba23632 | ||
![]() |
d62c10ff76 | ||
![]() |
3aa24e5b13 | ||
![]() |
c7200b4327 | ||
![]() |
5b02b7003a | ||
![]() |
f83972ddc4 | ||
![]() |
fffca98a02 | ||
![]() |
390112fc76 | ||
![]() |
2f3f4ad4da | ||
![]() |
0f9e07321f | ||
![]() |
7593b18626 | ||
![]() |
e964a396c9 | ||
![]() |
8d45404b5b | ||
![]() |
7055061635 | ||
![]() |
5e9799db4a | ||
![]() |
4258fbbed3 | ||
![]() |
db8fcbbee4 | ||
![]() |
d33c990278 | ||
![]() |
59dd405626 | ||
![]() |
dbbf7dc969 | ||
![]() |
8a71aa874f | ||
![]() |
0766f63182 | ||
![]() |
380fedb7bc | ||
![]() |
33cc47f6d3 | ||
![]() |
5935f9c8a0 | ||
![]() |
a86911246a | ||
![]() |
cd94827c5f | ||
![]() |
bb8b4f9979 | ||
![]() |
fc7a16e77e | ||
![]() |
e633e57297 | ||
![]() |
7b74fab12f | ||
![]() |
005c7cd353 | ||
![]() |
0f54a63dfd | ||
![]() |
f11778bb02 | ||
![]() |
3437926cde | ||
![]() |
d25375da55 | ||
![]() |
0b302034df | ||
![]() |
b9f69a8dfa | ||
![]() |
c3e9aeeed0 | ||
![]() |
277234c044 | ||
![]() |
0077a25639 | ||
![]() |
6a3e20023e | ||
![]() |
f92987b11f | ||
![]() |
61f198e8af | ||
![]() |
4d90d663a3 | ||
![]() |
7a7e9eb04f | ||
![]() |
3ea4b53bf6 | ||
![]() |
ad0d908d8d | ||
![]() |
9a793fe01b | ||
![]() |
6dd3c78924 | ||
![]() |
5b080d63fb | ||
![]() |
ea8e3c27a4 | ||
![]() |
30ffd6d33e | ||
![]() |
c1aec72f60 | ||
![]() |
cfd0dc6d89 | ||
![]() |
60b3d32072 | ||
![]() |
5142ebdd57 | ||
![]() |
6b782e6d7e | ||
![]() |
168bced888 | ||
![]() |
489de38890 | ||
![]() |
2a20520cc8 | ||
![]() |
ae6213b193 | ||
![]() |
bb1cd430c0 | ||
![]() |
36877abd02 | ||
![]() |
62db008e42 | ||
![]() |
b10d75b1c6 | ||
![]() |
078767946c | ||
![]() |
9ca7165ef0 | ||
![]() |
d1d668a9d5 | ||
![]() |
284c3a3fd8 | ||
![]() |
ec89c47aee | ||
![]() |
49114ffff7 | ||
![]() |
05fd39477e | ||
![]() |
e53a19a08d | ||
![]() |
a8470a7efe | ||
![]() |
0f26d4402e | ||
![]() |
4d28a64661 | ||
![]() |
4a5e68816b | ||
![]() |
97fe7ad32b | ||
![]() |
052bf6b9df | ||
![]() |
80f5939a94 | ||
![]() |
bca8b52a8d | ||
![]() |
e4c2d1afc6 | ||
![]() |
89976af732 | ||
![]() |
a079722b1c | ||
![]() |
f332ac6d21 | ||
![]() |
e4218595de | ||
![]() |
c9561c5a0e | ||
![]() |
f099a68e65 | ||
![]() |
54abc7fb7e | ||
![]() |
23eb2dc9d6 | ||
![]() |
1a3415619e | ||
![]() |
84a3d32aa3 | ||
![]() |
69d4637671 | ||
![]() |
3693622edf | ||
![]() |
b3b675157c | ||
![]() |
6241cdb27b | ||
![]() |
28d669cb39 | ||
![]() |
c0170a675b | ||
![]() |
28a77c2821 | ||
![]() |
01a5788517 | ||
![]() |
cc84ab1e92 | ||
![]() |
e0e20e3e79 | ||
![]() |
9d08feb63e | ||
![]() |
8be6378688 | ||
![]() |
ec05543054 | ||
![]() |
a30b60f9a6 | ||
![]() |
8fb8381b6f | ||
![]() |
1dcb5d1fa7 | ||
![]() |
96b8240ea6 | ||
![]() |
47df88404a | ||
![]() |
476e647c94 | ||
![]() |
c3851704a2 | ||
![]() |
1eb35d0378 | ||
![]() |
74c3fbdf87 | ||
![]() |
492525fda5 | ||
![]() |
9dcd4fac15 | ||
![]() |
2ab974f530 | ||
![]() |
e045dabb3a | ||
![]() |
f8e4ad5209 | ||
![]() |
4b84cd8af5 | ||
![]() |
fce7bf179f | ||
![]() |
f3db624b86 | ||
![]() |
22c2f3fe89 | ||
![]() |
52cc798948 | ||
![]() |
258edf7dac | ||
![]() |
d4b45605c8 | ||
![]() |
8b4b26fcbd | ||
![]() |
f07f75a47b | ||
![]() |
f286a7fa9a | ||
![]() |
fffc4c4846 | ||
![]() |
27e1d28c0b | ||
![]() |
e550f48b17 | ||
![]() |
0f32f7d0e9 | ||
![]() |
5558940ce6 | ||
![]() |
c9fcb8aadc | ||
![]() |
3346c0918b | ||
![]() |
71d480515b | ||
![]() |
c0ed5612ab | ||
![]() |
d79cba1a77 | ||
![]() |
34724cae87 | ||
![]() |
9becc82dfc |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov 'coverage[toml]<=6.2'
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]
|
||||
- name: Package audits (with coverage)
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
run: |
|
||||
|
8
.github/workflows/build-containers.yml
vendored
8
.github/workflows/build-containers.yml
vendored
@@ -80,16 +80,16 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@8b122486cedac8393e77aa9734c3528886e4a1a8 # @v1
|
||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # @v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@c74574e6c82eeedc46366be1b0d287eff9085eb6 # @v1
|
||||
uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@f4ef78c080cd8ba55a85445d5b36e214a81df20a # @v1
|
||||
@@ -106,7 +106,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@c84f38281176d4c9cdb1626ffafcd6b3911b5d94 # @v2
|
||||
uses: docker/build-push-action@c56af957549030174b10d6867f20e78cfd7debc5 # @v2
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -46,7 +46,7 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@b2feaf19c27470162a626bd6fa8438ae5b263721
|
||||
- uses: dorny/paths-filter@4512585405083f25c027a35db413c2b3b9006d50
|
||||
id: filter
|
||||
with:
|
||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||
|
17
.github/workflows/unit_tests.yaml
vendored
17
.github/workflows/unit_tests.yaml
vendored
@@ -11,10 +11,10 @@ concurrency:
|
||||
jobs:
|
||||
# Run unit tests with different configurations on linux
|
||||
ubuntu:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ubuntu-20.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10', '3.11']
|
||||
concretizer: ['clingo']
|
||||
on_develop:
|
||||
- ${{ github.ref == 'refs/heads/develop' }}
|
||||
@@ -22,7 +22,7 @@ jobs:
|
||||
- python-version: 2.7
|
||||
concretizer: original
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
- python-version: '3.10'
|
||||
- python-version: '3.11'
|
||||
concretizer: original
|
||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||
exclude:
|
||||
@@ -35,6 +35,9 @@ jobs:
|
||||
- python-version: '3.9'
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
- python-version: '3.10'
|
||||
concretizer: 'clingo'
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
@@ -86,7 +89,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
SPACK_TEST_PARALLEL: 2
|
||||
COVERAGE: true
|
||||
UNIT_TEST_COVERAGE: ${{ (matrix.python-version == '3.10') }}
|
||||
UNIT_TEST_COVERAGE: ${{ (matrix.python-version == '3.11') }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||
@@ -101,7 +104,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
@@ -109,7 +112,7 @@ jobs:
|
||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml]==6.2 pytest-xdist
|
||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-xdist
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
@@ -158,7 +161,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
|
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
@@ -40,7 +40,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@13ae5bb136fac2878aff31522b9efb785519f984 # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
@@ -57,4 +57,4 @@ jobs:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.10'
|
||||
python_version: '3.11'
|
||||
|
4
.github/workflows/windows_python.yml
vendored
4
.github/workflows/windows_python.yml
vendored
@@ -109,11 +109,11 @@ jobs:
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
- uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
- uses: actions/upload-artifact@83fd05a356d7e2593de66fc9913b3002723633cb
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
|
299
CHANGELOG.md
299
CHANGELOG.md
@@ -1,16 +1,309 @@
|
||||
# v0.19.1 (2023-02-07)
|
||||
|
||||
### Spack Bugfixes
|
||||
|
||||
* `buildcache create`: make "file exists" less verbose (#35019)
|
||||
* `spack mirror create`: don't change paths to urls (#34992)
|
||||
* Improve error message for requirements (#33988)
|
||||
* uninstall: fix accidental cubic complexity (#34005)
|
||||
* scons: fix signature for `install_args` (#34481)
|
||||
* Fix `combine_phase_logs` text encoding issues (#34657)
|
||||
* Use a module-like object to propagate changes in the MRO, when setting build env (#34059)
|
||||
* PackageBase should not define builder legacy attributes (#33942)
|
||||
* Forward lookup of the "run_tests" attribute (#34531)
|
||||
* Bugfix for timers (#33917, #33900)
|
||||
* Fix path handling in prefix inspections (#35318)
|
||||
* Fix libtool filter for Fujitsu compilers (#34916)
|
||||
* Bug fix for duplicate rpath errors on macOS when creating build caches (#34375)
|
||||
* FileCache: delete the new cache file on exception (#34623)
|
||||
* Propagate exceptions from Spack python console (#34547)
|
||||
* Tests: Fix a bug/typo in a `config_values.py` fixture (#33886)
|
||||
* Various CI fixes (#33953, #34560, #34560, #34828)
|
||||
* Docs: remove monitors and analyzers, typos (#34358, #33926)
|
||||
* bump release version for tutorial command (#33859)
|
||||
|
||||
|
||||
# v0.19.0 (2022-11-11)
|
||||
|
||||
`v0.19.0` is a major feature release.
|
||||
|
||||
## Major features in this release
|
||||
|
||||
1. **Package requirements**
|
||||
|
||||
Spack's traditional [package preferences](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#package-preferences)
|
||||
are soft, but we've added hard requriements to `packages.yaml` and `spack.yaml`
|
||||
(#32528, #32369). Package requirements use the same syntax as specs:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
libfabric:
|
||||
require: "@1.13.2"
|
||||
mpich:
|
||||
require:
|
||||
- one_of: ["+cuda", "+rocm"]
|
||||
```
|
||||
|
||||
More details in [the docs](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#package-requirements).
|
||||
|
||||
2. **Environment UI Improvements**
|
||||
|
||||
* Fewer surprising modifications to `spack.yaml` (#33711):
|
||||
|
||||
* `spack install` in an environment will no longer add to the `specs:` list; you'll
|
||||
need to either use `spack add <spec>` or `spack install --add <spec>`.
|
||||
|
||||
* Similarly, `spack uninstall` will not remove from your environment's `specs:`
|
||||
list; you'll need to use `spack remove` or `spack uninstall --remove`.
|
||||
|
||||
This will make it easier to manage an environment, as there is clear separation
|
||||
between the stack to be installed (`spack.yaml`/`spack.lock`) and which parts of
|
||||
it should be installed (`spack install` / `spack uninstall`).
|
||||
|
||||
* `concretizer:unify:true` is now the default mode for new environments (#31787)
|
||||
|
||||
We see more users creating `unify:true` environments now. Users who need
|
||||
`unify:false` can add it to their environment to get the old behavior. This will
|
||||
concretize every spec in the environment independently.
|
||||
|
||||
* Include environment configuration from URLs (#29026, [docs](
|
||||
https://spack.readthedocs.io/en/latest/environments.html#included-configurations))
|
||||
|
||||
You can now include configuration in your environment directly from a URL:
|
||||
|
||||
```yaml
|
||||
spack:
|
||||
include:
|
||||
- https://github.com/path/to/raw/config/compilers.yaml
|
||||
```
|
||||
|
||||
4. **Multiple Build Systems**
|
||||
|
||||
An increasing number of packages in the ecosystem need the ability to support
|
||||
multiple build systems (#30738, [docs](
|
||||
https://spack.readthedocs.io/en/latest/packaging_guide.html#multiple-build-systems)),
|
||||
either across versions, across platforms, or within the same version of the software.
|
||||
This has been hard to support through multiple inheritance, as methods from different
|
||||
build system superclasses would conflict. `package.py` files can now define separate
|
||||
builder classes with installation logic for different build systems, e.g.:
|
||||
|
||||
```python
|
||||
class ArpackNg(CMakePackage, AutotoolsPackage):
|
||||
|
||||
build_system(
|
||||
conditional("cmake", when="@0.64:"),
|
||||
conditional("autotools", when="@:0.63"),
|
||||
default="cmake",
|
||||
)
|
||||
|
||||
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||
def cmake_args(self):
|
||||
pass
|
||||
|
||||
class Autotoolsbuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
pass
|
||||
```
|
||||
|
||||
5. **Compiler and variant propagation**
|
||||
|
||||
Currently, compiler flags and variants are inconsistent: compiler flags set for a
|
||||
package are inherited by its dependencies, while variants are not. We should have
|
||||
these be consistent by allowing for inheritance to be enabled or disabled for both
|
||||
variants and compiler flags.
|
||||
|
||||
Example syntax:
|
||||
- `package ++variant`:
|
||||
enabled variant that will be propagated to dependencies
|
||||
- `package +variant`:
|
||||
enabled variant that will NOT be propagated to dependencies
|
||||
- `package ~~variant`:
|
||||
disabled variant that will be propagated to dependencies
|
||||
- `package ~variant`:
|
||||
disabled variant that will NOT be propagated to dependencies
|
||||
- `package cflags==-g`:
|
||||
`cflags` will be propagated to dependencies
|
||||
- `package cflags=-g`:
|
||||
`cflags` will NOT be propagated to dependencies
|
||||
|
||||
Syntax for non-boolan variants is similar to compiler flags. More in the docs for
|
||||
[variants](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#variants) and [compiler flags](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#compiler-flags).
|
||||
|
||||
6. **Enhancements to git version specifiers**
|
||||
|
||||
* `v0.18.0` added the ability to use git commits as versions. You can now use the
|
||||
`git.` prefix to specify git tags or branches as versions. All of these are valid git
|
||||
versions in `v0.19` (#31200):
|
||||
|
||||
```console
|
||||
foo@abcdef1234abcdef1234abcdef1234abcdef1234 # raw commit
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234 # commit with git prefix
|
||||
foo@git.develop # the develop branch
|
||||
foo@git.0.19 # use the 0.19 tag
|
||||
```
|
||||
|
||||
* `v0.19` also gives you more control over how Spack interprets git versions, in case
|
||||
Spack cannot detect the version from the git repository. You can suffix a git
|
||||
version with `=<version>` to force Spack to concretize it as a particular version
|
||||
(#30998, #31914, #32257):
|
||||
|
||||
```console
|
||||
# use mybranch, but treat it as version 3.2 for version comparison
|
||||
foo@git.mybranch=3.2
|
||||
|
||||
# use the given commit, but treat it as develop for version comparison
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234=develop
|
||||
```
|
||||
|
||||
More in [the docs](
|
||||
https://spack.readthedocs.io/en/latest/basic_usage.html#version-specifier)
|
||||
|
||||
7. **Changes to Cray EX Support**
|
||||
|
||||
Cray machines have historically had their own "platform" within Spack, because we
|
||||
needed to go through the module system to leverage compilers and MPI installations on
|
||||
these machines. The Cray EX programming environment now provides standalone `craycc`
|
||||
executables and proper `mpicc` wrappers, so Spack can treat EX machines like Linux
|
||||
with extra packages (#29392).
|
||||
|
||||
We expect this to greatly reduce bugs, as external packages and compilers can now be
|
||||
used by prefix instead of through modules. We will also no longer be subject to
|
||||
reproducibility issues when modules change from Cray PE release to release and from
|
||||
site to site. This also simplifies dealing with the underlying Linux OS on cray
|
||||
systems, as Spack will properly model the machine's OS as either SuSE or RHEL.
|
||||
|
||||
8. **Improvements to tests and testing in CI**
|
||||
|
||||
* `spack ci generate --tests` will generate a `.gitlab-ci.yml` file that not only does
|
||||
builds but also runs tests for built packages (#27877). Public GitHub pipelines now
|
||||
also run tests in CI.
|
||||
|
||||
* `spack test run --explicit` will only run tests for packages that are explicitly
|
||||
installed, instead of all packages.
|
||||
|
||||
9. **Experimental binding link model**
|
||||
|
||||
You can add a new option to `config.yaml` to make Spack embed absolute paths to
|
||||
needed shared libraries in ELF executables and shared libraries on Linux (#31948, [docs](
|
||||
https://spack.readthedocs.io/en/latest/config_yaml.html#shared-linking-bind)):
|
||||
|
||||
```yaml
|
||||
config:
|
||||
shared_linking:
|
||||
type: rpath
|
||||
bind: true
|
||||
```
|
||||
|
||||
This can improve launch time at scale for parallel applications, and it can make
|
||||
installations less susceptible to environment variables like `LD_LIBRARY_PATH`, even
|
||||
especially when dealing with external libraries that use `RUNPATH`. You can think of
|
||||
this as a faster, even higher-precedence version of `RPATH`.
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* `spack spec` prints dependencies more legibly. Dependencies in the output now appear
|
||||
at the *earliest* level of indentation possible (#33406)
|
||||
* You can override `package.py` attributes like `url`, directly in `packages.yaml`
|
||||
(#33275, [docs](
|
||||
https://spack.readthedocs.io/en/latest/build_settings.html#assigning-package-attributes))
|
||||
* There are a number of new architecture-related format strings you can use in Spack
|
||||
configuration files to specify paths (#29810, [docs](
|
||||
https://spack.readthedocs.io/en/latest/configuration.html#config-file-variables))
|
||||
* Spack now supports bootstrapping Clingo on Windows (#33400)
|
||||
* There is now support for an `RPATH`-like library model on Windows (#31930)
|
||||
|
||||
## Performance Improvements
|
||||
|
||||
* Major performance improvements for installation from binary caches (#27610, #33628,
|
||||
#33636, #33608, #33590, #33496)
|
||||
* Test suite can now be parallelized using `xdist` (used in GitHub Actions) (#32361)
|
||||
* Reduce lock contention for parallel builds in environments (#31643)
|
||||
|
||||
## New binary caches and stacks
|
||||
|
||||
* We now build nearly all of E4S with `oneapi` in our buildcache (#31781, #31804,
|
||||
#31804, #31803, #31840, #31991, #32117, #32107, #32239)
|
||||
* Added 3 new machine learning-centric stacks to binary cache: `x86_64_v3`, CUDA, ROCm
|
||||
(#31592, #33463)
|
||||
|
||||
## Removals and Deprecations
|
||||
|
||||
* Support for Python 3.5 is dropped (#31908). Only Python 2.7 and 3.6+ are officially
|
||||
supported.
|
||||
|
||||
* This is the last Spack release that will support Python 2 (#32615). Spack `v0.19`
|
||||
will emit a deprecation warning if you run it with Python 2, and Python 2 support will
|
||||
soon be removed from the `develop` branch.
|
||||
|
||||
* `LD_LIBRARY_PATH` is no longer set by default by `spack load` or module loads.
|
||||
|
||||
Setting `LD_LIBRARY_PATH` in Spack environments/modules can cause binaries from
|
||||
outside of Spack to crash, and Spack's own builds use `RPATH` and do not need
|
||||
`LD_LIBRARY_PATH` set in order to run. If you still want the old behavior, you
|
||||
can run these commands to configure Spack to set `LD_LIBRARY_PATH`:
|
||||
|
||||
```console
|
||||
spack config add modules:prefix_inspections:lib64:[LD_LIBRARY_PATH]
|
||||
spack config add modules:prefix_inspections:lib:[LD_LIBRARY_PATH]
|
||||
```
|
||||
|
||||
* The `spack:concretization:[together|separately]` has been removed after being
|
||||
deprecated in `v0.18`. Use `concretizer:unify:[true|false]`.
|
||||
* `config:module_roots` is no longer supported after being deprecated in `v0.18`. Use
|
||||
configuration in module sets instead (#28659, [docs](
|
||||
https://spack.readthedocs.io/en/latest/module_file_support.html)).
|
||||
* `spack activate` and `spack deactivate` are no longer supported, having been
|
||||
deprecated in `v0.18`. Use an environment with a view instead of
|
||||
activating/deactivating ([docs](
|
||||
https://spack.readthedocs.io/en/latest/environments.html#configuration-in-spack-yaml)).
|
||||
* The old YAML format for buildcaches is now deprecated (#33707). If you are using an
|
||||
old buildcache with YAML metadata you will need to regenerate it with JSON metadata.
|
||||
* `spack bootstrap trust` and `spack bootstrap untrust` are deprecated in favor of
|
||||
`spack bootstrap enable` and `spack bootstrap disable` and will be removed in `v0.20`.
|
||||
(#33600)
|
||||
* The `graviton2` architecture has been renamed to `neoverse_n1`, and `graviton3`
|
||||
is now `neoverse_v1`. Buildcaches using the old architecture names will need to be rebuilt.
|
||||
* The terms `blacklist` and `whitelist` have been replaced with `include` and `exclude`
|
||||
in all configuration files (#31569). You can use `spack config update` to
|
||||
automatically fix your configuration files.
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* Permission setting on installation now handles effective uid properly (#19980)
|
||||
* `buildable:true` for an MPI implementation now overrides `buildable:false` for `mpi` (#18269)
|
||||
* Improved error messages when attempting to use an unconfigured compiler (#32084)
|
||||
* Do not punish explicitly requested compiler mismatches in the solver (#30074)
|
||||
* `spack stage`: add missing --fresh and --reuse (#31626)
|
||||
* Fixes for adding build system executables like `cmake` to package scope (#31739)
|
||||
* Bugfix for binary relocation with aliased strings produced by newer `binutils` (#32253)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 6,751 total packages, 335 new since `v0.18.0`
|
||||
* 141 new Python packages
|
||||
* 89 new R packages
|
||||
* 303 people contributed to this release
|
||||
* 287 committers to packages
|
||||
* 57 committers to core
|
||||
|
||||
|
||||
# v0.18.1 (2022-07-19)
|
||||
|
||||
### Spack Bugfixes
|
||||
* Fix several bugs related to bootstrapping (#30834,#31042,#31180)
|
||||
* Fix a regression that was causing spec hashes to differ between
|
||||
* Fix a regression that was causing spec hashes to differ between
|
||||
Python 2 and Python 3 (#31092)
|
||||
* Fixed compiler flags for oneAPI and DPC++ (#30856)
|
||||
* Fixed several issues related to concretization (#31142,#31153,#31170,#31226)
|
||||
* Improved support for Cray manifest file and `spack external find` (#31144,#31201,#31173,#31186)
|
||||
* Assign a version to openSUSE Tumbleweed according to the GLIBC version
|
||||
in the system (#19895)
|
||||
in the system (#19895)
|
||||
* Improved Dockerfile generation for `spack containerize` (#29741,#31321)
|
||||
* Fixed a few bugs related to concurrent execution of commands (#31509,#31493,#31477)
|
||||
* Fixed a few bugs related to concurrent execution of commands (#31509,#31493,#31477)
|
||||
|
||||
### Package updates
|
||||
* WarpX: add v22.06, fixed libs property (#30866,#31102)
|
||||
|
@@ -10,8 +10,8 @@ For more on Spack's release structure, see
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| develop | :white_check_mark: |
|
||||
| 0.17.x | :white_check_mark: |
|
||||
| 0.16.x | :white_check_mark: |
|
||||
| 0.19.x | :white_check_mark: |
|
||||
| 0.18.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
48
bin/spack
48
bin/spack
@@ -49,52 +49,8 @@ spack_prefix = os.path.dirname(os.path.dirname(spack_file))
|
||||
spack_lib_path = os.path.join(spack_prefix, "lib", "spack")
|
||||
sys.path.insert(0, spack_lib_path)
|
||||
|
||||
# Add external libs
|
||||
spack_external_libs = os.path.join(spack_lib_path, "external")
|
||||
|
||||
if sys.version_info[:2] <= (2, 7):
|
||||
sys.path.insert(0, os.path.join(spack_external_libs, "py2"))
|
||||
|
||||
sys.path.insert(0, spack_external_libs)
|
||||
|
||||
# Here we delete ruamel.yaml in case it has been already imported from site
|
||||
# (see #9206 for a broader description of the issue).
|
||||
#
|
||||
# Briefly: ruamel.yaml produces a .pth file when installed with pip that
|
||||
# makes the site installed package the preferred one, even though sys.path
|
||||
# is modified to point to another version of ruamel.yaml.
|
||||
if "ruamel.yaml" in sys.modules:
|
||||
del sys.modules["ruamel.yaml"]
|
||||
|
||||
if "ruamel" in sys.modules:
|
||||
del sys.modules["ruamel"]
|
||||
|
||||
# The following code is here to avoid failures when updating
|
||||
# the develop version, due to spurious argparse.pyc files remaining
|
||||
# in the libs/spack/external directory, see:
|
||||
# https://github.com/spack/spack/pull/25376
|
||||
# TODO: Remove in v0.18.0 or later
|
||||
try:
|
||||
import argparse
|
||||
except ImportError:
|
||||
argparse_pyc = os.path.join(spack_external_libs, "argparse.pyc")
|
||||
if not os.path.exists(argparse_pyc):
|
||||
raise
|
||||
try:
|
||||
os.remove(argparse_pyc)
|
||||
import argparse # noqa: F401
|
||||
except Exception:
|
||||
msg = (
|
||||
"The file\n\n\t{0}\n\nis corrupted and cannot be deleted by Spack. "
|
||||
"Either delete it manually or ask some administrator to "
|
||||
"delete it for you."
|
||||
)
|
||||
print(msg.format(argparse_pyc))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
import spack.main # noqa: E402
|
||||
from spack_installable.main import main # noqa: E402
|
||||
|
||||
# Once we've set up the system path, run the spack main method
|
||||
if __name__ == "__main__":
|
||||
sys.exit(spack.main.main())
|
||||
sys.exit(main())
|
||||
|
@@ -33,4 +33,4 @@ concretizer:
|
||||
# environments can always be activated. When "false" perform concretization separately
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: false
|
||||
unify: true
|
@@ -1,162 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _analyze:
|
||||
|
||||
=======
|
||||
Analyze
|
||||
=======
|
||||
|
||||
|
||||
The analyze command is a front-end to various tools that let us analyze
|
||||
package installations. Each analyzer is a module for a different kind
|
||||
of analysis that can be done on a package installation, including (but not
|
||||
limited to) binary, log, or text analysis. Thus, the analyze command group
|
||||
allows you to take an existing package install, choose an analyzer,
|
||||
and extract some output for the package using it.
|
||||
|
||||
|
||||
-----------------
|
||||
Analyzer Metadata
|
||||
-----------------
|
||||
|
||||
For all analyzers, we write to an ``analyzers`` folder in ``~/.spack``, or the
|
||||
value that you specify in your spack config at ``config:analyzers_dir``.
|
||||
For example, here we see the results of running an analysis on zlib:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tree ~/.spack/analyzers/
|
||||
└── linux-ubuntu20.04-skylake
|
||||
└── gcc-9.3.0
|
||||
└── zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2
|
||||
├── environment_variables
|
||||
│ └── spack-analyzer-environment-variables.json
|
||||
├── install_files
|
||||
│ └── spack-analyzer-install-files.json
|
||||
└── libabigail
|
||||
└── spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
|
||||
This means that you can always find analyzer output in this folder, and it
|
||||
is organized with the same logic as the package install it was run for.
|
||||
If you want to customize this top level folder, simply provide the ``--path``
|
||||
argument to ``spack analyze run``. The nested organization will be maintained
|
||||
within your custom root.
|
||||
|
||||
-----------------
|
||||
Listing Analyzers
|
||||
-----------------
|
||||
|
||||
If you aren't familiar with Spack's analyzers, you can quickly list those that
|
||||
are available:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze list-analyzers
|
||||
install_files : install file listing read from install_manifest.json
|
||||
environment_variables : environment variables parsed from spack-build-env.txt
|
||||
config_args : config args loaded from spack-configure-args.txt
|
||||
libabigail : Application Binary Interface (ABI) features for objects
|
||||
|
||||
|
||||
In the above, the first three are fairly simple - parsing metadata files from
|
||||
a package install directory to save
|
||||
|
||||
-------------------
|
||||
Analyzing a Package
|
||||
-------------------
|
||||
|
||||
The analyze command, akin to install, will accept a package spec to perform
|
||||
an analysis for. The package must be installed. Let's walk through an example
|
||||
with zlib. We first ask to analyze it. However, since we have more than one
|
||||
install, we are asked to disambiguate:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run zlib
|
||||
==> Error: zlib matches multiple packages.
|
||||
Matching packages:
|
||||
fz2bs56 zlib@1.2.11%gcc@7.5.0 arch=linux-ubuntu18.04-skylake
|
||||
sl7m27m zlib@1.2.11%gcc@9.3.0 arch=linux-ubuntu20.04-skylake
|
||||
Use a more specific spec.
|
||||
|
||||
|
||||
We can then specify the spec version that we want to analyze:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run zlib/fz2bs56
|
||||
|
||||
If you don't provide any specific analyzer names, by default all analyzers
|
||||
(shown in the ``list-analyzers`` subcommand list) will be run. If an analyzer does not
|
||||
have any result, it will be skipped. For example, here is a result running for
|
||||
zlib:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ls ~/.spack/analyzers/linux-ubuntu20.04-skylake/gcc-9.3.0/zlib-1.2.11-sl7m27mzkbejtkrajigj3a3m37ygv4u2/
|
||||
spack-analyzer-environment-variables.json
|
||||
spack-analyzer-install-files.json
|
||||
spack-analyzer-libabigail-libz.so.1.2.11.xml
|
||||
|
||||
If you want to run a specific analyzer, ask for it with `--analyzer`. Here we run
|
||||
spack analyze on libabigail (already installed) _using_ libabigail1
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --analyzer abigail libabigail
|
||||
|
||||
|
||||
.. _analyze_monitoring:
|
||||
|
||||
----------------------
|
||||
Monitoring An Analysis
|
||||
----------------------
|
||||
|
||||
For any kind of analysis, you can
|
||||
use a `spack monitor <https://github.com/spack/spack-monitor>`_ "Spackmon"
|
||||
as a server to upload the same run metadata to. You can
|
||||
follow the instructions in the `spack monitor documentation <https://spack-monitor.readthedocs.org>`_
|
||||
to first create a server along with a username and token for yourself.
|
||||
You can then use this guide to interact with the server.
|
||||
|
||||
You should first export our spack monitor token and username to the environment:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
|
||||
By default, the host for your server is expected to be at ``http://127.0.0.1``
|
||||
with a prefix of ``ms1``, and if this is the case, you can simply add the
|
||||
``--monitor`` flag to the install command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor wget
|
||||
|
||||
If you need to customize the host or the prefix, you can do that as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io wget
|
||||
|
||||
If your server doesn't have authentication, you can skip it:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze run --monitor --monitor-disable-auth wget
|
||||
|
||||
Regardless of your choice, when you run analyze on an installed package (whether
|
||||
it was installed with ``--monitor`` or not, you'll see the results generating as they did
|
||||
before, and a message that the monitor server was pinged:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack analyze --monitor wget
|
||||
...
|
||||
==> Sending result for wget bin/wget to monitor.
|
@@ -1114,21 +1114,21 @@ set of arbitrary versions, such as ``@1.0,1.5,1.7`` (``1.0``, ``1.5``,
|
||||
or ``1.7``). When you supply such a specifier to ``spack install``,
|
||||
it constrains the set of versions that Spack will install.
|
||||
|
||||
For packages with a ``git`` attribute, ``git`` references
|
||||
may be specified instead of a numerical version i.e. branches, tags
|
||||
and commits. Spack will stage and build based off the ``git``
|
||||
For packages with a ``git`` attribute, ``git`` references
|
||||
may be specified instead of a numerical version i.e. branches, tags
|
||||
and commits. Spack will stage and build based off the ``git``
|
||||
reference provided. Acceptable syntaxes for this are:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
|
||||
# branches and tags
|
||||
foo@git.develop # use the develop branch
|
||||
foo@git.0.19 # use the 0.19 tag
|
||||
|
||||
|
||||
# commit hashes
|
||||
foo@abcdef1234abcdef1234abcdef1234abcdef1234 # 40 character hashes are automatically treated as git commits
|
||||
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234
|
||||
|
||||
|
||||
Spack versions from git reference either have an associated version supplied by the user,
|
||||
or infer a relationship to known versions from the structure of the git repository. If an
|
||||
associated version is supplied by the user, Spack treats the git version as equivalent to that
|
||||
@@ -1244,8 +1244,8 @@ For example, for the ``stackstart`` variant:
|
||||
|
||||
.. code-block:: sh
|
||||
|
||||
mpileaks stackstart=4 # variant will be propagated to dependencies
|
||||
mpileaks stackstart==4 # only mpileaks will have this variant value
|
||||
mpileaks stackstart==4 # variant will be propagated to dependencies
|
||||
mpileaks stackstart=4 # only mpileaks will have this variant value
|
||||
|
||||
^^^^^^^^^^^^^^
|
||||
Compiler Flags
|
||||
@@ -1672,9 +1672,13 @@ own install prefix. However, certain packages are typically installed
|
||||
`Python <https://www.python.org>`_ packages are typically installed in the
|
||||
``$prefix/lib/python-2.7/site-packages`` directory.
|
||||
|
||||
Spack has support for this type of installation as well. In Spack,
|
||||
a package that can live inside the prefix of another package is called
|
||||
an *extension*. Suppose you have Python installed like so:
|
||||
In Spack, installation prefixes are immutable, so this type of installation
|
||||
is not directly supported. However, it is possible to create views that
|
||||
allow you to merge install prefixes of multiple packages into a single new prefix.
|
||||
Views are a convenient way to get a more traditional filesystem structure.
|
||||
Using *extensions*, you can ensure that Python packages always share the
|
||||
same prefix in the view as Python itself. Suppose you have
|
||||
Python installed like so:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -1712,8 +1716,6 @@ You can find extensions for your Python installation like this:
|
||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
||||
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
||||
|
||||
==> None activated.
|
||||
|
||||
The extensions are a subset of what's returned by ``spack list``, and
|
||||
they are packages like any other. They are installed into their own
|
||||
prefixes, and you can see this with ``spack find --paths``:
|
||||
@@ -1741,32 +1743,72 @@ directly when you run ``python``:
|
||||
ImportError: No module named numpy
|
||||
>>>
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Using Extensions
|
||||
^^^^^^^^^^^^^^^^
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Extensions in Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
There are four ways to get ``numpy`` working in Python. The first is
|
||||
to use :ref:`shell-support`. You can simply ``load`` the extension,
|
||||
and it will be added to the ``PYTHONPATH`` in your current shell:
|
||||
The recommended way of working with extensions such as ``py-numpy``
|
||||
above is through :ref:`Environments <environments>`. For example,
|
||||
the following creates an environment in the current working directory
|
||||
with a filesystem view in the ``./view`` directory:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load python
|
||||
$ spack load py-numpy
|
||||
$ spack env create --with-view view --dir .
|
||||
$ spack -e . add py-numpy
|
||||
$ spack -e . concretize
|
||||
$ spack -e . install
|
||||
|
||||
We recommend environments for two reasons. Firstly, environments
|
||||
can be activated (requires :ref:`shell-support`):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate .
|
||||
|
||||
which sets all the right environment variables such as ``PATH`` and
|
||||
``PYTHONPATH``. This ensures that
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python
|
||||
>>> import numpy
|
||||
|
||||
works. Secondly, even without shell support, the view ensures
|
||||
that Python can locate its extensions:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ./view/bin/python
|
||||
>>> import numpy
|
||||
|
||||
See :ref:`environments` for a more in-depth description of Spack
|
||||
environments and customizations to views.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Using ``spack load``
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A more traditional way of using Spack and extensions is ``spack load``
|
||||
(requires :ref:`shell-support`). This will add the extension to ``PYTHONPATH``
|
||||
in your current shell, and Python itself will be available in the ``PATH``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load py-numpy
|
||||
$ python
|
||||
>>> import numpy
|
||||
|
||||
Now ``import numpy`` will succeed for as long as you keep your current
|
||||
session open.
|
||||
The loaded packages can be checked using ``spack find --loaded``
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Loading Extensions via Modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Instead of using Spack's environment modification capabilities through
|
||||
the ``spack load`` command, you can load numpy through your
|
||||
environment modules (using ``environment-modules`` or ``lmod``). This
|
||||
will also add the extension to the ``PYTHONPATH`` in your current
|
||||
shell.
|
||||
Apart from ``spack env activate`` and ``spack load``, you can load numpy
|
||||
through your environment modules (using ``environment-modules`` or
|
||||
``lmod``). This will also add the extension to the ``PYTHONPATH`` in
|
||||
your current shell.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -1776,130 +1818,6 @@ If you do not know the name of the specific numpy module you wish to
|
||||
load, you can use the ``spack module tcl|lmod loads`` command to get
|
||||
the name of the module from the Spack spec.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Activating Extensions in a View
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Another way to use extensions is to create a view, which merges the
|
||||
python installation along with the extensions into a single prefix.
|
||||
See :ref:`configuring_environment_views` for a more in-depth description
|
||||
of views.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Activating Extensions Globally
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
As an alternative to creating a merged prefix with Python and its extensions,
|
||||
and prior to support for views, Spack has provided a means to install the
|
||||
extension into the Spack installation prefix for the extendee. This has
|
||||
typically been useful since extendable packages typically search their own
|
||||
installation path for addons by default.
|
||||
|
||||
Global activations are performed with the ``spack activate`` command:
|
||||
|
||||
.. _cmd-spack-activate:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
``spack activate``
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack activate py-numpy
|
||||
==> Activated extension py-setuptools@11.3.1%gcc@4.4.7 arch=linux-debian7-x86_64-3c74eb69 for python@2.7.8%gcc@4.4.7.
|
||||
==> Activated extension py-nose@1.3.4%gcc@4.4.7 arch=linux-debian7-x86_64-5f70f816 for python@2.7.8%gcc@4.4.7.
|
||||
==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
|
||||
|
||||
Several things have happened here. The user requested that
|
||||
``py-numpy`` be activated in the ``python`` installation it was built
|
||||
with. Spack knows that ``py-numpy`` depends on ``py-nose`` and
|
||||
``py-setuptools``, so it activated those packages first. Finally,
|
||||
once all dependencies were activated in the ``python`` installation,
|
||||
``py-numpy`` was activated as well.
|
||||
|
||||
If we run ``spack extensions`` again, we now see the three new
|
||||
packages listed as activated:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack extensions python
|
||||
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
|
||||
==> 36 extensions:
|
||||
geos py-ipython py-pexpect py-pyside py-sip
|
||||
py-basemap py-libxml2 py-pil py-pytz py-six
|
||||
py-biopython py-mako py-pmw py-rpy2 py-sympy
|
||||
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
|
||||
py-dateutil py-mpi4py py-pygments py-scikit-learn
|
||||
py-epydoc py-mx py-pylint py-scipy
|
||||
py-gnuplot py-nose py-pyparsing py-setuptools
|
||||
py-h5py py-numpy py-pyqt py-shiboken
|
||||
|
||||
==> 12 installed:
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
|
||||
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
|
||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
||||
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
||||
|
||||
==> 3 currently activated:
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-nose@1.3.4 py-numpy@1.9.1 py-setuptools@11.3.1
|
||||
|
||||
Now, when a user runs python, ``numpy`` will be available for import
|
||||
*without* the user having to explicitly load it. ``python@2.7.8`` now
|
||||
acts like a system Python installation with ``numpy`` installed inside
|
||||
of it.
|
||||
|
||||
Spack accomplishes this by symbolically linking the *entire* prefix of
|
||||
the ``py-numpy`` package into the prefix of the ``python`` package. To the
|
||||
python interpreter, it looks like ``numpy`` is installed in the
|
||||
``site-packages`` directory.
|
||||
|
||||
The only limitation of global activation is that you can only have a *single*
|
||||
version of an extension activated at a time. This is because multiple
|
||||
versions of the same extension would conflict if symbolically linked
|
||||
into the same prefix. Users who want a different version of a package
|
||||
can still get it by using environment modules or views, but they will have to
|
||||
explicitly load their preferred version.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
``spack activate --force``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If, for some reason, you want to activate a package *without* its
|
||||
dependencies, you can use ``spack activate --force``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack activate --force py-numpy
|
||||
==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
|
||||
|
||||
.. _cmd-spack-deactivate:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
``spack deactivate``
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We've seen how activating an extension can be used to set up a default
|
||||
version of a Python module. Obviously, you may want to change that at
|
||||
some point. ``spack deactivate`` is the command for this. There are
|
||||
several variants:
|
||||
|
||||
* ``spack deactivate <extension>`` will deactivate a single
|
||||
extension. If another activated extension depends on this one,
|
||||
Spack will warn you and exit with an error.
|
||||
* ``spack deactivate --force <extension>`` deactivates an extension
|
||||
regardless of packages that depend on it.
|
||||
* ``spack deactivate --all <extension>`` deactivates an extension and
|
||||
all of its dependencies. Use ``--force`` to disregard dependents.
|
||||
* ``spack deactivate --all <extendee>`` deactivates *all* activated
|
||||
extensions of a package. For example, to deactivate *all* python
|
||||
extensions, use:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack deactivate --all python
|
||||
|
||||
-----------------------
|
||||
Filesystem requirements
|
||||
-----------------------
|
||||
|
@@ -302,88 +302,31 @@ microarchitectures considered during the solve are constrained to be compatible
|
||||
host Spack is currently running on. For instance, if this option is set to ``true``, a
|
||||
user cannot concretize for ``target=icelake`` while running on an Haswell node.
|
||||
|
||||
.. _package-preferences:
|
||||
|
||||
-------------------
|
||||
Package Preferences
|
||||
-------------------
|
||||
|
||||
Spack can be configured to prefer certain compilers, package
|
||||
versions, dependencies, and variants during concretization.
|
||||
The preferred configuration can be controlled via the
|
||||
``~/.spack/packages.yaml`` file for user configurations, or the
|
||||
``etc/spack/packages.yaml`` site configuration.
|
||||
|
||||
Here's an example ``packages.yaml`` file that sets preferred packages:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
opencv:
|
||||
compiler: [gcc@4.9]
|
||||
variants: +debug
|
||||
gperftools:
|
||||
version: [2.2, 2.4, 2.3]
|
||||
all:
|
||||
compiler: [gcc@4.4.7, 'gcc@4.6:', intel, clang, pgi]
|
||||
target: [sandybridge]
|
||||
providers:
|
||||
mpi: [mvapich2, mpich, openmpi]
|
||||
|
||||
At a high level, this example is specifying how packages should be
|
||||
concretized. The opencv package should prefer using GCC 4.9 and
|
||||
be built with debug options. The gperftools package should prefer version
|
||||
2.2 over 2.4. Every package on the system should prefer mvapich2 for
|
||||
its MPI and GCC 4.4.7 (except for opencv, which overrides this by preferring GCC 4.9).
|
||||
These options are used to fill in implicit defaults. Any of them can be overwritten
|
||||
on the command line if explicitly requested.
|
||||
|
||||
Each ``packages.yaml`` file begins with the string ``packages:`` and
|
||||
package names are specified on the next level. The special string ``all``
|
||||
applies settings to *all* packages. Underneath each package name is one
|
||||
or more components: ``compiler``, ``variants``, ``version``,
|
||||
``providers``, and ``target``. Each component has an ordered list of
|
||||
spec ``constraints``, with earlier entries in the list being preferred
|
||||
over later entries.
|
||||
|
||||
Sometimes a package installation may have constraints that forbid
|
||||
the first concretization rule, in which case Spack will use the first
|
||||
legal concretization rule. Going back to the example, if a user
|
||||
requests gperftools 2.3 or later, then Spack will install version 2.4
|
||||
as the 2.4 version of gperftools is preferred over 2.3.
|
||||
|
||||
An explicit concretization rule in the preferred section will always
|
||||
take preference over unlisted concretizations. In the above example,
|
||||
xlc isn't listed in the compiler list. Every listed compiler from
|
||||
gcc to pgi will thus be preferred over the xlc compiler.
|
||||
|
||||
The syntax for the ``provider`` section differs slightly from other
|
||||
concretization rules. A provider lists a value that packages may
|
||||
``depend_on`` (e.g, MPI) and a list of rules for fulfilling that
|
||||
dependency.
|
||||
|
||||
.. _package-requirements:
|
||||
|
||||
--------------------
|
||||
Package Requirements
|
||||
--------------------
|
||||
|
||||
You can use the configuration to force the concretizer to choose
|
||||
specific properties for packages when building them. Like preferences,
|
||||
these are only applied when the package is required by some other
|
||||
request (e.g. if the package is needed as a dependency of a
|
||||
request to ``spack install``).
|
||||
Spack can be configured to always use certain compilers, package
|
||||
versions, and variants during concretization through package
|
||||
requirements.
|
||||
|
||||
An example of where this is useful is if you have a package that
|
||||
is normally built as a dependency but only under certain circumstances
|
||||
(e.g. only when a variant on a dependent is active): you can make
|
||||
sure that it always builds the way you want it to; this distinguishes
|
||||
package configuration requirements from constraints that you add to
|
||||
``spack install`` or to environments (in those cases, the associated
|
||||
packages are always built).
|
||||
Package requirements are useful when you find yourself repeatedly
|
||||
specifying the same constraints on the command line, and wish that
|
||||
Spack respects these constraints whether you mention them explicitly
|
||||
or not. Another use case is specifying constraints that should apply
|
||||
to all root specs in an environment, without having to repeat the
|
||||
constraint everywhere.
|
||||
|
||||
The following is an example of how to enforce package properties in
|
||||
``packages.yaml``:
|
||||
Apart from that, requirements config is more flexible than constraints
|
||||
on the command line, because it can specify constraints on packages
|
||||
*when they occur* as a dependency. In contrast, on the command line it
|
||||
is not possible to specify constraints on dependencies while also keeping
|
||||
those dependencies optional.
|
||||
|
||||
The package requirements configuration is specified in ``packages.yaml``
|
||||
keyed by package name:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -452,15 +395,15 @@ under ``all`` are disregarded. For example, with a configuration like this:
|
||||
cmake:
|
||||
require: '%gcc'
|
||||
|
||||
Spack requires ``cmake`` to use ``gcc`` and all other nodes (including cmake dependencies)
|
||||
to use ``clang``.
|
||||
Spack requires ``cmake`` to use ``gcc`` and all other nodes (including ``cmake``
|
||||
dependencies) to use ``clang``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Setting requirements on virtual specs
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A requirement on a virtual spec applies whenever that virtual is present in the DAG. This
|
||||
can be useful for fixing which virtual provider you want to use:
|
||||
A requirement on a virtual spec applies whenever that virtual is present in the DAG.
|
||||
This can be useful for fixing which virtual provider you want to use:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -470,8 +413,8 @@ can be useful for fixing which virtual provider you want to use:
|
||||
|
||||
With the configuration above the only allowed ``mpi`` provider is ``mvapich2 %gcc``.
|
||||
|
||||
Requirements on the virtual spec and on the specific provider are both applied, if present. For
|
||||
instance with a configuration like:
|
||||
Requirements on the virtual spec and on the specific provider are both applied, if
|
||||
present. For instance with a configuration like:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -483,6 +426,66 @@ instance with a configuration like:
|
||||
|
||||
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
||||
|
||||
.. _package-preferences:
|
||||
|
||||
-------------------
|
||||
Package Preferences
|
||||
-------------------
|
||||
|
||||
In some cases package requirements can be too strong, and package
|
||||
preferences are the better option. Package preferences do not impose
|
||||
constraints on packages for particular versions or variants values,
|
||||
they rather only set defaults -- the concretizer is free to change
|
||||
them if it must due to other constraints. Also note that package
|
||||
preferences are of lower priority than reuse of already installed
|
||||
packages.
|
||||
|
||||
Here's an example ``packages.yaml`` file that sets preferred packages:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
opencv:
|
||||
compiler: [gcc@4.9]
|
||||
variants: +debug
|
||||
gperftools:
|
||||
version: [2.2, 2.4, 2.3]
|
||||
all:
|
||||
compiler: [gcc@4.4.7, 'gcc@4.6:', intel, clang, pgi]
|
||||
target: [sandybridge]
|
||||
providers:
|
||||
mpi: [mvapich2, mpich, openmpi]
|
||||
|
||||
At a high level, this example is specifying how packages are preferably
|
||||
concretized. The opencv package should prefer using GCC 4.9 and
|
||||
be built with debug options. The gperftools package should prefer version
|
||||
2.2 over 2.4. Every package on the system should prefer mvapich2 for
|
||||
its MPI and GCC 4.4.7 (except for opencv, which overrides this by preferring GCC 4.9).
|
||||
These options are used to fill in implicit defaults. Any of them can be overwritten
|
||||
on the command line if explicitly requested.
|
||||
|
||||
Package preferences accept the follow keys or components under
|
||||
the specific package (or ``all``) section: ``compiler``, ``variants``,
|
||||
``version``, ``providers``, and ``target``. Each component has an
|
||||
ordered list of spec ``constraints``, with earlier entries in the
|
||||
list being preferred over later entries.
|
||||
|
||||
Sometimes a package installation may have constraints that forbid
|
||||
the first concretization rule, in which case Spack will use the first
|
||||
legal concretization rule. Going back to the example, if a user
|
||||
requests gperftools 2.3 or later, then Spack will install version 2.4
|
||||
as the 2.4 version of gperftools is preferred over 2.3.
|
||||
|
||||
An explicit concretization rule in the preferred section will always
|
||||
take preference over unlisted concretizations. In the above example,
|
||||
xlc isn't listed in the compiler list. Every listed compiler from
|
||||
gcc to pgi will thus be preferred over the xlc compiler.
|
||||
|
||||
The syntax for the ``provider`` section differs slightly from other
|
||||
concretization rules. A provider lists a value that packages may
|
||||
``depends_on`` (e.g, MPI) and a list of rules for fulfilling that
|
||||
dependency.
|
||||
|
||||
.. _package_permissions:
|
||||
|
||||
-------------------
|
||||
@@ -531,3 +534,25 @@ directories inside the install prefix. This will ensure that even
|
||||
manually placed files within the install prefix are owned by the
|
||||
assigned group. If no group is assigned, Spack will allow the OS
|
||||
default behavior to go as expected.
|
||||
|
||||
----------------------------
|
||||
Assigning Package Attributes
|
||||
----------------------------
|
||||
|
||||
You can assign class-level attributes in the configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
mpileaks:
|
||||
# Override existing attributes
|
||||
url: http://www.somewhereelse.com/mpileaks-1.0.tar.gz
|
||||
# ... or add new ones
|
||||
x: 1
|
||||
|
||||
Attributes set this way will be accessible to any method executed
|
||||
in the package.py file (e.g. the ``install()`` method). Values for these
|
||||
attributes may be any value parseable by yaml.
|
||||
|
||||
These can only be applied to specific packages, not "all" or
|
||||
virtual packages.
|
||||
|
@@ -724,10 +724,9 @@ extends vs. depends_on
|
||||
|
||||
This is very similar to the naming dilemma above, with a slight twist.
|
||||
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
||||
``extends`` and ``depends_on`` are very similar, but ``extends`` adds
|
||||
the ability to *activate* the package. Activation involves symlinking
|
||||
everything in the installation prefix of the package to the installation
|
||||
prefix of Python. This allows the user to import a Python module without
|
||||
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
||||
that the extension and extendee share the same prefix in views.
|
||||
This allows the user to import a Python module without
|
||||
having to add that module to ``PYTHONPATH``.
|
||||
|
||||
When deciding between ``extends`` and ``depends_on``, the best rule of
|
||||
@@ -735,7 +734,7 @@ thumb is to check the installation prefix. If Python libraries are
|
||||
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
|
||||
should use ``extends``. If Python libraries are installed elsewhere
|
||||
or the only files that get installed reside in ``<prefix>/bin``, then
|
||||
don't use ``extends``, as symlinking the package wouldn't be useful.
|
||||
don't use ``extends``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Alternatives to Spack
|
||||
|
@@ -193,10 +193,10 @@ Build system dependencies
|
||||
|
||||
As an extension of the R ecosystem, your package will obviously depend
|
||||
on R to build and run. Normally, we would use ``depends_on`` to express
|
||||
this, but for R packages, we use ``extends``. ``extends`` is similar to
|
||||
``depends_on``, but adds an additional feature: the ability to "activate"
|
||||
the package by symlinking it to the R installation directory. Since
|
||||
every R package needs this, the ``RPackage`` base class contains:
|
||||
this, but for R packages, we use ``extends``. This implies a special
|
||||
dependency on R, which is used to set environment variables such as
|
||||
``R_LIBS`` uniformly. Since every R package needs this, the ``RPackage``
|
||||
base class contains:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
|
@@ -405,6 +405,17 @@ Spack understands several special variables. These are:
|
||||
* ``$user``: name of the current user
|
||||
* ``$user_cache_path``: user cache directory (``~/.spack`` unless
|
||||
:ref:`overridden <local-config-overrides>`)
|
||||
* ``$architecture``: the architecture triple of the current host, as
|
||||
detected by Spack.
|
||||
* ``$arch``: alias for ``$architecture``.
|
||||
* ``$platform``: the platform of the current host, as detected by Spack.
|
||||
* ``$operating_system``: the operating system of the current host, as
|
||||
detected by the ``distro`` python module.
|
||||
* ``$os``: alias for ``$operating_system``.
|
||||
* ``$target``: the ISA target for the current host, as detected by
|
||||
ArchSpec. E.g. ``skylake`` or ``neoverse-n1``.
|
||||
* ``$target_family``. The target family for the current host, as
|
||||
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
|
||||
|
||||
Note that, as with shell variables, you can write these as ``$varname``
|
||||
or with braces to distinguish the variable from surrounding characters:
|
||||
|
@@ -253,27 +253,6 @@ to update them.
|
||||
multiple runs of ``spack style`` just to re-compute line numbers and
|
||||
makes it much easier to fix errors directly off of the CI output.
|
||||
|
||||
.. warning::
|
||||
|
||||
Flake8 and ``pep8-naming`` require a number of dependencies in order
|
||||
to run. If you installed ``py-flake8`` and ``py-pep8-naming``, the
|
||||
easiest way to ensure the right packages are on your ``PYTHONPATH`` is
|
||||
to run::
|
||||
|
||||
spack activate py-flake8
|
||||
spack activate pep8-naming
|
||||
|
||||
so that all of the dependencies are symlinked to a central
|
||||
location. If you see an error message like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
Traceback (most recent call last):
|
||||
File: "/usr/bin/flake8", line 5, in <module>
|
||||
from pkg_resources import load_entry_point
|
||||
ImportError: No module named pkg_resources
|
||||
|
||||
that means Flake8 couldn't find setuptools in your ``PYTHONPATH``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Documentation Tests
|
||||
@@ -309,13 +288,9 @@ All of these can be installed with Spack, e.g.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack activate py-sphinx
|
||||
$ spack activate py-sphinx-rtd-theme
|
||||
$ spack activate py-sphinxcontrib-programoutput
|
||||
$ spack load py-sphinx py-sphinx-rtd-theme py-sphinxcontrib-programoutput
|
||||
|
||||
so that all of the dependencies are symlinked into that Python's
|
||||
tree. Alternatively, you could arrange for their library
|
||||
directories to be added to PYTHONPATH. If you see an error message
|
||||
so that all of the dependencies are added to PYTHONPATH. If you see an error message
|
||||
like:
|
||||
|
||||
.. code-block:: console
|
||||
|
@@ -233,8 +233,8 @@ packages will be listed as roots of the Environment.
|
||||
|
||||
All of the Spack commands that act on the list of installed specs are
|
||||
Environment-sensitive in this way, including ``install``,
|
||||
``uninstall``, ``activate``, ``deactivate``, ``find``, ``extensions``,
|
||||
and more. In the :ref:`environment-configuration` section we will discuss
|
||||
``uninstall``, ``find``, ``extensions``, and more. In the
|
||||
:ref:`environment-configuration` section we will discuss
|
||||
Environment-sensitive commands further.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -519,27 +519,33 @@ available from the yaml file.
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Spec concretization
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
An environment can be concretized in three different modes and the behavior active under any environment
|
||||
is determined by the ``concretizer:unify`` property. By default specs are concretized *separately*, one after the other:
|
||||
An environment can be concretized in three different modes and the behavior active under
|
||||
any environment is determined by the ``concretizer:unify`` configuration option.
|
||||
|
||||
The *default* mode is to unify all specs:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: false
|
||||
unify: true
|
||||
|
||||
This mode of operation permits to deploy a full software stack where multiple configurations of the same package
|
||||
need to be installed alongside each other using the best possible selection of transitive dependencies. The downside
|
||||
is that redundancy of installations is disregarded completely, and thus environments might be more bloated than
|
||||
strictly needed. In the example above, for instance, if a version of ``zlib`` newer than ``1.2.8`` is known to Spack,
|
||||
then it will be used for both ``hdf5`` installations.
|
||||
This means that any package in the environment corresponds to a single concrete spec. In
|
||||
the above example, when ``hdf5`` depends down the line of ``zlib``, it is required to
|
||||
take ``zlib@1.2.8`` instead of a newer version. This mode of concretization is
|
||||
particularly useful when environment views are used: if every package occurs in
|
||||
only one flavor, it is usually possible to merge all install directories into a view.
|
||||
|
||||
If redundancy of the environment is a concern, Spack provides a way to install it *together where possible*,
|
||||
i.e. trying to maximize reuse of dependencies across different specs:
|
||||
A downside of unified concretization is that it can be overly strict. For example, a
|
||||
concretization error would happen when both ``hdf5+mpi`` and ``hdf5~mpi`` are specified
|
||||
in an environment.
|
||||
|
||||
The second mode is to *unify when possible*: this makes concretization of root specs
|
||||
more independendent. Instead of requiring reuse of dependencies across different root
|
||||
specs, it is only maximized:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
@@ -551,26 +557,27 @@ i.e. trying to maximize reuse of dependencies across different specs:
|
||||
concretizer:
|
||||
unify: when_possible
|
||||
|
||||
Also in this case Spack allows having multiple configurations of the same package, but privileges the reuse of
|
||||
specs over other factors. Going back to our example, this means that both ``hdf5`` installations will use
|
||||
``zlib@1.2.8`` as a dependency even if newer versions of that library are available.
|
||||
Central installations done at HPC centers by system administrators or user support groups are a common case
|
||||
that fits either of these two modes.
|
||||
This means that both ``hdf5`` installations will use ``zlib@1.2.8`` as a dependency even
|
||||
if newer versions of that library are available.
|
||||
|
||||
Environments can also be configured to concretize all the root specs *together*, in a self-consistent way, to
|
||||
ensure that each package in the environment comes with a single configuration:
|
||||
The third mode of operation is to concretize root specs entirely independently by
|
||||
disabling unified concretization:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: true
|
||||
unify: false
|
||||
|
||||
This mode of operation is usually what is required by software developers that want to deploy their development
|
||||
environment and have a single view of it in the filesystem.
|
||||
In this example ``hdf5`` is concretized separately, and does not consider ``zlib@1.2.8``
|
||||
as a constraint or preference. Instead, it will take the latest possible version.
|
||||
|
||||
The last two concretization options are typically useful for system administrators and
|
||||
user support groups providing a large software stack for their HPC center.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -581,10 +588,10 @@ environment and have a single view of it in the filesystem.
|
||||
|
||||
.. admonition:: Re-concretization of user specs
|
||||
|
||||
When concretizing specs *together* or *together where possible* the entire set of specs will be
|
||||
When using *unified* concretization (when possible), the entire set of specs will be
|
||||
re-concretized after any addition of new user specs, to ensure that
|
||||
the environment remains consistent / minimal. When instead the specs are concretized
|
||||
separately only the new specs will be re-concretized after any addition.
|
||||
the environment remains consistent / minimal. When instead unified concretization is
|
||||
disabled, only the new specs will be concretized after any addition.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
Spec Matrices
|
||||
|
@@ -44,7 +44,7 @@ A build matrix showing which packages are working on which systems is shown belo
|
||||
yum install -y epel-release
|
||||
yum update -y
|
||||
yum --enablerepo epel groupinstall -y "Development Tools"
|
||||
yum --enablerepo epel install -y curl findutils gcc-c++ gcc gcc-gfortran git gnupg2 hostname iproute make patch python3 python3-pip python3-setuptools unzip
|
||||
yum --enablerepo epel install -y curl findutils gcc-c++ gcc gcc-gfortran git gnupg2 hostname iproute redhat-lsb-core make patch python3 python3-pip python3-setuptools unzip
|
||||
python3 -m pip install boto3
|
||||
|
||||
.. tab-item:: macOS Brew
|
||||
|
@@ -56,7 +56,6 @@ or refer to the full manual below.
|
||||
basic_usage
|
||||
Tutorial: Spack 101 <https://spack-tutorial.readthedocs.io>
|
||||
replace_conda_homebrew
|
||||
known_issues
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
@@ -68,7 +67,6 @@ or refer to the full manual below.
|
||||
build_settings
|
||||
environments
|
||||
containers
|
||||
monitoring
|
||||
mirrors
|
||||
module_file_support
|
||||
repositories
|
||||
@@ -79,12 +77,6 @@ or refer to the full manual below.
|
||||
extensions
|
||||
pipelines
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Research
|
||||
|
||||
analyze
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contributing
|
||||
|
@@ -1,40 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
============
|
||||
Known Issues
|
||||
============
|
||||
|
||||
This is a list of known issues in Spack. It provides ways of getting around these
|
||||
problems if you encounter them.
|
||||
|
||||
------------------------------------------------
|
||||
Spack does not seem to respect ``packages.yaml``
|
||||
------------------------------------------------
|
||||
|
||||
.. note::
|
||||
|
||||
This issue is **resolved** as of v0.19.0.dev0 commit
|
||||
`8281a0c5feabfc4fe180846d6fe95cfe53420bc5`, through the introduction of package
|
||||
requirements. See :ref:`package-requirements`.
|
||||
|
||||
A common problem in Spack v0.18.0 up to v0.19.0.dev0 is that package, compiler and target
|
||||
preferences specified in ``packages.yaml`` do not seem to be respected. Spack picks the
|
||||
"wrong" compilers and their versions, package versions and variants, and
|
||||
micro-architectures.
|
||||
|
||||
This is however not a bug. In order to reduce the number of builds of the same
|
||||
packages, the concretizer values reuse of installed packages higher than preferences
|
||||
set in ``packages.yaml``. Note that ``packages.yaml`` specifies only preferences, not
|
||||
hard constraints.
|
||||
|
||||
There are multiple workarounds:
|
||||
|
||||
1. Disable reuse during concretization: ``spack install --fresh <spec>`` when installing
|
||||
from the command line, or ``spack concretize --fresh --force`` when using
|
||||
environments.
|
||||
2. Turn preferences into constrains, by moving them to the input spec. For example,
|
||||
use ``spack spec zlib%gcc@12`` when you want to force GCC 12 even if ``zlib`` was
|
||||
already installed with GCC 10.
|
@@ -1,265 +0,0 @@
|
||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _monitoring:
|
||||
|
||||
==========
|
||||
Monitoring
|
||||
==========
|
||||
|
||||
You can use a `spack monitor <https://github.com/spack/spack-monitor>`_ "Spackmon"
|
||||
server to store a database of your packages, builds, and associated metadata
|
||||
for provenance, research, or some other kind of development. You should
|
||||
follow the instructions in the `spack monitor documentation <https://spack-monitor.readthedocs.org>`_
|
||||
to first create a server along with a username and token for yourself.
|
||||
You can then use this guide to interact with the server.
|
||||
|
||||
-------------------
|
||||
Analysis Monitoring
|
||||
-------------------
|
||||
|
||||
To read about how to monitor an analysis (meaning you want to send analysis results
|
||||
to a server) see :ref:`analyze_monitoring`.
|
||||
|
||||
---------------------
|
||||
Monitoring An Install
|
||||
---------------------
|
||||
|
||||
Since an install is typically when you build packages, we logically want
|
||||
to tell spack to monitor during this step. Let's start with an example
|
||||
where we want to monitor the install of hdf5. Unless you have disabled authentication
|
||||
for the server, we first want to export our spack monitor token and username to the environment:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
|
||||
By default, the host for your server is expected to be at ``http://127.0.0.1``
|
||||
with a prefix of ``ms1``, and if this is the case, you can simply add the
|
||||
``--monitor`` flag to the install command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor hdf5
|
||||
|
||||
|
||||
If you need to customize the host or the prefix, you can do that as well:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-prefix monitor --monitor-host https://monitor-service.io hdf5
|
||||
|
||||
|
||||
As a precaution, we cut out early in the spack client if you have not provided
|
||||
authentication credentials. For example, if you run the command above without
|
||||
exporting your username or token, you'll see:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
==> Error: You are required to export SPACKMON_TOKEN and SPACKMON_USER
|
||||
|
||||
This extra check is to ensure that we don't start any builds,
|
||||
and then discover that you forgot to export your token. However, if
|
||||
your monitoring server has authentication disabled, you can tell this to
|
||||
the client to skip this step:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-disable-auth hdf5
|
||||
|
||||
If the service is not running, you'll cleanly exit early - the install will
|
||||
not continue if you've asked it to monitor and there is no service.
|
||||
For example, here is what you'll see if the monitoring service is not running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
[Errno 111] Connection refused
|
||||
|
||||
|
||||
If you want to continue builds (and stop monitoring) you can set the ``--monitor-keep-going``
|
||||
flag.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-keep-going hdf5
|
||||
|
||||
This could mean that if a request fails, you only have partial or no data
|
||||
added to your monitoring database. This setting will not be applied to the
|
||||
first request to check if the server is running, but to subsequent requests.
|
||||
If you don't have a monitor server running and you want to build, simply
|
||||
don't provide the ``--monitor`` flag! Finally, if you want to provide one or
|
||||
more tags to your build, you can do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# Add one tag, "pizza"
|
||||
$ spack install --monitor --monitor-tags pizza hdf5
|
||||
|
||||
# Add two tags, "pizza" and "pasta"
|
||||
$ spack install --monitor --monitor-tags pizza,pasta hdf5
|
||||
|
||||
|
||||
----------------------------
|
||||
Monitoring with Containerize
|
||||
----------------------------
|
||||
|
||||
The same argument group is available to add to a containerize command.
|
||||
|
||||
^^^^^^
|
||||
Docker
|
||||
^^^^^^
|
||||
|
||||
To add monitoring to a Docker container recipe generation using the defaults,
|
||||
and assuming a monitor server running on localhost, you would
|
||||
start with a spack.yaml in your present working directory:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- samtools
|
||||
|
||||
And then do:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# preview first
|
||||
spack containerize --monitor
|
||||
|
||||
# and then write to a Dockerfile
|
||||
spack containerize --monitor > Dockerfile
|
||||
|
||||
|
||||
The install command will be edited to include commands for enabling monitoring.
|
||||
However, getting secrets into the container for your monitor server is something
|
||||
that should be done carefully. Specifically you should:
|
||||
|
||||
- Never try to define secrets as ENV, ARG, or using ``--build-arg``
|
||||
- Do not try to get the secret into the container via a "temporary" file that you remove (it in fact will still exist in a layer)
|
||||
|
||||
Instead, it's recommended to use buildkit `as explained here <https://pythonspeed.com/articles/docker-build-secrets/>`_.
|
||||
You'll need to again export environment variables for your spack monitor server:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
$ export SPACKMON_USER=spacky
|
||||
|
||||
And then use buildkit along with your build and identifying the name of the secret:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ DOCKER_BUILDKIT=1 docker build --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container .
|
||||
|
||||
The secrets are expected to come from your environment, and then will be temporarily mounted and available
|
||||
at ``/run/secrets/<name>``. If you forget to supply them (and authentication is required) the build
|
||||
will fail. If you need to build on your host (and interact with a spack monitor at localhost) you'll
|
||||
need to tell Docker to use the host network:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ DOCKER_BUILDKIT=1 docker build --network="host" --secret id=st,env=SPACKMON_TOKEN --secret id=su,env=SPACKMON_USER -t spack/container .
|
||||
|
||||
|
||||
^^^^^^^^^^^
|
||||
Singularity
|
||||
^^^^^^^^^^^
|
||||
|
||||
To add monitoring to a Singularity container build, the spack.yaml needs to
|
||||
be modified slightly to specify wanting a different format:
|
||||
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- samtools
|
||||
container:
|
||||
format: singularity
|
||||
|
||||
|
||||
Again, generate the recipe:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# preview first
|
||||
$ spack containerize --monitor
|
||||
|
||||
# then write to a Singularity recipe
|
||||
$ spack containerize --monitor > Singularity
|
||||
|
||||
|
||||
Singularity doesn't have a direct way to define secrets at build time, so we have
|
||||
to do a bit of a manual command to add a file, source secrets in it, and remove it.
|
||||
Since Singularity doesn't have layers like Docker, deleting a file will truly
|
||||
remove it from the container and history. So let's say we have this file,
|
||||
``secrets.sh``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
# secrets.sh
|
||||
export SPACKMON_USER=spack
|
||||
export SPACKMON_TOKEN=50445263afd8f67e59bd79bff597836ee6c05438
|
||||
|
||||
|
||||
We would then generate the Singularity recipe, and add a files section,
|
||||
a source of that file at the start of ``%post``, and **importantly**
|
||||
a removal of the final at the end of that same section.
|
||||
|
||||
.. code-block::
|
||||
|
||||
Bootstrap: docker
|
||||
From: spack/ubuntu-bionic:latest
|
||||
Stage: build
|
||||
|
||||
%files
|
||||
secrets.sh /opt/secrets.sh
|
||||
|
||||
%post
|
||||
. /opt/secrets.sh
|
||||
|
||||
# spack install commands are here
|
||||
...
|
||||
|
||||
# Don't forget to remove here!
|
||||
rm /opt/secrets.sh
|
||||
|
||||
|
||||
You can then build the container as your normally would.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ sudo singularity build container.sif Singularity
|
||||
|
||||
|
||||
------------------
|
||||
Monitoring Offline
|
||||
------------------
|
||||
|
||||
In the case that you want to save monitor results to your filesystem
|
||||
and then upload them later (perhaps you are in an environment where you don't
|
||||
have credentials or it isn't safe to use them) you can use the ``--monitor-save-local``
|
||||
flag.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install --monitor --monitor-save-local hdf5
|
||||
|
||||
This will save results in a subfolder, "monitor" in your designated spack
|
||||
reports folder, which defaults to ``$HOME/.spack/reports/monitor``. When
|
||||
you are ready to upload them to a spack monitor server:
|
||||
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack monitor upload ~/.spack/reports/monitor
|
||||
|
||||
|
||||
You can choose the root directory of results as shown above, or a specific
|
||||
subdirectory. The command accepts other arguments to specify configuration
|
||||
for the monitor.
|
@@ -2634,9 +2634,12 @@ extendable package:
|
||||
extends('python')
|
||||
...
|
||||
|
||||
Now, the ``py-numpy`` package can be used as an argument to ``spack
|
||||
activate``. When it is activated, all the files in its prefix will be
|
||||
symbolically linked into the prefix of the python package.
|
||||
This accomplishes a few things. Firstly, the Python package can set special
|
||||
variables such as ``PYTHONPATH`` for all extensions when the run or build
|
||||
environment is set up. Secondly, filesystem views can ensure that extensions
|
||||
are put in the same prefix as their extendee. This ensures that Python in
|
||||
a view can always locate its Python packages, even without environment
|
||||
variables set.
|
||||
|
||||
A package can only extend one other package at a time. To support packages
|
||||
that may extend one of a list of other packages, Spack supports multiple
|
||||
@@ -2684,9 +2687,8 @@ variant(s) are selected. This may be accomplished with conditional
|
||||
...
|
||||
|
||||
Sometimes, certain files in one package will conflict with those in
|
||||
another, which means they cannot both be activated (symlinked) at the
|
||||
same time. In this case, you can tell Spack to ignore those files
|
||||
when it does the activation:
|
||||
another, which means they cannot both be used in a view at the
|
||||
same time. In this case, you can tell Spack to ignore those files:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@@ -2698,7 +2700,7 @@ when it does the activation:
|
||||
...
|
||||
|
||||
The code above will prevent everything in the ``$prefix/bin/`` directory
|
||||
from being linked in at activation time.
|
||||
from being linked in a view.
|
||||
|
||||
.. note::
|
||||
|
||||
@@ -2722,67 +2724,6 @@ extensions; as a consequence python extension packages (those inheriting from
|
||||
``PythonPackage``) likewise override ``add_files_to_view`` in order to rewrite
|
||||
shebang lines which point to the Python interpreter.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Activation & deactivation
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Adding an extension to a view is referred to as an activation. If the view is
|
||||
maintained in the Spack installation prefix of the extendee this is called a
|
||||
global activation. Activations may involve updating some centralized state
|
||||
that is maintained by the extendee package, so there can be additional work
|
||||
for adding extensions compared with non-extension packages.
|
||||
|
||||
Spack's ``Package`` class has default ``activate`` and ``deactivate``
|
||||
implementations that handle symbolically linking extensions' prefixes
|
||||
into a specified view. Extendable packages can override these methods
|
||||
to add custom activate/deactivate logic of their own. For example,
|
||||
the ``activate`` and ``deactivate`` methods in the Python class handle
|
||||
symbolic linking of extensions, but they also handle details surrounding
|
||||
Python's ``.pth`` files, and other aspects of Python packaging.
|
||||
|
||||
Spack's extensions mechanism is designed to be extensible, so that
|
||||
other packages (like Ruby, R, Perl, etc.) can provide their own
|
||||
custom extension management logic, as they may not handle modules the
|
||||
same way that Python does.
|
||||
|
||||
Let's look at Python's activate function:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.activate
|
||||
:linenos:
|
||||
|
||||
This function is called on the *extendee* (Python). It first calls
|
||||
``activate`` in the superclass, which handles symlinking the
|
||||
extension package's prefix into the specified view. It then does
|
||||
some special handling of the ``easy-install.pth`` file, part of
|
||||
Python's setuptools.
|
||||
|
||||
Deactivate behaves similarly to activate, but it unlinks files:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
:pyobject: Python.deactivate
|
||||
:linenos:
|
||||
|
||||
Both of these methods call some custom functions in the Python
|
||||
package. See the source for Spack's Python package for details.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Activation arguments
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You may have noticed that the ``activate`` function defined above
|
||||
takes keyword arguments. These are the keyword arguments from
|
||||
``extends()``, and they are passed to both activate and deactivate.
|
||||
|
||||
This capability allows an extension to customize its own activation by
|
||||
passing arguments to the extendee. Extendees can likewise implement
|
||||
custom ``activate()`` and ``deactivate()`` functions to suit their
|
||||
needs.
|
||||
|
||||
The only keyword argument supported by default is the ``ignore``
|
||||
argument, which can take a regex, list of regexes, or a predicate to
|
||||
determine which files *not* to symlink during activation.
|
||||
|
||||
.. _virtual-dependencies:
|
||||
|
||||
--------------------
|
||||
@@ -3584,7 +3525,7 @@ will likely contain some overriding of default builder methods:
|
||||
def cmake_args(self):
|
||||
pass
|
||||
|
||||
class Autotoolsbuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||
def configure_args(self):
|
||||
pass
|
||||
|
||||
@@ -4406,16 +4347,9 @@ In addition to invoking the right compiler, the compiler wrappers add
|
||||
flags to the compile line so that dependencies can be easily found.
|
||||
These flags are added for each dependency, if they exist:
|
||||
|
||||
Compile-time library search paths
|
||||
* ``-L$dep_prefix/lib``
|
||||
* ``-L$dep_prefix/lib64``
|
||||
|
||||
Runtime library search paths (RPATHs)
|
||||
* ``$rpath_flag$dep_prefix/lib``
|
||||
* ``$rpath_flag$dep_prefix/lib64``
|
||||
|
||||
Include search paths
|
||||
* ``-I$dep_prefix/include``
|
||||
* Compile-time library search paths: ``-L$dep_prefix/lib``, ``-L$dep_prefix/lib64``
|
||||
* Runtime library search paths (RPATHs): ``$rpath_flag$dep_prefix/lib``, ``$rpath_flag$dep_prefix/lib64``
|
||||
* Include search paths: ``-I$dep_prefix/include``
|
||||
|
||||
An example of this would be the ``libdwarf`` build, which has one
|
||||
dependency: ``libelf``. Every call to ``cc`` in the ``libdwarf``
|
||||
|
@@ -1,5 +1,5 @@
|
||||
Name, Supported Versions, Notes, Requirement Reason
|
||||
Python, 2.7/3.6-3.10, , Interpreter for Spack
|
||||
Python, 2.7/3.6-3.11, , Interpreter for Spack
|
||||
C/C++ Compilers, , , Building software
|
||||
make, , , Build software
|
||||
patch, , , Build software
|
||||
@@ -11,6 +11,7 @@ bzip2, , , Compress/Decompress archives
|
||||
xz, , , Compress/Decompress archives
|
||||
zstd, , Optional, Compress/Decompress archives
|
||||
file, , , Create/Use Buildcaches
|
||||
lsb-release, , , Linux: identify operating system version
|
||||
gnupg2, , , Sign/Verify Buildcaches
|
||||
git, , , Manage Software Repositories
|
||||
svn, , Optional, Manage Software Repositories
|
||||
|
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.1.4 (commit e2cfdc266174488dee78b8c9058e36d60dc1b548)
|
||||
* Version: 0.2.0 (commit 77640e572725ad97f18e63a04857155752ace045)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
34
lib/spack/external/archspec/cpu/detect.py
vendored
34
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -132,9 +132,15 @@ def sysctl(*args):
|
||||
"model name": sysctl("-n", "machdep.cpu.brand_string"),
|
||||
}
|
||||
else:
|
||||
model = (
|
||||
"m1" if "Apple" in sysctl("-n", "machdep.cpu.brand_string") else "unknown"
|
||||
)
|
||||
model = "unknown"
|
||||
model_str = sysctl("-n", "machdep.cpu.brand_string").lower()
|
||||
if "m2" in model_str:
|
||||
model = "m2"
|
||||
elif "m1" in model_str:
|
||||
model = "m1"
|
||||
elif "apple" in model_str:
|
||||
model = "m1"
|
||||
|
||||
info = {
|
||||
"vendor_id": "Apple",
|
||||
"flags": [],
|
||||
@@ -322,14 +328,26 @@ def compatibility_check_for_aarch64(info, target):
|
||||
features = set(info.get("Features", "").split())
|
||||
vendor = info.get("CPU implementer", "generic")
|
||||
|
||||
# At the moment it's not clear how to detect compatibility with
|
||||
# a specific version of the architecture
|
||||
if target.vendor == "generic" and target.name != "aarch64":
|
||||
return False
|
||||
|
||||
arch_root = TARGETS[basename]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and target.vendor in (vendor, "generic")
|
||||
# On macOS it seems impossible to get all the CPU features with syctl info
|
||||
and (target.features.issubset(features) or platform.system() == "Darwin")
|
||||
arch_root_and_vendor = arch_root == target.family and target.vendor in (
|
||||
vendor,
|
||||
"generic",
|
||||
)
|
||||
|
||||
# On macOS it seems impossible to get all the CPU features
|
||||
# with syctl info, but for ARM we can get the exact model
|
||||
if platform.system() == "Darwin":
|
||||
model_key = info.get("model", basename)
|
||||
model = TARGETS[model_key]
|
||||
return arch_root_and_vendor and (target == model or target in model.ancestors)
|
||||
|
||||
return arch_root_and_vendor and target.features.issubset(features)
|
||||
|
||||
|
||||
@compatibility_check(architecture_family="riscv64")
|
||||
def compatibility_check_for_riscv64(info, target):
|
||||
|
@@ -85,7 +85,7 @@
|
||||
"intel": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "x86-64",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
@@ -2093,8 +2093,163 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"thunderx2": {
|
||||
"armv8.1a": {
|
||||
"from": ["aarch64"],
|
||||
"vendor": "generic",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "5:",
|
||||
"flags": "-march=armv8.1-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.1-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.1-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.1-a -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"armv8.2a": {
|
||||
"from": ["armv8.1a"],
|
||||
"vendor": "generic",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "6:",
|
||||
"flags": "-march=armv8.2-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.2-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.2-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.2-a -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"armv8.3a": {
|
||||
"from": ["armv8.2a"],
|
||||
"vendor": "generic",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "6:",
|
||||
"flags": "-march=armv8.3-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "6:",
|
||||
"flags": "-march=armv8.3-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.3-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.3-a -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"armv8.4a": {
|
||||
"from": ["armv8.3a"],
|
||||
"vendor": "generic",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "8:",
|
||||
"flags": "-march=armv8.4-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "8:",
|
||||
"flags": "-march=armv8.4-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.4-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.4-a -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"armv8.5a": {
|
||||
"from": ["armv8.4a"],
|
||||
"vendor": "generic",
|
||||
"features": [],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "9:",
|
||||
"flags": "-march=armv8.5-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "11:",
|
||||
"flags": "-march=armv8.5-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.5-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march=armv8.5-a -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"thunderx2": {
|
||||
"from": ["armv8.1a"],
|
||||
"vendor": "Cavium",
|
||||
"features": [
|
||||
"fp",
|
||||
@@ -2141,7 +2296,7 @@
|
||||
}
|
||||
},
|
||||
"a64fx": {
|
||||
"from": ["aarch64"],
|
||||
"from": ["armv8.2a"],
|
||||
"vendor": "Fujitsu",
|
||||
"features": [
|
||||
"fp",
|
||||
@@ -2209,7 +2364,7 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"graviton": {
|
||||
"cortex_a72": {
|
||||
"from": ["aarch64"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
@@ -2235,19 +2390,19 @@
|
||||
},
|
||||
{
|
||||
"versions": "6:",
|
||||
"flags" : "-march=armv8-a+crc+crypto -mtune=cortex-a72"
|
||||
"flags" : "-mcpu=cortex-a72"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "3.9:",
|
||||
"flags" : "-march=armv8-a+crc+crypto"
|
||||
"flags" : "-mcpu=cortex-a72"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"graviton2": {
|
||||
"from": ["graviton"],
|
||||
"neoverse_n1": {
|
||||
"from": ["cortex_a72", "armv8.2a"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
"fp",
|
||||
@@ -2296,7 +2451,7 @@
|
||||
},
|
||||
{
|
||||
"versions": "9.0:",
|
||||
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto -mtune=neoverse-n1"
|
||||
"flags" : "-mcpu=neoverse-n1"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
@@ -2307,6 +2462,10 @@
|
||||
{
|
||||
"versions": "5:",
|
||||
"flags" : "-march=armv8.2-a+fp16+rcpc+dotprod+crypto"
|
||||
},
|
||||
{
|
||||
"versions": "10:",
|
||||
"flags" : "-mcpu=neoverse-n1"
|
||||
}
|
||||
],
|
||||
"arm" : [
|
||||
@@ -2317,11 +2476,11 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"graviton3": {
|
||||
"from": ["graviton2"],
|
||||
"neoverse_v1": {
|
||||
"from": ["neoverse_n1", "armv8.4a"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
"fp",
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
@@ -2384,11 +2543,11 @@
|
||||
},
|
||||
{
|
||||
"versions": "9.0:9.9",
|
||||
"flags" : "-march=armv8.4-a+crypto+rcpc+sha3+sm4+sve+rng+nodotprod -mtune=neoverse-v1"
|
||||
"flags" : "-mcpu=neoverse-v1"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:",
|
||||
"flags" : "-march=armv8.4-a+crypto+rcpc+sha3+sm4+sve+rng+ssbs+i8mm+bf16+nodotprod -mtune=neoverse-v1"
|
||||
"flags" : "-mcpu=neoverse-v1"
|
||||
}
|
||||
|
||||
],
|
||||
@@ -2404,6 +2563,10 @@
|
||||
{
|
||||
"versions": "11:",
|
||||
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
||||
},
|
||||
{
|
||||
"versions": "12:",
|
||||
"flags" : "-mcpu=neoverse-v1"
|
||||
}
|
||||
],
|
||||
"arm" : [
|
||||
@@ -2419,7 +2582,7 @@
|
||||
}
|
||||
},
|
||||
"m1": {
|
||||
"from": ["aarch64"],
|
||||
"from": ["armv8.4a"],
|
||||
"vendor": "Apple",
|
||||
"features": [
|
||||
"fp",
|
||||
@@ -2484,6 +2647,76 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"m2": {
|
||||
"from": ["m1", "armv8.5a"],
|
||||
"vendor": "Apple",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"ecv",
|
||||
"bf16",
|
||||
"i8mm",
|
||||
"bti"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "8.0:",
|
||||
"flags" : "-march=armv8.5-a -mtune=generic"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:12.0",
|
||||
"flags" : "-march=armv8.5-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": "11.0:12.5",
|
||||
"flags" : "-march=armv8.5-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=vortex"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"arm": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
|
@@ -3,11 +3,20 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: (major, minor, micro, dev release) tuple
|
||||
spack_version_info = (0, 19, 0, "dev0")
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
spack_version = ".".join(str(s) for s in spack_version_info)
|
||||
__version__ = "0.19.1"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
def __try_int(v):
|
||||
try:
|
||||
return int(v)
|
||||
except ValueError:
|
||||
return v
|
||||
|
||||
|
||||
#: (major, minor, micro, dev release) tuple
|
||||
spack_version_info = tuple([__try_int(v) for v in __version__.split(".")])
|
||||
|
||||
|
||||
__all__ = ["spack_version_info", "spack_version"]
|
||||
__version__ = spack_version
|
||||
|
@@ -288,7 +288,7 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
errors = []
|
||||
for pkg_name in pkgs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||
test_callbacks = pkg_cls.build_time_test_callbacks
|
||||
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||
|
||||
if test_callbacks and "test" in test_callbacks:
|
||||
msg = '{0} package contains "test" method in ' "build_time_test_callbacks"
|
||||
|
@@ -7,6 +7,7 @@
|
||||
import collections
|
||||
import hashlib
|
||||
import json
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
@@ -45,6 +46,7 @@
|
||||
from spack.relocate import utf8_paths_to_single_binary_regex
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
from spack.util.executable import which
|
||||
|
||||
_build_cache_relative_path = "build_cache"
|
||||
_build_cache_keys_relative_path = "_pgp"
|
||||
@@ -72,6 +74,10 @@ def __init__(self, errors):
|
||||
super(FetchCacheError, self).__init__(self.message)
|
||||
|
||||
|
||||
class ListMirrorSpecsError(spack.error.SpackError):
|
||||
"""Raised when unable to retrieve list of specs from the mirror"""
|
||||
|
||||
|
||||
class BinaryCacheIndex(object):
|
||||
"""
|
||||
The BinaryCacheIndex tracks what specs are available on (usually remote)
|
||||
@@ -287,10 +293,12 @@ def update_spec(self, spec, found_list):
|
||||
cur_entry["spec"] = new_entry["spec"]
|
||||
break
|
||||
else:
|
||||
current_list.append = {
|
||||
"mirror_url": new_entry["mirror_url"],
|
||||
"spec": new_entry["spec"],
|
||||
}
|
||||
current_list.append(
|
||||
{
|
||||
"mirror_url": new_entry["mirror_url"],
|
||||
"spec": new_entry["spec"],
|
||||
}
|
||||
)
|
||||
|
||||
def update(self, with_cooldown=False):
|
||||
"""Make sure local cache of buildcache index files is up to date.
|
||||
@@ -548,9 +556,9 @@ class NoOverwriteException(spack.error.SpackError):
|
||||
"""
|
||||
|
||||
def __init__(self, file_path):
|
||||
err_msg = "\n%s\nexists\n" % file_path
|
||||
err_msg += "Use -f option to overwrite."
|
||||
super(NoOverwriteException, self).__init__(err_msg)
|
||||
super(NoOverwriteException, self).__init__(
|
||||
'"{}" exists in buildcache. Use --force flag to overwrite.'.format(file_path)
|
||||
)
|
||||
|
||||
|
||||
class NoGpgException(spack.error.SpackError):
|
||||
@@ -881,37 +889,52 @@ def sign_specfile(key, force, specfile_path):
|
||||
spack.util.gpg.sign(key, specfile_path, signed_specfile_path, clearsign=True)
|
||||
|
||||
|
||||
def _fetch_spec_from_mirror(spec_url):
|
||||
s = None
|
||||
tty.debug("fetching {0}".format(spec_url))
|
||||
_, _, spec_file = web_util.read_from_url(spec_url)
|
||||
spec_file_contents = codecs.getreader("utf-8")(spec_file).read()
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith(".json.sig"):
|
||||
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
|
||||
s = Spec.from_dict(specfile_json)
|
||||
elif spec_url.endswith(".json"):
|
||||
s = Spec.from_json(spec_file_contents)
|
||||
elif spec_url.endswith(".yaml"):
|
||||
s = Spec.from_yaml(spec_file_contents)
|
||||
return s
|
||||
def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_dir, concurrency):
|
||||
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
||||
generate the index, and push it to the mirror.
|
||||
|
||||
Args:
|
||||
file_list (list(str)): List of urls or file paths pointing at spec files to read
|
||||
read_method: A function taking a single argument, either a url or a file path,
|
||||
and which reads the spec file at that location, and returns the spec.
|
||||
cache_prefix (str): prefix of the build cache on s3 where index should be pushed.
|
||||
db: A spack database used for adding specs and then writing the index.
|
||||
temp_dir (str): Location to write index.json and hash for pushing
|
||||
concurrency (int): Number of parallel processes to use when fetching
|
||||
|
||||
def _read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir):
|
||||
for file_path in file_list:
|
||||
try:
|
||||
s = _fetch_spec_from_mirror(url_util.join(cache_prefix, file_path))
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error("Error reading specfile: {0}".format(file_path))
|
||||
tty.error(url_err)
|
||||
Return:
|
||||
None
|
||||
"""
|
||||
|
||||
if s:
|
||||
db.add(s, None)
|
||||
db.mark(s, "in_buildcache", True)
|
||||
def _fetch_spec_from_mirror(spec_url):
|
||||
spec_file_contents = read_method(spec_url)
|
||||
|
||||
if spec_file_contents:
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith(".json.sig"):
|
||||
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
|
||||
return Spec.from_dict(specfile_json)
|
||||
if spec_url.endswith(".json"):
|
||||
return Spec.from_json(spec_file_contents)
|
||||
if spec_url.endswith(".yaml"):
|
||||
return Spec.from_yaml(spec_file_contents)
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
try:
|
||||
fetched_specs = tp.map(
|
||||
llnl.util.lang.star(_fetch_spec_from_mirror), [(f,) for f in file_list]
|
||||
)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
|
||||
for fetched_spec in fetched_specs:
|
||||
db.add(fetched_spec, None)
|
||||
db.mark(fetched_spec, "in_buildcache", True)
|
||||
|
||||
# Now generate the index, compute its hash, and push the two files to
|
||||
# the mirror.
|
||||
index_json_path = os.path.join(db_root_dir, "index.json")
|
||||
index_json_path = os.path.join(temp_dir, "index.json")
|
||||
with open(index_json_path, "w") as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
@@ -921,7 +944,7 @@ def _read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir):
|
||||
index_hash = compute_hash(index_string)
|
||||
|
||||
# Write the hash out to a local file
|
||||
index_hash_path = os.path.join(db_root_dir, "index.json.hash")
|
||||
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
||||
with open(index_hash_path, "w") as f:
|
||||
f.write(index_hash)
|
||||
|
||||
@@ -942,31 +965,142 @@ def _read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir):
|
||||
)
|
||||
|
||||
|
||||
def generate_package_index(cache_prefix):
|
||||
"""Create the build cache index page.
|
||||
def _specs_from_cache_aws_cli(cache_prefix):
|
||||
"""Use aws cli to sync all the specs into a local temporary directory.
|
||||
|
||||
Creates (or replaces) the "index.json" page at the location given in
|
||||
cache_prefix. This page contains a link for each binary package (.yaml or
|
||||
.json) under cache_prefix.
|
||||
Args:
|
||||
cache_prefix (str): prefix of the build cache on s3
|
||||
|
||||
Return:
|
||||
List of the local file paths and a function that can read each one from the file system.
|
||||
"""
|
||||
read_fn = None
|
||||
file_list = None
|
||||
aws = which("aws")
|
||||
|
||||
def file_read_method(file_path):
|
||||
with open(file_path) as fd:
|
||||
return fd.read()
|
||||
|
||||
tmpspecsdir = tempfile.mkdtemp()
|
||||
sync_command_args = [
|
||||
"s3",
|
||||
"sync",
|
||||
"--exclude",
|
||||
"*",
|
||||
"--include",
|
||||
"*.spec.json.sig",
|
||||
"--include",
|
||||
"*.spec.json",
|
||||
"--include",
|
||||
"*.spec.yaml",
|
||||
cache_prefix,
|
||||
tmpspecsdir,
|
||||
]
|
||||
|
||||
try:
|
||||
tty.debug(
|
||||
"Using aws s3 sync to download specs from {0} to {1}".format(cache_prefix, tmpspecsdir)
|
||||
)
|
||||
aws(*sync_command_args, output=os.devnull, error=os.devnull)
|
||||
file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json", "*.spec.yaml"])
|
||||
read_fn = file_read_method
|
||||
except Exception:
|
||||
tty.warn("Failed to use aws s3 sync to retrieve specs, falling back to parallel fetch")
|
||||
shutil.rmtree(tmpspecsdir)
|
||||
|
||||
return file_list, read_fn
|
||||
|
||||
|
||||
def _specs_from_cache_fallback(cache_prefix):
|
||||
"""Use spack.util.web module to get a list of all the specs at the remote url.
|
||||
|
||||
Args:
|
||||
cache_prefix (str): Base url of mirror (location of spec files)
|
||||
|
||||
Return:
|
||||
The list of complete spec file urls and a function that can read each one from its
|
||||
remote location (also using the spack.util.web module).
|
||||
"""
|
||||
read_fn = None
|
||||
file_list = None
|
||||
|
||||
def url_read_method(url):
|
||||
contents = None
|
||||
try:
|
||||
_, _, spec_file = web_util.read_from_url(url)
|
||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error("Error reading specfile: {0}".format(url))
|
||||
tty.error(url_err)
|
||||
return contents
|
||||
|
||||
try:
|
||||
file_list = [
|
||||
entry
|
||||
url_util.join(cache_prefix, entry)
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
if entry.endswith(".yaml")
|
||||
or entry.endswith("spec.json")
|
||||
or entry.endswith("spec.json.sig")
|
||||
]
|
||||
read_fn = url_read_method
|
||||
except KeyError as inst:
|
||||
msg = "No packages at {0}: {1}".format(cache_prefix, inst)
|
||||
tty.warn(msg)
|
||||
return
|
||||
except Exception as err:
|
||||
# If we got some kind of S3 (access denied or other connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = "Encountered problem listing packages at {0}: {1}".format(cache_prefix, err)
|
||||
tty.warn(msg)
|
||||
|
||||
return file_list, read_fn
|
||||
|
||||
|
||||
def _spec_files_from_cache(cache_prefix):
|
||||
"""Get a list of all the spec files in the mirror and a function to
|
||||
read them.
|
||||
|
||||
Args:
|
||||
cache_prefix (str): Base url of mirror (location of spec files)
|
||||
|
||||
Return:
|
||||
A tuple where the first item is a list of absolute file paths or
|
||||
urls pointing to the specs that should be read from the mirror,
|
||||
and the second item is a function taking a url or file path and
|
||||
returning the spec read from that location.
|
||||
"""
|
||||
callbacks = []
|
||||
if cache_prefix.startswith("s3"):
|
||||
callbacks.append(_specs_from_cache_aws_cli)
|
||||
|
||||
callbacks.append(_specs_from_cache_fallback)
|
||||
|
||||
for specs_from_cache_fn in callbacks:
|
||||
file_list, read_fn = specs_from_cache_fn(cache_prefix)
|
||||
if file_list:
|
||||
return file_list, read_fn
|
||||
|
||||
raise ListMirrorSpecsError("Failed to get list of specs from {0}".format(cache_prefix))
|
||||
|
||||
|
||||
def generate_package_index(cache_prefix, concurrency=32):
|
||||
"""Create or replace the build cache index on the given mirror. The
|
||||
buildcache index contains an entry for each binary package under the
|
||||
cache_prefix.
|
||||
|
||||
Args:
|
||||
cache_prefix(str): Base url of binary mirror.
|
||||
concurrency: (int): The desired threading concurrency to use when
|
||||
fetching the spec files from the mirror.
|
||||
|
||||
Return:
|
||||
None
|
||||
"""
|
||||
try:
|
||||
file_list, read_fn = _spec_files_from_cache(cache_prefix)
|
||||
except ListMirrorSpecsError as err:
|
||||
tty.error("Unabled to generate package index, {0}".format(err))
|
||||
return
|
||||
|
||||
if any(x.endswith(".yaml") for x in file_list):
|
||||
@@ -989,7 +1123,7 @@ def generate_package_index(cache_prefix):
|
||||
)
|
||||
|
||||
try:
|
||||
_read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir)
|
||||
_read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)
|
||||
except Exception as err:
|
||||
msg = "Encountered problem pushing package index to {0}: {1}".format(cache_prefix, err)
|
||||
tty.warn(msg)
|
||||
@@ -1600,13 +1734,19 @@ def relocate_package(spec, allow_root):
|
||||
install_path = spack.hooks.sbang.sbang_install_path()
|
||||
prefix_to_prefix_text[old_sbang_install_path] = install_path
|
||||
|
||||
# First match specific prefix paths. Possibly the *local* install prefix
|
||||
# of some dependency is in an upstream, so we cannot assume the original
|
||||
# spack store root can be mapped uniformly to the new spack store root.
|
||||
for orig_prefix, hash in prefix_to_hash.items():
|
||||
prefix_to_prefix_text[orig_prefix] = hash_to_prefix.get(hash, None)
|
||||
prefix_to_prefix_bin[orig_prefix] = hash_to_prefix.get(hash, None)
|
||||
|
||||
# Only then add the generic fallback of install prefix -> install prefix.
|
||||
prefix_to_prefix_text[old_prefix] = new_prefix
|
||||
prefix_to_prefix_bin[old_prefix] = new_prefix
|
||||
prefix_to_prefix_text[old_layout_root] = new_layout_root
|
||||
prefix_to_prefix_bin[old_layout_root] = new_layout_root
|
||||
for orig_prefix, hash in prefix_to_hash.items():
|
||||
prefix_to_prefix_text[orig_prefix] = hash_to_prefix.get(hash, None)
|
||||
prefix_to_prefix_bin[orig_prefix] = hash_to_prefix.get(hash, None)
|
||||
|
||||
# This is vestigial code for the *old* location of sbang. Previously,
|
||||
# sbang was a bash script, and it lived in the spack prefix. It is
|
||||
# now a POSIX script that lives in the install prefix. Old packages
|
||||
|
@@ -978,22 +978,9 @@ def add_modifications_for_dep(dep):
|
||||
if set_package_py_globals:
|
||||
set_module_variables_for_package(dpkg)
|
||||
|
||||
# Allow dependencies to modify the module
|
||||
# Get list of modules that may need updating
|
||||
modules = []
|
||||
for cls in inspect.getmro(type(spec.package)):
|
||||
module = cls.module
|
||||
if module == spack.package_base:
|
||||
break
|
||||
modules.append(module)
|
||||
|
||||
# Execute changes as if on a single module
|
||||
# copy dict to ensure prior changes are available
|
||||
changes = spack.util.pattern.Bunch()
|
||||
dpkg.setup_dependent_package(changes, spec)
|
||||
|
||||
for module in modules:
|
||||
module.__dict__.update(changes.__dict__)
|
||||
current_module = ModuleChangePropagator(spec.package)
|
||||
dpkg.setup_dependent_package(current_module, spec)
|
||||
current_module.propagate_changes_to_mro()
|
||||
|
||||
if context == "build":
|
||||
builder = spack.builder.create(dpkg)
|
||||
@@ -1437,3 +1424,51 @@ def write_log_summary(out, log_type, log, last=None):
|
||||
# If no errors are found but warnings are, display warnings
|
||||
out.write("\n%s found in %s log:\n" % (plural(nwar, "warning"), log_type))
|
||||
out.write(make_log_context(warnings))
|
||||
|
||||
|
||||
class ModuleChangePropagator(object):
|
||||
"""Wrapper class to accept changes to a package.py Python module, and propagate them in the
|
||||
MRO of the package.
|
||||
|
||||
It is mainly used as a substitute of the ``package.py`` module, when calling the
|
||||
"setup_dependent_package" function during build environment setup.
|
||||
"""
|
||||
|
||||
_PROTECTED_NAMES = ("package", "current_module", "modules_in_mro", "_set_attributes")
|
||||
|
||||
def __init__(self, package):
|
||||
self._set_self_attributes("package", package)
|
||||
self._set_self_attributes("current_module", package.module)
|
||||
|
||||
#: Modules for the classes in the MRO up to PackageBase
|
||||
modules_in_mro = []
|
||||
for cls in inspect.getmro(type(package)):
|
||||
module = cls.module
|
||||
|
||||
if module == self.current_module:
|
||||
continue
|
||||
|
||||
if module == spack.package_base:
|
||||
break
|
||||
|
||||
modules_in_mro.append(module)
|
||||
self._set_self_attributes("modules_in_mro", modules_in_mro)
|
||||
self._set_self_attributes("_set_attributes", {})
|
||||
|
||||
def _set_self_attributes(self, key, value):
|
||||
super(ModuleChangePropagator, self).__setattr__(key, value)
|
||||
|
||||
def __getattr__(self, item):
|
||||
return getattr(self.current_module, item)
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if key in ModuleChangePropagator._PROTECTED_NAMES:
|
||||
msg = 'Cannot set attribute "{}" in ModuleMonkeyPatcher'.format(key)
|
||||
return AttributeError(msg)
|
||||
|
||||
setattr(self.current_module, key, value)
|
||||
self._set_attributes[key] = value
|
||||
|
||||
def propagate_changes_to_mro(self):
|
||||
for module_in_mro in self.modules_in_mro:
|
||||
module_in_mro.__dict__.update(self._set_attributes)
|
||||
|
@@ -7,7 +7,7 @@
|
||||
import os.path
|
||||
import stat
|
||||
import subprocess
|
||||
from typing import List # novm
|
||||
from typing import List # novm # noqa: F401
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -427,15 +427,15 @@ def _do_patch_libtool(self):
|
||||
x.filter(regex="-nostdlib", repl="", string=True)
|
||||
rehead = r"/\S*/"
|
||||
for o in [
|
||||
"fjhpctag.o",
|
||||
"fjcrt0.o",
|
||||
"fjlang08.o",
|
||||
"fjomp.o",
|
||||
"crti.o",
|
||||
"crtbeginS.o",
|
||||
"crtendS.o",
|
||||
r"fjhpctag\.o",
|
||||
r"fjcrt0\.o",
|
||||
r"fjlang08\.o",
|
||||
r"fjomp\.o",
|
||||
r"crti\.o",
|
||||
r"crtbeginS\.o",
|
||||
r"crtendS\.o",
|
||||
]:
|
||||
x.filter(regex=(rehead + o), repl="", string=True)
|
||||
x.filter(regex=(rehead + o), repl="")
|
||||
elif self.pkg.compiler.name == "dpcpp":
|
||||
# Hack to filter out spurious predep_objects when building with Intel dpcpp
|
||||
# (see https://github.com/spack/spack/issues/32863):
|
||||
|
@@ -205,13 +205,7 @@ def initconfig_hardware_entries(self):
|
||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||
cudacompiler = "${CUDA_TOOLKIT_ROOT_DIR}/bin/nvcc"
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", cudacompiler))
|
||||
|
||||
if spec.satisfies("^mpi"):
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${MPI_CXX_COMPILER}"))
|
||||
else:
|
||||
entries.append(
|
||||
cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}")
|
||||
)
|
||||
entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}"))
|
||||
|
||||
return entries
|
||||
|
||||
|
@@ -142,15 +142,17 @@ def std_args(pkg):
|
||||
default_library = "shared"
|
||||
|
||||
args = [
|
||||
"--prefix={0}".format(pkg.prefix),
|
||||
"-Dprefix={0}".format(pkg.prefix),
|
||||
# If we do not specify libdir explicitly, Meson chooses something
|
||||
# like lib/x86_64-linux-gnu, which causes problems when trying to
|
||||
# find libraries and pkg-config files.
|
||||
# See https://github.com/mesonbuild/meson/issues/2197
|
||||
"--libdir={0}".format(pkg.prefix.lib),
|
||||
"-Dlibdir={0}".format(pkg.prefix.lib),
|
||||
"-Dbuildtype={0}".format(build_type),
|
||||
"-Dstrip={0}".format(strip),
|
||||
"-Ddefault_library={0}".format(default_library),
|
||||
# Do not automatically download and install dependencies
|
||||
"-Dwrap_mode=nodownload",
|
||||
]
|
||||
|
||||
return args
|
||||
|
@@ -15,6 +15,7 @@
|
||||
import spack.builder
|
||||
import spack.multimethod
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
from spack.directives import build_system, depends_on, extends
|
||||
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
||||
from spack.version import Version
|
||||
@@ -218,6 +219,27 @@ def list_url(cls):
|
||||
name = cls.pypi.split("/")[0]
|
||||
return "https://pypi.org/simple/" + name + "/"
|
||||
|
||||
def update_external_dependencies(self):
|
||||
"""
|
||||
Ensure all external python packages have a python dependency
|
||||
|
||||
If another package in the DAG depends on python, we use that
|
||||
python for the dependency of the external. If not, we assume
|
||||
that the external PythonPackage is installed into the same
|
||||
directory as the python it depends on.
|
||||
"""
|
||||
# TODO: Include this in the solve, rather than instantiating post-concretization
|
||||
if "python" not in self.spec:
|
||||
if "python" in self.spec.root:
|
||||
python = self.spec.root["python"]
|
||||
else:
|
||||
python = spack.spec.Spec("python")
|
||||
repo = spack.repo.path.repo_for_pkg(python)
|
||||
python.namespace = repo.namespace
|
||||
python._mark_concrete()
|
||||
python.external_path = self.prefix
|
||||
self.spec.add_dependency_edge(python, ("build", "link", "run"))
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
"""Discover header files in platlib."""
|
||||
|
@@ -21,8 +21,6 @@ class SConsPackage(spack.package_base.PackageBase):
|
||||
#: build-system class we are using
|
||||
build_system_class = "SConsPackage"
|
||||
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ["build_test"]
|
||||
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||
legacy_buildsystem = "scons"
|
||||
|
||||
@@ -48,27 +46,33 @@ class SConsBuilder(BaseBuilder):
|
||||
phases = ("build", "install")
|
||||
|
||||
#: Names associated with package methods in the old build-system format
|
||||
legacy_methods = ("build_args", "install_args", "build_test")
|
||||
legacy_methods = ("build_test",)
|
||||
|
||||
#: Same as legacy_methods, but the signature is different
|
||||
legacy_long_methods = ("build_args", "install_args")
|
||||
|
||||
#: Names associated with package attributes in the old build-system format
|
||||
legacy_attributes = ()
|
||||
legacy_attributes = ("build_time_test_callbacks",)
|
||||
|
||||
def build_args(self):
|
||||
#: Callback names for build-time test
|
||||
build_time_test_callbacks = ["build_test"]
|
||||
|
||||
def build_args(self, spec, prefix):
|
||||
"""Arguments to pass to build."""
|
||||
return []
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Build the package."""
|
||||
args = self.build_args()
|
||||
args = self.build_args(spec, prefix)
|
||||
inspect.getmodule(self.pkg).scons(*args)
|
||||
|
||||
def install_args(self):
|
||||
def install_args(self, spec, prefix):
|
||||
"""Arguments to pass to install."""
|
||||
return []
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install the package."""
|
||||
args = self.install_args()
|
||||
args = self.install_args(spec, prefix)
|
||||
|
||||
inspect.getmodule(self.pkg).scons("install", *args)
|
||||
|
||||
|
@@ -6,7 +6,7 @@
|
||||
import copy
|
||||
import functools
|
||||
import inspect
|
||||
from typing import List, Optional, Tuple
|
||||
from typing import List, Optional, Tuple # noqa: F401
|
||||
|
||||
import six
|
||||
|
||||
@@ -127,7 +127,12 @@ def __init__(self, wrapped_pkg_object, root_builder):
|
||||
wrapper_cls = type(self)
|
||||
bases = (package_cls, wrapper_cls)
|
||||
new_cls_name = package_cls.__name__ + "Wrapper"
|
||||
new_cls = type(new_cls_name, bases, {})
|
||||
# Forward attributes that might be monkey patched later
|
||||
new_cls = type(
|
||||
new_cls_name,
|
||||
bases,
|
||||
{"run_tests": property(lambda x: x.wrapped_package_object.run_tests)},
|
||||
)
|
||||
new_cls.__module__ = package_cls.__module__
|
||||
self.__class__ = new_cls
|
||||
self.__dict__.update(wrapped_pkg_object.__dict__)
|
||||
|
@@ -1167,7 +1167,14 @@ def generate_gitlab_ci_yaml(
|
||||
"after_script",
|
||||
]
|
||||
|
||||
service_job_retries = {"max": 2, "when": ["runner_system_failure", "stuck_or_timeout_failure"]}
|
||||
service_job_retries = {
|
||||
"max": 2,
|
||||
"when": [
|
||||
"runner_system_failure",
|
||||
"stuck_or_timeout_failure",
|
||||
"script_failure",
|
||||
],
|
||||
}
|
||||
|
||||
if job_id > 0:
|
||||
if temp_storage_url_prefix:
|
||||
@@ -1762,9 +1769,9 @@ def reproduce_ci_job(url, work_dir):
|
||||
download_and_extract_artifacts(url, work_dir)
|
||||
|
||||
lock_file = fs.find(work_dir, "spack.lock")[0]
|
||||
concrete_env_dir = os.path.dirname(lock_file)
|
||||
repro_lock_dir = os.path.dirname(lock_file)
|
||||
|
||||
tty.debug("Concrete environment directory: {0}".format(concrete_env_dir))
|
||||
tty.debug("Found lock file in: {0}".format(repro_lock_dir))
|
||||
|
||||
yaml_files = fs.find(work_dir, ["*.yaml", "*.yml"])
|
||||
|
||||
@@ -1787,6 +1794,21 @@ def reproduce_ci_job(url, work_dir):
|
||||
if pipeline_yaml:
|
||||
tty.debug("\n{0} is likely your pipeline file".format(yf))
|
||||
|
||||
relative_concrete_env_dir = pipeline_yaml["variables"]["SPACK_CONCRETE_ENV_DIR"]
|
||||
tty.debug("Relative environment path used by cloud job: {0}".format(relative_concrete_env_dir))
|
||||
|
||||
# Using the relative concrete environment path found in the generated
|
||||
# pipeline variable above, copy the spack environment files so they'll
|
||||
# be found in the same location as when the job ran in the cloud.
|
||||
concrete_env_dir = os.path.join(work_dir, relative_concrete_env_dir)
|
||||
if not os.path.isdir(concrete_env_dir):
|
||||
fs.mkdirp(concrete_env_dir)
|
||||
copy_lock_path = os.path.join(concrete_env_dir, "spack.lock")
|
||||
orig_yaml_path = os.path.join(repro_lock_dir, "spack.yaml")
|
||||
copy_yaml_path = os.path.join(concrete_env_dir, "spack.yaml")
|
||||
shutil.copyfile(lock_file, copy_lock_path)
|
||||
shutil.copyfile(orig_yaml_path, copy_yaml_path)
|
||||
|
||||
# Find the install script in the unzipped artifacts and make it executable
|
||||
install_script = fs.find(work_dir, "install.sh")[0]
|
||||
st = os.stat(install_script)
|
||||
@@ -1842,6 +1864,7 @@ def reproduce_ci_job(url, work_dir):
|
||||
if repro_details:
|
||||
mount_as_dir = repro_details["ci_project_dir"]
|
||||
mounted_repro_dir = os.path.join(mount_as_dir, rel_repro_dir)
|
||||
mounted_env_dir = os.path.join(mount_as_dir, relative_concrete_env_dir)
|
||||
|
||||
# We will also try to clone spack from your local checkout and
|
||||
# reproduce the state present during the CI build, and put that into
|
||||
@@ -1925,7 +1948,7 @@ def reproduce_ci_job(url, work_dir):
|
||||
inst_list.append(" $ source {0}/share/spack/setup-env.sh\n".format(spack_root))
|
||||
inst_list.append(
|
||||
" $ spack env activate --without-view {0}\n\n".format(
|
||||
mounted_repro_dir if job_image else repro_dir
|
||||
mounted_env_dir if job_image else repro_dir
|
||||
)
|
||||
)
|
||||
inst_list.append(" - Run the install script\n\n")
|
||||
|
@@ -30,6 +30,7 @@
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.util.string
|
||||
@@ -464,11 +465,12 @@ def format_list(specs):
|
||||
# create the final, formatted versions of all specs
|
||||
formatted = []
|
||||
for spec in specs:
|
||||
formatted.append((fmt(spec), spec))
|
||||
if deps:
|
||||
for depth, dep in spec.traverse(root=False, depth=True):
|
||||
formatted.append((fmt(dep, depth), dep))
|
||||
for depth, dep in traverse.traverse_tree([spec], depth_first=False):
|
||||
formatted.append((fmt(dep.spec, depth), dep.spec))
|
||||
formatted.append(("", None)) # mark newlines
|
||||
else:
|
||||
formatted.append((fmt(spec), spec))
|
||||
|
||||
# unless any of these are set, we can just colify and be done.
|
||||
if not any((deps, paths)):
|
||||
|
@@ -1,53 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
|
||||
description = "activate a package extension"
|
||||
section = "extensions"
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="activate without first activating dependencies"
|
||||
)
|
||||
subparser.add_argument("-v", "--view", metavar="VIEW", type=str, help="the view to operate on")
|
||||
arguments.add_common_arguments(subparser, ["installed_spec"])
|
||||
|
||||
|
||||
def activate(parser, args):
|
||||
|
||||
tty.warn(
|
||||
"spack activate is deprecated in favor of " "environments and will be removed in v0.19.0"
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
if len(specs) != 1:
|
||||
tty.die("activate requires one spec. %d given." % len(specs))
|
||||
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], ev.active_environment())
|
||||
if not spec.package.is_extension:
|
||||
tty.die("%s is not an extension." % spec.name)
|
||||
|
||||
if args.view:
|
||||
target = args.view
|
||||
else:
|
||||
target = spec.package.extendee_spec.prefix
|
||||
|
||||
view = YamlFilesystemView(target, spack.store.layout)
|
||||
|
||||
if spec.package.is_activated(view):
|
||||
tty.msg("Package %s is already activated." % specs[0].short_spec)
|
||||
return
|
||||
|
||||
# TODO: refactor FilesystemView.add_extension and use that here (so there
|
||||
# aren't two ways of activating extensions)
|
||||
spec.package.do_activate(view, with_dependencies=not args.force)
|
@@ -52,6 +52,7 @@
|
||||
|
||||
CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/clingo.json"
|
||||
GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/gnupg.json"
|
||||
PATCHELF_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/patchelf.json"
|
||||
|
||||
# Metadata for a generated source mirror
|
||||
SOURCE_METADATA = {
|
||||
@@ -443,6 +444,7 @@ def write_metadata(subdir, metadata):
|
||||
abs_directory, rel_directory = write_metadata(subdir="binaries", metadata=BINARY_METADATA)
|
||||
shutil.copy(spack.util.path.canonicalize_path(CLINGO_JSON), abs_directory)
|
||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||
shutil.copy(spack.util.path.canonicalize_path(PATCHELF_JSON), abs_directory)
|
||||
instructions += cmd.format("local-binaries", rel_directory)
|
||||
print(instructions)
|
||||
|
||||
|
@@ -531,7 +531,6 @@ def ci_rebuild(args):
|
||||
slash_hash = "/{}".format(job_spec.dag_hash())
|
||||
deps_install_args = install_args
|
||||
root_install_args = install_args + [
|
||||
"--no-add",
|
||||
"--keep-stage",
|
||||
"--only=package",
|
||||
"--use-buildcache=package:never,dependencies:only",
|
||||
|
@@ -1,96 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.graph
|
||||
import spack.store
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
|
||||
description = "deactivate a package extension"
|
||||
section = "extensions"
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"-f",
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="run deactivation even if spec is NOT currently activated",
|
||||
)
|
||||
subparser.add_argument("-v", "--view", metavar="VIEW", type=str, help="the view to operate on")
|
||||
subparser.add_argument(
|
||||
"-a",
|
||||
"--all",
|
||||
action="store_true",
|
||||
help="deactivate all extensions of an extendable package, or "
|
||||
"deactivate an extension AND its dependencies",
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ["installed_spec"])
|
||||
|
||||
|
||||
def deactivate(parser, args):
|
||||
|
||||
tty.warn(
|
||||
"spack deactivate is deprecated in favor of " "environments and will be removed in v0.19.0"
|
||||
)
|
||||
|
||||
specs = spack.cmd.parse_specs(args.spec)
|
||||
if len(specs) != 1:
|
||||
tty.die("deactivate requires one spec. %d given." % len(specs))
|
||||
|
||||
env = ev.active_environment()
|
||||
spec = spack.cmd.disambiguate_spec(specs[0], env)
|
||||
pkg = spec.package
|
||||
|
||||
if args.view:
|
||||
target = args.view
|
||||
elif pkg.is_extension:
|
||||
target = pkg.extendee_spec.prefix
|
||||
elif pkg.extendable:
|
||||
target = spec.prefix
|
||||
|
||||
view = YamlFilesystemView(target, spack.store.layout)
|
||||
|
||||
if args.all:
|
||||
if pkg.extendable:
|
||||
tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec)
|
||||
ext_pkgs = spack.store.db.activated_extensions_for(spec, view.extensions_layout)
|
||||
|
||||
for ext_pkg in ext_pkgs:
|
||||
ext_pkg.spec.normalize()
|
||||
if ext_pkg.is_activated(view):
|
||||
ext_pkg.do_deactivate(view, force=True)
|
||||
|
||||
elif pkg.is_extension:
|
||||
if not args.force and not spec.package.is_activated(view):
|
||||
tty.die("%s is not activated." % pkg.spec.short_spec)
|
||||
|
||||
tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec)
|
||||
|
||||
nodes_in_topological_order = spack.graph.topological_sort(spec)
|
||||
for espec in reversed(nodes_in_topological_order):
|
||||
epkg = espec.package
|
||||
if epkg.extends(pkg.extendee_spec):
|
||||
if epkg.is_activated(view) or args.force:
|
||||
epkg.do_deactivate(view, force=args.force)
|
||||
|
||||
else:
|
||||
tty.die("spack deactivate --all requires an extendable package " "or an extension.")
|
||||
|
||||
else:
|
||||
if not pkg.is_extension:
|
||||
tty.die(
|
||||
"spack deactivate requires an extension.", "Did you mean 'spack deactivate --all'?"
|
||||
)
|
||||
|
||||
if not args.force and not spec.package.is_activated(view):
|
||||
tty.die("Package %s is not activated." % spec.short_spec)
|
||||
|
||||
spec.package.do_deactivate(view, force=args.force)
|
@@ -14,7 +14,6 @@
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
|
||||
description = "list extensions for package"
|
||||
section = "extensions"
|
||||
@@ -38,10 +37,9 @@ def setup_parser(subparser):
|
||||
"--show",
|
||||
action="store",
|
||||
default="all",
|
||||
choices=("packages", "installed", "activated", "all"),
|
||||
choices=("packages", "installed", "all"),
|
||||
help="show only part of output",
|
||||
)
|
||||
subparser.add_argument("-v", "--view", metavar="VIEW", type=str, help="the view to operate on")
|
||||
|
||||
subparser.add_argument(
|
||||
"spec",
|
||||
@@ -91,13 +89,6 @@ def extensions(parser, args):
|
||||
tty.msg("%d extensions:" % len(extensions))
|
||||
colify(ext.name for ext in extensions)
|
||||
|
||||
if args.view:
|
||||
target = args.view
|
||||
else:
|
||||
target = spec.prefix
|
||||
|
||||
view = YamlFilesystemView(target, spack.store.layout)
|
||||
|
||||
if args.show in ("installed", "all"):
|
||||
# List specs of installed extensions.
|
||||
installed = [s.spec for s in spack.store.db.installed_extensions_for(spec)]
|
||||
@@ -109,14 +100,3 @@ def extensions(parser, args):
|
||||
else:
|
||||
tty.msg("%d installed:" % len(installed))
|
||||
cmd.display_specs(installed, args)
|
||||
|
||||
if args.show in ("activated", "all"):
|
||||
# List specs of activated extensions.
|
||||
activated = view.extensions_layout.extension_map(spec)
|
||||
if args.show == "all":
|
||||
print
|
||||
if not activated:
|
||||
tty.msg("None activated.")
|
||||
else:
|
||||
tty.msg("%d activated:" % len(activated))
|
||||
cmd.display_specs(activated.values(), args)
|
||||
|
@@ -242,8 +242,8 @@ def print_tests(pkg):
|
||||
# So the presence of a callback in Spack does not necessarily correspond
|
||||
# to the actual presence of built-time tests for a package.
|
||||
for callbacks, phase in [
|
||||
(pkg.build_time_test_callbacks, "Build"),
|
||||
(pkg.install_time_test_callbacks, "Install"),
|
||||
(getattr(pkg, "build_time_test_callbacks", None), "Build"),
|
||||
(getattr(pkg, "install_time_test_callbacks", None), "Install"),
|
||||
]:
|
||||
color.cprint("")
|
||||
color.cprint(section_title("Available {0} Phase Test Methods:".format(phase)))
|
||||
|
@@ -193,14 +193,22 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="(with environment) only install already concretized specs",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--no-add",
|
||||
|
||||
updateenv_group = subparser.add_mutually_exclusive_group()
|
||||
updateenv_group.add_argument(
|
||||
"--add",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""(with environment) partially install an environment, limiting
|
||||
to concrete specs in the environment matching the arguments.
|
||||
Non-roots remain installed implicitly.""",
|
||||
help="""(with environment) add spec to the environment as a root.""",
|
||||
)
|
||||
updateenv_group.add_argument(
|
||||
"--no-add",
|
||||
action="store_false",
|
||||
dest="add",
|
||||
help="""(with environment) do not add spec to the environment as a
|
||||
root (the default behavior).""",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"-f",
|
||||
"--file",
|
||||
@@ -289,11 +297,12 @@ def install_specs_inside_environment(specs, install_kwargs, cli_args):
|
||||
# the matches. Getting to this point means there were either
|
||||
# no matches or exactly one match.
|
||||
|
||||
if not m_spec and cli_args.no_add:
|
||||
if not m_spec and not cli_args.add:
|
||||
msg = (
|
||||
"You asked to install {0} without adding it (--no-add), but no such spec "
|
||||
"exists in environment"
|
||||
).format(abstract.name)
|
||||
"Cannot install '{0}' because it is not in the current environment."
|
||||
" You can add it to the environment with 'spack add {0}', or as part"
|
||||
" of the install command with 'spack install --add {0}'"
|
||||
).format(str(abstract))
|
||||
tty.die(msg)
|
||||
|
||||
if not m_spec:
|
||||
@@ -303,14 +312,16 @@ def install_specs_inside_environment(specs, install_kwargs, cli_args):
|
||||
|
||||
tty.debug("exactly one match for {0} in env -> {1}".format(m_spec.name, m_spec.dag_hash()))
|
||||
|
||||
if m_spec in env.roots() or cli_args.no_add:
|
||||
# either the single match is a root spec (and --no-add is
|
||||
# the default for roots) or --no-add was stated explicitly
|
||||
if m_spec in env.roots() or not cli_args.add:
|
||||
# either the single match is a root spec (in which case
|
||||
# the spec is not added to the env again), or the user did
|
||||
# not specify --add (in which case it is assumed we are
|
||||
# installing already-concretized specs in the env)
|
||||
tty.debug("just install {0}".format(m_spec.name))
|
||||
specs_to_install.append(m_spec)
|
||||
else:
|
||||
# the single match is not a root (i.e. it's a dependency),
|
||||
# and --no-add was not specified, so we'll add it as a
|
||||
# and --add was specified, so we'll add it as a
|
||||
# root before installing
|
||||
tty.debug("add {0} then install it".format(m_spec.name))
|
||||
specs_to_add.append((abstract, concrete))
|
||||
|
@@ -9,6 +9,7 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.builder
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
@@ -134,6 +135,7 @@ def location(parser, args):
|
||||
# Either concretize or filter from already concretized environment
|
||||
spec = spack.cmd.matching_spec_from_env(spec)
|
||||
pkg = spec.package
|
||||
builder = spack.builder.create(pkg)
|
||||
|
||||
if args.stage_dir:
|
||||
print(pkg.stage.path)
|
||||
@@ -141,10 +143,10 @@ def location(parser, args):
|
||||
|
||||
if args.build_dir:
|
||||
# Out of source builds have build_directory defined
|
||||
if hasattr(pkg, "build_directory"):
|
||||
if hasattr(builder, "build_directory"):
|
||||
# build_directory can be either absolute or relative to the stage path
|
||||
# in either case os.path.join makes it absolute
|
||||
print(os.path.normpath(os.path.join(pkg.stage.path, pkg.build_directory)))
|
||||
print(os.path.normpath(os.path.join(pkg.stage.path, builder.build_directory)))
|
||||
return
|
||||
|
||||
# Otherwise assume in-source builds
|
||||
|
@@ -9,6 +9,7 @@
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
|
||||
import spack.caches
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.concretize
|
||||
@@ -356,12 +357,9 @@ def versions_per_spec(args):
|
||||
return num_versions
|
||||
|
||||
|
||||
def create_mirror_for_individual_specs(mirror_specs, directory_hint, skip_unstable_versions):
|
||||
local_push_url = local_mirror_url_from_user(directory_hint)
|
||||
present, mirrored, error = spack.mirror.create(
|
||||
local_push_url, mirror_specs, skip_unstable_versions
|
||||
)
|
||||
tty.msg("Summary for mirror in {}".format(local_push_url))
|
||||
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
|
||||
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
|
||||
tty.msg("Summary for mirror in {}".format(path))
|
||||
process_mirror_stats(present, mirrored, error)
|
||||
|
||||
|
||||
@@ -379,21 +377,6 @@ def process_mirror_stats(present, mirrored, error):
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def local_mirror_url_from_user(directory_hint):
|
||||
"""Return a file:// url pointing to the local mirror to be used.
|
||||
|
||||
Args:
|
||||
directory_hint (str or None): directory where to create the mirror. If None,
|
||||
defaults to "config:source_cache".
|
||||
"""
|
||||
mirror_directory = spack.util.path.canonicalize_path(
|
||||
directory_hint or spack.config.get("config:source_cache")
|
||||
)
|
||||
tmp_mirror = spack.mirror.Mirror(mirror_directory)
|
||||
local_url = url_util.format(tmp_mirror.push_url)
|
||||
return local_url
|
||||
|
||||
|
||||
def mirror_create(args):
|
||||
"""Create a directory to be used as a spack mirror, and fill it with
|
||||
package archives.
|
||||
@@ -424,9 +407,12 @@ def mirror_create(args):
|
||||
"The option '--all' already implies mirroring all versions for each package.",
|
||||
)
|
||||
|
||||
# When no directory is provided, the source dir is used
|
||||
path = args.directory or spack.caches.fetch_cache_location()
|
||||
|
||||
if args.all and not ev.active_environment():
|
||||
create_mirror_for_all_specs(
|
||||
directory_hint=args.directory,
|
||||
path=path,
|
||||
skip_unstable_versions=args.skip_unstable_versions,
|
||||
selection_fn=not_excluded_fn(args),
|
||||
)
|
||||
@@ -434,7 +420,7 @@ def mirror_create(args):
|
||||
|
||||
if args.all and ev.active_environment():
|
||||
create_mirror_for_all_specs_inside_environment(
|
||||
directory_hint=args.directory,
|
||||
path=path,
|
||||
skip_unstable_versions=args.skip_unstable_versions,
|
||||
selection_fn=not_excluded_fn(args),
|
||||
)
|
||||
@@ -443,16 +429,15 @@ def mirror_create(args):
|
||||
mirror_specs = concrete_specs_from_user(args)
|
||||
create_mirror_for_individual_specs(
|
||||
mirror_specs,
|
||||
directory_hint=args.directory,
|
||||
path=path,
|
||||
skip_unstable_versions=args.skip_unstable_versions,
|
||||
)
|
||||
|
||||
|
||||
def create_mirror_for_all_specs(directory_hint, skip_unstable_versions, selection_fn):
|
||||
def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
||||
mirror_specs = all_specs_with_all_versions(selection_fn=selection_fn)
|
||||
local_push_url = local_mirror_url_from_user(directory_hint=directory_hint)
|
||||
mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
|
||||
local_push_url, skip_unstable_versions=skip_unstable_versions
|
||||
path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
for candidate in mirror_specs:
|
||||
pkg_cls = spack.repo.path.get_pkg_class(candidate.name)
|
||||
@@ -462,13 +447,11 @@ def create_mirror_for_all_specs(directory_hint, skip_unstable_versions, selectio
|
||||
process_mirror_stats(*mirror_stats.stats())
|
||||
|
||||
|
||||
def create_mirror_for_all_specs_inside_environment(
|
||||
directory_hint, skip_unstable_versions, selection_fn
|
||||
):
|
||||
def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn):
|
||||
mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn)
|
||||
create_mirror_for_individual_specs(
|
||||
mirror_specs,
|
||||
directory_hint=directory_hint,
|
||||
path=path,
|
||||
skip_unstable_versions=skip_unstable_versions,
|
||||
)
|
||||
|
||||
|
@@ -127,8 +127,10 @@ def python_interpreter(args):
|
||||
console.runsource(startup.read(), startup_file, "exec")
|
||||
|
||||
if args.python_command:
|
||||
propagate_exceptions_from(console)
|
||||
console.runsource(args.python_command)
|
||||
elif args.python_args:
|
||||
propagate_exceptions_from(console)
|
||||
sys.argv = args.python_args
|
||||
with open(args.python_args[0]) as file:
|
||||
console.runsource(file.read(), args.python_args[0], "exec")
|
||||
@@ -149,3 +151,18 @@ def python_interpreter(args):
|
||||
platform.machine(),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def propagate_exceptions_from(console):
|
||||
"""Set sys.excepthook to let uncaught exceptions return 1 to the shell.
|
||||
|
||||
Args:
|
||||
console (code.InteractiveConsole): the console that needs a change in sys.excepthook
|
||||
"""
|
||||
console.push("import sys")
|
||||
console.push("_wrapped_hook = sys.excepthook")
|
||||
console.push("def _hook(exc_type, exc_value, exc_tb):")
|
||||
console.push(" _wrapped_hook(exc_type, exc_value, exc_tb)")
|
||||
console.push(" sys.exit(1)")
|
||||
console.push("")
|
||||
console.push("sys.excepthook = _hook")
|
||||
|
@@ -11,6 +11,7 @@
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.paths
|
||||
@@ -24,7 +25,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.18"
|
||||
tutorial_branch = "releases/v%s" % ".".join(str(v) for v in spack.spack_version_info[:2])
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
@@ -5,7 +5,6 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import itertools
|
||||
import sys
|
||||
|
||||
from llnl.util import tty
|
||||
@@ -18,6 +17,7 @@
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.store
|
||||
import spack.traverse as traverse
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
description = "remove installed packages"
|
||||
@@ -61,6 +61,13 @@ def setup_parser(subparser):
|
||||
dest="force",
|
||||
help="remove regardless of whether other packages or environments " "depend on this one",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--remove",
|
||||
action="store_true",
|
||||
dest="remove",
|
||||
help="if in an environment, then the spec should also be removed from "
|
||||
"the environment description",
|
||||
)
|
||||
arguments.add_common_arguments(
|
||||
subparser, ["recurse_dependents", "yes_to_all", "installed_specs"]
|
||||
)
|
||||
@@ -134,31 +141,43 @@ def installed_dependents(specs, env):
|
||||
env (spack.environment.Environment or None): the active environment, or None
|
||||
|
||||
Returns:
|
||||
tuple: two mappings: one from specs to their dependent environments in the
|
||||
active environment (or global scope if there is no environment), and one from
|
||||
specs to their dependents in *inactive* environments (empty if there is no
|
||||
environment
|
||||
tuple: two mappings: one from specs to their dependent installs in the
|
||||
active environment, and one from specs to dependent installs outside of
|
||||
the active environment.
|
||||
|
||||
Every installed dependent spec is listed once.
|
||||
|
||||
If there is not current active environment, the first mapping will be
|
||||
empty.
|
||||
"""
|
||||
active_dpts = {}
|
||||
inactive_dpts = {}
|
||||
outside_dpts = {}
|
||||
|
||||
env_hashes = set(env.all_hashes()) if env else set()
|
||||
|
||||
all_specs_in_db = spack.store.db.query()
|
||||
# Ensure we stop traversal at input specs.
|
||||
visited = set(s.dag_hash() for s in specs)
|
||||
|
||||
for spec in specs:
|
||||
installed = [x for x in all_specs_in_db if spec in x]
|
||||
for dpt in traverse.traverse_nodes(
|
||||
spec.dependents(deptype="all"),
|
||||
direction="parents",
|
||||
visited=visited,
|
||||
deptype="all",
|
||||
root=True,
|
||||
key=lambda s: s.dag_hash(),
|
||||
):
|
||||
hash = dpt.dag_hash()
|
||||
# Ensure that all the specs we get are installed
|
||||
record = spack.store.db.query_local_by_spec_hash(hash)
|
||||
if record is None or not record.installed:
|
||||
continue
|
||||
if hash in env_hashes:
|
||||
active_dpts.setdefault(spec, set()).add(dpt)
|
||||
else:
|
||||
outside_dpts.setdefault(spec, set()).add(dpt)
|
||||
|
||||
# separate installed dependents into dpts in this environment and
|
||||
# dpts that are outside this environment
|
||||
for dpt in installed:
|
||||
if dpt not in specs:
|
||||
if not env or dpt.dag_hash() in env_hashes:
|
||||
active_dpts.setdefault(spec, set()).add(dpt)
|
||||
else:
|
||||
inactive_dpts.setdefault(spec, set()).add(dpt)
|
||||
|
||||
return active_dpts, inactive_dpts
|
||||
return active_dpts, outside_dpts
|
||||
|
||||
|
||||
def dependent_environments(specs):
|
||||
@@ -236,7 +255,7 @@ def is_ready(dag_hash):
|
||||
if force:
|
||||
return True
|
||||
|
||||
_, record = spack.store.db.query_by_spec_hash(dag_hash)
|
||||
record = spack.store.db.query_local_by_spec_hash(dag_hash)
|
||||
if not record.ref_count:
|
||||
return True
|
||||
|
||||
@@ -262,31 +281,65 @@ def is_ready(dag_hash):
|
||||
|
||||
|
||||
def get_uninstall_list(args, specs, env):
|
||||
# Gets the list of installed specs that match the ones give via cli
|
||||
"""Returns uninstall_list and remove_list: these may overlap (some things
|
||||
may be both uninstalled and removed from the current environment).
|
||||
|
||||
It is assumed we are in an environment if --remove is specified (this
|
||||
method raises an exception otherwise).
|
||||
|
||||
uninstall_list is topologically sorted: dependents come before
|
||||
dependencies (so if a user uninstalls specs in the order provided,
|
||||
the dependents will always be uninstalled first).
|
||||
"""
|
||||
if args.remove and not env:
|
||||
raise ValueError("Can only use --remove when in an environment")
|
||||
|
||||
# Gets the list of installed specs that match the ones given via cli
|
||||
# args.all takes care of the case where '-a' is given in the cli
|
||||
uninstall_list = find_matching_specs(env, specs, args.all, args.force, args.origin)
|
||||
base_uninstall_specs = set(find_matching_specs(env, specs, args.all, args.force))
|
||||
|
||||
# Takes care of '-R'
|
||||
active_dpts, inactive_dpts = installed_dependents(uninstall_list, env)
|
||||
active_dpts, outside_dpts = installed_dependents(base_uninstall_specs, env)
|
||||
# It will be useful to track the unified set of specs with dependents, as
|
||||
# well as to separately track specs in the current env with dependents
|
||||
spec_to_dpts = {}
|
||||
for spec, dpts in active_dpts.items():
|
||||
spec_to_dpts[spec] = list(dpts)
|
||||
for spec, dpts in outside_dpts.items():
|
||||
if spec in spec_to_dpts:
|
||||
spec_to_dpts[spec].extend(dpts)
|
||||
else:
|
||||
spec_to_dpts[spec] = list(dpts)
|
||||
|
||||
# if we are in the global scope, we complain if you try to remove a
|
||||
# spec that's in an environment. If we're in an environment, we'll
|
||||
# just *remove* it from the environment, so we ignore this
|
||||
# error when *in* an environment
|
||||
spec_envs = dependent_environments(uninstall_list)
|
||||
spec_envs = inactive_dependent_environments(spec_envs)
|
||||
all_uninstall_specs = set(base_uninstall_specs)
|
||||
if args.dependents:
|
||||
for spec, lst in active_dpts.items():
|
||||
all_uninstall_specs.update(lst)
|
||||
for spec, lst in outside_dpts.items():
|
||||
all_uninstall_specs.update(lst)
|
||||
|
||||
# Process spec_dependents and update uninstall_list
|
||||
has_error = not args.force and (
|
||||
(active_dpts and not args.dependents) # dependents in the current env
|
||||
or (not env and spec_envs) # there are environments that need specs
|
||||
# For each spec that we intend to uninstall, this tracks the set of
|
||||
# environments outside the current active environment which depend on the
|
||||
# spec. There may be environments not managed directly with Spack: such
|
||||
# environments would not be included here.
|
||||
spec_to_other_envs = inactive_dependent_environments(
|
||||
dependent_environments(all_uninstall_specs)
|
||||
)
|
||||
|
||||
has_error = not args.force and (
|
||||
# There are dependents in the current env and we didn't ask to remove
|
||||
# dependents
|
||||
(spec_to_dpts and not args.dependents)
|
||||
# An environment different than the current env (if any) depends on
|
||||
# one or more of the specs to be uninstalled. There may also be
|
||||
# packages in those envs which depend on the base set of packages
|
||||
# to uninstall, but this covers that scenario.
|
||||
or (not args.remove and spec_to_other_envs)
|
||||
)
|
||||
|
||||
# say why each problem spec is needed
|
||||
if has_error:
|
||||
specs = set(active_dpts)
|
||||
if not env:
|
||||
specs.update(set(spec_envs)) # environments depend on this
|
||||
# say why each problem spec is needed
|
||||
specs = set(spec_to_dpts)
|
||||
specs.update(set(spec_to_other_envs)) # environments depend on this
|
||||
|
||||
for i, spec in enumerate(sorted(specs)):
|
||||
# space out blocks of reasons
|
||||
@@ -296,66 +349,86 @@ def get_uninstall_list(args, specs, env):
|
||||
spec_format = "{name}{@version}{%compiler}{/hash:7}"
|
||||
tty.info("Will not uninstall %s" % spec.cformat(spec_format), format="*r")
|
||||
|
||||
dependents = active_dpts.get(spec)
|
||||
if dependents:
|
||||
dependents = spec_to_dpts.get(spec)
|
||||
if dependents and not args.dependents:
|
||||
print("The following packages depend on it:")
|
||||
spack.cmd.display_specs(dependents, **display_args)
|
||||
|
||||
if not env:
|
||||
envs = spec_envs.get(spec)
|
||||
if envs:
|
||||
print("It is used by the following environments:")
|
||||
colify([e.name for e in envs], indent=4)
|
||||
envs = spec_to_other_envs.get(spec)
|
||||
if envs:
|
||||
if env:
|
||||
env_context_qualifier = " other"
|
||||
else:
|
||||
env_context_qualifier = ""
|
||||
print("It is used by the following{0} environments:".format(env_context_qualifier))
|
||||
colify([e.name for e in envs], indent=4)
|
||||
|
||||
msgs = []
|
||||
if active_dpts:
|
||||
if spec_to_dpts and not args.dependents:
|
||||
msgs.append("use `spack uninstall --dependents` to remove dependents too")
|
||||
if spec_envs:
|
||||
if spec_to_other_envs:
|
||||
msgs.append("use `spack env remove` to remove from environments")
|
||||
print()
|
||||
tty.die("There are still dependents.", *msgs)
|
||||
|
||||
elif args.dependents:
|
||||
for spec, lst in active_dpts.items():
|
||||
uninstall_list.extend(lst)
|
||||
uninstall_list = list(set(uninstall_list))
|
||||
# If we are in an environment, this will track specs in this environment
|
||||
# which should only be removed from the environment rather than uninstalled
|
||||
remove_only = set()
|
||||
if args.remove and not args.force:
|
||||
remove_only.update(spec_to_other_envs)
|
||||
if remove_only:
|
||||
tty.info(
|
||||
"The following specs will be removed but not uninstalled because"
|
||||
" they are also used by another environment: {speclist}".format(
|
||||
speclist=", ".join(x.name for x in remove_only)
|
||||
)
|
||||
)
|
||||
|
||||
# only force-remove (don't completely uninstall) specs that still
|
||||
# have external dependent envs or pkgs
|
||||
removes = set(inactive_dpts)
|
||||
if env:
|
||||
removes.update(spec_envs)
|
||||
# Compute the set of specs that should be removed from the current env.
|
||||
# This may overlap (some specs may be uninstalled and also removed from
|
||||
# the current environment).
|
||||
if args.remove:
|
||||
remove_specs = set(base_uninstall_specs)
|
||||
if args.dependents:
|
||||
# Any spec matched from the cli, or dependent of, should be removed
|
||||
# from the environment
|
||||
for spec, lst in active_dpts.items():
|
||||
remove_specs.update(lst)
|
||||
else:
|
||||
remove_specs = set()
|
||||
|
||||
# remove anything in removes from the uninstall list
|
||||
uninstall_list = set(uninstall_list) - removes
|
||||
all_uninstall_specs -= remove_only
|
||||
# Inefficient topological sort: uninstall dependents before dependencies
|
||||
all_uninstall_specs = sorted(
|
||||
all_uninstall_specs, key=lambda x: sum(1 for i in x.traverse()), reverse=True
|
||||
)
|
||||
|
||||
return uninstall_list, removes
|
||||
return list(all_uninstall_specs), list(remove_specs)
|
||||
|
||||
|
||||
def uninstall_specs(args, specs):
|
||||
env = ev.active_environment()
|
||||
|
||||
uninstall_list, remove_list = get_uninstall_list(args, specs, env)
|
||||
anything_to_do = set(uninstall_list).union(set(remove_list))
|
||||
|
||||
if not anything_to_do:
|
||||
if not uninstall_list:
|
||||
tty.warn("There are no package to uninstall.")
|
||||
return
|
||||
|
||||
if not args.yes_to_all:
|
||||
confirm_removal(anything_to_do)
|
||||
|
||||
if env:
|
||||
# Remove all the specs that are supposed to be uninstalled or just
|
||||
# removed.
|
||||
with env.write_transaction():
|
||||
for spec in itertools.chain(remove_list, uninstall_list):
|
||||
_remove_from_env(spec, env)
|
||||
env.write()
|
||||
confirm_removal(uninstall_list)
|
||||
|
||||
# Uninstall everything on the list
|
||||
do_uninstall(env, uninstall_list, args.force)
|
||||
|
||||
if env:
|
||||
with env.write_transaction():
|
||||
for spec in remove_list:
|
||||
_remove_from_env(spec, env)
|
||||
env.write()
|
||||
|
||||
env.regenerate_views()
|
||||
|
||||
|
||||
def confirm_removal(specs):
|
||||
"""Display the list of specs to be removed and ask for confirmation.
|
||||
|
@@ -49,12 +49,26 @@
|
||||
"clang": "llvm+clang",
|
||||
"oneapi": "intel-oneapi-compilers",
|
||||
"rocmcc": "llvm-amdgpu",
|
||||
"intel@2020:": "intel-oneapi-compilers-classic",
|
||||
}
|
||||
|
||||
# TODO: generating this from the previous dict causes docs errors
|
||||
package_name_to_compiler_name = {
|
||||
"llvm": "clang",
|
||||
"intel-oneapi-compilers": "oneapi",
|
||||
"llvm-amdgpu": "rocmcc",
|
||||
"intel-oneapi-compilers-classic": "intel",
|
||||
}
|
||||
|
||||
|
||||
def pkg_spec_for_compiler(cspec):
|
||||
"""Return the spec of the package that provides the compiler."""
|
||||
spec_str = "%s@%s" % (_compiler_to_pkg.get(cspec.name, cspec.name), cspec.versions)
|
||||
for spec, package in _compiler_to_pkg.items():
|
||||
if cspec.satisfies(spec):
|
||||
spec_str = "%s@%s" % (package, cspec.versions)
|
||||
break
|
||||
else:
|
||||
spec_str = str(cspec)
|
||||
return spack.spec.Spec(spec_str)
|
||||
|
||||
|
||||
|
@@ -26,7 +26,7 @@
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
from typing import Dict # novm
|
||||
from typing import Dict # novm # noqa: F401
|
||||
|
||||
import six
|
||||
|
||||
@@ -53,7 +53,6 @@
|
||||
InconsistentInstallDirectoryError,
|
||||
)
|
||||
from spack.error import SpackError
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
from spack.util.crypto import bit_length
|
||||
from spack.version import Version
|
||||
|
||||
@@ -726,6 +725,15 @@ def query_by_spec_hash(self, hash_key, data=None):
|
||||
return True, db._data[hash_key]
|
||||
return False, None
|
||||
|
||||
def query_local_by_spec_hash(self, hash_key):
|
||||
"""Get a spec by hash in the local database
|
||||
|
||||
Return:
|
||||
(InstallRecord or None): InstallRecord when installed
|
||||
locally, otherwise None."""
|
||||
with self.read_transaction():
|
||||
return self._data.get(hash_key, None)
|
||||
|
||||
def _assign_dependencies(self, hash_key, installs, data):
|
||||
# Add dependencies from other records in the install DB to
|
||||
# form a full spec.
|
||||
@@ -1379,23 +1387,6 @@ def installed_extensions_for(self, extendee_spec):
|
||||
if spec.package.extends(extendee_spec):
|
||||
yield spec.package
|
||||
|
||||
@_autospec
|
||||
def activated_extensions_for(self, extendee_spec, extensions_layout=None):
|
||||
"""
|
||||
Return the specs of all packages that extend
|
||||
the given spec
|
||||
"""
|
||||
if extensions_layout is None:
|
||||
view = YamlFilesystemView(extendee_spec.prefix, spack.store.layout)
|
||||
extensions_layout = view.extensions_layout
|
||||
for spec in self.query():
|
||||
try:
|
||||
extensions_layout.check_activated(extendee_spec, spec)
|
||||
yield spec.package
|
||||
except spack.directory_layout.NoSuchExtensionError:
|
||||
continue
|
||||
# TODO: conditional way to do this instead of catching exceptions
|
||||
|
||||
def _get_by_hash_local(self, dag_hash, default=None, installed=any):
|
||||
# hash is a full hash and is in the data somewhere
|
||||
if dag_hash in self._data:
|
||||
|
@@ -468,14 +468,7 @@ def _execute_depends_on(pkg):
|
||||
|
||||
@directive(("extendees", "dependencies"))
|
||||
def extends(spec, type=("build", "run"), **kwargs):
|
||||
"""Same as depends_on, but allows symlinking into dependency's
|
||||
prefix tree.
|
||||
|
||||
This is for Python and other language modules where the module
|
||||
needs to be installed into the prefix of the Python installation.
|
||||
Spack handles this by installing modules into their own prefix,
|
||||
but allowing ONE module version to be symlinked into a parent
|
||||
Python install at a time, using ``spack activate``.
|
||||
"""Same as depends_on, but also adds this package to the extendee list.
|
||||
|
||||
keyword arguments can be passed to extends() so that extension
|
||||
packages can pass parameters to the extendee's extension
|
||||
|
@@ -10,10 +10,8 @@
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
import six
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -102,7 +100,7 @@ def __init__(self, root, **kwargs):
|
||||
|
||||
@property
|
||||
def hidden_file_regexes(self):
|
||||
return (re.escape(self.metadata_dir),)
|
||||
return ("^{0}$".format(re.escape(self.metadata_dir)),)
|
||||
|
||||
def relative_path_for_spec(self, spec):
|
||||
_check_concrete(spec)
|
||||
@@ -389,205 +387,6 @@ def remove_install_directory(self, spec, deprecated=False):
|
||||
path = os.path.dirname(path)
|
||||
|
||||
|
||||
class ExtensionsLayout(object):
|
||||
"""A directory layout is used to associate unique paths with specs for
|
||||
package extensions.
|
||||
Keeps track of which extensions are activated for what package.
|
||||
Depending on the use case, this can mean globally activated extensions
|
||||
directly in the installation folder - or extensions activated in
|
||||
filesystem views.
|
||||
"""
|
||||
|
||||
def __init__(self, view, **kwargs):
|
||||
self.view = view
|
||||
|
||||
def add_extension(self, spec, ext_spec):
|
||||
"""Add to the list of currently installed extensions."""
|
||||
raise NotImplementedError()
|
||||
|
||||
def check_activated(self, spec, ext_spec):
|
||||
"""Ensure that ext_spec can be removed from spec.
|
||||
|
||||
If not, raise NoSuchExtensionError.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def check_extension_conflict(self, spec, ext_spec):
|
||||
"""Ensure that ext_spec can be activated in spec.
|
||||
|
||||
If not, raise ExtensionAlreadyInstalledError or
|
||||
ExtensionConflictError.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def extension_map(self, spec):
|
||||
"""Get a dict of currently installed extension packages for a spec.
|
||||
|
||||
Dict maps { name : extension_spec }
|
||||
Modifying dict does not affect internals of this layout.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def extendee_target_directory(self, extendee):
|
||||
"""Specify to which full path extendee should link all files
|
||||
from extensions."""
|
||||
raise NotImplementedError
|
||||
|
||||
def remove_extension(self, spec, ext_spec):
|
||||
"""Remove from the list of currently installed extensions."""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class YamlViewExtensionsLayout(ExtensionsLayout):
|
||||
"""Maintain extensions within a view."""
|
||||
|
||||
def __init__(self, view, layout):
|
||||
"""layout is the corresponding YamlDirectoryLayout object for which
|
||||
we implement extensions.
|
||||
"""
|
||||
super(YamlViewExtensionsLayout, self).__init__(view)
|
||||
self.layout = layout
|
||||
self.extension_file_name = "extensions.yaml"
|
||||
|
||||
# Cache of already written/read extension maps.
|
||||
self._extension_maps = {}
|
||||
|
||||
def add_extension(self, spec, ext_spec):
|
||||
_check_concrete(spec)
|
||||
_check_concrete(ext_spec)
|
||||
|
||||
# Check whether it's already installed or if it's a conflict.
|
||||
exts = self._extension_map(spec)
|
||||
self.check_extension_conflict(spec, ext_spec)
|
||||
|
||||
# do the actual adding.
|
||||
exts[ext_spec.name] = ext_spec
|
||||
self._write_extensions(spec, exts)
|
||||
|
||||
def check_extension_conflict(self, spec, ext_spec):
|
||||
exts = self._extension_map(spec)
|
||||
if ext_spec.name in exts:
|
||||
installed_spec = exts[ext_spec.name]
|
||||
if ext_spec.dag_hash() == installed_spec.dag_hash():
|
||||
raise ExtensionAlreadyInstalledError(spec, ext_spec)
|
||||
else:
|
||||
raise ExtensionConflictError(spec, ext_spec, installed_spec)
|
||||
|
||||
def check_activated(self, spec, ext_spec):
|
||||
exts = self._extension_map(spec)
|
||||
if (ext_spec.name not in exts) or (ext_spec != exts[ext_spec.name]):
|
||||
raise NoSuchExtensionError(spec, ext_spec)
|
||||
|
||||
def extension_file_path(self, spec):
|
||||
"""Gets full path to an installed package's extension file, which
|
||||
keeps track of all the extensions for that package which have been
|
||||
added to this view.
|
||||
"""
|
||||
_check_concrete(spec)
|
||||
normalize_path = lambda p: (os.path.abspath(p).rstrip(os.path.sep))
|
||||
|
||||
view_prefix = self.view.get_projection_for_spec(spec)
|
||||
if normalize_path(spec.prefix) == normalize_path(view_prefix):
|
||||
# For backwards compatibility, when the view is the extended
|
||||
# package's installation directory, do not include the spec name
|
||||
# as a subdirectory.
|
||||
components = [view_prefix, self.layout.metadata_dir, self.extension_file_name]
|
||||
else:
|
||||
components = [
|
||||
view_prefix,
|
||||
self.layout.metadata_dir,
|
||||
spec.name,
|
||||
self.extension_file_name,
|
||||
]
|
||||
|
||||
return os.path.join(*components)
|
||||
|
||||
def extension_map(self, spec):
|
||||
"""Defensive copying version of _extension_map() for external API."""
|
||||
_check_concrete(spec)
|
||||
return self._extension_map(spec).copy()
|
||||
|
||||
def remove_extension(self, spec, ext_spec):
|
||||
_check_concrete(spec)
|
||||
_check_concrete(ext_spec)
|
||||
|
||||
# Make sure it's installed before removing.
|
||||
exts = self._extension_map(spec)
|
||||
self.check_activated(spec, ext_spec)
|
||||
|
||||
# do the actual removing.
|
||||
del exts[ext_spec.name]
|
||||
self._write_extensions(spec, exts)
|
||||
|
||||
def _extension_map(self, spec):
|
||||
"""Get a dict<name -> spec> for all extensions currently
|
||||
installed for this package."""
|
||||
_check_concrete(spec)
|
||||
|
||||
if spec not in self._extension_maps:
|
||||
path = self.extension_file_path(spec)
|
||||
if not os.path.exists(path):
|
||||
self._extension_maps[spec] = {}
|
||||
|
||||
else:
|
||||
by_hash = self.layout.specs_by_hash()
|
||||
exts = {}
|
||||
with open(path) as ext_file:
|
||||
yaml_file = yaml.load(ext_file)
|
||||
for entry in yaml_file["extensions"]:
|
||||
name = next(iter(entry))
|
||||
dag_hash = entry[name]["hash"]
|
||||
prefix = entry[name]["path"]
|
||||
|
||||
if dag_hash not in by_hash:
|
||||
raise InvalidExtensionSpecError(
|
||||
"Spec %s not found in %s" % (dag_hash, prefix)
|
||||
)
|
||||
|
||||
ext_spec = by_hash[dag_hash]
|
||||
if prefix != ext_spec.prefix:
|
||||
raise InvalidExtensionSpecError(
|
||||
"Prefix %s does not match spec hash %s: %s"
|
||||
% (prefix, dag_hash, ext_spec)
|
||||
)
|
||||
|
||||
exts[ext_spec.name] = ext_spec
|
||||
self._extension_maps[spec] = exts
|
||||
|
||||
return self._extension_maps[spec]
|
||||
|
||||
def _write_extensions(self, spec, extensions):
|
||||
path = self.extension_file_path(spec)
|
||||
|
||||
if not extensions:
|
||||
# Remove the empty extensions file
|
||||
os.remove(path)
|
||||
return
|
||||
|
||||
# Create a temp file in the same directory as the actual file.
|
||||
dirname, basename = os.path.split(path)
|
||||
fs.mkdirp(dirname)
|
||||
|
||||
tmp = tempfile.NamedTemporaryFile(prefix=basename, dir=dirname, delete=False)
|
||||
|
||||
# write tmp file
|
||||
with tmp:
|
||||
yaml.dump(
|
||||
{
|
||||
"extensions": [
|
||||
{ext.name: {"hash": ext.dag_hash(), "path": str(ext.prefix)}}
|
||||
for ext in sorted(extensions.values())
|
||||
]
|
||||
},
|
||||
tmp,
|
||||
default_flow_style=False,
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
# Atomic update by moving tmpfile on top of old one.
|
||||
fs.rename(tmp.name, path)
|
||||
|
||||
|
||||
class DirectoryLayoutError(SpackError):
|
||||
"""Superclass for directory layout errors."""
|
||||
|
||||
@@ -644,13 +443,3 @@ def __init__(self, spec, ext_spec, conflict):
|
||||
"%s cannot be installed in %s because it conflicts with %s"
|
||||
% (ext_spec.short_spec, spec.short_spec, conflict.short_spec)
|
||||
)
|
||||
|
||||
|
||||
class NoSuchExtensionError(DirectoryLayoutError):
|
||||
"""Raised when an extension isn't there on deactivate."""
|
||||
|
||||
def __init__(self, spec, ext_spec):
|
||||
super(NoSuchExtensionError, self).__init__(
|
||||
"%s cannot be removed from %s because it's not activated."
|
||||
% (ext_spec.short_spec, spec.short_spec)
|
||||
)
|
||||
|
@@ -786,17 +786,12 @@ def _read_manifest(self, f, raw_yaml=None):
|
||||
)
|
||||
else:
|
||||
self.views = {}
|
||||
|
||||
# Retrieve the current concretization strategy
|
||||
configuration = config_dict(self.yaml)
|
||||
|
||||
# Let `concretization` overrule `concretize:unify` config for now,
|
||||
# but use a translation table to have internally a representation
|
||||
# as if we were using the new configuration
|
||||
translation = {"separately": False, "together": True}
|
||||
try:
|
||||
self.unify = translation[configuration["concretization"]]
|
||||
except KeyError:
|
||||
self.unify = spack.config.get("concretizer:unify", False)
|
||||
# Retrieve unification scheme for the concretizer
|
||||
self.unify = spack.config.get("concretizer:unify", False)
|
||||
|
||||
# Retrieve dev-build packages:
|
||||
self.dev_specs = configuration.get("develop", {})
|
||||
@@ -1322,30 +1317,25 @@ def _concretize_together(self, tests=False):
|
||||
if user_specs_did_not_change:
|
||||
return []
|
||||
|
||||
# Check that user specs don't have duplicate packages
|
||||
counter = collections.defaultdict(int)
|
||||
for user_spec in self.user_specs:
|
||||
counter[user_spec.name] += 1
|
||||
|
||||
duplicates = []
|
||||
for name, count in counter.items():
|
||||
if count > 1:
|
||||
duplicates.append(name)
|
||||
|
||||
if duplicates:
|
||||
msg = (
|
||||
"environment that are configured to concretize specs"
|
||||
" together cannot contain more than one spec for each"
|
||||
" package [{0}]".format(", ".join(duplicates))
|
||||
)
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
# Proceed with concretization
|
||||
self.concretized_user_specs = []
|
||||
self.concretized_order = []
|
||||
self.specs_by_hash = {}
|
||||
|
||||
concrete_specs = spack.concretize.concretize_specs_together(*self.user_specs, tests=tests)
|
||||
try:
|
||||
concrete_specs = spack.concretize.concretize_specs_together(
|
||||
*self.user_specs, tests=tests
|
||||
)
|
||||
except spack.error.UnsatisfiableSpecError as e:
|
||||
# "Enhance" the error message for multiple root specs, suggest a less strict
|
||||
# form of concretization.
|
||||
if len(self.user_specs) > 1:
|
||||
e.message += (
|
||||
". Consider setting `concretizer:unify` to `when_possible` "
|
||||
"or `false` to relax the concretizer strictness."
|
||||
)
|
||||
raise
|
||||
|
||||
concretized_specs = [x for x in zip(self.user_specs, concrete_specs)]
|
||||
for abstract, concrete in concretized_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
|
@@ -37,10 +37,6 @@
|
||||
import spack.store
|
||||
import spack.util.spack_json as s_json
|
||||
import spack.util.spack_yaml as s_yaml
|
||||
from spack.directory_layout import (
|
||||
ExtensionAlreadyInstalledError,
|
||||
YamlViewExtensionsLayout,
|
||||
)
|
||||
from spack.error import SpackError
|
||||
|
||||
__all__ = ["FilesystemView", "YamlFilesystemView"]
|
||||
@@ -166,9 +162,6 @@ def add_specs(self, *specs, **kwargs):
|
||||
"""
|
||||
Add given specs to view.
|
||||
|
||||
The supplied specs might be standalone packages or extensions of
|
||||
other packages.
|
||||
|
||||
Should accept `with_dependencies` as keyword argument (default
|
||||
True) to indicate wether or not dependencies should be activated as
|
||||
well.
|
||||
@@ -176,13 +169,7 @@ def add_specs(self, *specs, **kwargs):
|
||||
Should except an `exclude` keyword argument containing a list of
|
||||
regexps that filter out matching spec names.
|
||||
|
||||
This method should make use of `activate_{extension,standalone}`.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def add_extension(self, spec):
|
||||
"""
|
||||
Add (link) an extension in this view. Does not add dependencies.
|
||||
This method should make use of `activate_standalone`.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -202,9 +189,6 @@ def remove_specs(self, *specs, **kwargs):
|
||||
"""
|
||||
Removes given specs from view.
|
||||
|
||||
The supplied spec might be a standalone package or an extension of
|
||||
another package.
|
||||
|
||||
Should accept `with_dependencies` as keyword argument (default
|
||||
True) to indicate wether or not dependencies should be deactivated
|
||||
as well.
|
||||
@@ -216,13 +200,7 @@ def remove_specs(self, *specs, **kwargs):
|
||||
Should except an `exclude` keyword argument containing a list of
|
||||
regexps that filter out matching spec names.
|
||||
|
||||
This method should make use of `deactivate_{extension,standalone}`.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def remove_extension(self, spec):
|
||||
"""
|
||||
Remove (unlink) an extension from this view.
|
||||
This method should make use of `deactivate_standalone`.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -296,8 +274,6 @@ def __init__(self, root, layout, **kwargs):
|
||||
msg += " which does not match projections passed manually."
|
||||
raise ConflictingProjectionsError(msg)
|
||||
|
||||
self.extensions_layout = YamlViewExtensionsLayout(self, layout)
|
||||
|
||||
self._croot = colorize_root(self._root) + " "
|
||||
|
||||
def write_projections(self):
|
||||
@@ -332,38 +308,10 @@ def add_specs(self, *specs, **kwargs):
|
||||
self.print_conflict(v, s)
|
||||
return
|
||||
|
||||
extensions = set(filter(lambda s: s.package.is_extension, specs))
|
||||
standalones = specs - extensions
|
||||
|
||||
set(map(self._check_no_ext_conflicts, extensions))
|
||||
# fail on first error, otherwise link extensions as well
|
||||
if all(map(self.add_standalone, standalones)):
|
||||
all(map(self.add_extension, extensions))
|
||||
|
||||
def add_extension(self, spec):
|
||||
if not spec.package.is_extension:
|
||||
tty.error(self._croot + "Package %s is not an extension." % spec.name)
|
||||
return False
|
||||
|
||||
if spec.external:
|
||||
tty.warn(self._croot + "Skipping external package: %s" % colorize_spec(spec))
|
||||
return True
|
||||
|
||||
if not spec.package.is_activated(self):
|
||||
spec.package.do_activate(self, verbose=self.verbose, with_dependencies=False)
|
||||
|
||||
# make sure the meta folder is linked as well (this is not done by the
|
||||
# extension-activation mechnism)
|
||||
if not self.check_added(spec):
|
||||
self.link_meta_folder(spec)
|
||||
|
||||
return True
|
||||
for s in specs:
|
||||
self.add_standalone(s)
|
||||
|
||||
def add_standalone(self, spec):
|
||||
if spec.package.is_extension:
|
||||
tty.error(self._croot + "Package %s is an extension." % spec.name)
|
||||
return False
|
||||
|
||||
if spec.external:
|
||||
tty.warn(self._croot + "Skipping external package: %s" % colorize_spec(spec))
|
||||
return True
|
||||
@@ -372,19 +320,6 @@ def add_standalone(self, spec):
|
||||
tty.warn(self._croot + "Skipping already linked package: %s" % colorize_spec(spec))
|
||||
return True
|
||||
|
||||
if spec.package.extendable:
|
||||
# Check for globally activated extensions in the extendee that
|
||||
# we're looking at.
|
||||
activated = [p.spec for p in spack.store.db.activated_extensions_for(spec)]
|
||||
if activated:
|
||||
tty.error(
|
||||
"Globally activated extensions cannot be used in "
|
||||
"conjunction with filesystem views. "
|
||||
"Please deactivate the following specs: "
|
||||
)
|
||||
spack.cmd.display_specs(activated, flags=True, variants=True, long=False)
|
||||
return False
|
||||
|
||||
self.merge(spec)
|
||||
|
||||
self.link_meta_folder(spec)
|
||||
@@ -533,27 +468,10 @@ def remove_specs(self, *specs, **kwargs):
|
||||
|
||||
# Remove the packages from the view
|
||||
for spec in to_deactivate_sorted:
|
||||
if spec.package.is_extension:
|
||||
self.remove_extension(spec, with_dependents=with_dependents)
|
||||
else:
|
||||
self.remove_standalone(spec)
|
||||
self.remove_standalone(spec)
|
||||
|
||||
self._purge_empty_directories()
|
||||
|
||||
def remove_extension(self, spec, with_dependents=True):
|
||||
"""
|
||||
Remove (unlink) an extension from this view.
|
||||
"""
|
||||
if not self.check_added(spec):
|
||||
tty.warn(self._croot + "Skipping package not linked in view: %s" % spec.name)
|
||||
return
|
||||
|
||||
if spec.package.is_activated(self):
|
||||
spec.package.do_deactivate(
|
||||
self, verbose=self.verbose, remove_dependents=with_dependents
|
||||
)
|
||||
self.unlink_meta_folder(spec)
|
||||
|
||||
def remove_standalone(self, spec):
|
||||
"""
|
||||
Remove (unlink) a standalone package from this view.
|
||||
@@ -575,8 +493,8 @@ def get_projection_for_spec(self, spec):
|
||||
Relies on the ordering of projections to avoid ambiguity.
|
||||
"""
|
||||
spec = spack.spec.Spec(spec)
|
||||
# Extensions are placed by their extendee, not by their own spec
|
||||
locator_spec = spec
|
||||
|
||||
if spec.package.extendee_spec:
|
||||
locator_spec = spec.package.extendee_spec
|
||||
|
||||
@@ -712,18 +630,6 @@ def unlink_meta_folder(self, spec):
|
||||
assert os.path.exists(path)
|
||||
shutil.rmtree(path)
|
||||
|
||||
def _check_no_ext_conflicts(self, spec):
|
||||
"""
|
||||
Check that there is no extension conflict for specs.
|
||||
"""
|
||||
extendee = spec.package.extendee_spec
|
||||
try:
|
||||
self.extensions_layout.check_extension_conflict(extendee, spec)
|
||||
except ExtensionAlreadyInstalledError:
|
||||
# we print the warning here because later on the order in which
|
||||
# packages get activated is not clear (set-sorting)
|
||||
tty.warn(self._croot + "Skipping already activated package: %s" % spec.name)
|
||||
|
||||
|
||||
class SimpleFilesystemView(FilesystemView):
|
||||
"""A simple and partial implementation of FilesystemView focused on
|
||||
@@ -842,14 +748,13 @@ def get_projection_for_spec(self, spec):
|
||||
Relies on the ordering of projections to avoid ambiguity.
|
||||
"""
|
||||
spec = spack.spec.Spec(spec)
|
||||
# Extensions are placed by their extendee, not by their own spec
|
||||
locator_spec = spec
|
||||
if spec.package.extendee_spec:
|
||||
locator_spec = spec.package.extendee_spec
|
||||
|
||||
proj = spack.projections.get_projection(self.projections, locator_spec)
|
||||
if spec.package.extendee_spec:
|
||||
spec = spec.package.extendee_spec
|
||||
|
||||
proj = spack.projections.get_projection(self.projections, spec)
|
||||
if proj:
|
||||
return os.path.join(self._root, locator_spec.format(proj))
|
||||
return os.path.join(self._root, spec.format(proj))
|
||||
return self._root
|
||||
|
||||
|
||||
|
@@ -1,20 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
|
||||
|
||||
def pre_uninstall(spec):
|
||||
pkg = spec.package
|
||||
assert spec.concrete
|
||||
|
||||
if pkg.is_extension:
|
||||
target = pkg.extendee_spec.prefix
|
||||
view = YamlFilesystemView(target, spack.store.layout)
|
||||
|
||||
if pkg.is_activated(view):
|
||||
# deactivate globally
|
||||
pkg.do_deactivate(force=True)
|
@@ -56,9 +56,9 @@
|
||||
import spack.store
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.timer as timer
|
||||
from spack.util.environment import EnvironmentModifications, dump_environment
|
||||
from spack.util.executable import which
|
||||
from spack.util.timer import Timer
|
||||
|
||||
#: Counter to support unique spec sequencing that is used to ensure packages
|
||||
#: with the same priority are (initially) processed in the order in which they
|
||||
@@ -304,9 +304,9 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
|
||||
bool: ``True`` if the package was extract from binary cache,
|
||||
``False`` otherwise
|
||||
"""
|
||||
timer = Timer()
|
||||
t = timer.Timer()
|
||||
installed_from_cache = _try_install_from_binary_cache(
|
||||
pkg, explicit, unsigned=unsigned, timer=timer
|
||||
pkg, explicit, unsigned=unsigned, timer=t
|
||||
)
|
||||
pkg_id = package_id(pkg)
|
||||
if not installed_from_cache:
|
||||
@@ -316,14 +316,14 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
|
||||
|
||||
tty.msg("{0}: installing from source".format(pre))
|
||||
return False
|
||||
timer.stop()
|
||||
t.stop()
|
||||
tty.debug("Successfully extracted {0} from binary cache".format(pkg_id))
|
||||
_print_timer(
|
||||
pre=_log_prefix(pkg.name),
|
||||
pkg_id=pkg_id,
|
||||
fetch=timer.phases.get("search", 0) + timer.phases.get("fetch", 0),
|
||||
build=timer.phases.get("install", 0),
|
||||
total=timer.total,
|
||||
fetch=t.duration("search") + t.duration("fetch"),
|
||||
build=t.duration("install"),
|
||||
total=t.duration(),
|
||||
)
|
||||
_print_installed_pkg(pkg.spec.prefix)
|
||||
spack.hooks.post_install(pkg.spec)
|
||||
@@ -372,7 +372,7 @@ def _process_external_package(pkg, explicit):
|
||||
|
||||
|
||||
def _process_binary_cache_tarball(
|
||||
pkg, binary_spec, explicit, unsigned, mirrors_for_spec=None, timer=None
|
||||
pkg, binary_spec, explicit, unsigned, mirrors_for_spec=None, timer=timer.NULL_TIMER
|
||||
):
|
||||
"""
|
||||
Process the binary cache tarball.
|
||||
@@ -391,11 +391,11 @@ def _process_binary_cache_tarball(
|
||||
bool: ``True`` if the package was extracted from binary cache,
|
||||
else ``False``
|
||||
"""
|
||||
timer.start("fetch")
|
||||
download_result = binary_distribution.download_tarball(
|
||||
binary_spec, unsigned, mirrors_for_spec=mirrors_for_spec
|
||||
)
|
||||
if timer:
|
||||
timer.phase("fetch")
|
||||
timer.stop("fetch")
|
||||
# see #10063 : install from source if tarball doesn't exist
|
||||
if download_result is None:
|
||||
tty.msg("{0} exists in binary cache but with different hash".format(pkg.name))
|
||||
@@ -405,6 +405,7 @@ def _process_binary_cache_tarball(
|
||||
tty.msg("Extracting {0} from binary cache".format(pkg_id))
|
||||
|
||||
# don't print long padded paths while extracting/relocating binaries
|
||||
timer.start("install")
|
||||
with spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(
|
||||
binary_spec, download_result, allow_root=False, unsigned=unsigned, force=False
|
||||
@@ -412,12 +413,11 @@ def _process_binary_cache_tarball(
|
||||
|
||||
pkg.installed_from_binary_cache = True
|
||||
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
|
||||
if timer:
|
||||
timer.phase("install")
|
||||
timer.stop("install")
|
||||
return True
|
||||
|
||||
|
||||
def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=None):
|
||||
def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=timer.NULL_TIMER):
|
||||
"""
|
||||
Try to extract the package from binary cache.
|
||||
|
||||
@@ -430,10 +430,10 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False, timer=None):
|
||||
"""
|
||||
pkg_id = package_id(pkg)
|
||||
tty.debug("Searching for binary cache of {0}".format(pkg_id))
|
||||
matches = binary_distribution.get_mirrors_for_spec(pkg.spec)
|
||||
|
||||
if timer:
|
||||
timer.phase("search")
|
||||
timer.start("search")
|
||||
matches = binary_distribution.get_mirrors_for_spec(pkg.spec)
|
||||
timer.stop("search")
|
||||
|
||||
if not matches:
|
||||
return False
|
||||
@@ -462,11 +462,10 @@ def combine_phase_logs(phase_log_files, log_path):
|
||||
phase_log_files (list): a list or iterator of logs to combine
|
||||
log_path (str): the path to combine them to
|
||||
"""
|
||||
|
||||
with open(log_path, "w") as log_file:
|
||||
with open(log_path, "wb") as log_file:
|
||||
for phase_log_file in phase_log_files:
|
||||
with open(phase_log_file, "r") as phase_log:
|
||||
log_file.write(phase_log.read())
|
||||
with open(phase_log_file, "rb") as phase_log:
|
||||
shutil.copyfileobj(phase_log, log_file)
|
||||
|
||||
|
||||
def dump_packages(spec, path):
|
||||
@@ -803,8 +802,34 @@ def _add_bootstrap_compilers(self, compiler, architecture, pkgs, request, all_de
|
||||
"""
|
||||
packages = _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs)
|
||||
for (comp_pkg, is_compiler) in packages:
|
||||
if package_id(comp_pkg) not in self.build_tasks:
|
||||
pkgid = package_id(comp_pkg)
|
||||
if pkgid not in self.build_tasks:
|
||||
self._add_init_task(comp_pkg, request, is_compiler, all_deps)
|
||||
elif is_compiler:
|
||||
# ensure it's queued as a compiler
|
||||
self._modify_existing_task(pkgid, "compiler", True)
|
||||
|
||||
def _modify_existing_task(self, pkgid, attr, value):
|
||||
"""
|
||||
Update a task in-place to modify its behavior.
|
||||
|
||||
Currently used to update the ``compiler`` field on tasks
|
||||
that were originally created as a dependency of a compiler,
|
||||
but are compilers in their own right.
|
||||
|
||||
For example, ``intel-oneapi-compilers-classic`` depends on
|
||||
``intel-oneapi-compilers``, which can cause the latter to be
|
||||
queued first as a non-compiler, and only later as a compiler.
|
||||
"""
|
||||
for i, tup in enumerate(self.build_pq):
|
||||
key, task = tup
|
||||
if task.pkg_id == pkgid:
|
||||
tty.debug(
|
||||
"Modifying task for {0} to treat it as a compiler".format(pkgid),
|
||||
level=2,
|
||||
)
|
||||
setattr(task, attr, value)
|
||||
self.build_pq[i] = (key, task)
|
||||
|
||||
def _add_init_task(self, pkg, request, is_compiler, all_deps):
|
||||
"""
|
||||
@@ -1215,6 +1240,12 @@ def _add_tasks(self, request, all_deps):
|
||||
fail_fast = request.install_args.get("fail_fast")
|
||||
self.fail_fast = self.fail_fast or fail_fast
|
||||
|
||||
def _add_compiler_package_to_config(self, pkg):
|
||||
compiler_search_prefix = getattr(pkg, "compiler_search_prefix", pkg.spec.prefix)
|
||||
spack.compilers.add_compilers_to_config(
|
||||
spack.compilers.find_compilers([compiler_search_prefix])
|
||||
)
|
||||
|
||||
def _install_task(self, task):
|
||||
"""
|
||||
Perform the installation of the requested spec and/or dependency
|
||||
@@ -1240,9 +1271,7 @@ def _install_task(self, task):
|
||||
if use_cache and _install_from_cache(pkg, cache_only, explicit, unsigned):
|
||||
self._update_installed(task)
|
||||
if task.compiler:
|
||||
spack.compilers.add_compilers_to_config(
|
||||
spack.compilers.find_compilers([pkg.spec.prefix])
|
||||
)
|
||||
self._add_compiler_package_to_config(pkg)
|
||||
return
|
||||
|
||||
pkg.run_tests = tests is True or tests and pkg.name in tests
|
||||
@@ -1270,9 +1299,7 @@ def _install_task(self, task):
|
||||
|
||||
# If a compiler, ensure it is added to the configuration
|
||||
if task.compiler:
|
||||
spack.compilers.add_compilers_to_config(
|
||||
spack.compilers.find_compilers([pkg.spec.prefix])
|
||||
)
|
||||
self._add_compiler_package_to_config(pkg)
|
||||
except spack.build_environment.StopPhase as e:
|
||||
# A StopPhase exception means that do_install was asked to
|
||||
# stop early from clients, and is not an error at this point
|
||||
@@ -1691,9 +1718,7 @@ def install(self):
|
||||
|
||||
# It's an already installed compiler, add it to the config
|
||||
if task.compiler:
|
||||
spack.compilers.add_compilers_to_config(
|
||||
spack.compilers.find_compilers([pkg.spec.prefix])
|
||||
)
|
||||
self._add_compiler_package_to_config(pkg)
|
||||
|
||||
else:
|
||||
# At this point we've failed to get a write or a read
|
||||
@@ -1747,6 +1772,16 @@ def install(self):
|
||||
spack.hooks.on_install_cancel(task.request.pkg.spec)
|
||||
raise
|
||||
|
||||
except binary_distribution.NoChecksumException as exc:
|
||||
if not task.cache_only:
|
||||
# Checking hash on downloaded binary failed.
|
||||
err = "Failed to install {0} from binary cache due to {1}:"
|
||||
err += " Requeueing to install from source."
|
||||
tty.error(err.format(pkg.name, str(exc)))
|
||||
task.use_cache = False
|
||||
self._requeue_task(task)
|
||||
continue
|
||||
|
||||
except (Exception, SystemExit) as exc:
|
||||
self._update_failed(task, True, exc)
|
||||
spack.hooks.on_install_failure(task.request.pkg.spec)
|
||||
@@ -1870,7 +1905,7 @@ def __init__(self, pkg, install_args):
|
||||
self.env_mods = install_args.get("env_modifications", EnvironmentModifications())
|
||||
|
||||
# timer for build phases
|
||||
self.timer = Timer()
|
||||
self.timer = timer.Timer()
|
||||
|
||||
# If we are using a padded path, filter the output to compress padded paths
|
||||
# The real log still has full-length paths.
|
||||
@@ -1925,8 +1960,8 @@ def run(self):
|
||||
pre=self.pre,
|
||||
pkg_id=self.pkg_id,
|
||||
fetch=self.pkg._fetch_time,
|
||||
build=self.timer.total - self.pkg._fetch_time,
|
||||
total=self.timer.total,
|
||||
build=self.timer.duration() - self.pkg._fetch_time,
|
||||
total=self.timer.duration(),
|
||||
)
|
||||
_print_installed_pkg(self.pkg.prefix)
|
||||
|
||||
@@ -1999,6 +2034,7 @@ def _real_install(self):
|
||||
)
|
||||
|
||||
with log_contextmanager as logger:
|
||||
# Redirect stdout and stderr to daemon pipe
|
||||
with logger.force_echo():
|
||||
inner_debug_level = tty.debug_level()
|
||||
tty.set_debug(debug_level)
|
||||
@@ -2006,12 +2042,11 @@ def _real_install(self):
|
||||
tty.msg(msg.format(self.pre, phase_fn.name))
|
||||
tty.set_debug(inner_debug_level)
|
||||
|
||||
# Redirect stdout and stderr to daemon pipe
|
||||
self.timer.phase(phase_fn.name)
|
||||
|
||||
# Catch any errors to report to logging
|
||||
self.timer.start(phase_fn.name)
|
||||
phase_fn.execute()
|
||||
spack.hooks.on_phase_success(pkg, phase_fn.name, log_file)
|
||||
self.timer.stop(phase_fn.name)
|
||||
|
||||
except BaseException:
|
||||
combine_phase_logs(pkg.phase_log_files, pkg.log_path)
|
||||
|
@@ -343,17 +343,21 @@ def add_command(self, cmd_name):
|
||||
self._remove_action(self._actions[-1])
|
||||
self.subparsers = self.add_subparsers(metavar="COMMAND", dest="command")
|
||||
|
||||
# each command module implements a parser() function, to which we
|
||||
# pass its subparser for setup.
|
||||
module = spack.cmd.get_module(cmd_name)
|
||||
if cmd_name not in self.subparsers._name_parser_map:
|
||||
# each command module implements a parser() function, to which we
|
||||
# pass its subparser for setup.
|
||||
module = spack.cmd.get_module(cmd_name)
|
||||
|
||||
# build a list of aliases
|
||||
alias_list = [k for k, v in aliases.items() if v == cmd_name]
|
||||
# build a list of aliases
|
||||
alias_list = [k for k, v in aliases.items() if v == cmd_name]
|
||||
|
||||
subparser = self.subparsers.add_parser(
|
||||
cmd_name, aliases=alias_list, help=module.description, description=module.description
|
||||
)
|
||||
module.setup_parser(subparser)
|
||||
subparser = self.subparsers.add_parser(
|
||||
cmd_name,
|
||||
aliases=alias_list,
|
||||
help=module.description,
|
||||
description=module.description,
|
||||
)
|
||||
module.setup_parser(subparser)
|
||||
|
||||
# return the callable function for the command
|
||||
return spack.cmd.get_command(cmd_name)
|
||||
|
@@ -26,9 +26,6 @@
|
||||
def configuration(module_set_name):
|
||||
config_path = "modules:%s:lmod" % module_set_name
|
||||
config = spack.config.get(config_path, {})
|
||||
if not config and module_set_name == "default":
|
||||
# return old format for backward compatibility
|
||||
return spack.config.get("modules:lmod", {})
|
||||
return config
|
||||
|
||||
|
||||
@@ -184,22 +181,10 @@ def provides(self):
|
||||
# If it is in the list of supported compilers family -> compiler
|
||||
if self.spec.name in spack.compilers.supported_compilers():
|
||||
provides["compiler"] = spack.spec.CompilerSpec(str(self.spec))
|
||||
# Special case for llvm
|
||||
if self.spec.name == "llvm":
|
||||
provides["compiler"] = spack.spec.CompilerSpec(str(self.spec))
|
||||
provides["compiler"].name = "clang"
|
||||
# Special case for llvm-amdgpu
|
||||
if self.spec.name == "llvm-amdgpu":
|
||||
provides["compiler"] = spack.spec.CompilerSpec(str(self.spec))
|
||||
provides["compiler"].name = "rocmcc"
|
||||
# Special case for oneapi
|
||||
if self.spec.name == "intel-oneapi-compilers":
|
||||
provides["compiler"] = spack.spec.CompilerSpec(str(self.spec))
|
||||
provides["compiler"].name = "oneapi"
|
||||
# Special case for oneapi classic
|
||||
if self.spec.name == "intel-oneapi-compilers-classic":
|
||||
provides["compiler"] = spack.spec.CompilerSpec(str(self.spec))
|
||||
provides["compiler"].name = "intel"
|
||||
elif self.spec.name in spack.compilers.package_name_to_compiler_name:
|
||||
# If it is the package for a supported compiler, but of a different name
|
||||
cname = spack.compilers.package_name_to_compiler_name[self.spec.name]
|
||||
provides["compiler"] = spack.spec.CompilerSpec("%s@%s" % (cname, self.spec.version))
|
||||
|
||||
# All the other tokens in the hierarchy must be virtual dependencies
|
||||
for x in self.hierarchy_tokens:
|
||||
|
@@ -23,9 +23,6 @@
|
||||
def configuration(module_set_name):
|
||||
config_path = "modules:%s:tcl" % module_set_name
|
||||
config = spack.config.get(config_path, {})
|
||||
if not config and module_set_name == "default":
|
||||
# return old format for backward compatibility
|
||||
return spack.config.get("modules:tcl", {})
|
||||
return config
|
||||
|
||||
|
||||
|
@@ -27,7 +27,16 @@
|
||||
import traceback
|
||||
import types
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type # novm
|
||||
from typing import ( # novm # noqa: F401
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
Type,
|
||||
)
|
||||
|
||||
import six
|
||||
|
||||
@@ -531,10 +540,6 @@ class PackageBase(six.with_metaclass(PackageMeta, WindowsRPathMeta, PackageViewM
|
||||
# These are default values for instance variables.
|
||||
#
|
||||
|
||||
#: A list or set of build time test functions to be called when tests
|
||||
#: are executed or 'None' if there are no such test functions.
|
||||
build_time_test_callbacks = None # type: Optional[List[str]]
|
||||
|
||||
#: By default, packages are not virtual
|
||||
#: Virtual packages override this attribute
|
||||
virtual = False
|
||||
@@ -543,10 +548,6 @@ class PackageBase(six.with_metaclass(PackageMeta, WindowsRPathMeta, PackageViewM
|
||||
#: those that do not can be used to install a set of other Spack packages.
|
||||
has_code = True
|
||||
|
||||
#: A list or set of install time test functions to be called when tests
|
||||
#: are executed or 'None' if there are no such test functions.
|
||||
install_time_test_callbacks = None # type: Optional[List[str]]
|
||||
|
||||
#: By default we build in parallel. Subclasses can override this.
|
||||
parallel = True
|
||||
|
||||
@@ -919,6 +920,12 @@ def url_for_version(self, version):
|
||||
"""
|
||||
return self._implement_all_urls_for_version(version)[0]
|
||||
|
||||
def update_external_dependencies(self):
|
||||
"""
|
||||
Method to override in package classes to handle external dependencies
|
||||
"""
|
||||
pass
|
||||
|
||||
def all_urls_for_version(self, version):
|
||||
"""Return all URLs derived from version_urls(), url, urls, and
|
||||
list_url (if it contains a version) in a package in that order.
|
||||
@@ -1307,19 +1314,6 @@ def extends(self, spec):
|
||||
s = self.extendee_spec
|
||||
return s and spec.satisfies(s)
|
||||
|
||||
def is_activated(self, view):
|
||||
"""Return True if package is activated."""
|
||||
if not self.is_extension:
|
||||
raise ValueError("is_activated called on package that is not an extension.")
|
||||
if self.extendee_spec.installed_upstream:
|
||||
# If this extends an upstream package, it cannot be activated for
|
||||
# it. This bypasses construction of the extension map, which can
|
||||
# can fail when run in the context of a downstream Spack instance
|
||||
return False
|
||||
extensions_layout = view.extensions_layout
|
||||
exts = extensions_layout.extension_map(self.extendee_spec)
|
||||
return (self.name in exts) and (exts[self.name] == self.spec)
|
||||
|
||||
def provides(self, vpkg_name):
|
||||
"""
|
||||
True if this package provides a virtual package with the specified name
|
||||
@@ -2319,30 +2313,6 @@ def do_deprecate(self, deprecator, link_fn):
|
||||
"""Deprecate this package in favor of deprecator spec"""
|
||||
spec = self.spec
|
||||
|
||||
# Check whether package to deprecate has active extensions
|
||||
if self.extendable:
|
||||
view = spack.filesystem_view.YamlFilesystemView(spec.prefix, spack.store.layout)
|
||||
active_exts = view.extensions_layout.extension_map(spec).values()
|
||||
if active_exts:
|
||||
short = spec.format("{name}/{hash:7}")
|
||||
m = "Spec %s has active extensions\n" % short
|
||||
for active in active_exts:
|
||||
m += " %s\n" % active.format("{name}/{hash:7}")
|
||||
m += "Deactivate extensions before deprecating %s" % short
|
||||
tty.die(m)
|
||||
|
||||
# Check whether package to deprecate is an active extension
|
||||
if self.is_extension:
|
||||
extendee = self.extendee_spec
|
||||
view = spack.filesystem_view.YamlFilesystemView(extendee.prefix, spack.store.layout)
|
||||
|
||||
if self.is_activated(view):
|
||||
short = spec.format("{name}/{hash:7}")
|
||||
short_ext = extendee.format("{name}/{hash:7}")
|
||||
msg = "Spec %s is an active extension of %s\n" % (short, short_ext)
|
||||
msg += "Deactivate %s to be able to deprecate it" % short
|
||||
tty.die(msg)
|
||||
|
||||
# Install deprecator if it isn't installed already
|
||||
if not spack.store.db.query(deprecator):
|
||||
deprecator.package.do_install()
|
||||
@@ -2372,155 +2342,6 @@ def _check_extendable(self):
|
||||
if not self.extendable:
|
||||
raise ValueError("Package %s is not extendable!" % self.name)
|
||||
|
||||
def _sanity_check_extension(self):
|
||||
if not self.is_extension:
|
||||
raise ActivationError("This package is not an extension.")
|
||||
|
||||
extendee_package = self.extendee_spec.package
|
||||
extendee_package._check_extendable()
|
||||
|
||||
if not self.extendee_spec.installed:
|
||||
raise ActivationError("Can only (de)activate extensions for installed packages.")
|
||||
if not self.spec.installed:
|
||||
raise ActivationError("Extensions must first be installed.")
|
||||
if self.extendee_spec.name not in self.extendees:
|
||||
raise ActivationError("%s does not extend %s!" % (self.name, self.extendee.name))
|
||||
|
||||
def do_activate(self, view=None, with_dependencies=True, verbose=True):
|
||||
"""Called on an extension to invoke the extendee's activate method.
|
||||
|
||||
Commands should call this routine, and should not call
|
||||
activate() directly.
|
||||
"""
|
||||
if verbose:
|
||||
tty.msg(
|
||||
"Activating extension {0} for {1}".format(
|
||||
self.spec.cshort_spec, self.extendee_spec.cshort_spec
|
||||
)
|
||||
)
|
||||
|
||||
self._sanity_check_extension()
|
||||
if not view:
|
||||
view = YamlFilesystemView(self.extendee_spec.prefix, spack.store.layout)
|
||||
|
||||
extensions_layout = view.extensions_layout
|
||||
|
||||
try:
|
||||
extensions_layout.check_extension_conflict(self.extendee_spec, self.spec)
|
||||
except spack.directory_layout.ExtensionAlreadyInstalledError as e:
|
||||
# already installed, let caller know
|
||||
tty.msg(e.message)
|
||||
return
|
||||
|
||||
# Activate any package dependencies that are also extensions.
|
||||
if with_dependencies:
|
||||
for spec in self.dependency_activations():
|
||||
if not spec.package.is_activated(view):
|
||||
spec.package.do_activate(
|
||||
view, with_dependencies=with_dependencies, verbose=verbose
|
||||
)
|
||||
|
||||
self.extendee_spec.package.activate(self, view, **self.extendee_args)
|
||||
|
||||
extensions_layout.add_extension(self.extendee_spec, self.spec)
|
||||
|
||||
if verbose:
|
||||
tty.debug(
|
||||
"Activated extension {0} for {1}".format(
|
||||
self.spec.cshort_spec, self.extendee_spec.cshort_spec
|
||||
)
|
||||
)
|
||||
|
||||
def dependency_activations(self):
|
||||
return (
|
||||
spec
|
||||
for spec in self.spec.traverse(root=False, deptype="run")
|
||||
if spec.package.extends(self.extendee_spec)
|
||||
)
|
||||
|
||||
def activate(self, extension, view, **kwargs):
|
||||
"""
|
||||
Add the extension to the specified view.
|
||||
|
||||
Package authors can override this function to maintain some
|
||||
centralized state related to the set of activated extensions
|
||||
for a package.
|
||||
|
||||
Spack internals (commands, hooks, etc.) should call
|
||||
do_activate() method so that proper checks are always executed.
|
||||
"""
|
||||
view.merge(extension.spec, ignore=kwargs.get("ignore", None))
|
||||
|
||||
def do_deactivate(self, view=None, **kwargs):
|
||||
"""Remove this extension package from the specified view. Called
|
||||
on the extension to invoke extendee's deactivate() method.
|
||||
|
||||
`remove_dependents=True` deactivates extensions depending on this
|
||||
package instead of raising an error.
|
||||
"""
|
||||
self._sanity_check_extension()
|
||||
force = kwargs.get("force", False)
|
||||
verbose = kwargs.get("verbose", True)
|
||||
remove_dependents = kwargs.get("remove_dependents", False)
|
||||
|
||||
if verbose:
|
||||
tty.msg(
|
||||
"Deactivating extension {0} for {1}".format(
|
||||
self.spec.cshort_spec, self.extendee_spec.cshort_spec
|
||||
)
|
||||
)
|
||||
|
||||
if not view:
|
||||
view = YamlFilesystemView(self.extendee_spec.prefix, spack.store.layout)
|
||||
extensions_layout = view.extensions_layout
|
||||
|
||||
# Allow a force deactivate to happen. This can unlink
|
||||
# spurious files if something was corrupted.
|
||||
if not force:
|
||||
extensions_layout.check_activated(self.extendee_spec, self.spec)
|
||||
|
||||
activated = extensions_layout.extension_map(self.extendee_spec)
|
||||
for name, aspec in activated.items():
|
||||
if aspec == self.spec:
|
||||
continue
|
||||
for dep in aspec.traverse(deptype="run"):
|
||||
if self.spec == dep:
|
||||
if remove_dependents:
|
||||
aspec.package.do_deactivate(**kwargs)
|
||||
else:
|
||||
msg = (
|
||||
"Cannot deactivate {0} because {1} is "
|
||||
"activated and depends on it"
|
||||
)
|
||||
raise ActivationError(
|
||||
msg.format(self.spec.cshort_spec, aspec.cshort_spec)
|
||||
)
|
||||
|
||||
self.extendee_spec.package.deactivate(self, view, **self.extendee_args)
|
||||
|
||||
# redundant activation check -- makes SURE the spec is not
|
||||
# still activated even if something was wrong above.
|
||||
if self.is_activated(view):
|
||||
extensions_layout.remove_extension(self.extendee_spec, self.spec)
|
||||
|
||||
if verbose:
|
||||
tty.debug(
|
||||
"Deactivated extension {0} for {1}".format(
|
||||
self.spec.cshort_spec, self.extendee_spec.cshort_spec
|
||||
)
|
||||
)
|
||||
|
||||
def deactivate(self, extension, view, **kwargs):
|
||||
"""
|
||||
Remove all extension files from the specified view.
|
||||
|
||||
Package authors can override this method to support other
|
||||
extension mechanisms. Spack internals (commands, hooks, etc.)
|
||||
should call do_deactivate() method so that proper checks are
|
||||
always executed.
|
||||
"""
|
||||
view.unmerge(extension.spec, ignore=kwargs.get("ignore", None))
|
||||
|
||||
def view(self):
|
||||
"""Create a view with the prefix of this package as the root.
|
||||
Extensions added to this view will modify the installation prefix of
|
||||
|
@@ -195,23 +195,23 @@ def _package(maybe_abstract_spec):
|
||||
|
||||
def is_spec_buildable(spec):
|
||||
"""Return true if the spec is configured as buildable"""
|
||||
|
||||
allpkgs = spack.config.get("packages")
|
||||
all_buildable = allpkgs.get("all", {}).get("buildable", True)
|
||||
so_far = all_buildable # the default "so far"
|
||||
|
||||
def _package(s):
|
||||
pkg_cls = spack.repo.path.get_pkg_class(s.name)
|
||||
return pkg_cls(s)
|
||||
|
||||
# Get the list of names for which all_buildable is overridden
|
||||
reverse = [
|
||||
name
|
||||
# check whether any providers for this package override the default
|
||||
if any(
|
||||
_package(spec).provides(name) and entry.get("buildable", so_far) != so_far
|
||||
for name, entry in allpkgs.items()
|
||||
if entry.get("buildable", all_buildable) != all_buildable
|
||||
]
|
||||
# Does this spec override all_buildable
|
||||
spec_reversed = spec.name in reverse or any(_package(spec).provides(name) for name in reverse)
|
||||
return not all_buildable if spec_reversed else all_buildable
|
||||
):
|
||||
so_far = not so_far
|
||||
|
||||
spec_buildable = allpkgs.get(spec.name, {}).get("buildable", so_far)
|
||||
return spec_buildable
|
||||
|
||||
|
||||
def get_package_dir_permissions(spec):
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import re
|
||||
@@ -61,23 +62,30 @@ def __init__(self, file_path, old_len, new_len):
|
||||
|
||||
|
||||
class BinaryTextReplaceError(spack.error.SpackError):
|
||||
def __init__(self, old_path, new_path):
|
||||
"""Raised when the new install path is longer than the
|
||||
old one, so binary text replacement cannot occur.
|
||||
def __init__(self, msg):
|
||||
msg += (
|
||||
" To fix this, compile with more padding "
|
||||
"(config:install_tree:padded_length), or install to a shorter prefix."
|
||||
)
|
||||
super(BinaryTextReplaceError, self).__init__(msg)
|
||||
|
||||
Args:
|
||||
old_path (str): original path to be substituted
|
||||
new_path (str): candidate path for substitution
|
||||
"""
|
||||
|
||||
msg = "New path longer than old path: binary text"
|
||||
msg += " replacement not possible."
|
||||
err_msg = "The new path %s" % new_path
|
||||
err_msg += " is longer than the old path %s.\n" % old_path
|
||||
err_msg += "Text replacement in binaries will not work.\n"
|
||||
err_msg += "Create buildcache from an install path "
|
||||
err_msg += "longer than new path."
|
||||
super(BinaryTextReplaceError, self).__init__(msg, err_msg)
|
||||
class CannotGrowString(BinaryTextReplaceError):
|
||||
def __init__(self, old, new):
|
||||
msg = "Cannot replace {!r} with {!r} because the new prefix is longer.".format(old, new)
|
||||
super(CannotGrowString, self).__init__(msg)
|
||||
|
||||
|
||||
class CannotShrinkCString(BinaryTextReplaceError):
|
||||
def __init__(self, old, new, full_old_string):
|
||||
# Just interpolate binary string to not risk issues with invalid
|
||||
# unicode, which would be really bad user experience: error in error.
|
||||
# We have no clue if we actually deal with a real C-string nor what
|
||||
# encoding it has.
|
||||
msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
old, new, full_old_string
|
||||
)
|
||||
super(CannotShrinkCString, self).__init__(msg)
|
||||
|
||||
|
||||
@memoized
|
||||
@@ -289,17 +297,24 @@ def modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths):
|
||||
if idpath:
|
||||
new_idpath = paths_to_paths.get(idpath, None)
|
||||
if new_idpath and not idpath == new_idpath:
|
||||
args += ["-id", new_idpath]
|
||||
args += [("-id", new_idpath)]
|
||||
|
||||
for dep in deps:
|
||||
new_dep = paths_to_paths.get(dep)
|
||||
if new_dep and dep != new_dep:
|
||||
args += ["-change", dep, new_dep]
|
||||
args += [("-change", dep, new_dep)]
|
||||
|
||||
new_rpaths = []
|
||||
for orig_rpath in rpaths:
|
||||
new_rpath = paths_to_paths.get(orig_rpath)
|
||||
if new_rpath and not orig_rpath == new_rpath:
|
||||
args += ["-rpath", orig_rpath, new_rpath]
|
||||
args_to_add = ("-rpath", orig_rpath, new_rpath)
|
||||
if args_to_add not in args and new_rpath not in new_rpaths:
|
||||
args += [args_to_add]
|
||||
new_rpaths.append(new_rpath)
|
||||
|
||||
# Deduplicate and flatten
|
||||
args = list(itertools.chain.from_iterable(llnl.util.lang.dedupe(args)))
|
||||
if args:
|
||||
args.append(str(cur_path))
|
||||
install_name_tool = executable.Executable("install_name_tool")
|
||||
@@ -451,43 +466,116 @@ def _replace_prefix_text(filename, compiled_prefixes):
|
||||
f.truncate()
|
||||
|
||||
|
||||
def _replace_prefix_bin(filename, byte_prefixes):
|
||||
"""Replace all the occurrences of the old install prefix with a
|
||||
new install prefix in binary files.
|
||||
def apply_binary_replacements(f, prefix_to_prefix, suffix_safety_size=7):
|
||||
"""
|
||||
Given a file opened in rb+ mode, apply the string replacements as
|
||||
specified by an ordered dictionary of prefix to prefix mappings. This
|
||||
method takes special care of null-terminated C-strings. C-string constants
|
||||
are problematic because compilers and linkers optimize readonly strings for
|
||||
space by aliasing those that share a common suffix (only suffix since all
|
||||
of them are null terminated). See https://github.com/spack/spack/pull/31739
|
||||
and https://github.com/spack/spack/pull/32253 for details. Our logic matches
|
||||
the original prefix with a ``suffix_safety_size + 1`` lookahead for null bytes.
|
||||
If no null terminator is found, we simply pad with leading /, assuming that
|
||||
it's a long C-string; the full C-string after replacement has a large suffix
|
||||
in common with its original value.
|
||||
If there *is* a null terminator we can do the same as long as the replacement
|
||||
has a sufficiently long common suffix with the original prefix.
|
||||
As a last resort when the replacement does not have a long enough common suffix,
|
||||
we can try to shorten the string, but this only works if the new length is
|
||||
sufficiently short (typically the case when going from large padding -> normal path)
|
||||
If the replacement string is longer, or all of the above fails, we error out.
|
||||
|
||||
The new install prefix is prefixed with ``b'/'`` until the
|
||||
lengths of the prefixes are the same.
|
||||
Arguments:
|
||||
f: file opened in rb+ mode
|
||||
prefix_to_prefix (OrderedDict): OrderedDictionary where the keys are
|
||||
bytes representing the old prefixes and the values are the new
|
||||
suffix_safety_size (int): in case of null terminated strings, what size
|
||||
of the suffix should remain to avoid aliasing issues?
|
||||
"""
|
||||
assert suffix_safety_size >= 0
|
||||
assert f.tell() == 0
|
||||
|
||||
# Look for exact matches of our paths, and also look if there's a null terminator
|
||||
# soon after (this covers the case where we search for /abc but match /abc/ with
|
||||
# a trailing dir seperator).
|
||||
regex = re.compile(
|
||||
b"("
|
||||
+ b"|".join(re.escape(p) for p in prefix_to_prefix.keys())
|
||||
+ b")([^\0]{0,%d}\0)?" % suffix_safety_size
|
||||
)
|
||||
|
||||
# We *could* read binary data in chunks to avoid loading all in memory,
|
||||
# but it's nasty to deal with matches across boundaries, so let's stick to
|
||||
# something simple.
|
||||
|
||||
for match in regex.finditer(f.read()):
|
||||
# The matching prefix (old) and its replacement (new)
|
||||
old = match.group(1)
|
||||
new = prefix_to_prefix[old]
|
||||
|
||||
# Did we find a trailing null within a N + 1 bytes window after the prefix?
|
||||
null_terminated = match.end(0) > match.end(1)
|
||||
|
||||
# Suffix string length, excluding the null byte
|
||||
# Only makes sense if null_terminated
|
||||
suffix_strlen = match.end(0) - match.end(1) - 1
|
||||
|
||||
# How many bytes are we shrinking our string?
|
||||
bytes_shorter = len(old) - len(new)
|
||||
|
||||
# We can't make strings larger.
|
||||
if bytes_shorter < 0:
|
||||
raise CannotGrowString(old, new)
|
||||
|
||||
# If we don't know whether this is a null terminated C-string (we're looking
|
||||
# only N + 1 bytes ahead), or if it is and we have a common suffix, we can
|
||||
# simply pad with leading dir separators.
|
||||
elif (
|
||||
not null_terminated
|
||||
or suffix_strlen >= suffix_safety_size # == is enough, but let's be defensive
|
||||
or old[-suffix_safety_size + suffix_strlen :]
|
||||
== new[-suffix_safety_size + suffix_strlen :]
|
||||
):
|
||||
replacement = b"/" * bytes_shorter + new
|
||||
|
||||
# If it *was* null terminated, all that matters is that we can leave N bytes
|
||||
# of old suffix in place. Note that > is required since we also insert an
|
||||
# additional null terminator.
|
||||
elif bytes_shorter > suffix_safety_size:
|
||||
replacement = new + match.group(2) # includes the trailing null
|
||||
|
||||
# Otherwise... we can't :(
|
||||
else:
|
||||
raise CannotShrinkCString(old, new, match.group()[:-1])
|
||||
|
||||
f.seek(match.start())
|
||||
f.write(replacement)
|
||||
|
||||
|
||||
def _replace_prefix_bin(filename, prefix_to_prefix):
|
||||
"""Replace all the occurrences of the old prefix with a new prefix in binary
|
||||
files. See :func:`~spack.relocate.apply_binary_replacements` for details.
|
||||
|
||||
Args:
|
||||
filename (str): target binary file
|
||||
byte_prefixes (OrderedDict): OrderedDictionary where the keys are
|
||||
binary strings of the old prefixes and the values are the new
|
||||
binary prefixes
|
||||
byte_prefixes (OrderedDict): ordered dictionary where the keys are
|
||||
bytes representing the old prefixes and the values are the new
|
||||
prefixes (all bytes utf-8 encoded)
|
||||
"""
|
||||
all_prefixes = re.compile(b"|".join(re.escape(prefix) for prefix in byte_prefixes.keys()))
|
||||
|
||||
def padded_replacement(old):
|
||||
new = byte_prefixes[old]
|
||||
pad = len(old) - len(new)
|
||||
if pad < 0:
|
||||
raise BinaryTextReplaceError(old, new)
|
||||
return new + b"/" * pad
|
||||
|
||||
with open(filename, "rb+") as f:
|
||||
# Register what replacement string to put on what offsets in the file.
|
||||
replacements_at_offset = [
|
||||
(padded_replacement(m.group(0)), m.start())
|
||||
for m in re.finditer(all_prefixes, f.read())
|
||||
]
|
||||
|
||||
# Apply the replacements
|
||||
for replacement, offset in replacements_at_offset:
|
||||
f.seek(offset)
|
||||
f.write(replacement)
|
||||
apply_binary_replacements(f, prefix_to_prefix)
|
||||
|
||||
|
||||
def relocate_macho_binaries(
|
||||
path_names, old_layout_root, new_layout_root, prefix_to_prefix, rel, old_prefix, new_prefix
|
||||
path_names,
|
||||
old_layout_root,
|
||||
new_layout_root,
|
||||
prefix_to_prefix,
|
||||
rel,
|
||||
old_prefix,
|
||||
new_prefix,
|
||||
):
|
||||
"""
|
||||
Use macholib python package to get the rpaths, depedent libraries
|
||||
|
@@ -1366,32 +1366,32 @@ def get_pkg_class(self, pkg_name):
|
||||
spack.config.get("packages").get(pkg_name, {}).get("package_attributes", {})
|
||||
)
|
||||
|
||||
overidden_attrs = getattr(cls, "overidden_attrs", {})
|
||||
overridden_attrs = getattr(cls, "overridden_attrs", {})
|
||||
attrs_exclusively_from_config = getattr(cls, "attrs_exclusively_from_config", [])
|
||||
# Clear any prior changes to class attributes in case the config has
|
||||
# since changed
|
||||
for key, val in overidden_attrs.items():
|
||||
for key, val in overridden_attrs.items():
|
||||
setattr(cls, key, val)
|
||||
for key in attrs_exclusively_from_config:
|
||||
delattr(cls, key)
|
||||
|
||||
# Keep track of every class attribute that is overidden by the config:
|
||||
# Keep track of every class attribute that is overridden by the config:
|
||||
# if the config changes between calls to this method, we make sure to
|
||||
# restore the original config values (in case the new config no longer
|
||||
# sets attributes that it used to)
|
||||
new_overidden_attrs = {}
|
||||
new_overridden_attrs = {}
|
||||
new_attrs_exclusively_from_config = set()
|
||||
for key, val in new_cfg_settings.items():
|
||||
if hasattr(cls, key):
|
||||
new_overidden_attrs[key] = getattr(cls, key)
|
||||
new_overridden_attrs[key] = getattr(cls, key)
|
||||
else:
|
||||
new_attrs_exclusively_from_config.add(key)
|
||||
|
||||
setattr(cls, key, val)
|
||||
if new_overidden_attrs:
|
||||
setattr(cls, "overidden_attrs", dict(new_overidden_attrs))
|
||||
elif hasattr(cls, "overidden_attrs"):
|
||||
delattr(cls, "overidden_attrs")
|
||||
if new_overridden_attrs:
|
||||
setattr(cls, "overridden_attrs", dict(new_overridden_attrs))
|
||||
elif hasattr(cls, "overridden_attrs"):
|
||||
delattr(cls, "overridden_attrs")
|
||||
if new_attrs_exclusively_from_config:
|
||||
setattr(cls, "attrs_exclusively_from_config", new_attrs_exclusively_from_config)
|
||||
elif hasattr(cls, "attrs_exclusively_from_config"):
|
||||
|
@@ -8,32 +8,12 @@
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/env.py
|
||||
:lines: 36-
|
||||
"""
|
||||
import warnings
|
||||
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
import spack.schema.merged
|
||||
import spack.schema.packages
|
||||
import spack.schema.projections
|
||||
|
||||
warned_about_concretization = False
|
||||
|
||||
|
||||
def deprecate_concretization(instance, props):
|
||||
global warned_about_concretization
|
||||
if warned_about_concretization:
|
||||
return None
|
||||
# Deprecate `spack:concretization` in favor of `spack:concretizer:unify`.
|
||||
concretization_to_unify = {"together": "true", "separately": "false"}
|
||||
concretization = instance["concretization"]
|
||||
unify = concretization_to_unify[concretization]
|
||||
|
||||
return (
|
||||
"concretization:{} is deprecated and will be removed in Spack 0.19 in favor of "
|
||||
"the new concretizer:unify:{} config option.".format(concretization, unify)
|
||||
)
|
||||
|
||||
|
||||
#: legal first keys in the schema
|
||||
keys = ("spack", "env")
|
||||
|
||||
@@ -76,11 +56,6 @@ def deprecate_concretization(instance, props):
|
||||
"type": "object",
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"deprecatedProperties": {
|
||||
"properties": ["concretization"],
|
||||
"message": deprecate_concretization,
|
||||
"error": False,
|
||||
},
|
||||
"properties": union_dicts(
|
||||
# merged configuration scope schemas
|
||||
spack.schema.merged.properties,
|
||||
@@ -148,11 +123,6 @@ def deprecate_concretization(instance, props):
|
||||
},
|
||||
]
|
||||
},
|
||||
"concretization": {
|
||||
"type": "string",
|
||||
"enum": ["together", "separately"],
|
||||
"default": "separately",
|
||||
},
|
||||
},
|
||||
),
|
||||
}
|
||||
@@ -169,31 +139,6 @@ def update(data):
|
||||
Returns:
|
||||
True if data was changed, False otherwise
|
||||
"""
|
||||
updated = False
|
||||
if "include" in data:
|
||||
msg = "included configuration files should be updated manually" " [files={0}]"
|
||||
warnings.warn(msg.format(", ".join(data["include"])))
|
||||
|
||||
# Spack 0.19 drops support for `spack:concretization` in favor of
|
||||
# `spack:concretizer:unify`. Here we provide an upgrade path that changes the former
|
||||
# into the latter, or warns when there's an ambiguity. Note that Spack 0.17 is not
|
||||
# forward compatible with `spack:concretizer:unify`.
|
||||
if "concretization" in data:
|
||||
has_unify = "unify" in data.get("concretizer", {})
|
||||
to_unify = {"together": True, "separately": False}
|
||||
unify = to_unify[data["concretization"]]
|
||||
|
||||
if has_unify and data["concretizer"]["unify"] != unify:
|
||||
warnings.warn(
|
||||
"The following configuration conflicts: "
|
||||
"`spack:concretization:{}` and `spack:concretizer:unify:{}`"
|
||||
". Please update manually.".format(
|
||||
data["concretization"], data["concretizer"]["unify"]
|
||||
)
|
||||
)
|
||||
else:
|
||||
data.update({"concretizer": {"unify": unify}})
|
||||
data.pop("concretization")
|
||||
updated = True
|
||||
|
||||
return updated
|
||||
# There are not currently any deprecated attributes in this section
|
||||
# that have not been removed
|
||||
return False
|
||||
|
@@ -8,8 +8,6 @@
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/modules.py
|
||||
:lines: 13-
|
||||
"""
|
||||
import warnings
|
||||
|
||||
import spack.schema.environment
|
||||
import spack.schema.projections
|
||||
|
||||
@@ -26,9 +24,7 @@
|
||||
)
|
||||
|
||||
#: Matches a valid name for a module set
|
||||
valid_module_set_name = (
|
||||
r"^(?!arch_folder$|lmod$|roots$|enable$|prefix_inspections$|" r"tcl$|use_view$)\w[\w-]*$"
|
||||
)
|
||||
valid_module_set_name = r"^(?!prefix_inspections$)\w[\w-]*$"
|
||||
|
||||
#: Matches an anonymous spec, i.e. a spec without a root name
|
||||
anonymous_spec_regex = r"^[\^@%+~]"
|
||||
@@ -156,15 +152,6 @@
|
||||
}
|
||||
|
||||
|
||||
def deprecation_msg_default_module_set(instance, props):
|
||||
return (
|
||||
'Top-level properties "{0}" in module config are ignored as of Spack v0.18. '
|
||||
'They should be set on the "default" module set. Run\n\n'
|
||||
"\t$ spack config update modules\n\n"
|
||||
"to update the file to the new format".format('", "'.join(instance))
|
||||
)
|
||||
|
||||
|
||||
# Properties for inclusion into other schemas (requires definitions)
|
||||
properties = {
|
||||
"modules": {
|
||||
@@ -187,13 +174,6 @@ def deprecation_msg_default_module_set(instance, props):
|
||||
"additionalProperties": False,
|
||||
"properties": module_config_properties,
|
||||
},
|
||||
# Deprecated top-level keys (ignored in 0.18 with a warning)
|
||||
"^(arch_folder|lmod|roots|enable|tcl|use_view)$": {},
|
||||
},
|
||||
"deprecatedProperties": {
|
||||
"properties": ["arch_folder", "lmod", "roots", "enable", "tcl", "use_view"],
|
||||
"message": deprecation_msg_default_module_set,
|
||||
"error": False,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -249,39 +229,6 @@ def update_keys(data, key_translations):
|
||||
return changed
|
||||
|
||||
|
||||
def update_default_module_set(data):
|
||||
"""Update module configuration to move top-level keys inside default module set.
|
||||
|
||||
This change was introduced in v0.18 (see 99083f1706 or #28659).
|
||||
"""
|
||||
changed = False
|
||||
|
||||
deprecated_top_level_keys = ("arch_folder", "lmod", "roots", "enable", "tcl", "use_view")
|
||||
|
||||
# Don't update when we already have a default module set
|
||||
if "default" in data:
|
||||
if any(key in data for key in deprecated_top_level_keys):
|
||||
warnings.warn(
|
||||
'Did not move top-level module properties into "default" '
|
||||
'module set, because the "default" module set is already '
|
||||
"defined"
|
||||
)
|
||||
return changed
|
||||
|
||||
default = {}
|
||||
|
||||
# Move deprecated top-level keys under "default" module set.
|
||||
for key in deprecated_top_level_keys:
|
||||
if key in data:
|
||||
default[key] = data.pop(key)
|
||||
|
||||
if default:
|
||||
changed = True
|
||||
data["default"] = default
|
||||
|
||||
return changed
|
||||
|
||||
|
||||
def update(data):
|
||||
"""Update the data in place to remove deprecated properties.
|
||||
|
||||
@@ -291,10 +238,5 @@ def update(data):
|
||||
Returns:
|
||||
True if data was changed, False otherwise
|
||||
"""
|
||||
# deprecated top-level module config (everything in default module set)
|
||||
changed = update_default_module_set(data)
|
||||
|
||||
# translate blacklist/whitelist to exclude/include
|
||||
changed |= update_keys(data, exclude_include_translations)
|
||||
|
||||
return changed
|
||||
return update_keys(data, exclude_include_translations)
|
||||
|
@@ -102,7 +102,15 @@ def getter(node):
|
||||
ast_sym = ast_getter("symbol", "term")
|
||||
|
||||
#: Order of precedence for version origins. Topmost types are preferred.
|
||||
version_origin_fields = ["spec", "external", "packages_yaml", "package_py", "installed"]
|
||||
version_origin_fields = [
|
||||
"spec",
|
||||
"dev_spec",
|
||||
"external",
|
||||
"packages_yaml",
|
||||
"package_py",
|
||||
"installed",
|
||||
]
|
||||
|
||||
#: Look up version precedence strings by enum id
|
||||
version_origin_str = {i: name for i, name in enumerate(version_origin_fields)}
|
||||
|
||||
@@ -614,11 +622,13 @@ def solve(self, setup, specs, reuse=None, output=None, control=None):
|
||||
self.control = control or default_clingo_control()
|
||||
# set up the problem -- this generates facts and rules
|
||||
self.assumptions = []
|
||||
timer.start("setup")
|
||||
with self.control.backend() as backend:
|
||||
self.backend = backend
|
||||
setup.setup(self, specs, reuse=reuse)
|
||||
timer.phase("setup")
|
||||
timer.stop("setup")
|
||||
|
||||
timer.start("load")
|
||||
# read in the main ASP program and display logic -- these are
|
||||
# handwritten, not generated, so we load them as resources
|
||||
parent_dir = os.path.dirname(__file__)
|
||||
@@ -648,12 +658,13 @@ def visit(node):
|
||||
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
|
||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||
timer.phase("load")
|
||||
timer.stop("load")
|
||||
|
||||
# Grounding is the first step in the solve -- it turns our facts
|
||||
# and first-order logic rules into propositional logic.
|
||||
timer.start("ground")
|
||||
self.control.ground([("base", [])])
|
||||
timer.phase("ground")
|
||||
timer.stop("ground")
|
||||
|
||||
# With a grounded program, we can run the solve.
|
||||
result = Result(specs)
|
||||
@@ -671,8 +682,10 @@ def on_model(model):
|
||||
|
||||
if clingo_cffi:
|
||||
solve_kwargs["on_unsat"] = cores.append
|
||||
|
||||
timer.start("solve")
|
||||
solve_result = self.control.solve(**solve_kwargs)
|
||||
timer.phase("solve")
|
||||
timer.stop("solve")
|
||||
|
||||
# once done, construct the solve result
|
||||
result.satisfiable = solve_result.satisfiable
|
||||
@@ -1463,6 +1476,12 @@ class Body(object):
|
||||
if concrete_build_deps or dtype != "build":
|
||||
clauses.append(fn.depends_on(spec.name, dep.name, dtype))
|
||||
|
||||
# Ensure Spack will not coconcretize this with another provider
|
||||
# for the same virtual
|
||||
for virtual in dep.package.virtuals_provided:
|
||||
clauses.append(fn.virtual_node(virtual.name))
|
||||
clauses.append(fn.provider(dep.name, virtual.name))
|
||||
|
||||
# imposing hash constraints for all but pure build deps of
|
||||
# already-installed concrete specs.
|
||||
if concrete_build_deps or dspec.deptypes != ("build",):
|
||||
@@ -1483,7 +1502,7 @@ class Body(object):
|
||||
|
||||
return clauses
|
||||
|
||||
def build_version_dict(self, possible_pkgs, specs):
|
||||
def build_version_dict(self, possible_pkgs):
|
||||
"""Declare any versions in specs not declared in packages."""
|
||||
self.declared_versions = collections.defaultdict(list)
|
||||
self.possible_versions = collections.defaultdict(set)
|
||||
@@ -1524,6 +1543,8 @@ def key_fn(item):
|
||||
DeclaredVersion(version=ver, idx=idx, origin=version_provenance.packages_yaml)
|
||||
)
|
||||
|
||||
def add_concrete_versions_from_specs(self, specs, origin):
|
||||
"""Add concrete versions to possible versions from lists of CLI/dev specs."""
|
||||
for spec in specs:
|
||||
for dep in spec.traverse():
|
||||
if not dep.versions.concrete:
|
||||
@@ -1547,7 +1568,7 @@ def key_fn(item):
|
||||
# about*, add it to the known versions. Use idx=0, which is the
|
||||
# best possible, so they're guaranteed to be used preferentially.
|
||||
self.declared_versions[dep.name].append(
|
||||
DeclaredVersion(version=dep.version, idx=0, origin=version_provenance.spec)
|
||||
DeclaredVersion(version=dep.version, idx=0, origin=origin)
|
||||
)
|
||||
self.possible_versions[dep.name].add(dep.version)
|
||||
|
||||
@@ -1938,11 +1959,28 @@ def setup(self, driver, specs, reuse=None):
|
||||
# rules to generate an ASP program.
|
||||
self.gen = driver
|
||||
|
||||
# Calculate develop specs
|
||||
# they will be used in addition to command line specs
|
||||
# in determining known versions/targets/os
|
||||
dev_specs = ()
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
dev_specs = tuple(
|
||||
spack.spec.Spec(info["spec"]).constrained(
|
||||
"dev_path=%s"
|
||||
% spack.util.path.canonicalize_path(info["path"], default_wd=env.path)
|
||||
)
|
||||
for name, info in env.dev_specs.items()
|
||||
)
|
||||
specs = tuple(specs) # ensure compatible types to add
|
||||
|
||||
# get possible compilers
|
||||
self.possible_compilers = self.generate_possible_compilers(specs)
|
||||
|
||||
# traverse all specs and packages to build dict of possible versions
|
||||
self.build_version_dict(possible, specs)
|
||||
self.build_version_dict(possible)
|
||||
self.add_concrete_versions_from_specs(specs, version_provenance.spec)
|
||||
self.add_concrete_versions_from_specs(dev_specs, version_provenance.dev_spec)
|
||||
|
||||
self.gen.h1("Concrete input spec definitions")
|
||||
self.define_concrete_input_specs(specs, possible)
|
||||
@@ -1960,8 +1998,8 @@ def setup(self, driver, specs, reuse=None):
|
||||
|
||||
# architecture defaults
|
||||
self.platform_defaults()
|
||||
self.os_defaults(specs)
|
||||
self.target_defaults(specs)
|
||||
self.os_defaults(specs + dev_specs)
|
||||
self.target_defaults(specs + dev_specs)
|
||||
|
||||
self.virtual_providers()
|
||||
self.provider_defaults()
|
||||
@@ -1978,11 +2016,8 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.target_preferences(pkg)
|
||||
|
||||
# Inject dev_path from environment
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
for spec in sorted(specs):
|
||||
for dep in spec.traverse():
|
||||
_develop_specs_from_env(dep, env)
|
||||
for ds in dev_specs:
|
||||
self.condition(spack.spec.Spec(ds.name), ds, msg="%s is a develop spec" % ds.name)
|
||||
|
||||
self.gen.h1("Spec Constraints")
|
||||
self.literal_specs(specs)
|
||||
@@ -2290,6 +2325,12 @@ def build_specs(self, function_tuples):
|
||||
if isinstance(spec.version, spack.version.GitVersion):
|
||||
spec.version.generate_git_lookup(spec.fullname)
|
||||
|
||||
# Add synthetic edges for externals that are extensions
|
||||
for root in self._specs.values():
|
||||
for dep in root.traverse():
|
||||
if dep.external:
|
||||
dep.package.update_external_dependencies()
|
||||
|
||||
return self._specs
|
||||
|
||||
|
||||
@@ -2305,8 +2346,7 @@ def _develop_specs_from_env(spec, env):
|
||||
"Internal Error: The dev_path for spec {name} is not connected to a valid environment"
|
||||
"path. Please note that develop specs can only be used inside an environment"
|
||||
"These paths should be the same:\n\tdev_path:{dev_path}\n\tenv_based_path:{env_path}"
|
||||
)
|
||||
error_msg.format(name=spec.name, dev_path=spec.variants["dev_path"], env_path=path)
|
||||
).format(name=spec.name, dev_path=spec.variants["dev_path"], env_path=path)
|
||||
|
||||
assert spec.variants["dev_path"].value == path, error_msg
|
||||
else:
|
||||
|
@@ -44,6 +44,8 @@ node_flag_set(Package, Flag, Value) :- attr("node_flag_set", Package, Flag, Val
|
||||
|
||||
node_compiler_version_set(Package, Compiler, Version)
|
||||
:- attr("node_compiler_version_set", Package, Compiler, Version).
|
||||
node_compiler_set(Package, Compiler)
|
||||
:- attr("node_compiler_set", Package, Compiler).
|
||||
|
||||
variant_default_value_from_cli(Package, Variant, Value)
|
||||
:- attr("variant_default_value_from_cli", Package, Variant, Value).
|
||||
@@ -73,7 +75,8 @@ version_declared(Package, Version, Weight) :- version_declared(Package, Version,
|
||||
:- version_declared(Package, Version, Weight, "installed"),
|
||||
version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
not hash(Package, _).
|
||||
not hash(Package, _),
|
||||
internal_error("Reuse version weight used for built package").
|
||||
|
||||
% versions are declared w/priority -- declared with priority implies declared
|
||||
version_declared(Package, Version) :- version_declared(Package, Version, _).
|
||||
@@ -119,19 +122,22 @@ possible_version_weight(Package, Weight)
|
||||
:- version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
version_declared(Package, Version, Weight, "external"),
|
||||
not external(Package).
|
||||
not external(Package),
|
||||
internal_error("External weight used for built package").
|
||||
|
||||
% we can't use a weight from an installed spec if we are building it
|
||||
% and vice-versa
|
||||
:- version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
version_declared(Package, Version, Weight, "installed"),
|
||||
build(Package).
|
||||
build(Package),
|
||||
internal_error("Reuse version weight used for build package").
|
||||
|
||||
:- version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
not version_declared(Package, Version, Weight, "installed"),
|
||||
not build(Package).
|
||||
not build(Package),
|
||||
internal_error("Build version weight used for reused package").
|
||||
|
||||
1 { version_weight(Package, Weight) : version_declared(Package, Version, Weight) } 1
|
||||
:- version(Package, Version),
|
||||
@@ -195,12 +201,14 @@ attr(Name, A1, A2, A3, A4) :- impose(ID), imposed_constraint(ID, Name, A1, A2, A
|
||||
% we cannot have additional variant values when we are working with concrete specs
|
||||
:- node(Package), hash(Package, Hash),
|
||||
variant_value(Package, Variant, Value),
|
||||
not imposed_constraint(Hash, "variant_value", Package, Variant, Value).
|
||||
not imposed_constraint(Hash, "variant_value", Package, Variant, Value),
|
||||
internal_error("imposed hash without imposing all variant values").
|
||||
|
||||
% we cannot have additional flag values when we are working with concrete specs
|
||||
:- node(Package), hash(Package, Hash),
|
||||
node_flag(Package, FlagType, Flag),
|
||||
not imposed_constraint(Hash, "node_flag", Package, FlagType, Flag).
|
||||
not imposed_constraint(Hash, "node_flag", Package, FlagType, Flag),
|
||||
internal_error("imposed hash without imposing all flag values").
|
||||
|
||||
#defined condition/2.
|
||||
#defined condition_requirement/3.
|
||||
@@ -555,7 +563,7 @@ requirement_weight(Package, W) :-
|
||||
requirement_policy(Package, X, "any_of"),
|
||||
requirement_group_satisfied(Package, X).
|
||||
|
||||
error(2, "Cannot satisfy requirement group for package '{0}'", Package) :-
|
||||
error(2, "Cannot satisfy the requirements in packages.yaml for the '{0}' package. You may want to delete them to proceed with concretization. To check where the requirements are defined run 'spack config blame packages'", Package) :-
|
||||
activate_requirement_rules(Package),
|
||||
requirement_group(Package, X),
|
||||
not requirement_group_satisfied(Package, X).
|
||||
@@ -835,7 +843,8 @@ os_compatible(OS1, OS3) :- os_compatible(OS1, OS2), os_compatible(OS2, OS3).
|
||||
% for which we can build software. We need a cardinality constraint
|
||||
% since we might have more than one "buildable_os(OS)" fact.
|
||||
:- not 1 { os_compatible(CurrentOS, ReusedOS) : buildable_os(CurrentOS) },
|
||||
node_os(Package, ReusedOS).
|
||||
node_os(Package, ReusedOS),
|
||||
internal_error("Reused OS incompatible with build OS").
|
||||
|
||||
% If an OS is set explicitly respect the value
|
||||
node_os(Package, OS) :- node_os_set(Package, OS), node(Package).
|
||||
@@ -960,10 +969,15 @@ error(2, "'{0}' compiler constraints '%{1}@{2}' and '%{3}@{4}' are incompatible"
|
||||
node_compiler(Package, Compiler) :- node_compiler_version(Package, Compiler, _).
|
||||
|
||||
% We can't have a compiler be enforced and select the version from another compiler
|
||||
:- node_compiler(Package, Compiler1),
|
||||
node_compiler_version(Package, Compiler2, _),
|
||||
Compiler1 != Compiler2,
|
||||
internal_error("Mismatch between selected compiler and compiler version").
|
||||
error(2, "Cannot concretize {0} with two compilers {1}@{2} and {3}@{4}", Package, C1, V1, C2, V2)
|
||||
:- node_compiler_version(Package, C1, V1),
|
||||
node_compiler_version(Package, C2, V2),
|
||||
(C1, V1) != (C2, V2).
|
||||
|
||||
error(2, "Cannot concretize {0} with two compilers {1} and {2}@{3}", Package, Compiler1, Compiler2, Version)
|
||||
:- node_compiler(Package, Compiler1),
|
||||
node_compiler_version(Package, Compiler2, Version),
|
||||
Compiler1 != Compiler2.
|
||||
|
||||
% If the compiler of a node cannot be satisfied, raise
|
||||
error(1, "No valid compiler for {0} satisfies '%{1}'", Package, Compiler)
|
||||
@@ -1011,6 +1025,12 @@ compiler_match(Package, Dependency)
|
||||
|
||||
compiler_mismatch(Package, Dependency)
|
||||
:- depends_on(Package, Dependency),
|
||||
not node_compiler_set(Dependency, _),
|
||||
not compiler_match(Package, Dependency).
|
||||
|
||||
compiler_mismatch_required(Package, Dependency)
|
||||
:- depends_on(Package, Dependency),
|
||||
node_compiler_set(Dependency, _),
|
||||
not compiler_match(Package, Dependency).
|
||||
|
||||
#defined node_compiler_set/2.
|
||||
@@ -1032,7 +1052,8 @@ compiler_weight(Package, 100)
|
||||
not default_compiler_preference(Compiler, Version, _).
|
||||
|
||||
% For the time being, be strict and reuse only if the compiler match one we have on the system
|
||||
:- node_compiler_version(Package, Compiler, Version), not compiler_version(Compiler, Version).
|
||||
error(2, "Compiler {1}@{2} requested for {0} cannot be found. Set install_missing_compilers:true if intended.", Package, Compiler, Version)
|
||||
:- node_compiler_version(Package, Compiler, Version), not compiler_version(Compiler, Version).
|
||||
|
||||
#defined node_compiler_preference/4.
|
||||
#defined default_compiler_preference/3.
|
||||
@@ -1282,7 +1303,7 @@ opt_criterion(45, "preferred providers (non-roots)").
|
||||
}.
|
||||
|
||||
% Try to minimize the number of compiler mismatches in the DAG.
|
||||
opt_criterion(40, "compiler mismatches").
|
||||
opt_criterion(40, "compiler mismatches that are not from CLI").
|
||||
#minimize{ 0@240: #true }.
|
||||
#minimize{ 0@40: #true }.
|
||||
#minimize{
|
||||
@@ -1291,6 +1312,15 @@ opt_criterion(40, "compiler mismatches").
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
opt_criterion(39, "compiler mismatches that are not from CLI").
|
||||
#minimize{ 0@239: #true }.
|
||||
#minimize{ 0@39: #true }.
|
||||
#minimize{
|
||||
1@39+Priority,Package,Dependency
|
||||
: compiler_mismatch_required(Package, Dependency),
|
||||
build_priority(Package, Priority)
|
||||
}.
|
||||
|
||||
% Try to minimize the number of compiler mismatches in the DAG.
|
||||
opt_criterion(35, "OS mismatches").
|
||||
#minimize{ 0@235: #true }.
|
||||
|
@@ -1558,10 +1558,24 @@ def _set_compiler(self, compiler):
|
||||
|
||||
def _add_dependency(self, spec, deptypes):
|
||||
"""Called by the parser to add another spec as a dependency."""
|
||||
if spec.name in self._dependencies:
|
||||
if spec.name not in self._dependencies:
|
||||
self.add_dependency_edge(spec, deptypes)
|
||||
return
|
||||
|
||||
# Keep the intersection of constraints when a dependency is added
|
||||
# multiple times. Currently we only allow identical edge types.
|
||||
orig = self._dependencies[spec.name]
|
||||
try:
|
||||
dspec = next(dspec for dspec in orig if deptypes == dspec.deptypes)
|
||||
except StopIteration:
|
||||
raise DuplicateDependencyError("Cannot depend on '%s' twice" % spec)
|
||||
|
||||
self.add_dependency_edge(spec, deptypes)
|
||||
try:
|
||||
dspec.spec.constrain(spec)
|
||||
except spack.error.UnsatisfiableSpecError:
|
||||
raise DuplicateDependencyError(
|
||||
"Cannot depend on incompatible specs '%s' and '%s'" % (dspec.spec, spec)
|
||||
)
|
||||
|
||||
def add_dependency_edge(self, dependency_spec, deptype):
|
||||
"""Add a dependency edge to this spec.
|
||||
@@ -1741,7 +1755,12 @@ def spec_hash(self, hash):
|
||||
return hash.override(self)
|
||||
node_dict = self.to_node_dict(hash=hash)
|
||||
json_text = sjson.dump(node_dict)
|
||||
return spack.util.hash.b32_hash(json_text)
|
||||
# This implements "frankenhashes", preserving the last 7 characters of the
|
||||
# original hash when splicing so that we can avoid relocation issues
|
||||
out = spack.util.hash.b32_hash(json_text)
|
||||
if self.build_spec is not self:
|
||||
return out[:-7] + self.build_spec.spec_hash(hash)[-7:]
|
||||
return out
|
||||
|
||||
def _cached_hash(self, hash, length=None, force=False):
|
||||
"""Helper function for storing a cached hash on the spec.
|
||||
@@ -2732,6 +2751,11 @@ def _old_concretize(self, tests=False, deprecation_warning=True):
|
||||
# If any spec in the DAG is deprecated, throw an error
|
||||
Spec.ensure_no_deprecated(self)
|
||||
|
||||
# Update externals as needed
|
||||
for dep in self.traverse():
|
||||
if dep.external:
|
||||
dep.package.update_external_dependencies()
|
||||
|
||||
# Now that the spec is concrete we should check if
|
||||
# there are declared conflicts
|
||||
#
|
||||
@@ -3547,7 +3571,9 @@ def _constrain_dependencies(self, other):
|
||||
)
|
||||
|
||||
# Update with additional constraints from other spec
|
||||
for name in other.dep_difference(self):
|
||||
# operate on direct dependencies only, because a concrete dep
|
||||
# represented by hash may have structure that needs to be preserved
|
||||
for name in other.direct_dep_difference(self):
|
||||
dep_spec_copy = other._get_dependency(name)
|
||||
dep_copy = dep_spec_copy.spec
|
||||
deptypes = dep_spec_copy.deptypes
|
||||
@@ -3568,10 +3594,10 @@ def constrained(self, other, deps=True):
|
||||
clone.constrain(other, deps)
|
||||
return clone
|
||||
|
||||
def dep_difference(self, other):
|
||||
def direct_dep_difference(self, other):
|
||||
"""Returns dependencies in self that are not in other."""
|
||||
mine = set(s.name for s in self.traverse(root=False))
|
||||
mine.difference_update(s.name for s in other.traverse(root=False))
|
||||
mine = set(dname for dname in self._dependencies)
|
||||
mine.difference_update(dname for dname in other._dependencies)
|
||||
return mine
|
||||
|
||||
def _autospec(self, spec_like):
|
||||
@@ -4850,7 +4876,7 @@ def merge_abstract_anonymous_specs(*abstract_specs):
|
||||
merged_spec[name].constrain(current_spec_constraint[name], deps=False)
|
||||
|
||||
# Update with additional constraints from other spec
|
||||
for name in current_spec_constraint.dep_difference(merged_spec):
|
||||
for name in current_spec_constraint.direct_dep_difference(merged_spec):
|
||||
edge = next(iter(current_spec_constraint.edges_to_dependencies(name)))
|
||||
merged_spec._add_dependency(edge.spec.copy(), edge.deptypes)
|
||||
|
||||
|
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import functools
|
||||
import warnings
|
||||
|
||||
import six
|
||||
|
||||
@@ -34,6 +35,14 @@ def _impl(self, other):
|
||||
return _impl
|
||||
|
||||
|
||||
#: Translation table from archspec deprecated names
|
||||
_DEPRECATED_ARCHSPEC_NAMES = {
|
||||
"graviton": "cortex_a72",
|
||||
"graviton2": "neoverse_n1",
|
||||
"graviton3": "neoverse_v1",
|
||||
}
|
||||
|
||||
|
||||
class Target(object):
|
||||
def __init__(self, name, module_name=None):
|
||||
"""Target models microarchitectures and their compatibility.
|
||||
@@ -45,6 +54,10 @@ def __init__(self, name, module_name=None):
|
||||
like Cray (e.g. craype-compiler)
|
||||
"""
|
||||
if not isinstance(name, archspec.cpu.Microarchitecture):
|
||||
if name in _DEPRECATED_ARCHSPEC_NAMES:
|
||||
msg = "'target={}' is deprecated, use 'target={}' instead"
|
||||
name, old_name = _DEPRECATED_ARCHSPEC_NAMES[name], name
|
||||
warnings.warn(msg.format(old_name, name))
|
||||
name = archspec.cpu.TARGETS.get(name, archspec.cpu.generic_microarchitecture(name))
|
||||
self.microarchitecture = name
|
||||
self.module_name = module_name
|
||||
|
@@ -2,7 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import inspect
|
||||
import os
|
||||
import platform
|
||||
import posixpath
|
||||
@@ -14,6 +14,7 @@
|
||||
|
||||
import spack.build_environment
|
||||
import spack.config
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.build_environment import (
|
||||
@@ -521,3 +522,27 @@ def test_dirty_disable_module_unload(config, mock_packages, working_env, mock_mo
|
||||
assert mock_module_cmd.calls
|
||||
assert any(("unload", "cray-libsci") == item[0] for item in mock_module_cmd.calls)
|
||||
assert any(("unload", "cray-mpich") == item[0] for item in mock_module_cmd.calls)
|
||||
|
||||
|
||||
class TestModuleMonkeyPatcher:
|
||||
def test_getting_attributes(self, config, mock_packages):
|
||||
s = spack.spec.Spec("libelf").concretized()
|
||||
module_wrapper = spack.build_environment.ModuleChangePropagator(s.package)
|
||||
assert module_wrapper.Libelf == s.package.module.Libelf
|
||||
|
||||
def test_setting_attributes(self, config, mock_packages):
|
||||
s = spack.spec.Spec("libelf").concretized()
|
||||
module = s.package.module
|
||||
module_wrapper = spack.build_environment.ModuleChangePropagator(s.package)
|
||||
|
||||
# Setting an attribute has an immediate effect
|
||||
module_wrapper.SOME_ATTRIBUTE = 1
|
||||
assert module.SOME_ATTRIBUTE == 1
|
||||
|
||||
# We can also propagate the settings to classes in the MRO
|
||||
module_wrapper.propagate_changes_to_mro()
|
||||
for cls in inspect.getmro(type(s.package)):
|
||||
current_module = cls.module
|
||||
if current_module == spack.package_base:
|
||||
break
|
||||
assert current_module.SOME_ATTRIBUTE == 1
|
||||
|
@@ -121,3 +121,31 @@ def test_old_style_compatibility_with_super(spec_str, method_name, expected):
|
||||
builder = spack.builder.create(s.package)
|
||||
value = getattr(builder, method_name)()
|
||||
assert value == expected
|
||||
|
||||
|
||||
@pytest.mark.regression("33928")
|
||||
@pytest.mark.usefixtures("builder_test_repository", "config", "working_env")
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_build_time_tests_are_executed_from_default_builder():
|
||||
s = spack.spec.Spec("old-style-autotools").concretized()
|
||||
builder = spack.builder.create(s.package)
|
||||
builder.pkg.run_tests = True
|
||||
for phase_fn in builder:
|
||||
phase_fn.execute()
|
||||
|
||||
assert os.environ.get("CHECK_CALLED") == "1", "Build time tests not executed"
|
||||
assert os.environ.get("INSTALLCHECK_CALLED") == "1", "Install time tests not executed"
|
||||
|
||||
|
||||
@pytest.mark.regression("34518")
|
||||
@pytest.mark.usefixtures("builder_test_repository", "config", "working_env")
|
||||
def test_monkey_patching_wrapped_pkg():
|
||||
s = spack.spec.Spec("old-style-autotools").concretized()
|
||||
builder = spack.builder.create(s.package)
|
||||
assert s.package.run_tests is False
|
||||
assert builder.pkg.run_tests is False
|
||||
assert builder.pkg_with_dispatcher.run_tests is False
|
||||
|
||||
s.package.run_tests = True
|
||||
assert builder.pkg.run_tests is True
|
||||
assert builder.pkg_with_dispatcher.run_tests is True
|
||||
|
@@ -1,41 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from spack.main import SpackCommand
|
||||
|
||||
activate = SpackCommand("activate")
|
||||
deactivate = SpackCommand("deactivate")
|
||||
install = SpackCommand("install")
|
||||
extensions = SpackCommand("extensions")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
|
||||
def test_activate(mock_packages, mock_archive, mock_fetch, config, install_mockery):
|
||||
install("extension1")
|
||||
activate("extension1")
|
||||
output = extensions("--show", "activated", "extendee")
|
||||
assert "extension1" in output
|
||||
|
||||
|
||||
def test_deactivate(mock_packages, mock_archive, mock_fetch, config, install_mockery):
|
||||
install("extension1")
|
||||
activate("extension1")
|
||||
deactivate("extension1")
|
||||
output = extensions("--show", "activated", "extendee")
|
||||
assert "extension1" not in output
|
||||
|
||||
|
||||
def test_deactivate_all(mock_packages, mock_archive, mock_fetch, config, install_mockery):
|
||||
install("extension1")
|
||||
install("extension2")
|
||||
activate("extension1")
|
||||
activate("extension2")
|
||||
deactivate("--all", "extendee")
|
||||
output = extensions("--show", "activated", "extendee")
|
||||
assert "extension1" not in output
|
@@ -1011,7 +1011,6 @@ def mystrip(s):
|
||||
|
||||
assert "--keep-stage" in install_parts
|
||||
assert "--no-check-signature" not in install_parts
|
||||
assert "--no-add" in install_parts
|
||||
assert "-f" in install_parts
|
||||
flag_index = install_parts.index("-f")
|
||||
assert "archive-files.json" in install_parts[flag_index + 1]
|
||||
@@ -1261,7 +1260,7 @@ def test_push_mirror_contents(
|
||||
with open(json_path, "w") as ypfd:
|
||||
ypfd.write(spec_json)
|
||||
|
||||
install_cmd("--keep-stage", json_path)
|
||||
install_cmd("--add", "--keep-stage", json_path)
|
||||
|
||||
# env, spec, json_path, mirror_url, build_id, sign_binaries
|
||||
ci.push_mirror_contents(env, json_path, mirror_url, True)
|
||||
@@ -1623,7 +1622,7 @@ def test_ci_rebuild_index(
|
||||
with open(json_path, "w") as ypfd:
|
||||
ypfd.write(spec_json)
|
||||
|
||||
install_cmd("--keep-stage", "-f", json_path)
|
||||
install_cmd("--add", "--keep-stage", "-f", json_path)
|
||||
buildcache_cmd("create", "-u", "-a", "-f", "--mirror-url", mirror_url, "callpath")
|
||||
ci_cmd("rebuild-index")
|
||||
|
||||
|
@@ -15,7 +15,6 @@
|
||||
uninstall = SpackCommand("uninstall")
|
||||
deprecate = SpackCommand("deprecate")
|
||||
find = SpackCommand("find")
|
||||
activate = SpackCommand("activate")
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
|
||||
|
||||
@@ -89,24 +88,6 @@ def test_deprecate_deps(mock_packages, mock_archive, mock_fetch, install_mockery
|
||||
assert sorted(deprecated) == sorted(list(old_spec.traverse()))
|
||||
|
||||
|
||||
def test_deprecate_fails_active_extensions(
|
||||
mock_packages, mock_archive, mock_fetch, install_mockery
|
||||
):
|
||||
"""Tests that active extensions and their extendees cannot be
|
||||
deprecated."""
|
||||
install("extendee")
|
||||
install("extension1")
|
||||
activate("extension1")
|
||||
|
||||
output = deprecate("-yi", "extendee", "extendee@nonexistent", fail_on_error=False)
|
||||
assert "extension1" in output
|
||||
assert "Deactivate extensions before deprecating" in output
|
||||
|
||||
output = deprecate("-yiD", "extension1", "extension1@notaversion", fail_on_error=False)
|
||||
assert "extendee" in output
|
||||
assert "is an active extension of" in output
|
||||
|
||||
|
||||
def test_uninstall_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
|
||||
"""Tests that we can still uninstall deprecated packages."""
|
||||
install("libelf@0.8.13")
|
||||
|
@@ -254,13 +254,18 @@ def test_dev_build_env_version_mismatch(
|
||||
def test_dev_build_multiple(
|
||||
tmpdir, mock_packages, install_mockery, mutable_mock_env_path, mock_fetch
|
||||
):
|
||||
"""Test spack install with multiple developer builds"""
|
||||
"""Test spack install with multiple developer builds
|
||||
|
||||
Test that only the root needs to be specified in the environment
|
||||
Test that versions known only from the dev specs are included in the solve,
|
||||
even if they come from a non-root
|
||||
"""
|
||||
# setup dev-build-test-install package for dev build
|
||||
# Wait to concretize inside the environment to set dev_path on the specs;
|
||||
# without the environment, the user would need to set dev_path for both the
|
||||
# root and dependency if they wanted a dev build for both.
|
||||
leaf_dir = tmpdir.mkdir("leaf")
|
||||
leaf_spec = spack.spec.Spec("dev-build-test-install@0.0.0")
|
||||
leaf_spec = spack.spec.Spec("dev-build-test-install@1.0.0")
|
||||
leaf_pkg_cls = spack.repo.path.get_pkg_class(leaf_spec.name)
|
||||
with leaf_dir.as_cwd():
|
||||
with open(leaf_pkg_cls.filename, "w") as f:
|
||||
@@ -283,13 +288,12 @@ def test_dev_build_multiple(
|
||||
"""\
|
||||
env:
|
||||
specs:
|
||||
- dev-build-test-install@0.0.0
|
||||
- dev-build-test-dependent@0.0.0
|
||||
|
||||
develop:
|
||||
dev-build-test-install:
|
||||
path: %s
|
||||
spec: dev-build-test-install@0.0.0
|
||||
spec: dev-build-test-install@1.0.0
|
||||
dev-build-test-dependent:
|
||||
spec: dev-build-test-dependent@0.0.0
|
||||
path: %s
|
||||
@@ -300,6 +304,7 @@ def test_dev_build_multiple(
|
||||
env("create", "test", "./spack.yaml")
|
||||
with ev.read("test"):
|
||||
# Do concretization inside environment for dev info
|
||||
# These specs are the source of truth to compare against the installs
|
||||
leaf_spec.concretize()
|
||||
root_spec.concretize()
|
||||
|
||||
|
@@ -18,6 +18,7 @@
|
||||
import spack.cmd.env
|
||||
import spack.environment as ev
|
||||
import spack.environment.shell
|
||||
import spack.error
|
||||
import spack.modules
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
@@ -220,7 +221,7 @@ def test_env_install_single_spec(install_mockery, mock_fetch):
|
||||
|
||||
e = ev.read("test")
|
||||
with e:
|
||||
install("cmake-client")
|
||||
install("--add", "cmake-client")
|
||||
|
||||
e = ev.read("test")
|
||||
assert e.user_specs[0].name == "cmake-client"
|
||||
@@ -255,7 +256,7 @@ def test_env_modifications_error_on_activate(install_mockery, mock_fetch, monkey
|
||||
|
||||
e = ev.read("test")
|
||||
with e:
|
||||
install("cmake-client")
|
||||
install("--add", "cmake-client")
|
||||
|
||||
def setup_error(pkg, env):
|
||||
raise RuntimeError("cmake-client had issues!")
|
||||
@@ -276,7 +277,7 @@ def test_activate_adds_transitive_run_deps_to_path(install_mockery, mock_fetch,
|
||||
|
||||
e = ev.read("test")
|
||||
with e:
|
||||
install("depends-on-run-env")
|
||||
install("--add", "depends-on-run-env")
|
||||
|
||||
env_variables = {}
|
||||
spack.environment.shell.activate(e).apply_modifications(env_variables)
|
||||
@@ -289,7 +290,7 @@ def test_env_install_same_spec_twice(install_mockery, mock_fetch):
|
||||
e = ev.read("test")
|
||||
with e:
|
||||
# The first installation outputs the package prefix, updates the view
|
||||
out = install("cmake-client")
|
||||
out = install("--add", "cmake-client")
|
||||
assert "Updating view at" in out
|
||||
|
||||
# The second installation reports all packages already installed
|
||||
@@ -448,7 +449,7 @@ def test_env_status_broken_view(
|
||||
):
|
||||
env_dir = str(tmpdir)
|
||||
with ev.Environment(env_dir):
|
||||
install("trivial-install-test-package")
|
||||
install("--add", "trivial-install-test-package")
|
||||
|
||||
# switch to a new repo that doesn't include the installed package
|
||||
# test that Spack detects the missing package and warns the user
|
||||
@@ -467,7 +468,7 @@ def test_env_activate_broken_view(
|
||||
mutable_mock_env_path, mock_archive, mock_fetch, mock_custom_repository, install_mockery
|
||||
):
|
||||
with ev.create("test"):
|
||||
install("trivial-install-test-package")
|
||||
install("--add", "trivial-install-test-package")
|
||||
|
||||
# switch to a new repo that doesn't include the installed package
|
||||
# test that Spack detects the missing package and fails gracefully
|
||||
@@ -1056,7 +1057,9 @@ def test_roots_display_with_variants():
|
||||
assert "boost +shared" in out
|
||||
|
||||
|
||||
def test_uninstall_removes_from_env(mock_stage, mock_fetch, install_mockery):
|
||||
def test_uninstall_keeps_in_env(mock_stage, mock_fetch, install_mockery):
|
||||
# 'spack uninstall' without --remove should not change the environment
|
||||
# spack.yaml file, just uninstall specs
|
||||
env("create", "test")
|
||||
with ev.read("test"):
|
||||
add("mpileaks")
|
||||
@@ -1064,12 +1067,32 @@ def test_uninstall_removes_from_env(mock_stage, mock_fetch, install_mockery):
|
||||
install("--fake")
|
||||
|
||||
test = ev.read("test")
|
||||
assert any(s.name == "mpileaks" for s in test.specs_by_hash.values())
|
||||
assert any(s.name == "libelf" for s in test.specs_by_hash.values())
|
||||
# Save this spec to check later if it is still in the env
|
||||
(mpileaks_hash,) = list(x for x, y in test.specs_by_hash.items() if y.name == "mpileaks")
|
||||
orig_user_specs = test.user_specs
|
||||
orig_concretized_specs = test.concretized_order
|
||||
|
||||
with ev.read("test"):
|
||||
uninstall("-ya")
|
||||
|
||||
test = ev.read("test")
|
||||
assert test.concretized_order == orig_concretized_specs
|
||||
assert test.user_specs.specs == orig_user_specs.specs
|
||||
assert mpileaks_hash in test.specs_by_hash
|
||||
assert not test.specs_by_hash[mpileaks_hash].package.installed
|
||||
|
||||
|
||||
def test_uninstall_removes_from_env(mock_stage, mock_fetch, install_mockery):
|
||||
# 'spack uninstall --remove' should update the environment
|
||||
env("create", "test")
|
||||
with ev.read("test"):
|
||||
add("mpileaks")
|
||||
add("libelf")
|
||||
install("--fake")
|
||||
|
||||
with ev.read("test"):
|
||||
uninstall("-y", "-a", "--remove")
|
||||
|
||||
test = ev.read("test")
|
||||
assert not test.specs_by_hash
|
||||
assert not test.concretized_order
|
||||
@@ -1255,7 +1278,7 @@ def test_env_updates_view_install_package(tmpdir, mock_stage, mock_fetch, instal
|
||||
view_dir = tmpdir.join("view")
|
||||
env("create", "--with-view=%s" % view_dir, "test")
|
||||
with ev.read("test"):
|
||||
install("--fake", "mpileaks")
|
||||
install("--fake", "--add", "mpileaks")
|
||||
|
||||
assert os.path.exists(str(view_dir.join(".spack/mpileaks")))
|
||||
|
||||
@@ -1275,7 +1298,7 @@ def test_env_updates_view_uninstall(tmpdir, mock_stage, mock_fetch, install_mock
|
||||
view_dir = tmpdir.join("view")
|
||||
env("create", "--with-view=%s" % view_dir, "test")
|
||||
with ev.read("test"):
|
||||
install("--fake", "mpileaks")
|
||||
install("--fake", "--add", "mpileaks")
|
||||
|
||||
check_mpileaks_and_deps_in_view(view_dir)
|
||||
|
||||
@@ -1324,7 +1347,7 @@ def test_env_updates_view_force_remove(tmpdir, mock_stage, mock_fetch, install_m
|
||||
view_dir = tmpdir.join("view")
|
||||
env("create", "--with-view=%s" % view_dir, "test")
|
||||
with ev.read("test"):
|
||||
install("--fake", "mpileaks")
|
||||
install("--add", "--fake", "mpileaks")
|
||||
|
||||
check_mpileaks_and_deps_in_view(view_dir)
|
||||
|
||||
@@ -2403,7 +2426,9 @@ def test_duplicate_packages_raise_when_concretizing_together():
|
||||
e.add("mpileaks~opt")
|
||||
e.add("mpich")
|
||||
|
||||
with pytest.raises(ev.SpackEnvironmentError, match=r"cannot contain more"):
|
||||
with pytest.raises(
|
||||
spack.error.UnsatisfiableSpecError, match=r"relax the concretizer strictness"
|
||||
):
|
||||
e.concretize()
|
||||
|
||||
|
||||
@@ -2451,30 +2476,6 @@ def test_env_write_only_non_default_nested(tmpdir):
|
||||
assert manifest == contents
|
||||
|
||||
|
||||
@pytest.mark.parametrize("concretization,unify", [("together", "true"), ("separately", "false")])
|
||||
def test_update_concretization_to_concretizer_unify(concretization, unify, tmpdir):
|
||||
spack_yaml = """\
|
||||
spack:
|
||||
concretization: {}
|
||||
""".format(
|
||||
concretization
|
||||
)
|
||||
tmpdir.join("spack.yaml").write(spack_yaml)
|
||||
# Update the environment
|
||||
env("update", "-y", str(tmpdir))
|
||||
with open(str(tmpdir.join("spack.yaml"))) as f:
|
||||
assert (
|
||||
f.read()
|
||||
== """\
|
||||
spack:
|
||||
concretizer:
|
||||
unify: {}
|
||||
""".format(
|
||||
unify
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.regression("18147")
|
||||
def test_can_update_attributes_with_override(tmpdir):
|
||||
spack_yaml = """
|
||||
|
@@ -35,12 +35,11 @@ def python_database(mock_packages, mutable_database):
|
||||
def test_extensions(mock_packages, python_database, config, capsys):
|
||||
ext2 = Spec("py-extension2").concretized()
|
||||
|
||||
def check_output(ni, na):
|
||||
def check_output(ni):
|
||||
with capsys.disabled():
|
||||
output = extensions("python")
|
||||
packages = extensions("-s", "packages", "python")
|
||||
installed = extensions("-s", "installed", "python")
|
||||
activated = extensions("-s", "activated", "python")
|
||||
assert "==> python@2.7.11" in output
|
||||
assert "==> 2 extensions" in output
|
||||
assert "py-extension1" in output
|
||||
@@ -50,26 +49,13 @@ def check_output(ni, na):
|
||||
assert "py-extension1" in packages
|
||||
assert "py-extension2" in packages
|
||||
assert "installed" not in packages
|
||||
assert "activated" not in packages
|
||||
|
||||
assert ("%s installed" % (ni if ni else "None")) in output
|
||||
assert ("%s activated" % (na if na else "None")) in output
|
||||
assert ("%s installed" % (ni if ni else "None")) in installed
|
||||
assert ("%s activated" % (na if na else "None")) in activated
|
||||
|
||||
check_output(2, 0)
|
||||
|
||||
ext2.package.do_activate()
|
||||
check_output(2, 2)
|
||||
|
||||
ext2.package.do_deactivate(force=True)
|
||||
check_output(2, 1)
|
||||
|
||||
ext2.package.do_activate()
|
||||
check_output(2, 2)
|
||||
|
||||
check_output(2)
|
||||
ext2.package.do_uninstall(force=True)
|
||||
check_output(1, 1)
|
||||
check_output(1)
|
||||
|
||||
|
||||
def test_extensions_no_arguments(mock_packages):
|
||||
|
@@ -269,9 +269,9 @@ def test_find_format_deps(database, config):
|
||||
callpath-1.0
|
||||
dyninst-8.2
|
||||
libdwarf-20130729
|
||||
libelf-0.8.13
|
||||
zmpi-1.0
|
||||
fake-1.0
|
||||
libelf-0.8.13
|
||||
zmpi-1.0
|
||||
fake-1.0
|
||||
|
||||
"""
|
||||
)
|
||||
@@ -291,9 +291,9 @@ def test_find_format_deps_paths(database, config):
|
||||
callpath-1.0 {1}
|
||||
dyninst-8.2 {2}
|
||||
libdwarf-20130729 {3}
|
||||
libelf-0.8.13 {4}
|
||||
zmpi-1.0 {5}
|
||||
fake-1.0 {6}
|
||||
libelf-0.8.13 {4}
|
||||
zmpi-1.0 {5}
|
||||
fake-1.0 {6}
|
||||
|
||||
""".format(
|
||||
*prefixes
|
||||
@@ -356,7 +356,7 @@ def test_find_prefix_in_env(
|
||||
"""Test `find` formats requiring concrete specs work in environments."""
|
||||
env("create", "test")
|
||||
with ev.read("test"):
|
||||
install("mpileaks")
|
||||
install("--add", "mpileaks")
|
||||
find("-p")
|
||||
find("-l")
|
||||
find("-L")
|
||||
|
@@ -771,7 +771,7 @@ def test_install_only_dependencies_in_env(
|
||||
dep = Spec("dependency-install").concretized()
|
||||
root = Spec("dependent-install").concretized()
|
||||
|
||||
install("-v", "--only", "dependencies", "dependent-install")
|
||||
install("-v", "--only", "dependencies", "--add", "dependent-install")
|
||||
|
||||
assert os.path.exists(dep.prefix)
|
||||
assert not os.path.exists(root.prefix)
|
||||
@@ -800,7 +800,7 @@ def test_install_only_dependencies_of_all_in_env(
|
||||
|
||||
|
||||
def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock_env_path):
|
||||
# To test behavior of --no-add option, we create the following environment:
|
||||
# To test behavior of --add option, we create the following environment:
|
||||
#
|
||||
# mpileaks
|
||||
# ^callpath
|
||||
@@ -849,18 +849,19 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock
|
||||
# Assert using --no-add with a spec not in the env fails
|
||||
inst_out = install("--no-add", "boost", fail_on_error=False, output=str)
|
||||
|
||||
assert "no such spec exists in environment" in inst_out
|
||||
assert "You can add it to the environment with 'spack add " in inst_out
|
||||
|
||||
# Ensure using --no-add with an ambiguous spec fails
|
||||
# Without --add, ensure that install fails if the spec matches more
|
||||
# than one root
|
||||
with pytest.raises(ev.SpackEnvironmentError) as err:
|
||||
inst_out = install("--no-add", "a", output=str)
|
||||
inst_out = install("a", output=str)
|
||||
|
||||
assert "a matches multiple specs in the env" in str(err)
|
||||
|
||||
# With "--no-add", install an unambiguous dependency spec (that already
|
||||
# exists as a dep in the environment) using --no-add and make sure it
|
||||
# gets installed (w/ deps), but is not added to the environment.
|
||||
install("--no-add", "dyninst")
|
||||
# Install an unambiguous dependency spec (that already exists as a dep
|
||||
# in the environment) and make sure it gets installed (w/ deps),
|
||||
# but is not added to the environment.
|
||||
install("dyninst")
|
||||
|
||||
find_output = find("-l", output=str)
|
||||
assert "dyninst" in find_output
|
||||
@@ -872,31 +873,30 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock
|
||||
assert all([s in env_specs for s in post_install_specs])
|
||||
|
||||
# Make sure we can install a concrete dependency spec from a spec.json
|
||||
# file on disk, using the ``--no-add` option, and the spec is installed
|
||||
# but not added as a root
|
||||
# file on disk, and the spec is installed but not added as a root
|
||||
mpi_spec_json_path = tmpdir.join("{0}.json".format(mpi_spec.name))
|
||||
with open(mpi_spec_json_path.strpath, "w") as fd:
|
||||
fd.write(mpi_spec.to_json(hash=ht.dag_hash))
|
||||
|
||||
install("--no-add", "-f", mpi_spec_json_path.strpath)
|
||||
install("-f", mpi_spec_json_path.strpath)
|
||||
assert mpi_spec not in e.roots()
|
||||
|
||||
find_output = find("-l", output=str)
|
||||
assert mpi_spec.name in find_output
|
||||
|
||||
# Without "--no-add", install an unambiguous depependency spec (that
|
||||
# already exists as a dep in the environment) without --no-add and make
|
||||
# sure it is added as a root of the environment as well as installed.
|
||||
# Install an unambiguous depependency spec (that already exists as a
|
||||
# dep in the environment) with --add and make sure it is added as a
|
||||
# root of the environment as well as installed.
|
||||
assert b_spec not in e.roots()
|
||||
|
||||
install("b")
|
||||
install("--add", "b")
|
||||
|
||||
assert b_spec in e.roots()
|
||||
assert b_spec not in e.uninstalled_specs()
|
||||
|
||||
# Without "--no-add", install a novel spec and make sure it is added
|
||||
# as a root and installed.
|
||||
install("bowtie")
|
||||
# Install a novel spec with --add and make sure it is added as a root
|
||||
# and installed.
|
||||
install("--add", "bowtie")
|
||||
|
||||
assert any([s.name == "bowtie" for s in e.roots()])
|
||||
assert not any([s.name == "bowtie" for s in e.uninstalled_specs()])
|
||||
|
@@ -333,20 +333,6 @@ def test_error_conditions(self, cli_args, error_str):
|
||||
with pytest.raises(spack.error.SpackError, match=error_str):
|
||||
spack.cmd.mirror.mirror_create(args)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"cli_args,expected_end",
|
||||
[
|
||||
({"directory": None}, os.path.join("source")),
|
||||
({"directory": os.path.join("foo", "bar")}, os.path.join("foo", "bar")),
|
||||
],
|
||||
)
|
||||
def test_mirror_path_is_valid(self, cli_args, expected_end, config):
|
||||
args = MockMirrorArgs(**cli_args)
|
||||
local_push_url = spack.cmd.mirror.local_mirror_url_from_user(args.directory)
|
||||
assert local_push_url.startswith("file:")
|
||||
assert os.path.isabs(local_push_url.replace("file://", ""))
|
||||
assert local_push_url.endswith(expected_end)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"cli_args,not_expected",
|
||||
[
|
||||
|
@@ -3,10 +3,15 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import itertools
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd.uninstall
|
||||
import spack.environment
|
||||
import spack.store
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
|
||||
@@ -37,6 +42,39 @@ def test_installed_dependents(mutable_database):
|
||||
uninstall("-y", "libelf")
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
def test_correct_installed_dependents(mutable_database):
|
||||
# Test whether we return the right dependents.
|
||||
|
||||
# Take callpath from the database
|
||||
callpath = spack.store.db.query_local("callpath")[0]
|
||||
|
||||
# Ensure it still has dependents and dependencies
|
||||
dependents = callpath.dependents(deptype="all")
|
||||
dependencies = callpath.dependencies(deptype="all")
|
||||
assert dependents and dependencies
|
||||
|
||||
# Uninstall it, so it's missing.
|
||||
callpath.package.do_uninstall(force=True)
|
||||
|
||||
# Retrieve all dependent hashes
|
||||
inside_dpts, outside_dpts = spack.cmd.uninstall.installed_dependents(dependencies, None)
|
||||
dependent_hashes = [s.dag_hash() for s in itertools.chain(*outside_dpts.values())]
|
||||
set_dependent_hashes = set(dependent_hashes)
|
||||
|
||||
# We dont have an env, so this should be empty.
|
||||
assert not inside_dpts
|
||||
|
||||
# Assert uniqueness
|
||||
assert len(dependent_hashes) == len(set_dependent_hashes)
|
||||
|
||||
# Ensure parents of callpath are listed
|
||||
assert all(s.dag_hash() in set_dependent_hashes for s in dependents)
|
||||
|
||||
# Ensure callpath itself is not, since it was missing.
|
||||
assert callpath.dag_hash() not in set_dependent_hashes
|
||||
|
||||
|
||||
@pytest.mark.db
|
||||
def test_recursive_uninstall(mutable_database):
|
||||
"""Test recursive uninstall."""
|
||||
@@ -166,3 +204,187 @@ def _warn(*args, **kwargs):
|
||||
monkeypatch.setattr(tty, "warn", _warn)
|
||||
# Now try to uninstall and check this doesn't trigger warnings
|
||||
uninstall("-y", "-a")
|
||||
|
||||
|
||||
# Note: I want to use https://docs.pytest.org/en/7.1.x/how-to/skipping.html#skip-all-test-functions-of-a-class-or-module
|
||||
# the style formatter insists on separating these two lines.
|
||||
pytest.mark.skipif(sys.platform == "win32", reason="Envs unsupported on Windows")
|
||||
|
||||
|
||||
class TestUninstallFromEnv(object):
|
||||
"""Tests an installation with two environments e1 and e2, which each have
|
||||
shared package installations:
|
||||
|
||||
e1 has dt-diamond-left -> dt-diamond-bottom
|
||||
|
||||
e2 has dt-diamond-right -> dt-diamond-bottom
|
||||
"""
|
||||
|
||||
env = SpackCommand("env")
|
||||
add = SpackCommand("add")
|
||||
concretize = SpackCommand("concretize")
|
||||
find = SpackCommand("find")
|
||||
|
||||
@pytest.fixture
|
||||
def environment_setup(
|
||||
self, mutable_mock_env_path, config, mock_packages, mutable_database, install_mockery
|
||||
):
|
||||
TestUninstallFromEnv.env("create", "e1")
|
||||
e1 = spack.environment.read("e1")
|
||||
with e1:
|
||||
TestUninstallFromEnv.add("dt-diamond-left")
|
||||
TestUninstallFromEnv.add("dt-diamond-bottom")
|
||||
TestUninstallFromEnv.concretize()
|
||||
install("--fake")
|
||||
|
||||
TestUninstallFromEnv.env("create", "e2")
|
||||
e2 = spack.environment.read("e2")
|
||||
with e2:
|
||||
TestUninstallFromEnv.add("dt-diamond-right")
|
||||
TestUninstallFromEnv.add("dt-diamond-bottom")
|
||||
TestUninstallFromEnv.concretize()
|
||||
install("--fake")
|
||||
|
||||
def test_basic_env_sanity(self, environment_setup):
|
||||
for env_name in ["e1", "e2"]:
|
||||
e = spack.environment.read(env_name)
|
||||
with e:
|
||||
for _, concretized_spec in e.concretized_specs():
|
||||
assert concretized_spec.package.installed
|
||||
|
||||
def test_uninstall_force_dependency_shared_between_envs(self, environment_setup):
|
||||
"""If you "spack uninstall -f --dependents dt-diamond-bottom" from
|
||||
e1, then all packages should be uninstalled (but not removed) from
|
||||
both e1 and e2.
|
||||
"""
|
||||
e1 = spack.environment.read("e1")
|
||||
with e1:
|
||||
uninstall("-f", "-y", "--dependents", "dt-diamond-bottom")
|
||||
|
||||
# The specs should still be in the environment, since
|
||||
# --remove was not specified
|
||||
assert set(root.name for (root, _) in e1.concretized_specs()) == set(
|
||||
["dt-diamond-left", "dt-diamond-bottom"]
|
||||
)
|
||||
|
||||
for _, concretized_spec in e1.concretized_specs():
|
||||
assert not concretized_spec.package.installed
|
||||
|
||||
# Everything in e2 depended on dt-diamond-bottom, so should also
|
||||
# have been uninstalled. The roots should be unchanged though.
|
||||
e2 = spack.environment.read("e2")
|
||||
with e2:
|
||||
assert set(root.name for (root, _) in e2.concretized_specs()) == set(
|
||||
["dt-diamond-right", "dt-diamond-bottom"]
|
||||
)
|
||||
for _, concretized_spec in e2.concretized_specs():
|
||||
assert not concretized_spec.package.installed
|
||||
|
||||
def test_uninstall_remove_dependency_shared_between_envs(self, environment_setup):
|
||||
"""If you "spack uninstall --dependents --remove dt-diamond-bottom" from
|
||||
e1, then all packages are removed from e1 (it is now empty);
|
||||
dt-diamond-left is also uninstalled (since only e1 needs it) but
|
||||
dt-diamond-bottom is not uninstalled (since e2 needs it).
|
||||
"""
|
||||
e1 = spack.environment.read("e1")
|
||||
with e1:
|
||||
dtdiamondleft = next(
|
||||
concrete
|
||||
for (_, concrete) in e1.concretized_specs()
|
||||
if concrete.name == "dt-diamond-left"
|
||||
)
|
||||
output = uninstall("-y", "--dependents", "--remove", "dt-diamond-bottom")
|
||||
assert "The following specs will be removed but not uninstalled" in output
|
||||
assert not list(e1.roots())
|
||||
assert not dtdiamondleft.package.installed
|
||||
|
||||
# Since -f was not specified, all specs in e2 should still be installed
|
||||
# (and e2 should be unchanged)
|
||||
e2 = spack.environment.read("e2")
|
||||
with e2:
|
||||
assert set(root.name for (root, _) in e2.concretized_specs()) == set(
|
||||
["dt-diamond-right", "dt-diamond-bottom"]
|
||||
)
|
||||
for _, concretized_spec in e2.concretized_specs():
|
||||
assert concretized_spec.package.installed
|
||||
|
||||
def test_uninstall_dependency_shared_between_envs_fail(self, environment_setup):
|
||||
"""If you "spack uninstall --dependents dt-diamond-bottom" from
|
||||
e1 (without --remove or -f), then this should fail (this is needed by
|
||||
e2).
|
||||
"""
|
||||
e1 = spack.environment.read("e1")
|
||||
with e1:
|
||||
output = uninstall("-y", "--dependents", "dt-diamond-bottom", fail_on_error=False)
|
||||
assert "There are still dependents." in output
|
||||
assert "use `spack env remove`" in output
|
||||
|
||||
# The environment should be unchanged and nothing should have been
|
||||
# uninstalled
|
||||
assert set(root.name for (root, _) in e1.concretized_specs()) == set(
|
||||
["dt-diamond-left", "dt-diamond-bottom"]
|
||||
)
|
||||
for _, concretized_spec in e1.concretized_specs():
|
||||
assert concretized_spec.package.installed
|
||||
|
||||
def test_uninstall_force_and_remove_dependency_shared_between_envs(self, environment_setup):
|
||||
"""If you "spack uninstall -f --dependents --remove dt-diamond-bottom" from
|
||||
e1, then all packages should be uninstalled and removed from e1.
|
||||
All packages will also be uninstalled from e2, but the roots will
|
||||
remain unchanged.
|
||||
"""
|
||||
e1 = spack.environment.read("e1")
|
||||
with e1:
|
||||
dtdiamondleft = next(
|
||||
concrete
|
||||
for (_, concrete) in e1.concretized_specs()
|
||||
if concrete.name == "dt-diamond-left"
|
||||
)
|
||||
uninstall("-f", "-y", "--dependents", "--remove", "dt-diamond-bottom")
|
||||
assert not list(e1.roots())
|
||||
assert not dtdiamondleft.package.installed
|
||||
|
||||
e2 = spack.environment.read("e2")
|
||||
with e2:
|
||||
assert set(root.name for (root, _) in e2.concretized_specs()) == set(
|
||||
["dt-diamond-right", "dt-diamond-bottom"]
|
||||
)
|
||||
for _, concretized_spec in e2.concretized_specs():
|
||||
assert not concretized_spec.package.installed
|
||||
|
||||
def test_uninstall_keep_dependents_dependency_shared_between_envs(self, environment_setup):
|
||||
"""If you "spack uninstall -f --remove dt-diamond-bottom" from
|
||||
e1, then dt-diamond-bottom should be uninstalled, which leaves
|
||||
"dangling" references in both environments, since
|
||||
dt-diamond-left and dt-diamond-right both need it.
|
||||
"""
|
||||
e1 = spack.environment.read("e1")
|
||||
with e1:
|
||||
dtdiamondleft = next(
|
||||
concrete
|
||||
for (_, concrete) in e1.concretized_specs()
|
||||
if concrete.name == "dt-diamond-left"
|
||||
)
|
||||
uninstall("-f", "-y", "--remove", "dt-diamond-bottom")
|
||||
# dt-diamond-bottom was removed from the list of roots (note that
|
||||
# it would still be installed since dt-diamond-left depends on it)
|
||||
assert set(x.name for x in e1.roots()) == set(["dt-diamond-left"])
|
||||
assert dtdiamondleft.package.installed
|
||||
|
||||
e2 = spack.environment.read("e2")
|
||||
with e2:
|
||||
assert set(root.name for (root, _) in e2.concretized_specs()) == set(
|
||||
["dt-diamond-right", "dt-diamond-bottom"]
|
||||
)
|
||||
dtdiamondright = next(
|
||||
concrete
|
||||
for (_, concrete) in e2.concretized_specs()
|
||||
if concrete.name == "dt-diamond-right"
|
||||
)
|
||||
assert dtdiamondright.package.installed
|
||||
dtdiamondbottom = next(
|
||||
concrete
|
||||
for (_, concrete) in e2.concretized_specs()
|
||||
if concrete.name == "dt-diamond-bottom"
|
||||
)
|
||||
assert not dtdiamondbottom.package.installed
|
||||
|
@@ -10,8 +10,8 @@
|
||||
|
||||
import spack.util.spack_yaml as s_yaml
|
||||
from spack.main import SpackCommand
|
||||
from spack.spec import Spec
|
||||
|
||||
activate = SpackCommand("activate")
|
||||
extensions = SpackCommand("extensions")
|
||||
install = SpackCommand("install")
|
||||
view = SpackCommand("view")
|
||||
@@ -134,46 +134,9 @@ def test_view_extension(tmpdir, mock_packages, mock_archive, mock_fetch, config,
|
||||
assert "extension1@1.0" in all_installed
|
||||
assert "extension1@2.0" in all_installed
|
||||
assert "extension2@1.0" in all_installed
|
||||
global_activated = extensions("--show", "activated", "extendee")
|
||||
assert "extension1@1.0" not in global_activated
|
||||
assert "extension1@2.0" not in global_activated
|
||||
assert "extension2@1.0" not in global_activated
|
||||
view_activated = extensions("--show", "activated", "-v", viewpath, "extendee")
|
||||
assert "extension1@1.0" in view_activated
|
||||
assert "extension1@2.0" not in view_activated
|
||||
assert "extension2@1.0" not in view_activated
|
||||
assert os.path.exists(os.path.join(viewpath, "bin", "extension1"))
|
||||
|
||||
|
||||
def test_view_extension_projection(
|
||||
tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
|
||||
):
|
||||
install("extendee@1.0")
|
||||
install("extension1@1.0")
|
||||
install("extension1@2.0")
|
||||
install("extension2@1.0")
|
||||
|
||||
viewpath = str(tmpdir.mkdir("view"))
|
||||
view_projection = {"all": "{name}-{version}"}
|
||||
projection_file = create_projection_file(tmpdir, view_projection)
|
||||
view("symlink", viewpath, "--projection-file={0}".format(projection_file), "extension1@1.0")
|
||||
|
||||
all_installed = extensions("--show", "installed", "extendee")
|
||||
assert "extension1@1.0" in all_installed
|
||||
assert "extension1@2.0" in all_installed
|
||||
assert "extension2@1.0" in all_installed
|
||||
global_activated = extensions("--show", "activated", "extendee")
|
||||
assert "extension1@1.0" not in global_activated
|
||||
assert "extension1@2.0" not in global_activated
|
||||
assert "extension2@1.0" not in global_activated
|
||||
view_activated = extensions("--show", "activated", "-v", viewpath, "extendee")
|
||||
assert "extension1@1.0" in view_activated
|
||||
assert "extension1@2.0" not in view_activated
|
||||
assert "extension2@1.0" not in view_activated
|
||||
|
||||
assert os.path.exists(os.path.join(viewpath, "extendee-1.0", "bin", "extension1"))
|
||||
|
||||
|
||||
def test_view_extension_remove(
|
||||
tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
|
||||
):
|
||||
@@ -184,10 +147,6 @@ def test_view_extension_remove(
|
||||
view("remove", viewpath, "extension1@1.0")
|
||||
all_installed = extensions("--show", "installed", "extendee")
|
||||
assert "extension1@1.0" in all_installed
|
||||
global_activated = extensions("--show", "activated", "extendee")
|
||||
assert "extension1@1.0" not in global_activated
|
||||
view_activated = extensions("--show", "activated", "-v", viewpath, "extendee")
|
||||
assert "extension1@1.0" not in view_activated
|
||||
assert not os.path.exists(os.path.join(viewpath, "bin", "extension1"))
|
||||
|
||||
|
||||
@@ -216,48 +175,39 @@ def test_view_extension_conflict_ignored(
|
||||
assert fin.read() == "1.0"
|
||||
|
||||
|
||||
def test_view_extension_global_activation(
|
||||
tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
|
||||
):
|
||||
install("extendee")
|
||||
install("extension1@1.0")
|
||||
install("extension1@2.0")
|
||||
install("extension2@1.0")
|
||||
viewpath = str(tmpdir.mkdir("view"))
|
||||
view("symlink", viewpath, "extension1@1.0")
|
||||
activate("extension1@2.0")
|
||||
activate("extension2@1.0")
|
||||
all_installed = extensions("--show", "installed", "extendee")
|
||||
assert "extension1@1.0" in all_installed
|
||||
assert "extension1@2.0" in all_installed
|
||||
assert "extension2@1.0" in all_installed
|
||||
global_activated = extensions("--show", "activated", "extendee")
|
||||
assert "extension1@1.0" not in global_activated
|
||||
assert "extension1@2.0" in global_activated
|
||||
assert "extension2@1.0" in global_activated
|
||||
view_activated = extensions("--show", "activated", "-v", viewpath, "extendee")
|
||||
assert "extension1@1.0" in view_activated
|
||||
assert "extension1@2.0" not in view_activated
|
||||
assert "extension2@1.0" not in view_activated
|
||||
assert os.path.exists(os.path.join(viewpath, "bin", "extension1"))
|
||||
assert not os.path.exists(os.path.join(viewpath, "bin", "extension2"))
|
||||
|
||||
|
||||
def test_view_extendee_with_global_activations(
|
||||
tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
|
||||
):
|
||||
install("extendee")
|
||||
install("extension1@1.0")
|
||||
install("extension1@2.0")
|
||||
install("extension2@1.0")
|
||||
viewpath = str(tmpdir.mkdir("view"))
|
||||
activate("extension1@2.0")
|
||||
output = view("symlink", viewpath, "extension1@1.0")
|
||||
assert "Error: Globally activated extensions cannot be used" in output
|
||||
|
||||
|
||||
def test_view_fails_with_missing_projections_file(tmpdir):
|
||||
viewpath = str(tmpdir.mkdir("view"))
|
||||
projection_file = os.path.join(str(tmpdir), "nonexistent")
|
||||
with pytest.raises(SystemExit):
|
||||
view("symlink", "--projection-file", projection_file, viewpath, "foo")
|
||||
|
||||
|
||||
@pytest.mark.parametrize("with_projection", [False, True])
|
||||
@pytest.mark.parametrize("cmd", ["symlink", "copy"])
|
||||
def test_view_files_not_ignored(
|
||||
tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery, cmd, with_projection
|
||||
):
|
||||
spec = Spec("view-not-ignored").concretized()
|
||||
pkg = spec.package
|
||||
pkg.do_install()
|
||||
pkg.assert_installed(spec.prefix)
|
||||
|
||||
install("view-dir-file") # Arbitrary package to add noise
|
||||
|
||||
viewpath = str(tmpdir.mkdir("view_{0}".format(cmd)))
|
||||
|
||||
if with_projection:
|
||||
proj = str(tmpdir.join("proj.yaml"))
|
||||
with open(proj, "w") as f:
|
||||
f.write('{"projections":{"all":"{name}"}}')
|
||||
prefix_in_view = os.path.join(viewpath, "view-not-ignored")
|
||||
args = ["--projection-file", proj]
|
||||
else:
|
||||
prefix_in_view = viewpath
|
||||
args = []
|
||||
|
||||
view(cmd, *(args + [viewpath, "view-not-ignored", "view-dir-file"]))
|
||||
pkg.assert_installed(prefix_in_view)
|
||||
|
||||
view("remove", viewpath, "view-not-ignored")
|
||||
pkg.assert_not_installed(prefix_in_view)
|
||||
|
@@ -352,6 +352,17 @@ def test_concretize_propagate_compiler_flag_not_passed_to_dependent(self):
|
||||
assert set(spec.compiler_flags["cflags"]) == set(["-g"])
|
||||
assert spec.satisfies("^openblas cflags='-O3'")
|
||||
|
||||
def test_mixing_compilers_only_affects_subdag(self):
|
||||
spack.config.set("packages:all:compiler", ["clang", "gcc"])
|
||||
spec = Spec("dt-diamond%gcc ^dt-diamond-bottom%clang").concretized()
|
||||
for dep in spec.traverse():
|
||||
assert ("%clang" in dep) == (dep.name == "dt-diamond-bottom")
|
||||
|
||||
def test_compiler_inherited_upwards(self):
|
||||
spec = Spec("dt-diamond ^dt-diamond-bottom%clang").concretized()
|
||||
for dep in spec.traverse():
|
||||
assert "%clang" in dep
|
||||
|
||||
def test_architecture_inheritance(self):
|
||||
"""test_architecture_inheritance is likely to fail with an
|
||||
UnavailableCompilerVersionError if the architecture is concretized
|
||||
@@ -1695,6 +1706,28 @@ def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occura
|
||||
counter += 1
|
||||
assert counter == occurances, concrete_specs
|
||||
|
||||
def test_coconcretize_reuse_and_virtuals(self):
|
||||
import spack.solver.asp
|
||||
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer cannot reuse")
|
||||
|
||||
reusable_specs = []
|
||||
for s in ["mpileaks ^mpich", "zmpi"]:
|
||||
reusable_specs.extend(spack.spec.Spec(s).concretized().traverse(root=True))
|
||||
|
||||
root_specs = [spack.spec.Spec("mpileaks"), spack.spec.Spec("zmpi")]
|
||||
|
||||
import spack.solver.asp
|
||||
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(setup, root_specs, reuse=reusable_specs)
|
||||
|
||||
for spec in result.specs:
|
||||
assert "zmpi" in spec
|
||||
|
||||
@pytest.mark.regression("30864")
|
||||
def test_misleading_error_message_on_version(self, mutable_database):
|
||||
# For this bug to be triggered we need a reusable dependency
|
||||
@@ -1912,3 +1945,18 @@ def test_require_targets_are_allowed(self, mutable_database):
|
||||
|
||||
for s in spec.traverse():
|
||||
assert s.satisfies("target=%s" % spack.platforms.test.Test.front_end)
|
||||
|
||||
def test_external_python_extensions_have_dependency(self):
|
||||
"""Test that python extensions have access to a python dependency"""
|
||||
external_conf = {
|
||||
"py-extension1": {
|
||||
"buildable": False,
|
||||
"externals": [{"spec": "py-extension1@2.0", "prefix": "/fake"}],
|
||||
}
|
||||
}
|
||||
spack.config.set("packages", external_conf)
|
||||
|
||||
spec = Spec("py-extension2").concretized()
|
||||
|
||||
assert "python" in spec["py-extension1"]
|
||||
assert spec["python"] == spec["py-extension1"]["python"]
|
||||
|
@@ -8,8 +8,6 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import archspec
|
||||
|
||||
import spack.config
|
||||
import spack.package_prefs
|
||||
import spack.repo
|
||||
@@ -105,28 +103,16 @@ def test_preferred_variants_from_wildcard(self):
|
||||
update_packages("multivalue-variant", "variants", "foo=bar")
|
||||
assert_variant_values("multivalue-variant foo=*", foo=("bar",))
|
||||
|
||||
def test_preferred_compilers(self):
|
||||
@pytest.mark.parametrize(
|
||||
"compiler_str,spec_str",
|
||||
[("gcc@4.5.0", "mpileaks"), ("clang@12.0.0", "mpileaks"), ("gcc@4.5.0", "openmpi")],
|
||||
)
|
||||
def test_preferred_compilers(self, compiler_str, spec_str):
|
||||
"""Test preferred compilers are applied correctly"""
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Fixing the parser broke this test for the original concretizer.")
|
||||
|
||||
# Need to make sure the test uses an available compiler
|
||||
arch = spack.spec.ArchSpec(("test", "redhat6", archspec.cpu.host().name))
|
||||
|
||||
compiler_list = spack.compilers.compiler_specs_for_arch(arch)
|
||||
assert compiler_list
|
||||
|
||||
# Try the first available compiler
|
||||
compiler = str(compiler_list[0])
|
||||
update_packages("mpileaks", "compiler", [compiler])
|
||||
spec = concretize("mpileaks")
|
||||
assert spec.compiler == spack.spec.CompilerSpec(compiler)
|
||||
|
||||
# Try the last available compiler
|
||||
compiler = str(compiler_list[-1])
|
||||
update_packages("mpileaks", "compiler", [compiler])
|
||||
spec = concretize("mpileaks os=redhat6")
|
||||
assert spec.compiler == spack.spec.CompilerSpec(compiler)
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
update_packages(spec.name, "compiler", [compiler_str])
|
||||
spec.concretize()
|
||||
assert spec.compiler == spack.spec.CompilerSpec(compiler_str)
|
||||
|
||||
def test_preferred_target(self, mutable_mock_repo):
|
||||
"""Test preferred targets are applied correctly"""
|
||||
@@ -180,7 +166,7 @@ def test_preferred_providers(self):
|
||||
assert "zmpi" in spec
|
||||
|
||||
def test_config_set_pkg_property_url(self, mutable_mock_repo):
|
||||
"""Test setting an attribute that is explicitly-handled in the schema"""
|
||||
"""Test setting an existing attribute in the package class"""
|
||||
update_packages(
|
||||
"mpileaks",
|
||||
"package_attributes",
|
||||
@@ -195,26 +181,36 @@ def test_config_set_pkg_property_url(self, mutable_mock_repo):
|
||||
|
||||
def test_config_set_pkg_property_new(self, mutable_mock_repo):
|
||||
"""Test that you can set arbitrary attributes on the Package class"""
|
||||
update_packages(
|
||||
"mpileaks",
|
||||
"package_attributes",
|
||||
{"x": 1, "y": True, "z": "yesterday"},
|
||||
conf = syaml.load_config(
|
||||
"""\
|
||||
mpileaks:
|
||||
package_attributes:
|
||||
v1: 1
|
||||
v2: true
|
||||
v3: yesterday
|
||||
v4: "true"
|
||||
v5:
|
||||
x: 1
|
||||
y: 2
|
||||
v6:
|
||||
- 1
|
||||
- 2
|
||||
"""
|
||||
)
|
||||
spack.config.set("packages", conf, scope="concretize")
|
||||
|
||||
spec = concretize("mpileaks")
|
||||
assert spec.package.x == 1
|
||||
assert spec.package.y is True
|
||||
assert spec.package.z == "yesterday"
|
||||
assert spec.package.v1 == 1
|
||||
assert spec.package.v2 is True
|
||||
assert spec.package.v3 == "yesterday"
|
||||
assert spec.package.v4 == "true"
|
||||
assert dict(spec.package.v5) == {"x": 1, "y": 2}
|
||||
assert list(spec.package.v6) == [1, 2]
|
||||
|
||||
update_packages("mpileaks", "package_attributes", {})
|
||||
spec = concretize("mpileaks")
|
||||
with pytest.raises(AttributeError):
|
||||
spec.package.x
|
||||
|
||||
def test_config_set_pkg_property_collection_unsupported(self, mutable_mock_repo):
|
||||
"""Test that an error is raised if you attempt to assign a list value"""
|
||||
update_packages("mpileaks", "package_attributes", {"x": ["a", "b"]})
|
||||
with pytest.raises(ConfigError):
|
||||
concretize("mpileaks")
|
||||
spec.package.v1
|
||||
|
||||
def test_preferred(self):
|
||||
""" "Test packages with some version marked as preferred=True"""
|
||||
@@ -385,6 +381,23 @@ def test_buildable_false_all_true_virtual(self):
|
||||
spec = Spec("mpich")
|
||||
assert spack.package_prefs.is_spec_buildable(spec)
|
||||
|
||||
def test_buildable_false_virtual_true_pacakge(self):
|
||||
conf = syaml.load_config(
|
||||
"""\
|
||||
mpi:
|
||||
buildable: false
|
||||
mpich:
|
||||
buildable: true
|
||||
"""
|
||||
)
|
||||
spack.config.set("packages", conf, scope="concretize")
|
||||
|
||||
spec = Spec("zmpi")
|
||||
assert not spack.package_prefs.is_spec_buildable(spec)
|
||||
|
||||
spec = Spec("mpich")
|
||||
assert spack.package_prefs.is_spec_buildable(spec)
|
||||
|
||||
def test_config_permissions_from_all(self, configure_permissions):
|
||||
# Although these aren't strictly about concretization, they are
|
||||
# configured in the same file and therefore convenient to test here.
|
||||
|
@@ -379,6 +379,17 @@ def test_substitute_config_variables(mock_low_high_config, monkeypatch):
|
||||
os.path.join(mock_low_high_config.scopes["low"].path, os.path.join("foo", "bar", "baz"))
|
||||
)
|
||||
|
||||
# test architecture information is in replacements
|
||||
assert spack_path.canonicalize_path(
|
||||
os.path.join("foo", "$platform", "bar")
|
||||
) == os.path.abspath(os.path.join("foo", "test", "bar"))
|
||||
|
||||
host_target = spack.platforms.host().target("default_target")
|
||||
host_target_family = str(host_target.microarchitecture.family)
|
||||
assert spack_path.canonicalize_path(
|
||||
os.path.join("foo", "$target_family", "bar")
|
||||
) == os.path.abspath(os.path.join("foo", host_target_family, "bar"))
|
||||
|
||||
|
||||
packages_merge_low = {"packages": {"foo": {"variants": ["+v1"]}, "bar": {"variants": ["+v2"]}}}
|
||||
|
||||
|
@@ -28,7 +28,7 @@ def test_set_install_hash_length(hash_length, mutable_config, tmpdir):
|
||||
assert len(hash_str) == hash_length
|
||||
|
||||
|
||||
@pytest.mark.use_fixtures("mock_packages")
|
||||
@pytest.mark.usefixtures("mock_packages")
|
||||
def test_set_install_hash_length_upper_case(mutable_config, tmpdir):
|
||||
mutable_config.set("config:install_hash_length", 5)
|
||||
mutable_config.set(
|
||||
|
@@ -1643,7 +1643,7 @@ def mock_executable(tmpdir):
|
||||
"""
|
||||
import jinja2
|
||||
|
||||
shebang = "#!/bin/bash\n" if not is_windows else "@ECHO OFF"
|
||||
shebang = "#!/bin/sh\n" if not is_windows else "@ECHO OFF"
|
||||
|
||||
def _factory(name, output, subdir=("bin",)):
|
||||
f = tmpdir.ensure(*subdir, dir=True).join(name)
|
||||
@@ -1808,14 +1808,24 @@ def mock_tty_stdout(monkeypatch):
|
||||
monkeypatch.setattr(sys.stdout, "isatty", lambda: True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def prefix_like():
|
||||
return "package-0.0.0.a1-hashhashhashhashhashhashhashhash"
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def binary_with_rpaths(tmpdir):
|
||||
def prefix_tmpdir(tmpdir, prefix_like):
|
||||
return tmpdir.mkdir(prefix_like)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def binary_with_rpaths(prefix_tmpdir):
|
||||
"""Factory fixture that compiles an ELF binary setting its RPATH. Relative
|
||||
paths are encoded with `$ORIGIN` prepended.
|
||||
"""
|
||||
|
||||
def _factory(rpaths, message="Hello world!"):
|
||||
source = tmpdir.join("main.c")
|
||||
source = prefix_tmpdir.join("main.c")
|
||||
source.write(
|
||||
"""
|
||||
#include <stdio.h>
|
||||
|
@@ -252,12 +252,8 @@ def test_install_times(install_mockery, mock_fetch, mutable_mock_repo):
|
||||
|
||||
# The order should be maintained
|
||||
phases = [x["name"] for x in times["phases"]]
|
||||
total = sum([x["seconds"] for x in times["phases"]])
|
||||
for name in ["one", "two", "three", "install"]:
|
||||
assert name in phases
|
||||
|
||||
# Give a generous difference threshold
|
||||
assert abs(total - times["total"]["seconds"]) < 5
|
||||
assert phases == ["one", "two", "three", "install"]
|
||||
assert all(isinstance(x["seconds"], float) for x in times["phases"])
|
||||
|
||||
|
||||
def test_flatten_deps(install_mockery, mock_fetch, mutable_mock_repo):
|
||||
|
@@ -467,6 +467,37 @@ def _conc_spec(compiler):
|
||||
assert packages
|
||||
|
||||
|
||||
def test_update_tasks_for_compiler_packages_as_compiler(mock_packages, config, monkeypatch):
|
||||
spec = spack.spec.Spec("trivial-install-test-package").concretized()
|
||||
installer = inst.PackageInstaller([(spec.package, {})])
|
||||
|
||||
# Add a task to the queue
|
||||
installer._add_init_task(spec.package, installer.build_requests[0], False, {})
|
||||
|
||||
# monkeypatch to make the list of compilers be what we test
|
||||
def fake_package_list(compiler, architecture, pkgs):
|
||||
return [(spec.package, True)]
|
||||
|
||||
monkeypatch.setattr(inst, "_packages_needed_to_bootstrap_compiler", fake_package_list)
|
||||
|
||||
installer._add_bootstrap_compilers("fake", "fake", "fake", None, {})
|
||||
|
||||
# Check that the only task is now a compiler task
|
||||
assert len(installer.build_pq) == 1
|
||||
assert installer.build_pq[0][1].compiler
|
||||
|
||||
|
||||
def test_bootstrapping_compilers_with_different_names_from_spec(
|
||||
install_mockery, mutable_config, mock_fetch
|
||||
):
|
||||
with spack.config.override("config:install_missing_compilers", True):
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
spec = spack.spec.Spec("trivial-install-test-package%oneapi@22.2.0").concretized()
|
||||
spec.package.do_install()
|
||||
|
||||
assert spack.spec.CompilerSpec("oneapi@22.2.0") in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
def test_dump_packages_deps_ok(install_mockery, tmpdir, mock_packages):
|
||||
"""Test happy path for dump_packages with dependencies."""
|
||||
|
||||
@@ -591,7 +622,7 @@ def test_combine_phase_logs(tmpdir):
|
||||
|
||||
# This is the output log we will combine them into
|
||||
combined_log = os.path.join(str(tmpdir), "combined-out.txt")
|
||||
spack.installer.combine_phase_logs(phase_log_files, combined_log)
|
||||
inst.combine_phase_logs(phase_log_files, combined_log)
|
||||
with open(combined_log, "r") as log_file:
|
||||
out = log_file.read()
|
||||
|
||||
@@ -600,6 +631,22 @@ def test_combine_phase_logs(tmpdir):
|
||||
assert "Output from %s\n" % log_file in out
|
||||
|
||||
|
||||
def test_combine_phase_logs_does_not_care_about_encoding(tmpdir):
|
||||
# this is invalid utf-8 at a minimum
|
||||
data = b"\x00\xF4\xBF\x00\xBF\xBF"
|
||||
input = [str(tmpdir.join("a")), str(tmpdir.join("b"))]
|
||||
output = str(tmpdir.join("c"))
|
||||
|
||||
for path in input:
|
||||
with open(path, "wb") as f:
|
||||
f.write(data)
|
||||
|
||||
inst.combine_phase_logs(input, output)
|
||||
|
||||
with open(output, "rb") as f:
|
||||
assert f.read() == data * 2
|
||||
|
||||
|
||||
def test_check_deps_status_install_failure(install_mockery, monkeypatch):
|
||||
const_arg = installer_args(["a"], {})
|
||||
installer = create_installer(const_arg)
|
||||
|
@@ -81,6 +81,15 @@ def test_file_layout(self, compiler, provider, factory, module_configuration):
|
||||
else:
|
||||
assert repetitions == 1
|
||||
|
||||
def test_compilers_provided_different_name(self, factory, module_configuration):
|
||||
module_configuration("complex_hierarchy")
|
||||
module, spec = factory("intel-oneapi-compilers%clang@3.3")
|
||||
|
||||
provides = module.conf.provides
|
||||
|
||||
assert "compiler" in provides
|
||||
assert provides["compiler"] == spack.spec.CompilerSpec("oneapi@3.0")
|
||||
|
||||
def test_simple_case(self, modulefile_content, module_configuration):
|
||||
"""Tests the generation of a simple TCL module file."""
|
||||
|
||||
@@ -298,7 +307,7 @@ def test_modules_relative_to_view(
|
||||
):
|
||||
with ev.Environment(str(tmpdir), with_view=True) as e:
|
||||
module_configuration("with_view")
|
||||
install("cmake")
|
||||
install("--add", "cmake")
|
||||
|
||||
spec = spack.spec.Spec("cmake").concretized()
|
||||
|
||||
|
@@ -84,12 +84,6 @@ def test_inheritance_of_patches(self):
|
||||
# Will error if inheritor package cannot find inherited patch files
|
||||
s.concretize()
|
||||
|
||||
def test_dependency_extensions(self):
|
||||
s = Spec("extension2")
|
||||
s.concretize()
|
||||
deps = set(x.name for x in s.package.dependency_activations())
|
||||
assert deps == set(["extension1"])
|
||||
|
||||
def test_import_class_from_package(self):
|
||||
from spack.pkg.builtin.mock.mpich import Mpich # noqa: F401
|
||||
|
||||
|
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import io
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
@@ -141,13 +142,13 @@ def _factory():
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def copy_binary():
|
||||
def copy_binary(prefix_like):
|
||||
"""Returns a function that copies a binary somewhere and
|
||||
returns the new location.
|
||||
"""
|
||||
|
||||
def _copy_somewhere(orig_binary):
|
||||
new_root = orig_binary.mkdtemp()
|
||||
new_root = orig_binary.mkdtemp().mkdir(prefix_like)
|
||||
new_binary = new_root.join("main.x")
|
||||
shutil.copy(str(orig_binary), str(new_binary))
|
||||
return new_binary
|
||||
@@ -261,29 +262,33 @@ def test_set_elf_rpaths_warning(mock_patchelf):
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_replace_prefix_bin(binary_with_rpaths):
|
||||
def test_replace_prefix_bin(binary_with_rpaths, prefix_like):
|
||||
prefix = "/usr/" + prefix_like
|
||||
prefix_bytes = prefix.encode("utf-8")
|
||||
new_prefix = "/foo/" + prefix_like
|
||||
new_prefix_bytes = new_prefix.encode("utf-8")
|
||||
# Compile an "Hello world!" executable and set RPATHs
|
||||
executable = binary_with_rpaths(rpaths=["/usr/lib", "/usr/lib64"])
|
||||
executable = binary_with_rpaths(rpaths=[prefix + "/lib", prefix + "/lib64"])
|
||||
|
||||
# Relocate the RPATHs
|
||||
spack.relocate._replace_prefix_bin(str(executable), {b"/usr": b"/foo"})
|
||||
spack.relocate._replace_prefix_bin(str(executable), {prefix_bytes: new_prefix_bytes})
|
||||
|
||||
# Some compilers add rpaths so ensure changes included in final result
|
||||
assert "/foo/lib:/foo/lib64" in rpaths_for(executable)
|
||||
assert "%s/lib:%s/lib64" % (new_prefix, new_prefix) in rpaths_for(executable)
|
||||
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_relocate_elf_binaries_absolute_paths(binary_with_rpaths, copy_binary, tmpdir):
|
||||
def test_relocate_elf_binaries_absolute_paths(binary_with_rpaths, copy_binary, prefix_tmpdir):
|
||||
# Create an executable, set some RPATHs, copy it to another location
|
||||
orig_binary = binary_with_rpaths(rpaths=[str(tmpdir.mkdir("lib")), "/usr/lib64"])
|
||||
orig_binary = binary_with_rpaths(rpaths=[str(prefix_tmpdir.mkdir("lib")), "/usr/lib64"])
|
||||
new_binary = copy_binary(orig_binary)
|
||||
|
||||
spack.relocate.relocate_elf_binaries(
|
||||
binaries=[str(new_binary)],
|
||||
orig_root=str(orig_binary.dirpath()),
|
||||
new_root=None, # Not needed when relocating absolute paths
|
||||
new_prefixes={str(tmpdir): "/foo"},
|
||||
new_prefixes={str(orig_binary.dirpath()): "/foo"},
|
||||
rel=False,
|
||||
# Not needed when relocating absolute paths
|
||||
orig_prefix=None,
|
||||
@@ -317,9 +322,13 @@ def test_relocate_elf_binaries_relative_paths(binary_with_rpaths, copy_binary):
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_make_elf_binaries_relative(binary_with_rpaths, copy_binary, tmpdir):
|
||||
def test_make_elf_binaries_relative(binary_with_rpaths, copy_binary, prefix_tmpdir):
|
||||
orig_binary = binary_with_rpaths(
|
||||
rpaths=[str(tmpdir.mkdir("lib")), str(tmpdir.mkdir("lib64")), "/opt/local/lib"]
|
||||
rpaths=[
|
||||
str(prefix_tmpdir.mkdir("lib")),
|
||||
str(prefix_tmpdir.mkdir("lib64")),
|
||||
"/opt/local/lib",
|
||||
]
|
||||
)
|
||||
new_binary = copy_binary(orig_binary)
|
||||
|
||||
@@ -339,15 +348,19 @@ def test_raise_if_not_relocatable(monkeypatch):
|
||||
|
||||
@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
|
||||
@skip_unless_linux
|
||||
def test_relocate_text_bin(binary_with_rpaths, copy_binary, tmpdir):
|
||||
def test_relocate_text_bin(binary_with_rpaths, copy_binary, prefix_tmpdir):
|
||||
orig_binary = binary_with_rpaths(
|
||||
rpaths=[str(tmpdir.mkdir("lib")), str(tmpdir.mkdir("lib64")), "/opt/local/lib"],
|
||||
message=str(tmpdir),
|
||||
rpaths=[
|
||||
str(prefix_tmpdir.mkdir("lib")),
|
||||
str(prefix_tmpdir.mkdir("lib64")),
|
||||
"/opt/local/lib",
|
||||
],
|
||||
message=str(prefix_tmpdir),
|
||||
)
|
||||
new_binary = copy_binary(orig_binary)
|
||||
|
||||
# Check original directory is in the executabel and the new one is not
|
||||
assert text_in_bin(str(tmpdir), new_binary)
|
||||
# Check original directory is in the executable and the new one is not
|
||||
assert text_in_bin(str(prefix_tmpdir), new_binary)
|
||||
assert not text_in_bin(str(new_binary.dirpath()), new_binary)
|
||||
|
||||
# Check this call succeed
|
||||
@@ -358,7 +371,7 @@ def test_relocate_text_bin(binary_with_rpaths, copy_binary, tmpdir):
|
||||
|
||||
# Check original directory is not there anymore and it was
|
||||
# substituted with the new one
|
||||
assert not text_in_bin(str(tmpdir), new_binary)
|
||||
assert not text_in_bin(str(prefix_tmpdir), new_binary)
|
||||
assert text_in_bin(str(new_binary.dirpath()), new_binary)
|
||||
|
||||
|
||||
@@ -450,30 +463,144 @@ def test_utf8_paths_to_single_binary_regex():
|
||||
assert regex.search(string).group(0) == b"/safe/[a-z]/file"
|
||||
|
||||
|
||||
def test_ordered_replacement(tmpdir):
|
||||
def test_ordered_replacement():
|
||||
# This tests whether binary text replacement respects order, so that
|
||||
# a long package prefix is replaced before a shorter sub-prefix like
|
||||
# the root of the spack store (as a fallback).
|
||||
def replace_and_expect(prefix_map, before, after):
|
||||
file = str(tmpdir.join("file"))
|
||||
with open(file, "wb") as f:
|
||||
f.write(before)
|
||||
spack.relocate._replace_prefix_bin(file, prefix_map)
|
||||
with open(file, "rb") as f:
|
||||
assert f.read() == after
|
||||
def replace_and_expect(prefix_map, before, after=None, suffix_safety_size=7):
|
||||
f = io.BytesIO(before)
|
||||
spack.relocate.apply_binary_replacements(f, OrderedDict(prefix_map), suffix_safety_size)
|
||||
f.seek(0)
|
||||
assert f.read() == after
|
||||
|
||||
# The case of having a non-null terminated common suffix.
|
||||
replace_and_expect(
|
||||
OrderedDict(
|
||||
[(b"/old-spack/opt/specific-package", b"/first"), (b"/old-spack/opt", b"/second")]
|
||||
),
|
||||
[
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
(b"/old-spack/opt", b"/sec/spack/opt"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package and /old-spack/opt",
|
||||
b"Binary with /first///////////////////////// and /second///////",
|
||||
b"Binary with /////////first/specific-package and /sec/spack/opt",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# The case of having a direct null terminated common suffix.
|
||||
replace_and_expect(
|
||||
OrderedDict(
|
||||
[(b"/old-spack/opt", b"/second"), (b"/old-spack/opt/specific-package", b"/first")]
|
||||
),
|
||||
b"Binary with /old-spack/opt/specific-package and /old-spack/opt",
|
||||
b"Binary with /second////////specific-package and /second///////",
|
||||
[
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
(b"/old-spack/opt", b"/sec/spack/opt"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0",
|
||||
b"Binary with /////////first/specific-package\0 and /sec/spack/opt\0",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Testing the order of operations (not null terminated, long enough common suffix)
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt", b"/s/spack/opt"),
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package and /old-spack/opt",
|
||||
b"Binary with ///s/spack/opt/specific-package and ///s/spack/opt",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Testing the order of operations (null terminated, long enough common suffix)
|
||||
replace_and_expect(
|
||||
[
|
||||
(b"/old-spack/opt", b"/s/spack/opt"),
|
||||
(b"/old-spack/opt/specific-package", b"/first/specific-package"),
|
||||
],
|
||||
b"Binary with /old-spack/opt/specific-package\0 and /old-spack/opt\0",
|
||||
b"Binary with ///s/spack/opt/specific-package\0 and ///s/spack/opt\0",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix long enough
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXage")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with ///////////opt/specific-XXXXage/sub\0 data",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix too short, but
|
||||
# shortening is enough to spare more than 7 bytes of old suffix.
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/opt/specific-XXXXXge")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with /opt/specific-XXXXXge/sub\0ckage/sub\0 data", # ckage/sub = 9 bytes
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix too short,
|
||||
# shortening leaves exactly 7 suffix bytes untouched, amazing!
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/spack/specific-XXXXXge")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with /spack/specific-XXXXXge/sub\0age/sub\0 data", # age/sub = 7 bytes
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Null terminated within the lookahead window, common suffix too short,
|
||||
# shortening doesn't leave space for 7 bytes, sad!
|
||||
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
b"/old-spack/opt/specific-package",
|
||||
b"/snacks/specific-XXXXXge",
|
||||
b"/old-spack/opt/specific-package/sub",
|
||||
)
|
||||
with pytest.raises(spack.relocate.CannotShrinkCString, match=error_msg):
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXge")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
# expect failure!
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Check that it works when changing suffix_safety_size.
|
||||
replace_and_expect(
|
||||
[(b"/old-spack/opt/specific-package", b"/snacks/specific-XXXXXXe")],
|
||||
b"Binary with /old-spack/opt/specific-package/sub\0 data",
|
||||
b"Binary with /snacks/specific-XXXXXXe/sub\0ge/sub\0 data",
|
||||
suffix_safety_size=6,
|
||||
)
|
||||
|
||||
# Finally check the case of no shortening but a long enough common suffix.
|
||||
replace_and_expect(
|
||||
[(b"pkg-gwixwaalgczp6", b"pkg-zkesfralgczp6")],
|
||||
b"Binary with pkg-gwixwaalgczp6/config\0 data",
|
||||
b"Binary with pkg-zkesfralgczp6/config\0 data",
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Too short matching suffix, identical string length
|
||||
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
b"pkg-gwixwaxlgczp6",
|
||||
b"pkg-zkesfrzlgczp6",
|
||||
b"pkg-gwixwaxlgczp6",
|
||||
)
|
||||
with pytest.raises(spack.relocate.CannotShrinkCString, match=error_msg):
|
||||
replace_and_expect(
|
||||
[(b"pkg-gwixwaxlgczp6", b"pkg-zkesfrzlgczp6")],
|
||||
b"Binary with pkg-gwixwaxlgczp6\0 data",
|
||||
# expect failure
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
||||
# Finally, make sure that the regex is not greedily finding the LAST null byte
|
||||
# it should find the first null byte in the window. In this test we put one null
|
||||
# at a distance where we cant keep a long enough suffix, and one where we can,
|
||||
# so we should expect failure when the first null is used.
|
||||
error_msg = "Cannot replace {!r} with {!r} in the C-string {!r}.".format(
|
||||
b"pkg-abcdef",
|
||||
b"pkg-xyzabc",
|
||||
b"pkg-abcdef",
|
||||
)
|
||||
with pytest.raises(spack.relocate.CannotShrinkCString, match=error_msg):
|
||||
replace_and_expect(
|
||||
[(b"pkg-abcdef", b"pkg-xyzabc")],
|
||||
b"Binary with pkg-abcdef\0/xx\0", # def\0/xx is 7 bytes.
|
||||
# expect failure
|
||||
suffix_safety_size=7,
|
||||
)
|
||||
|
@@ -18,7 +18,7 @@
|
||||
if sys.platform == "darwin":
|
||||
args.extend(["/usr/bin/clang++", "install_name_tool"])
|
||||
else:
|
||||
args.extend(["/usr/bin/g++", "patchelf"])
|
||||
args.extend(["g++", "patchelf"])
|
||||
|
||||
|
||||
@pytest.mark.requires_executables(*args)
|
||||
|
@@ -200,3 +200,22 @@ def test_spec_list_matrix_exclude(self, mock_packages):
|
||||
]
|
||||
speclist = SpecList("specs", matrix)
|
||||
assert len(speclist.specs) == 1
|
||||
|
||||
@pytest.mark.regression("22991")
|
||||
def test_spec_list_constraints_with_structure(
|
||||
self, mock_packages, mock_fetch, install_mockery
|
||||
):
|
||||
# Setup by getting hash and installing package with dep
|
||||
libdwarf_spec = Spec("libdwarf").concretized()
|
||||
libdwarf_spec.package.do_install()
|
||||
|
||||
# Create matrix
|
||||
matrix = {
|
||||
"matrix": [["mpileaks"], ["^callpath"], ["^libdwarf/%s" % libdwarf_spec.dag_hash()]]
|
||||
}
|
||||
|
||||
# ensure the concrete spec was retained in the matrix entry of which
|
||||
# it is a dependency
|
||||
speclist = SpecList("specs", [matrix])
|
||||
assert len(speclist.specs) == 1
|
||||
assert libdwarf_spec in speclist.specs[0]
|
||||
|
@@ -293,6 +293,12 @@ def test_canonicalize(self):
|
||||
|
||||
self.check_parse("x ^y", "x@: ^y@:")
|
||||
|
||||
def test_parse_redundant_deps(self):
|
||||
self.check_parse("x ^y@foo", "x ^y@foo ^y@foo")
|
||||
self.check_parse("x ^y@foo+bar", "x ^y@foo ^y+bar")
|
||||
self.check_parse("x ^y@foo+bar", "x ^y@foo+bar ^y")
|
||||
self.check_parse("x ^y@foo+bar", "x ^y ^y@foo+bar")
|
||||
|
||||
def test_parse_errors(self):
|
||||
errors = ["x@@1.2", "x ^y@@1.2", "x@1.2::", "x::"]
|
||||
self._check_raises(SpecParseError, errors)
|
||||
@@ -481,7 +487,7 @@ def test_multiple_versions(self):
|
||||
self._check_raises(MultipleVersionError, multiples)
|
||||
|
||||
def test_duplicate_dependency(self):
|
||||
self._check_raises(DuplicateDependencyError, ["x ^y ^y"])
|
||||
self._check_raises(DuplicateDependencyError, ["x ^y@1 ^y@2"])
|
||||
|
||||
def test_duplicate_compiler(self):
|
||||
duplicates = [
|
||||
|
@@ -1,402 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""This includes tests for customized activation logic for specific packages
|
||||
(e.g. python and perl).
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from llnl.util.link_tree import MergeConflictError
|
||||
|
||||
import spack.package_base
|
||||
import spack.spec
|
||||
from spack.directory_layout import DirectoryLayout
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
|
||||
pytestmark = pytest.mark.skipif(
|
||||
sys.platform == "win32",
|
||||
reason="Python activation not currently supported on Windows",
|
||||
)
|
||||
|
||||
|
||||
def create_ext_pkg(name, prefix, extendee_spec, monkeypatch):
|
||||
ext_spec = spack.spec.Spec(name)
|
||||
ext_spec._concrete = True
|
||||
|
||||
ext_spec.package.spec.prefix = prefix
|
||||
ext_pkg = ext_spec.package
|
||||
|
||||
# temporarily override extendee_spec property on the package
|
||||
monkeypatch.setattr(ext_pkg.__class__, "extendee_spec", extendee_spec)
|
||||
|
||||
return ext_pkg
|
||||
|
||||
|
||||
def create_python_ext_pkg(name, prefix, python_spec, monkeypatch, namespace=None):
|
||||
ext_pkg = create_ext_pkg(name, prefix, python_spec, monkeypatch)
|
||||
ext_pkg.py_namespace = namespace
|
||||
return ext_pkg
|
||||
|
||||
|
||||
def create_dir_structure(tmpdir, dir_structure):
|
||||
for fname, children in dir_structure.items():
|
||||
tmpdir.ensure(fname, dir=fname.endswith("/"))
|
||||
if children:
|
||||
create_dir_structure(tmpdir.join(fname), children)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def builtin_and_mock_packages():
|
||||
# These tests use mock_repo packages to test functionality of builtin
|
||||
# packages for python and perl. To test this we put the mock repo at lower
|
||||
# precedence than the builtin repo, so we test builtin.perl against
|
||||
# builtin.mock.perl-extension.
|
||||
repo_dirs = [spack.paths.packages_path, spack.paths.mock_packages_path]
|
||||
with spack.repo.use_repositories(*repo_dirs):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def python_and_extension_dirs(tmpdir, builtin_and_mock_packages):
|
||||
python_dirs = {"bin/": {"python": None}, "lib/": {"python2.7/": {"site-packages/": None}}}
|
||||
|
||||
python_name = "python"
|
||||
python_prefix = tmpdir.join(python_name)
|
||||
create_dir_structure(python_prefix, python_dirs)
|
||||
|
||||
python_spec = spack.spec.Spec("python@2.7.12")
|
||||
python_spec._concrete = True
|
||||
python_spec.package.spec.prefix = str(python_prefix)
|
||||
|
||||
ext_dirs = {
|
||||
"bin/": {"py-ext-tool": None},
|
||||
"lib/": {"python2.7/": {"site-packages/": {"py-extension1/": {"sample.py": None}}}},
|
||||
}
|
||||
|
||||
ext_name = "py-extension1"
|
||||
ext_prefix = tmpdir.join(ext_name)
|
||||
create_dir_structure(ext_prefix, ext_dirs)
|
||||
|
||||
easy_install_location = "lib/python2.7/site-packages/easy-install.pth"
|
||||
with open(str(ext_prefix.join(easy_install_location)), "w") as f:
|
||||
f.write(
|
||||
"""path/to/ext1.egg
|
||||
path/to/setuptools.egg"""
|
||||
)
|
||||
|
||||
return str(python_prefix), str(ext_prefix)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def namespace_extensions(tmpdir, builtin_and_mock_packages):
|
||||
ext1_dirs = {
|
||||
"bin/": {"py-ext-tool1": None},
|
||||
"lib/": {
|
||||
"python2.7/": {
|
||||
"site-packages/": {
|
||||
"examplenamespace/": {"__init__.py": None, "ext1_sample.py": None}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
ext2_dirs = {
|
||||
"bin/": {"py-ext-tool2": None},
|
||||
"lib/": {
|
||||
"python2.7/": {
|
||||
"site-packages/": {
|
||||
"examplenamespace/": {"__init__.py": None, "ext2_sample.py": None}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
ext1_name = "py-extension1"
|
||||
ext1_prefix = tmpdir.join(ext1_name)
|
||||
create_dir_structure(ext1_prefix, ext1_dirs)
|
||||
|
||||
ext2_name = "py-extension2"
|
||||
ext2_prefix = tmpdir.join(ext2_name)
|
||||
create_dir_structure(ext2_prefix, ext2_dirs)
|
||||
|
||||
return str(ext1_prefix), str(ext2_prefix), "examplenamespace"
|
||||
|
||||
|
||||
def test_python_activation_with_files(
|
||||
tmpdir, python_and_extension_dirs, monkeypatch, builtin_and_mock_packages
|
||||
):
|
||||
python_prefix, ext_prefix = python_and_extension_dirs
|
||||
|
||||
python_spec = spack.spec.Spec("python@2.7.12")
|
||||
python_spec._concrete = True
|
||||
python_spec.package.spec.prefix = python_prefix
|
||||
|
||||
ext_pkg = create_python_ext_pkg("py-extension1", ext_prefix, python_spec, monkeypatch)
|
||||
|
||||
python_pkg = python_spec.package
|
||||
python_pkg.activate(ext_pkg, python_pkg.view())
|
||||
|
||||
assert os.path.exists(os.path.join(python_prefix, "bin/py-ext-tool"))
|
||||
|
||||
easy_install_location = "lib/python2.7/site-packages/easy-install.pth"
|
||||
with open(os.path.join(python_prefix, easy_install_location), "r") as f:
|
||||
easy_install_contents = f.read()
|
||||
|
||||
assert "ext1.egg" in easy_install_contents
|
||||
assert "setuptools.egg" not in easy_install_contents
|
||||
|
||||
|
||||
def test_python_activation_view(
|
||||
tmpdir, python_and_extension_dirs, builtin_and_mock_packages, monkeypatch
|
||||
):
|
||||
python_prefix, ext_prefix = python_and_extension_dirs
|
||||
|
||||
python_spec = spack.spec.Spec("python@2.7.12")
|
||||
python_spec._concrete = True
|
||||
python_spec.package.spec.prefix = python_prefix
|
||||
|
||||
ext_pkg = create_python_ext_pkg("py-extension1", ext_prefix, python_spec, monkeypatch)
|
||||
|
||||
view_dir = str(tmpdir.join("view"))
|
||||
layout = DirectoryLayout(view_dir)
|
||||
view = YamlFilesystemView(view_dir, layout)
|
||||
|
||||
python_pkg = python_spec.package
|
||||
python_pkg.activate(ext_pkg, view)
|
||||
|
||||
assert not os.path.exists(os.path.join(python_prefix, "bin/py-ext-tool"))
|
||||
|
||||
assert os.path.exists(os.path.join(view_dir, "bin/py-ext-tool"))
|
||||
|
||||
|
||||
def test_python_ignore_namespace_init_conflict(
|
||||
tmpdir, namespace_extensions, builtin_and_mock_packages, monkeypatch
|
||||
):
|
||||
"""Test the view update logic in PythonPackage ignores conflicting
|
||||
instances of __init__ for packages which are in the same namespace.
|
||||
"""
|
||||
ext1_prefix, ext2_prefix, py_namespace = namespace_extensions
|
||||
|
||||
python_spec = spack.spec.Spec("python@2.7.12")
|
||||
python_spec._concrete = True
|
||||
|
||||
ext1_pkg = create_python_ext_pkg(
|
||||
"py-extension1", ext1_prefix, python_spec, monkeypatch, py_namespace
|
||||
)
|
||||
ext2_pkg = create_python_ext_pkg(
|
||||
"py-extension2", ext2_prefix, python_spec, monkeypatch, py_namespace
|
||||
)
|
||||
|
||||
view_dir = str(tmpdir.join("view"))
|
||||
layout = DirectoryLayout(view_dir)
|
||||
view = YamlFilesystemView(view_dir, layout)
|
||||
|
||||
python_pkg = python_spec.package
|
||||
python_pkg.activate(ext1_pkg, view)
|
||||
# Normally handled by Package.do_activate, but here we activate directly
|
||||
view.extensions_layout.add_extension(python_spec, ext1_pkg.spec)
|
||||
python_pkg.activate(ext2_pkg, view)
|
||||
|
||||
f1 = "lib/python2.7/site-packages/examplenamespace/ext1_sample.py"
|
||||
f2 = "lib/python2.7/site-packages/examplenamespace/ext2_sample.py"
|
||||
init_file = "lib/python2.7/site-packages/examplenamespace/__init__.py"
|
||||
|
||||
assert os.path.exists(os.path.join(view_dir, f1))
|
||||
assert os.path.exists(os.path.join(view_dir, f2))
|
||||
assert os.path.exists(os.path.join(view_dir, init_file))
|
||||
|
||||
|
||||
def test_python_keep_namespace_init(
|
||||
tmpdir, namespace_extensions, builtin_and_mock_packages, monkeypatch
|
||||
):
|
||||
"""Test the view update logic in PythonPackage keeps the namespace
|
||||
__init__ file as long as one package in the namespace still
|
||||
exists.
|
||||
"""
|
||||
ext1_prefix, ext2_prefix, py_namespace = namespace_extensions
|
||||
|
||||
python_spec = spack.spec.Spec("python@2.7.12")
|
||||
python_spec._concrete = True
|
||||
|
||||
ext1_pkg = create_python_ext_pkg(
|
||||
"py-extension1", ext1_prefix, python_spec, monkeypatch, py_namespace
|
||||
)
|
||||
ext2_pkg = create_python_ext_pkg(
|
||||
"py-extension2", ext2_prefix, python_spec, monkeypatch, py_namespace
|
||||
)
|
||||
|
||||
view_dir = str(tmpdir.join("view"))
|
||||
layout = DirectoryLayout(view_dir)
|
||||
view = YamlFilesystemView(view_dir, layout)
|
||||
|
||||
python_pkg = python_spec.package
|
||||
python_pkg.activate(ext1_pkg, view)
|
||||
# Normally handled by Package.do_activate, but here we activate directly
|
||||
view.extensions_layout.add_extension(python_spec, ext1_pkg.spec)
|
||||
python_pkg.activate(ext2_pkg, view)
|
||||
view.extensions_layout.add_extension(python_spec, ext2_pkg.spec)
|
||||
|
||||
f1 = "lib/python2.7/site-packages/examplenamespace/ext1_sample.py"
|
||||
init_file = "lib/python2.7/site-packages/examplenamespace/__init__.py"
|
||||
|
||||
python_pkg.deactivate(ext1_pkg, view)
|
||||
view.extensions_layout.remove_extension(python_spec, ext1_pkg.spec)
|
||||
|
||||
assert not os.path.exists(os.path.join(view_dir, f1))
|
||||
assert os.path.exists(os.path.join(view_dir, init_file))
|
||||
|
||||
python_pkg.deactivate(ext2_pkg, view)
|
||||
view.extensions_layout.remove_extension(python_spec, ext2_pkg.spec)
|
||||
|
||||
assert not os.path.exists(os.path.join(view_dir, init_file))
|
||||
|
||||
|
||||
def test_python_namespace_conflict(
|
||||
tmpdir, namespace_extensions, monkeypatch, builtin_and_mock_packages
|
||||
):
|
||||
"""Test the view update logic in PythonPackage reports an error when two
|
||||
python extensions with different namespaces have a conflicting __init__
|
||||
file.
|
||||
"""
|
||||
ext1_prefix, ext2_prefix, py_namespace = namespace_extensions
|
||||
other_namespace = py_namespace + "other"
|
||||
|
||||
python_spec = spack.spec.Spec("python@2.7.12")
|
||||
python_spec._concrete = True
|
||||
|
||||
ext1_pkg = create_python_ext_pkg(
|
||||
"py-extension1", ext1_prefix, python_spec, monkeypatch, py_namespace
|
||||
)
|
||||
ext2_pkg = create_python_ext_pkg(
|
||||
"py-extension2", ext2_prefix, python_spec, monkeypatch, other_namespace
|
||||
)
|
||||
|
||||
view_dir = str(tmpdir.join("view"))
|
||||
layout = DirectoryLayout(view_dir)
|
||||
view = YamlFilesystemView(view_dir, layout)
|
||||
|
||||
python_pkg = python_spec.package
|
||||
python_pkg.activate(ext1_pkg, view)
|
||||
view.extensions_layout.add_extension(python_spec, ext1_pkg.spec)
|
||||
with pytest.raises(MergeConflictError):
|
||||
python_pkg.activate(ext2_pkg, view)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def perl_and_extension_dirs(tmpdir, builtin_and_mock_packages):
|
||||
perl_dirs = {
|
||||
"bin/": {"perl": None},
|
||||
"lib/": {"site_perl/": {"5.24.1/": {"x86_64-linux/": None}}},
|
||||
}
|
||||
|
||||
perl_name = "perl"
|
||||
perl_prefix = tmpdir.join(perl_name)
|
||||
create_dir_structure(perl_prefix, perl_dirs)
|
||||
|
||||
perl_spec = spack.spec.Spec("perl@5.24.1")
|
||||
perl_spec._concrete = True
|
||||
perl_spec.package.spec.prefix = str(perl_prefix)
|
||||
|
||||
ext_dirs = {
|
||||
"bin/": {"perl-ext-tool": None},
|
||||
"lib/": {"site_perl/": {"5.24.1/": {"x86_64-linux/": {"TestExt/": {}}}}},
|
||||
}
|
||||
|
||||
ext_name = "perl-extension"
|
||||
ext_prefix = tmpdir.join(ext_name)
|
||||
create_dir_structure(ext_prefix, ext_dirs)
|
||||
|
||||
return str(perl_prefix), str(ext_prefix)
|
||||
|
||||
|
||||
def test_perl_activation(tmpdir, builtin_and_mock_packages, monkeypatch):
|
||||
# Note the lib directory is based partly on the perl version
|
||||
perl_spec = spack.spec.Spec("perl@5.24.1")
|
||||
perl_spec._concrete = True
|
||||
|
||||
perl_name = "perl"
|
||||
tmpdir.ensure(perl_name, dir=True)
|
||||
|
||||
perl_prefix = str(tmpdir.join(perl_name))
|
||||
# Set the prefix on the package's spec reference because that is a copy of
|
||||
# the original spec
|
||||
perl_spec.package.spec.prefix = perl_prefix
|
||||
|
||||
ext_name = "perl-extension"
|
||||
tmpdir.ensure(ext_name, dir=True)
|
||||
ext_pkg = create_ext_pkg(ext_name, str(tmpdir.join(ext_name)), perl_spec, monkeypatch)
|
||||
|
||||
perl_pkg = perl_spec.package
|
||||
perl_pkg.activate(ext_pkg, perl_pkg.view())
|
||||
|
||||
|
||||
def test_perl_activation_with_files(
|
||||
tmpdir, perl_and_extension_dirs, monkeypatch, builtin_and_mock_packages
|
||||
):
|
||||
perl_prefix, ext_prefix = perl_and_extension_dirs
|
||||
|
||||
perl_spec = spack.spec.Spec("perl@5.24.1")
|
||||
perl_spec._concrete = True
|
||||
perl_spec.package.spec.prefix = perl_prefix
|
||||
|
||||
ext_pkg = create_ext_pkg("perl-extension", ext_prefix, perl_spec, monkeypatch)
|
||||
|
||||
perl_pkg = perl_spec.package
|
||||
perl_pkg.activate(ext_pkg, perl_pkg.view())
|
||||
|
||||
assert os.path.exists(os.path.join(perl_prefix, "bin/perl-ext-tool"))
|
||||
|
||||
|
||||
def test_perl_activation_view(
|
||||
tmpdir, perl_and_extension_dirs, monkeypatch, builtin_and_mock_packages
|
||||
):
|
||||
perl_prefix, ext_prefix = perl_and_extension_dirs
|
||||
|
||||
perl_spec = spack.spec.Spec("perl@5.24.1")
|
||||
perl_spec._concrete = True
|
||||
perl_spec.package.spec.prefix = perl_prefix
|
||||
|
||||
ext_pkg = create_ext_pkg("perl-extension", ext_prefix, perl_spec, monkeypatch)
|
||||
|
||||
view_dir = str(tmpdir.join("view"))
|
||||
layout = DirectoryLayout(view_dir)
|
||||
view = YamlFilesystemView(view_dir, layout)
|
||||
|
||||
perl_pkg = perl_spec.package
|
||||
perl_pkg.activate(ext_pkg, view)
|
||||
|
||||
assert not os.path.exists(os.path.join(perl_prefix, "bin/perl-ext-tool"))
|
||||
|
||||
assert os.path.exists(os.path.join(view_dir, "bin/perl-ext-tool"))
|
||||
|
||||
|
||||
def test_is_activated_upstream_extendee(tmpdir, builtin_and_mock_packages, monkeypatch):
|
||||
"""When an extendee is installed upstream, make sure that the extension
|
||||
spec is never considered to be globally activated for it.
|
||||
"""
|
||||
extendee_spec = spack.spec.Spec("python")
|
||||
extendee_spec._concrete = True
|
||||
|
||||
python_name = "python"
|
||||
tmpdir.ensure(python_name, dir=True)
|
||||
|
||||
python_prefix = str(tmpdir.join(python_name))
|
||||
# Set the prefix on the package's spec reference because that is a copy of
|
||||
# the original spec
|
||||
extendee_spec.package.spec.prefix = python_prefix
|
||||
monkeypatch.setattr(extendee_spec.__class__, "installed_upstream", True)
|
||||
|
||||
ext_name = "py-extension1"
|
||||
tmpdir.ensure(ext_name, dir=True)
|
||||
ext_pkg = create_ext_pkg(ext_name, str(tmpdir.join(ext_name)), extendee_spec, monkeypatch)
|
||||
|
||||
# The view should not be checked at all if the extendee is installed
|
||||
# upstream, so use 'None' here
|
||||
mock_view = None
|
||||
assert not ext_pkg.is_activated(mock_view)
|
@@ -32,6 +32,27 @@ def test_write_and_read_cache_file(file_cache):
|
||||
assert text == "foobar\n"
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.platform == "win32", reason="Locks not supported on Windows")
|
||||
def test_failed_write_and_read_cache_file(file_cache):
|
||||
"""Test failing to write then attempting to read a cached file."""
|
||||
with pytest.raises(RuntimeError, match=r"^foobar$"):
|
||||
with file_cache.write_transaction("test.yaml") as (old, new):
|
||||
assert old is None
|
||||
assert new is not None
|
||||
raise RuntimeError("foobar")
|
||||
|
||||
# Cache dir should have exactly one (lock) file
|
||||
assert os.listdir(file_cache.root) == [".test.yaml.lock"]
|
||||
|
||||
# File does not exist
|
||||
assert not file_cache.init_entry("test.yaml")
|
||||
|
||||
# Attempting to read will cause a file not found error
|
||||
with pytest.raises((IOError, OSError), match=r"test\.yaml"):
|
||||
with file_cache.read_transaction("test.yaml"):
|
||||
pass
|
||||
|
||||
|
||||
def test_write_and_remove_cache_file(file_cache):
|
||||
"""Test two write transactions on a cached file. Then try to remove an
|
||||
entry from it.
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user