Compare commits
213 Commits
e4s-22.05
...
features/r
Author | SHA1 | Date | |
---|---|---|---|
![]() |
719d31682f | ||
![]() |
1190d03b0f | ||
![]() |
5faa927fe6 | ||
![]() |
c60d220f81 | ||
![]() |
61d3d60414 | ||
![]() |
174258c09a | ||
![]() |
73c6a8f73d | ||
![]() |
86dc904080 | ||
![]() |
9e1c87409d | ||
![]() |
2b30dc2e30 | ||
![]() |
b1ce756d69 | ||
![]() |
1194ac6985 | ||
![]() |
954f961208 | ||
![]() |
47ac710796 | ||
![]() |
d7fb5a6db4 | ||
![]() |
e0624b9278 | ||
![]() |
e86614f7b8 | ||
![]() |
d166b948ce | ||
![]() |
9cc3a2942d | ||
![]() |
5d685f9ff6 | ||
![]() |
9ddf45964d | ||
![]() |
b88cc77f16 | ||
![]() |
f3af38ba9b | ||
![]() |
adc9f887ea | ||
![]() |
9461f482d9 | ||
![]() |
e014b889c6 | ||
![]() |
181ac574bb | ||
![]() |
055c9d125d | ||
![]() |
a94438b1f5 | ||
![]() |
f583e471b8 | ||
![]() |
f67f3b1796 | ||
![]() |
77c86c759c | ||
![]() |
8084259bd3 | ||
![]() |
98860c6a5f | ||
![]() |
e6929b9ff9 | ||
![]() |
18c2f1a57a | ||
![]() |
3054cd0eff | ||
![]() |
9016b79270 | ||
![]() |
9f5c6fb398 | ||
![]() |
19087c9d35 | ||
![]() |
4116b04368 | ||
![]() |
1485931695 | ||
![]() |
78cac4d840 | ||
![]() |
2f628c3a97 | ||
![]() |
a3a8710cbe | ||
![]() |
0bf3a9c2af | ||
![]() |
cff955f7bd | ||
![]() |
3d43ebec72 | ||
![]() |
6fd07479e3 | ||
![]() |
03bc36f8b0 | ||
![]() |
93e1b283b7 | ||
![]() |
df2c0fbfbd | ||
![]() |
54a69587c3 | ||
![]() |
294312f02b | ||
![]() |
0636fdbfef | ||
![]() |
85e13260cf | ||
![]() |
b5a519fa51 | ||
![]() |
2e2d0b3211 | ||
![]() |
d51f949768 | ||
![]() |
1b955e66c1 | ||
![]() |
5f8a3527e7 | ||
![]() |
ec02369dba | ||
![]() |
8ceac2ba9b | ||
![]() |
85eeed650e | ||
![]() |
14d4203722 | ||
![]() |
1bc742c13e | ||
![]() |
2712ea6299 | ||
![]() |
a9c064cd7e | ||
![]() |
17fc244cba | ||
![]() |
334c786b52 | ||
![]() |
492541b9cb | ||
![]() |
369f825523 | ||
![]() |
aba9149b71 | ||
![]() |
b29f27aec7 | ||
![]() |
0176d9830d | ||
![]() |
0c9370ce72 | ||
![]() |
3620204db6 | ||
![]() |
13984a4e8d | ||
![]() |
d5c68fdc0d | ||
![]() |
93649f6b68 | ||
![]() |
d367f1e787 | ||
![]() |
1c44999192 | ||
![]() |
ad506ac2a8 | ||
![]() |
806521b4a0 | ||
![]() |
70824e4a5e | ||
![]() |
0fe5e72744 | ||
![]() |
ba907defca | ||
![]() |
87b078d1f3 | ||
![]() |
54ea1f4bf6 | ||
![]() |
067800bc31 | ||
![]() |
0d2eae8da0 | ||
![]() |
f2a81af70e | ||
![]() |
494e567fe5 | ||
![]() |
6a57aede57 | ||
![]() |
ba701a7cf8 | ||
![]() |
557845cccc | ||
![]() |
c5297523af | ||
![]() |
6883868896 | ||
![]() |
1c5587f72d | ||
![]() |
6e7eb49888 | ||
![]() |
3df4a32c4f | ||
![]() |
95b03e7bc9 | ||
![]() |
817ee81eaa | ||
![]() |
330832c22c | ||
![]() |
306bed48d7 | ||
![]() |
63402c512b | ||
![]() |
736fddc079 | ||
![]() |
036048c26f | ||
![]() |
8616ba04db | ||
![]() |
07e9c0695a | ||
![]() |
f24886acb5 | ||
![]() |
5031578c39 | ||
![]() |
7c4cc1c71c | ||
![]() |
f7258e246f | ||
![]() |
ff980a1452 | ||
![]() |
51130abf86 | ||
![]() |
383356452b | ||
![]() |
5fc1547886 | ||
![]() |
68cd6c72c7 | ||
![]() |
3d2ff57e7b | ||
![]() |
3bc656808c | ||
![]() |
7ded692a76 | ||
![]() |
aa3c7a138a | ||
![]() |
42441cddcc | ||
![]() |
b78025345b | ||
![]() |
2113b625d1 | ||
![]() |
c6c3d243e1 | ||
![]() |
870b997cb6 | ||
![]() |
c9492f1cd4 | ||
![]() |
24f370491e | ||
![]() |
d688a699fa | ||
![]() |
4fbb822072 | ||
![]() |
f86c481280 | ||
![]() |
91a99882b3 | ||
![]() |
74bef2105a | ||
![]() |
630ebb9d8b | ||
![]() |
183465321e | ||
![]() |
580f9ec86e | ||
![]() |
0b0920bc90 | ||
![]() |
ee04a1ab0b | ||
![]() |
55f4950ed4 | ||
![]() |
23960ed623 | ||
![]() |
fb2730d87f | ||
![]() |
30f2394782 | ||
![]() |
262c3f07bf | ||
![]() |
b018eb041f | ||
![]() |
3f4398dd67 | ||
![]() |
a225a5b276 | ||
![]() |
c9cfc548da | ||
![]() |
3b30886a3a | ||
![]() |
c2fd98ccd2 | ||
![]() |
a0fe6ab2ed | ||
![]() |
c3be777ea8 | ||
![]() |
8fe39be3df | ||
![]() |
f5250da611 | ||
![]() |
c2af154cd2 | ||
![]() |
1f6b880fff | ||
![]() |
2c211d95ee | ||
![]() |
c46f673c16 | ||
![]() |
8ff2b4b747 | ||
![]() |
9e05dde28c | ||
![]() |
b1ef5a75f0 | ||
![]() |
f9aa7c611c | ||
![]() |
9a2e01e22d | ||
![]() |
2090351d7f | ||
![]() |
c775c322ec | ||
![]() |
1185eb9199 | ||
![]() |
51fa8e7b5e | ||
![]() |
f505c50770 | ||
![]() |
2fdc817f03 | ||
![]() |
e1d0b35d5b | ||
![]() |
ace5a7c4bf | ||
![]() |
a91ae8cafe | ||
![]() |
b9e3ee6dd0 | ||
![]() |
10ea0a2a3e | ||
![]() |
d6f8ffc6bc | ||
![]() |
02be2f27d1 | ||
![]() |
dfd0702aec | ||
![]() |
f454a683b5 | ||
![]() |
d7d0c892d8 | ||
![]() |
d566330a33 | ||
![]() |
446cbf4b5a | ||
![]() |
5153c9e98c | ||
![]() |
d74f2d0be5 | ||
![]() |
021b65d76f | ||
![]() |
45312d49be | ||
![]() |
3fcd85efe9 | ||
![]() |
6f3a082c3e | ||
![]() |
23e2820547 | ||
![]() |
22b999fcd4 | ||
![]() |
1df7de62ca | ||
![]() |
97ec8f1d19 | ||
![]() |
63b6e484fc | ||
![]() |
2b12d19314 | ||
![]() |
c37fcccd7c | ||
![]() |
6034b5afc2 | ||
![]() |
17bc937083 | ||
![]() |
ad8db0680d | ||
![]() |
4f033b155b | ||
![]() |
ad829ccee1 | ||
![]() |
4b60a17174 | ||
![]() |
edb91f4077 | ||
![]() |
0fdc3bf420 | ||
![]() |
8b34cabb16 | ||
![]() |
77fb651e01 | ||
![]() |
35ed7973e2 | ||
![]() |
e73b19024f | ||
![]() |
7803bc9e5f | ||
![]() |
55c400297c | ||
![]() |
8686e18494 | ||
![]() |
d28967bbf3 | ||
![]() |
5f928f71c0 | ||
![]() |
dc7bdf5f24 |
11
.github/workflows/bootstrap.yml
vendored
11
.github/workflows/bootstrap.yml
vendored
@@ -24,6 +24,7 @@ jobs:
|
||||
fedora-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "fedora:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -57,6 +58,7 @@ jobs:
|
||||
ubuntu-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -93,6 +95,7 @@ jobs:
|
||||
ubuntu-clingo-binaries-and-patchelf:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -126,6 +129,7 @@ jobs:
|
||||
opensuse-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -154,6 +158,7 @@ jobs:
|
||||
|
||||
macos-clingo-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -174,6 +179,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -195,6 +201,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
@@ -216,6 +223,7 @@ jobs:
|
||||
ubuntu-gnupg-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -250,6 +258,7 @@ jobs:
|
||||
ubuntu-gnupg-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -285,6 +294,7 @@ jobs:
|
||||
|
||||
macos-gnupg-binaries:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -302,6 +312,7 @@ jobs:
|
||||
|
||||
macos-gnupg-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
|
5
.github/workflows/build-containers.yml
vendored
5
.github/workflows/build-containers.yml
vendored
@@ -43,6 +43,7 @@ jobs:
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
@@ -75,7 +76,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@6673cd052c4cd6fcf4b4e6e60ea986c889389535
|
||||
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
@@ -94,7 +95,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
|
3
.github/workflows/macos_python.yml
vendored
3
.github/workflows/macos_python.yml
vendored
@@ -22,6 +22,7 @@ on:
|
||||
jobs:
|
||||
install_gcc:
|
||||
name: gcc with clang
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
@@ -36,6 +37,7 @@ jobs:
|
||||
|
||||
install_jupyter_clang:
|
||||
name: jupyter
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
@@ -50,6 +52,7 @@ jobs:
|
||||
|
||||
install_scipy_clang:
|
||||
name: scipy, mpl, pd
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
|
1
.github/workflows/setup_git.ps1
vendored
1
.github/workflows/setup_git.ps1
vendored
@@ -4,6 +4,7 @@ Set-Location spack
|
||||
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
git config --global core.longpaths true
|
||||
|
||||
if ($(git branch --show-current) -ne "develop")
|
||||
{
|
||||
|
4
.github/workflows/windows_python.yml
vendored
4
.github/workflows/windows_python.yml
vendored
@@ -139,11 +139,11 @@ jobs:
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
|
204
CHANGELOG.md
204
CHANGELOG.md
@@ -1,3 +1,205 @@
|
||||
# v0.18.0 (2022-05-28)
|
||||
|
||||
`v0.18.0` is a major feature release.
|
||||
|
||||
## Major features in this release
|
||||
|
||||
1. **Concretizer now reuses by default**
|
||||
|
||||
`spack install --reuse` was introduced in `v0.17.0`, and `--reuse`
|
||||
is now the default concretization mode. Spack will try hard to
|
||||
resolve dependencies using installed packages or binaries (#30396).
|
||||
|
||||
To avoid reuse and to use the latest package configurations, (the
|
||||
old default), you can use `spack install --fresh`, or add
|
||||
configuration like this to your environment or `concretizer.yaml`:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse: false
|
||||
```
|
||||
|
||||
2. **Finer-grained hashes**
|
||||
|
||||
Spack hashes now include `link`, `run`, *and* `build` dependencies,
|
||||
as well as a canonical hash of package recipes. Previously, hashes
|
||||
only included `link` and `run` dependencies (though `build`
|
||||
dependencies were stored by environments). We coarsened the hash to
|
||||
reduce churn in user installations, but the new default concretizer
|
||||
behavior mitigates this concern and gets us reuse *and* provenance.
|
||||
You will be able to see the build dependencies of new installations
|
||||
with `spack find`. Old installations will not change and their
|
||||
hashes will not be affected. (#28156, #28504, #30717, #30861)
|
||||
|
||||
3. **Improved error messages**
|
||||
|
||||
Error handling with the new concretizer is now done with
|
||||
optimization criteria rather than with unsatisfiable cores, and
|
||||
Spack reports many more details about conflicting constraints.
|
||||
(#30669)
|
||||
|
||||
4. **Unify environments when possible**
|
||||
|
||||
Environments have thus far supported `concretization: together` or
|
||||
`concretization: separately`. These have been replaced by a new
|
||||
preference in `concretizer.yaml`:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
unify: [true|false|when_possible]
|
||||
```
|
||||
|
||||
`concretizer:unify:when_possible` will *try* to resolve a fully
|
||||
unified environment, but if it cannot, it will create multiple
|
||||
configurations of some packages where it has to. For large
|
||||
environments that previously had to be concretized separately, this
|
||||
can result in a huge speedup (40-50x). (#28941)
|
||||
|
||||
5. **Automatically find externals on Cray machines**
|
||||
|
||||
Spack can now automatically discover installed packages in the Cray
|
||||
Programming Environment by running `spack external find` (or `spack
|
||||
external read-cray-manifest` to *only* query the PE). Packages from
|
||||
the PE (e.g., `cray-mpich` are added to the database with full
|
||||
dependency information, and compilers from the PE are added to
|
||||
`compilers.yaml`. Available with the June 2022 release of the Cray
|
||||
Programming Environment. (#24894, #30428)
|
||||
|
||||
6. **New binary format and hardened signing**
|
||||
|
||||
Spack now has an updated binary format, with improvements for
|
||||
security. The new format has a detached signature file, and Spack
|
||||
verifies the signature before untarring or decompressing the binary
|
||||
package. The previous format embedded the signature in a `tar`
|
||||
file, which required the client to run `tar` *before* verifying
|
||||
(#30750). Spack can still install from build caches using the old
|
||||
format, but we encourage users to switch to the new format going
|
||||
forward.
|
||||
|
||||
Production GitLab pipelines have been hardened to securely sign
|
||||
binaries. There is now a separate signing stage so that signing
|
||||
keys are never exposed to build system code, and signing keys are
|
||||
ephemeral and only live as long as the signing pipeline stage.
|
||||
(#30753)
|
||||
|
||||
7. **Bootstrap mirror generation**
|
||||
|
||||
The `spack bootstrap mirror` command can automatically create a
|
||||
mirror for bootstrapping the concretizer and other needed
|
||||
dependencies in an air-gapped environment. (#28556)
|
||||
|
||||
8. **Nascent Windows support**
|
||||
|
||||
Spack now has initial support for Windows. Spack core has been
|
||||
refactored to run in the Windows environment, and a small number of
|
||||
packages can now build for Windows. More details are
|
||||
[in the documentation](https://spack.rtfd.io/en/latest/getting_started.html#spack-on-windows)
|
||||
(#27021, #28385, many more)
|
||||
|
||||
9. **Makefile generation**
|
||||
|
||||
`spack env depfile` can be used to generate a `Makefile` from an
|
||||
environment, which can be used to build packages the environment
|
||||
in parallel on a single node. e.g.:
|
||||
|
||||
```console
|
||||
spack -e myenv env depfile > Makefile
|
||||
make
|
||||
```
|
||||
|
||||
Spack propagates `gmake` jobserver information to builds so that
|
||||
their jobs can share cores. (#30039, #30254, #30302, #30526)
|
||||
|
||||
10. **New variant features**
|
||||
|
||||
In addition to being conditional themselves, variants can now have
|
||||
[conditional *values*](https://spack.readthedocs.io/en/latest/packaging_guide.html#conditional-possible-values)
|
||||
that are only possible for certain configurations of a package. (#29530)
|
||||
|
||||
Variants can be
|
||||
[declared "sticky"](https://spack.readthedocs.io/en/latest/packaging_guide.html#sticky-variants),
|
||||
which prevents them from being enabled or disabled by the
|
||||
concretizer. Sticky variants must be set explicitly by users
|
||||
on the command line or in `packages.yaml`. (#28630)
|
||||
|
||||
* Allow conditional possible values in variants
|
||||
* Add a "sticky" property to variants
|
||||
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* Environment views can optionally link only `run` dependencies
|
||||
with `link:run` (#29336)
|
||||
* `spack external find --all` finds library-only packages in
|
||||
addition to build dependencies (#28005)
|
||||
* Customizable `config:license_dir` option (#30135)
|
||||
* `spack external find --path PATH` takes a custom search path (#30479)
|
||||
* `spack spec` has a new `--format` argument like `spack find` (#27908)
|
||||
* `spack concretize --quiet` skips printing concretized specs (#30272)
|
||||
* `spack info` now has cleaner output and displays test info (#22097)
|
||||
* Package-level submodule option for git commit versions (#30085, #30037)
|
||||
* Using `/hash` syntax to refer to concrete specs in an environment
|
||||
now works even if `/hash` is not installed. (#30276)
|
||||
|
||||
## Major internal refactors
|
||||
|
||||
* full hash (see above)
|
||||
* new develop versioning scheme `0.19.0-dev0`
|
||||
* Allow for multiple dependencies/dependents from the same package (#28673)
|
||||
* Splice differing virtual packages (#27919)
|
||||
|
||||
## Performance Improvements
|
||||
|
||||
* Concretization of large environments with `unify: when_possible` is
|
||||
much faster than concretizing separately (#28941, see above)
|
||||
* Single-pass view generation algorithm is 2.6x faster (#29443)
|
||||
|
||||
## Archspec improvements
|
||||
|
||||
* `oneapi` and `dpcpp` flag support (#30783)
|
||||
* better support for `M1` and `a64fx` (#30683)
|
||||
|
||||
## Removals and Deprecations
|
||||
|
||||
* Spack no longer supports Python `2.6` (#27256)
|
||||
* Removed deprecated `--run-tests` option of `spack install`;
|
||||
use `spack test` (#30461)
|
||||
* Removed deprecated `spack flake8`; use `spack style` (#27290)
|
||||
|
||||
* Deprecate `spack:concretization` config option; use
|
||||
`concretizer:unify` (#30038)
|
||||
* Deprecate top-level module configuration; use module sets (#28659)
|
||||
* `spack activate` and `spack deactivate` are deprecated in favor of
|
||||
environments; will be removed in `0.19.0` (#29430; see also `link:run`
|
||||
in #29336 above)
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* Fix bug that broke locks with many parallel builds (#27846)
|
||||
* Many bugfixes and consistency improvements for the new concretizer
|
||||
and `--reuse` (#30357, #30092, #29835, #29933, #28605, #29694, #28848)
|
||||
|
||||
## Packages
|
||||
|
||||
* `CMakePackage` uses `CMAKE_INSTALL_RPATH_USE_LINK_PATH` (#29703)
|
||||
* Refactored `lua` support: `lua-lang` virtual supports both
|
||||
`lua` and `luajit` via new `LuaPackage` build system(#28854)
|
||||
* PythonPackage: now installs packages with `pip` (#27798)
|
||||
* Python: improve site_packages_dir handling (#28346)
|
||||
* Extends: support spec, not just package name (#27754)
|
||||
* `find_libraries`: search for both .so and .dylib on macOS (#28924)
|
||||
* Use stable URLs and `?full_index=1` for all github patches (#29239)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 6,416 total packages, 458 new since `v0.17.0`
|
||||
* 219 new Python packages
|
||||
* 60 new R packages
|
||||
* 377 people contributed to this release
|
||||
* 337 committers to packages
|
||||
* 85 committers to core
|
||||
|
||||
|
||||
# v0.17.2 (2022-04-13)
|
||||
|
||||
### Spack bugfixes
|
||||
@@ -11,7 +213,7 @@
|
||||
* Fixed a few bugs affecting the spack ci command (#29518, #29419)
|
||||
* Fix handling of Intel compiler environment (#29439)
|
||||
* Fix a few edge cases when reindexing the DB (#28764)
|
||||
* Remove "Known issues" from documentation (#29664)
|
||||
* Remove "Known issues" from documentation (#29664)
|
||||
* Other miscellaneous bugfixes (0b72e070583fc5bcd016f5adc8a84c99f2b7805f, #28403, #29261)
|
||||
|
||||
# v0.17.1 (2021-12-23)
|
||||
|
@@ -6,34 +6,15 @@ bootstrap:
|
||||
# by Spack is installed in a "store" subfolder of this root directory
|
||||
root: $user_cache_path/bootstrap
|
||||
# Methods that can be used to bootstrap software. Each method may or
|
||||
# may not be able to bootstrap all of the software that Spack needs,
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions-v0.2'
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.2
|
||||
- name: 'github-actions-v0.1'
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
# This method is just Spack bootstrapping the software it needs from sources.
|
||||
# It has been added here so that users can selectively disable bootstrapping
|
||||
# from sources by "untrusting" it.
|
||||
- name: spack-install
|
||||
type: install
|
||||
description: |
|
||||
Specs built from sources by Spack. May take a long time.
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.1
|
||||
- name: 'spack-install'
|
||||
metadata: $spack/share/spack/bootstrap/spack-install
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
|
@@ -28,3 +28,9 @@ concretizer:
|
||||
# instance concretize with target "icelake" while running on "haswell").
|
||||
# If "true" only allow targets that are compatible with the host.
|
||||
host_compatible: true
|
||||
# When "true" concretize root specs of environments together, so that each unique
|
||||
# package in an environment corresponds to one concrete spec. This ensures
|
||||
# environments can always be activated. When "false" perform concretization separately
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: false
|
@@ -33,6 +33,9 @@ config:
|
||||
template_dirs:
|
||||
- $spack/share/spack/templates
|
||||
|
||||
# Directory where licenses should be located
|
||||
license_dir: $spack/etc/spack/licenses
|
||||
|
||||
# Temporary locations Spack can try to use for builds.
|
||||
#
|
||||
# Recommended options are given below.
|
||||
|
@@ -50,6 +50,13 @@ build cache files for the "ninja" spec:
|
||||
Note that the targeted spec must already be installed. Once you have a build cache,
|
||||
you can add it as a mirror, discussed next.
|
||||
|
||||
.. warning::
|
||||
|
||||
Spack improved the format used for binary caches in v0.18. The entire v0.18 series
|
||||
will be able to verify and install binary caches both in the new and in the old format.
|
||||
Support for using the old format is expected to end in v0.19, so we advise users to
|
||||
recreate relevant buildcaches using Spack v0.18 or higher.
|
||||
|
||||
---------------------------------------
|
||||
Finding or installing build cache files
|
||||
---------------------------------------
|
||||
|
160
lib/spack/docs/bootstrapping.rst
Normal file
160
lib/spack/docs/bootstrapping.rst
Normal file
@@ -0,0 +1,160 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _bootstrapping:
|
||||
|
||||
=============
|
||||
Bootstrapping
|
||||
=============
|
||||
|
||||
In the :ref:`Getting started <getting_started>` Section we already mentioned that
|
||||
Spack can bootstrap some of its dependencies, including ``clingo``. In fact, there
|
||||
is an entire command dedicated to the management of every aspect of bootstrapping:
|
||||
|
||||
.. command-output:: spack bootstrap --help
|
||||
|
||||
The first thing to know to understand bootstrapping in Spack is that each of
|
||||
Spack's dependencies is bootstrapped lazily; i.e. the first time it is needed and
|
||||
can't be found. You can readily check if any prerequisite for using Spack
|
||||
is missing by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap status
|
||||
Spack v0.17.1 - python@3.8
|
||||
|
||||
[FAIL] Core Functionalities
|
||||
[B] MISSING "clingo": required to concretize specs
|
||||
|
||||
[FAIL] Binary packages
|
||||
[B] MISSING "gpg2": required to sign/verify buildcaches
|
||||
|
||||
|
||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack solve zlib
|
||||
==> Bootstrapping clingo from pre-built binaries
|
||||
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.1/build_cache/darwin-catalina-x86_64/apple-clang-12.0.0/clingo-bootstrap-spack/darwin-catalina-x86_64-apple-clang-12.0.0-clingo-bootstrap-spack-p5on7i4hejl775ezndzfdkhvwra3hatn.spack
|
||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||
[ ... ]
|
||||
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
-----------------------
|
||||
The Bootstrapping store
|
||||
-----------------------
|
||||
|
||||
The software installed for bootstrapping purposes is deployed in a separate store.
|
||||
Its location can be checked with the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap root
|
||||
|
||||
It can also be changed with the same command by just specifying the newly desired path:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap root /opt/spack/bootstrap
|
||||
|
||||
You can check what is installed in the bootstrapping store at any time using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack find -b
|
||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||
==> 11 installed packages
|
||||
-- darwin-catalina-x86_64 / apple-clang@12.0.0 ------------------
|
||||
clingo-bootstrap@spack libassuan@2.5.5 libgpg-error@1.42 libksba@1.5.1 pinentry@1.1.1 zlib@1.2.11
|
||||
gnupg@2.3.1 libgcrypt@1.9.3 libiconv@1.16 npth@1.6 python@3.8
|
||||
|
||||
In case it is needed you can remove all the software in the current bootstrapping store with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack clean -b
|
||||
==> Removing bootstrapped software and configuration in "/Users/spack/.spack/bootstrap"
|
||||
|
||||
% spack find -b
|
||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||
==> 0 installed packages
|
||||
|
||||
--------------------------------------------
|
||||
Enabling and disabling bootstrapping methods
|
||||
--------------------------------------------
|
||||
|
||||
Bootstrapping is always performed by trying the methods listed by:
|
||||
|
||||
.. command-output:: spack bootstrap list
|
||||
|
||||
in the order they appear, from top to bottom. By default Spack is
|
||||
configured to try first bootstrapping from pre-built binaries and to
|
||||
fall-back to bootstrapping from sources if that failed.
|
||||
|
||||
If need be, you can disable bootstrapping altogether by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap disable
|
||||
|
||||
in which case it's your responsibility to ensure Spack runs in an
|
||||
environment where all its prerequisites are installed. You can
|
||||
also configure Spack to skip certain bootstrapping methods by *untrusting*
|
||||
them. For instance:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap untrust github-actions
|
||||
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
||||
|
||||
tells Spack to skip trying to bootstrap from binaries. To add the "github-actions" method back you can:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap trust github-actions
|
||||
|
||||
There is also an option to reset the bootstrapping configuration to Spack's defaults:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap reset
|
||||
==> Bootstrapping configuration is being reset to Spack's defaults. Current configuration will be lost.
|
||||
Do you want to continue? [Y/n]
|
||||
%
|
||||
|
||||
----------------------------------------
|
||||
Creating a mirror for air-gapped systems
|
||||
----------------------------------------
|
||||
|
||||
Spack's default configuration for bootstrapping relies on the user having
|
||||
access to the internet, either to fetch pre-compiled binaries or source tarballs.
|
||||
Sometimes though Spack is deployed on air-gapped systems where such access is denied.
|
||||
|
||||
To help with similar situations Spack has a command that recreates, in a local folder
|
||||
of choice, a mirror containing the source tarballs and/or binary packages needed for
|
||||
bootstrapping.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap mirror --binary-packages /opt/bootstrap
|
||||
==> Adding "clingo-bootstrap@spack+python %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding "gnupg@2.3: %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding "patchelf@0.13.1:0.13.99 %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding binary packages from "https://github.com/alalazo/spack-bootstrap-mirrors/releases/download/v0.1-rc.2/bootstrap-buildcache.tar.gz" to the mirror at /opt/bootstrap/local-mirror
|
||||
|
||||
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||
|
||||
|
||||
This command needs to be run on a machine with internet access and the resulting folder
|
||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||
commands suggested at the prompt, they can be used to bootstrap Spack.
|
@@ -39,6 +39,7 @@ on these ideas for each distinct build system that Spack supports:
|
||||
|
||||
build_systems/autotoolspackage
|
||||
build_systems/cmakepackage
|
||||
build_systems/cachedcmakepackage
|
||||
build_systems/mesonpackage
|
||||
build_systems/qmakepackage
|
||||
build_systems/sippackage
|
||||
|
123
lib/spack/docs/build_systems/cachedcmakepackage.rst
Normal file
123
lib/spack/docs/build_systems/cachedcmakepackage.rst
Normal file
@@ -0,0 +1,123 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _cachedcmakepackage:
|
||||
|
||||
------------------
|
||||
CachedCMakePackage
|
||||
------------------
|
||||
|
||||
The CachedCMakePackage base class is used for CMake-based workflows
|
||||
that create a CMake cache file prior to running ``cmake``. This is
|
||||
useful for packages with arguments longer than the system limit, and
|
||||
for reproducibility.
|
||||
|
||||
The documentation for this class assumes that the user is familiar with
|
||||
the ``CMakePackage`` class from which it inherits. See the documentation
|
||||
for :ref:`CMakePackage <cmakepackage>`.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``CachedCMakePackage`` base class comes with the following phases:
|
||||
|
||||
#. ``initconfig`` - generate the CMake cache file
|
||||
#. ``cmake`` - generate the Makefile
|
||||
#. ``build`` - build the package
|
||||
#. ``install`` - install the package
|
||||
|
||||
By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ mkdir spack-build
|
||||
$ cd spack-build
|
||||
$ cat << EOF > name-arch-compiler@version.cmake
|
||||
# Write information on compilers and dependencies
|
||||
# includes information on mpi and cuda if applicable
|
||||
$ cmake .. -DCMAKE_INSTALL_PREFIX=/path/to/installation/prefix -C name-arch-compiler@version.cmake
|
||||
$ make
|
||||
$ make test # optional
|
||||
$ make install
|
||||
|
||||
The ``CachedCMakePackage`` class inherits from the ``CMakePackage``
|
||||
class, and accepts all of the same options and adds all of the same
|
||||
flags to the ``cmake`` command. Similar to the ``CMakePAckage`` class,
|
||||
you may need to add a few arguments yourself, and the
|
||||
``CachedCMakePackage`` provides the same interface to add those
|
||||
flags.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding entries to the CMake cache
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In addition to adding flags to the ``cmake`` command, you may need to
|
||||
add entries to the CMake cache in the ``initconfig`` phase. This can
|
||||
be done by overriding one of four methods:
|
||||
|
||||
#. ``CachedCMakePackage.initconfig_compiler_entries``
|
||||
#. ``CachedCMakePackage.initconfig_mpi_entries``
|
||||
#. ``CachedCMakePackage.initconfig_hardware_entries``
|
||||
#. ``CachedCMakePackage.initconfig_package_entries``
|
||||
|
||||
Each of these methods returns a list of CMake cache strings. The
|
||||
distinction between these methods is merely to provide a
|
||||
well-structured and legible cmake cache file -- otherwise, entries
|
||||
from each of these methods are handled identically.
|
||||
|
||||
Spack also provides convenience methods for generating CMake cache
|
||||
entries. These methods are available at module scope in every Spack
|
||||
package. Because CMake parses boolean options, strings, and paths
|
||||
differently, there are three such methods:
|
||||
|
||||
#. ``cmake_cache_option``
|
||||
#. ``cmake_cache_string``
|
||||
#. ``cmake_cache_path``
|
||||
|
||||
These methods each accept three parameters -- the name of the CMake
|
||||
variable associated with the entry, the value of the entry, and an
|
||||
optional comment -- and return strings in the appropriate format to be
|
||||
returned from any of the ``initconfig*`` methods. Additionally, these
|
||||
methods may return comments beginning with the ``#`` character.
|
||||
|
||||
A typical usage of these methods may look something like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def initconfig_mpi_entries(self)
|
||||
# Get existing MPI configurations
|
||||
entries = super(self, Foo).initconfig_mpi_entries()
|
||||
|
||||
# The existing MPI configurations key on whether ``mpi`` is in the spec
|
||||
# This spec has an MPI variant, and we need to enable MPI when it is on.
|
||||
# This hypothetical package controls MPI with the ``FOO_MPI`` option to
|
||||
# cmake.
|
||||
if '+mpi' in self.spec:
|
||||
entries.append(cmake_cache_option('FOO_MPI', True, "enable mpi"))
|
||||
else:
|
||||
entries.append(cmake_cache_option('FOO_MPI', False, "disable mpi"))
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
# Package specific options
|
||||
entries = []
|
||||
|
||||
entries.append('#Entries for build options')
|
||||
|
||||
bar_on = '+bar' in self.spec
|
||||
entries.append(cmake_cache_option('FOO_BAR', bar_on, 'toggle bar'))
|
||||
|
||||
entries.append('#Entries for dependencies')
|
||||
|
||||
if self.spec['blas'].name == 'baz': # baz is our blas provider
|
||||
entries.append(cmake_cache_string('FOO_BLAS', 'baz', 'Use baz'))
|
||||
entries.append(cmake_cache_path('BAZ_PREFIX', self.spec['baz'].prefix))
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For more information on CMake cache files, see:
|
||||
https://cmake.org/cmake/help/latest/manual/cmake.1.html
|
@@ -59,7 +59,8 @@ other techniques to minimize the size of the final image:
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " concretizer: together" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
@@ -245,7 +246,8 @@ software is respectively built and installed:
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
@@ -366,7 +368,8 @@ produces, for instance, the following ``Dockerfile``:
|
||||
&& echo " externals:" \
|
||||
&& echo " - spec: cuda%gcc" \
|
||||
&& echo " prefix: /usr/local/cuda" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
|
@@ -151,7 +151,7 @@ Package-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.package`
|
||||
Contains the :class:`~spack.package.Package` class, which
|
||||
Contains the :class:`~spack.package_base.Package` class, which
|
||||
is the superclass for all packages in Spack. Methods on ``Package``
|
||||
implement all phases of the :ref:`package lifecycle
|
||||
<package-lifecycle>` and manage the build process.
|
||||
|
@@ -273,19 +273,9 @@ or
|
||||
Concretizing
|
||||
^^^^^^^^^^^^
|
||||
|
||||
Once some user specs have been added to an environment, they can be
|
||||
concretized. *By default specs are concretized separately*, one after
|
||||
the other. This mode of operation permits to deploy a full
|
||||
software stack where multiple configurations of the same package
|
||||
need to be installed alongside each other. Central installations done
|
||||
at HPC centers by system administrators or user support groups
|
||||
are a common case that fits in this behavior.
|
||||
Environments *can also be configured to concretize all
|
||||
the root specs in a self-consistent way* to ensure that
|
||||
each package in the environment comes with a single configuration. This
|
||||
mode of operation is usually what is required by software developers that
|
||||
want to deploy their development environment.
|
||||
|
||||
Once some user specs have been added to an environment, they can be concretized.
|
||||
There are at the moment three different modes of operation to concretize an environment,
|
||||
which are explained in details in :ref:`environments_concretization_config`.
|
||||
Regardless of which mode of operation has been chosen, the following
|
||||
command will ensure all the root specs are concretized according to the
|
||||
constraints that are prescribed in the configuration:
|
||||
@@ -493,32 +483,76 @@ Appending to this list in the yaml is identical to using the ``spack
|
||||
add`` command from the command line. However, there is more power
|
||||
available from the yaml file.
|
||||
|
||||
.. _environments_concretization_config:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Spec concretization
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Specs can be concretized separately or together, as already
|
||||
explained in :ref:`environments_concretization`. The behavior active
|
||||
under any environment is determined by the ``concretization`` property:
|
||||
An environment can be concretized in three different modes and the behavior active under any environment
|
||||
is determined by the ``concretizer:unify`` property. By default specs are concretized *separately*, one after the other:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- ncview
|
||||
- netcdf
|
||||
- nco
|
||||
- py-sphinx
|
||||
concretization: together
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: false
|
||||
|
||||
which can currently take either one of the two allowed values ``together`` or ``separately``
|
||||
(the default).
|
||||
This mode of operation permits to deploy a full software stack where multiple configurations of the same package
|
||||
need to be installed alongside each other using the best possible selection of transitive dependencies. The downside
|
||||
is that redundancy of installations is disregarded completely, and thus environments might be more bloated than
|
||||
strictly needed. In the example above, for instance, if a version of ``zlib`` newer than ``1.2.8`` is known to Spack,
|
||||
then it will be used for both ``hdf5`` installations.
|
||||
|
||||
If redundancy of the environment is a concern, Spack provides a way to install it *together where possible*,
|
||||
i.e. trying to maximize reuse of dependencies across different specs:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: when_possible
|
||||
|
||||
Also in this case Spack allows having multiple configurations of the same package, but privileges the reuse of
|
||||
specs over other factors. Going back to our example, this means that both ``hdf5`` installations will use
|
||||
``zlib@1.2.8`` as a dependency even if newer versions of that library are available.
|
||||
Central installations done at HPC centers by system administrators or user support groups are a common case
|
||||
that fits either of these two modes.
|
||||
|
||||
Environments can also be configured to concretize all the root specs *together*, in a self-consistent way, to
|
||||
ensure that each package in the environment comes with a single configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: true
|
||||
|
||||
This mode of operation is usually what is required by software developers that want to deploy their development
|
||||
environment and have a single view of it in the filesystem.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``concretizer:unify`` config option was introduced in Spack 0.18 to
|
||||
replace the ``concretization`` property. For reference,
|
||||
``concretization: separately`` is replaced by ``concretizer:unify:true``,
|
||||
and ``concretization: together`` is replaced by ``concretizer:unify:false``.
|
||||
|
||||
.. admonition:: Re-concretization of user specs
|
||||
|
||||
When concretizing specs together the entire set of specs will be
|
||||
When concretizing specs *together* or *together where possible* the entire set of specs will be
|
||||
re-concretized after any addition of new user specs, to ensure that
|
||||
the environment remains consistent. When instead the specs are concretized
|
||||
the environment remains consistent / minimal. When instead the specs are concretized
|
||||
separately only the new specs will be re-concretized after any addition.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
|
@@ -63,6 +63,7 @@ or refer to the full manual below.
|
||||
|
||||
configuration
|
||||
config_yaml
|
||||
bootstrapping
|
||||
build_settings
|
||||
environments
|
||||
containers
|
||||
|
@@ -2393,9 +2393,9 @@ Influence how dependents are built or run
|
||||
|
||||
Spack provides a mechanism for dependencies to influence the
|
||||
environment of their dependents by overriding the
|
||||
:meth:`setup_dependent_run_environment <spack.package.PackageBase.setup_dependent_run_environment>`
|
||||
:meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||
or the
|
||||
:meth:`setup_dependent_build_environment <spack.package.PackageBase.setup_dependent_build_environment>`
|
||||
:meth:`setup_dependent_build_environment <spack.package_base.PackageBase.setup_dependent_build_environment>`
|
||||
methods.
|
||||
The Qt package, for instance, uses this call:
|
||||
|
||||
@@ -2417,7 +2417,7 @@ will have the ``PYTHONPATH``, ``PYTHONHOME`` and ``PATH`` environment
|
||||
variables set appropriately before starting the installation. To make things
|
||||
even simpler the ``python setup.py`` command is also inserted into the module
|
||||
scope of dependents by overriding a third method called
|
||||
:meth:`setup_dependent_package <spack.package.PackageBase.setup_dependent_package>`
|
||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`
|
||||
:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
@@ -3022,7 +3022,7 @@ The classes that are currently provided by Spack are:
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| **Base Class** | **Purpose** |
|
||||
+==========================================================+==================================+
|
||||
| :class:`~spack.package.Package` | General base class not |
|
||||
| :class:`~spack.package_base.Package` | General base class not |
|
||||
| | specialized for any build system |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.makefile.MakefilePackage` | Specialized class for packages |
|
||||
@@ -3153,7 +3153,7 @@ for the install phase is:
|
||||
For those not used to Python instance methods, this is the
|
||||
package itself. In this case it's an instance of ``Foo``, which
|
||||
extends ``Package``. For API docs on Package objects, see
|
||||
:py:class:`Package <spack.package.Package>`.
|
||||
:py:class:`Package <spack.package_base.Package>`.
|
||||
|
||||
``spec``
|
||||
This is the concrete spec object created by Spack from an
|
||||
|
@@ -115,7 +115,8 @@ And here's the spack environment built by the pipeline represented as a
|
||||
|
||||
spack:
|
||||
view: false
|
||||
concretization: separately
|
||||
concretizer:
|
||||
unify: false
|
||||
|
||||
definitions:
|
||||
- pkgs:
|
||||
|
@@ -61,7 +61,7 @@ You can see the packages we added earlier in the ``specs:`` section. If you
|
||||
ever want to add more packages, you can either use ``spack add`` or manually
|
||||
edit this file.
|
||||
|
||||
We also need to change the ``concretization:`` option. By default, Spack
|
||||
We also need to change the ``concretizer:unify`` option. By default, Spack
|
||||
concretizes each spec *separately*, allowing multiple versions of the same
|
||||
package to coexist. Since we want a single consistent environment, we want to
|
||||
concretize all of the specs *together*.
|
||||
@@ -78,7 +78,8 @@ Here is what your ``spack.yaml`` looks like with this new setting:
|
||||
# add package specs to the `specs` list
|
||||
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
|
||||
view: true
|
||||
concretization: together
|
||||
concretizer:
|
||||
unify: true
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Symlink location
|
||||
|
@@ -25,4 +25,5 @@ spack:
|
||||
- subversion
|
||||
# Plotting
|
||||
- graphviz
|
||||
concretization: together
|
||||
concretizer:
|
||||
unify: true
|
||||
|
6
lib/spack/env/cc
vendored
6
lib/spack/env/cc
vendored
@@ -1,4 +1,4 @@
|
||||
#!/bin/sh
|
||||
#!/bin/sh -f
|
||||
# shellcheck disable=SC2034 # evals in this script fool shellcheck
|
||||
#
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
@@ -768,7 +768,9 @@ if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
echo "[$mode] ${full_command_list}" >> "$output_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.1.2 (commit 85757b6666422fca86aa882a769bf78b0f992f54)
|
||||
* Version: 0.1.4 (commit 53fc4ac91e9b4c5e4079f15772503a80bece72ad)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
75
lib/spack/external/archspec/cpu/detect.py
vendored
75
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -61,7 +61,7 @@ def proc_cpuinfo():
|
||||
``/proc/cpuinfo``
|
||||
"""
|
||||
info = {}
|
||||
with open("/proc/cpuinfo") as file:
|
||||
with open("/proc/cpuinfo") as file: # pylint: disable=unspecified-encoding
|
||||
for line in file:
|
||||
key, separator, value = line.partition(":")
|
||||
|
||||
@@ -80,26 +80,46 @@ def proc_cpuinfo():
|
||||
|
||||
|
||||
def _check_output(args, env):
|
||||
output = subprocess.Popen(args, stdout=subprocess.PIPE, env=env).communicate()[0]
|
||||
output = subprocess.Popen( # pylint: disable=consider-using-with
|
||||
args, stdout=subprocess.PIPE, env=env
|
||||
).communicate()[0]
|
||||
return six.text_type(output.decode("utf-8"))
|
||||
|
||||
|
||||
def _machine():
|
||||
""" "Return the machine architecture we are on"""
|
||||
operating_system = platform.system()
|
||||
|
||||
# If we are not on Darwin, trust what Python tells us
|
||||
if operating_system != "Darwin":
|
||||
return platform.machine()
|
||||
|
||||
# On Darwin it might happen that we are on M1, but using an interpreter
|
||||
# built for x86_64. In that case "platform.machine() == 'x86_64'", so we
|
||||
# need to fix that.
|
||||
#
|
||||
# See: https://bugs.python.org/issue42704
|
||||
output = _check_output(
|
||||
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
|
||||
).strip()
|
||||
|
||||
if "Apple" in output:
|
||||
# Note that a native Python interpreter on Apple M1 would return
|
||||
# "arm64" instead of "aarch64". Here we normalize to the latter.
|
||||
return "aarch64"
|
||||
|
||||
return "x86_64"
|
||||
|
||||
|
||||
@info_dict(operating_system="Darwin")
|
||||
def sysctl_info_dict():
|
||||
"""Returns a raw info dictionary parsing the output of sysctl."""
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||
# usually found there
|
||||
child_environment = dict(os.environ.items())
|
||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||
for additional_path in ("/sbin", "/usr/sbin"):
|
||||
if additional_path not in search_paths:
|
||||
search_paths.append(additional_path)
|
||||
child_environment["PATH"] = os.pathsep.join(search_paths)
|
||||
child_environment = _ensure_bin_usrbin_in_path()
|
||||
|
||||
def sysctl(*args):
|
||||
return _check_output(["sysctl"] + list(args), env=child_environment).strip()
|
||||
|
||||
if platform.machine() == "x86_64":
|
||||
if _machine() == "x86_64":
|
||||
flags = (
|
||||
sysctl("-n", "machdep.cpu.features").lower()
|
||||
+ " "
|
||||
@@ -125,6 +145,18 @@ def sysctl(*args):
|
||||
return info
|
||||
|
||||
|
||||
def _ensure_bin_usrbin_in_path():
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||
# usually found there
|
||||
child_environment = dict(os.environ.items())
|
||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||
for additional_path in ("/sbin", "/usr/sbin"):
|
||||
if additional_path not in search_paths:
|
||||
search_paths.append(additional_path)
|
||||
child_environment["PATH"] = os.pathsep.join(search_paths)
|
||||
return child_environment
|
||||
|
||||
|
||||
def adjust_raw_flags(info):
|
||||
"""Adjust the flags detected on the system to homogenize
|
||||
slightly different representations.
|
||||
@@ -184,12 +216,7 @@ def compatible_microarchitectures(info):
|
||||
Args:
|
||||
info (dict): dictionary containing information on the host cpu
|
||||
"""
|
||||
architecture_family = platform.machine()
|
||||
# On Apple M1 platform.machine() returns "arm64" instead of "aarch64"
|
||||
# so we should normalize the name here
|
||||
if architecture_family == "arm64":
|
||||
architecture_family = "aarch64"
|
||||
|
||||
architecture_family = _machine()
|
||||
# If a tester is not registered, be conservative and assume no known
|
||||
# target is compatible with the host
|
||||
tester = COMPATIBILITY_CHECKS.get(architecture_family, lambda x, y: False)
|
||||
@@ -244,12 +271,7 @@ def compatibility_check(architecture_family):
|
||||
architecture_family = (architecture_family,)
|
||||
|
||||
def decorator(func):
|
||||
# pylint: disable=fixme
|
||||
# TODO: on removal of Python 2.6 support this can be re-written as
|
||||
# TODO: an update + a dict comprehension
|
||||
for arch_family in architecture_family:
|
||||
COMPATIBILITY_CHECKS[arch_family] = func
|
||||
|
||||
COMPATIBILITY_CHECKS.update({family: func for family in architecture_family})
|
||||
return func
|
||||
|
||||
return decorator
|
||||
@@ -288,7 +310,7 @@ def compatibility_check_for_x86_64(info, target):
|
||||
arch_root = TARGETS[basename]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.vendor in (vendor, "generic")
|
||||
and target.features.issubset(features)
|
||||
)
|
||||
|
||||
@@ -303,8 +325,9 @@ def compatibility_check_for_aarch64(info, target):
|
||||
arch_root = TARGETS[basename]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.features.issubset(features)
|
||||
and target.vendor in (vendor, "generic")
|
||||
# On macOS it seems impossible to get all the CPU features with syctl info
|
||||
and (target.features.issubset(features) or platform.system() == "Darwin")
|
||||
)
|
||||
|
||||
|
||||
|
4
lib/spack/external/archspec/cpu/schema.py
vendored
4
lib/spack/external/archspec/cpu/schema.py
vendored
@@ -11,7 +11,7 @@
|
||||
try:
|
||||
from collections.abc import MutableMapping # novm
|
||||
except ImportError:
|
||||
from collections import MutableMapping
|
||||
from collections import MutableMapping # pylint: disable=deprecated-class
|
||||
|
||||
|
||||
class LazyDictionary(MutableMapping):
|
||||
@@ -56,7 +56,7 @@ def _load_json_file(json_file):
|
||||
|
||||
def _factory():
|
||||
filename = os.path.join(json_dir, json_file)
|
||||
with open(filename, "r") as file:
|
||||
with open(filename, "r") as file: # pylint: disable=unspecified-encoding
|
||||
return json.load(file)
|
||||
|
||||
return _factory
|
||||
|
@@ -88,6 +88,20 @@
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -291,6 +305,20 @@
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -333,6 +361,18 @@
|
||||
"versions": "16.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -384,6 +424,20 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -432,6 +486,20 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -490,6 +558,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -550,6 +630,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -615,6 +707,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -672,6 +776,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -732,6 +848,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -798,6 +926,20 @@
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -868,6 +1010,20 @@
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -937,6 +1093,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1004,6 +1172,18 @@
|
||||
"versions": "19.0.1:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1098,6 +1278,20 @@
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1142,6 +1336,20 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1192,6 +1400,20 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1246,6 +1468,20 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1301,6 +1537,20 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1360,6 +1610,22 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1422,6 +1688,22 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1485,6 +1767,22 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1543,6 +1841,30 @@
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1788,7 +2110,6 @@
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
@@ -1821,18 +2142,26 @@
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
||||
},
|
||||
{
|
||||
"versions": "8:",
|
||||
"flags": "-march=armv8.2-a+crc+aes+sha2+fp16+sve -msve-vector-bits=512"
|
||||
"versions": "8:10.2",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve -msve-vector-bits=512"
|
||||
},
|
||||
{
|
||||
"versions": "10.3:",
|
||||
"flags": "-mcpu=a64fx -msve-vector-bits=512"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "3.9:4.9",
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16"
|
||||
},
|
||||
{
|
||||
"versions": "5:",
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||
"versions": "5:10",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve"
|
||||
},
|
||||
{
|
||||
"versions": "11:",
|
||||
"flags": "-mcpu=a64fx"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
@@ -1954,7 +2283,40 @@
|
||||
"m1": {
|
||||
"from": ["aarch64"],
|
||||
"vendor": "Apple",
|
||||
"features": [],
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"flagm2",
|
||||
"frint"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
@@ -1964,14 +2326,22 @@
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:",
|
||||
"versions": "9.0:12.0",
|
||||
"flags" : "-march=armv8.4-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"versions": "11.0:12.5",
|
||||
"flags" : "-march=armv8.4-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@@ -367,7 +367,7 @@ def group_ids(uid=None):
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp(path, group):
|
||||
def chgrp(path, group, follow_symlinks=True):
|
||||
"""Implement the bash chgrp function on a single path"""
|
||||
if is_windows:
|
||||
raise OSError("Function 'chgrp' is not supported on Windows")
|
||||
@@ -376,7 +376,10 @@ def chgrp(path, group):
|
||||
gid = grp.getgrnam(group).gr_gid
|
||||
else:
|
||||
gid = group
|
||||
os.chown(path, -1, gid)
|
||||
if follow_symlinks:
|
||||
os.chown(path, -1, gid)
|
||||
else:
|
||||
os.lchown(path, -1, gid)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: (major, minor, micro, dev release) tuple
|
||||
spack_version_info = (0, 18, 0, 'dev0')
|
||||
spack_version_info = (0, 19, 0, 'dev0')
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
spack_version = '.'.join(str(s) for s in spack_version_info)
|
||||
|
@@ -12,7 +12,7 @@
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.monitor
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.util.executable
|
||||
|
||||
|
@@ -210,7 +210,7 @@ def get_all_built_specs(self):
|
||||
|
||||
return spec_list
|
||||
|
||||
def find_built_spec(self, spec):
|
||||
def find_built_spec(self, spec, mirrors_to_check=None):
|
||||
"""Look in our cache for the built spec corresponding to ``spec``.
|
||||
|
||||
If the spec can be found among the configured binary mirrors, a
|
||||
@@ -225,6 +225,8 @@ def find_built_spec(self, spec):
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): Concrete spec to find
|
||||
mirrors_to_check: Optional mapping containing mirrors to check. If
|
||||
None, just assumes all configured mirrors.
|
||||
|
||||
Returns:
|
||||
An list of objects containing the found specs and mirror url where
|
||||
@@ -240,17 +242,23 @@ def find_built_spec(self, spec):
|
||||
]
|
||||
"""
|
||||
self.regenerate_spec_cache()
|
||||
return self.find_by_hash(spec.dag_hash())
|
||||
return self.find_by_hash(spec.dag_hash(), mirrors_to_check=mirrors_to_check)
|
||||
|
||||
def find_by_hash(self, find_hash):
|
||||
def find_by_hash(self, find_hash, mirrors_to_check=None):
|
||||
"""Same as find_built_spec but uses the hash of a spec.
|
||||
|
||||
Args:
|
||||
find_hash (str): hash of the spec to search
|
||||
mirrors_to_check: Optional mapping containing mirrors to check. If
|
||||
None, just assumes all configured mirrors.
|
||||
"""
|
||||
if find_hash not in self._mirrors_for_spec:
|
||||
return None
|
||||
return self._mirrors_for_spec[find_hash]
|
||||
results = self._mirrors_for_spec[find_hash]
|
||||
if not mirrors_to_check:
|
||||
return results
|
||||
mirror_urls = mirrors_to_check.values()
|
||||
return [r for r in results if r['mirror_url'] in mirror_urls]
|
||||
|
||||
def update_spec(self, spec, found_list):
|
||||
"""
|
||||
@@ -563,6 +571,13 @@ def __init__(self, msg):
|
||||
super(NewLayoutException, self).__init__(msg)
|
||||
|
||||
|
||||
class UnsignedPackageException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if installation of unsigned package is attempted without
|
||||
the use of ``--no-check-signature``.
|
||||
"""
|
||||
|
||||
|
||||
def compute_hash(data):
|
||||
return hashlib.sha256(data.encode('utf-8')).hexdigest()
|
||||
|
||||
@@ -609,10 +624,14 @@ def get_buildfile_manifest(spec):
|
||||
"""
|
||||
data = {"text_to_relocate": [], "binary_to_relocate": [],
|
||||
"link_to_relocate": [], "other": [],
|
||||
"binary_to_relocate_fullpath": []}
|
||||
"binary_to_relocate_fullpath": [], "offsets": {}}
|
||||
|
||||
blacklist = (".spack", "man")
|
||||
|
||||
# Get all the paths we will want to relocate in binaries
|
||||
paths_to_relocate = [s.prefix for s in spec.traverse(root=True)]
|
||||
paths_to_relocate.append(spack.store.layout.root)
|
||||
|
||||
# Do this at during tarball creation to save time when tarball unpacked.
|
||||
# Used by make_package_relative to determine binaries to change.
|
||||
for root, dirs, files in os.walk(spec.prefix, topdown=True):
|
||||
@@ -647,6 +666,11 @@ def get_buildfile_manifest(spec):
|
||||
(m_subtype in ('x-mach-binary')
|
||||
and sys.platform == 'darwin') or
|
||||
(not filename.endswith('.o'))):
|
||||
|
||||
# Last path to relocate is the layout root, which is a substring
|
||||
# of the others
|
||||
indices = relocate.compute_indices(path_name, paths_to_relocate)
|
||||
data['offsets'][rel_path_name] = indices
|
||||
data['binary_to_relocate'].append(rel_path_name)
|
||||
data['binary_to_relocate_fullpath'].append(path_name)
|
||||
added = True
|
||||
@@ -685,6 +709,7 @@ def write_buildinfo_file(spec, workdir, rel=False):
|
||||
buildinfo['relocate_binaries'] = manifest['binary_to_relocate']
|
||||
buildinfo['relocate_links'] = manifest['link_to_relocate']
|
||||
buildinfo['prefix_to_hash'] = prefix_to_hash
|
||||
buildinfo['offsets'] = manifest['offsets']
|
||||
filename = buildinfo_file_name(workdir)
|
||||
with open(filename, 'w') as outfile:
|
||||
outfile.write(syaml.dump(buildinfo, default_flow_style=True))
|
||||
@@ -751,15 +776,16 @@ def select_signing_key(key=None):
|
||||
return key
|
||||
|
||||
|
||||
def sign_tarball(key, force, specfile_path):
|
||||
if os.path.exists('%s.asc' % specfile_path):
|
||||
def sign_specfile(key, force, specfile_path):
|
||||
signed_specfile_path = '%s.sig' % specfile_path
|
||||
if os.path.exists(signed_specfile_path):
|
||||
if force:
|
||||
os.remove('%s.asc' % specfile_path)
|
||||
os.remove(signed_specfile_path)
|
||||
else:
|
||||
raise NoOverwriteException('%s.asc' % specfile_path)
|
||||
raise NoOverwriteException(signed_specfile_path)
|
||||
|
||||
key = select_signing_key(key)
|
||||
spack.util.gpg.sign(key, specfile_path, '%s.asc' % specfile_path)
|
||||
spack.util.gpg.sign(key, specfile_path, signed_specfile_path, clearsign=True)
|
||||
|
||||
|
||||
def _fetch_spec_from_mirror(spec_url):
|
||||
@@ -768,7 +794,10 @@ def _fetch_spec_from_mirror(spec_url):
|
||||
_, _, spec_file = web_util.read_from_url(spec_url)
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith('.json'):
|
||||
if spec_url.endswith('.json.sig'):
|
||||
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
|
||||
s = Spec.from_dict(specfile_json)
|
||||
elif spec_url.endswith('.json'):
|
||||
s = Spec.from_json(spec_file_contents)
|
||||
elif spec_url.endswith('.yaml'):
|
||||
s = Spec.from_yaml(spec_file_contents)
|
||||
@@ -829,7 +858,9 @@ def generate_package_index(cache_prefix):
|
||||
file_list = (
|
||||
entry
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
if entry.endswith('.yaml') or entry.endswith('spec.json'))
|
||||
if entry.endswith('.yaml') or
|
||||
entry.endswith('spec.json') or
|
||||
entry.endswith('spec.json.sig'))
|
||||
except KeyError as inst:
|
||||
msg = 'No packages at {0}: {1}'.format(cache_prefix, inst)
|
||||
tty.warn(msg)
|
||||
@@ -944,7 +975,7 @@ def _build_tarball(
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
cache_prefix = build_cache_prefix(tmpdir)
|
||||
|
||||
tarfile_name = tarball_name(spec, '.tar.gz')
|
||||
tarfile_name = tarball_name(spec, '.spack')
|
||||
tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
|
||||
tarfile_path = os.path.join(tarfile_dir, tarfile_name)
|
||||
spackfile_path = os.path.join(
|
||||
@@ -967,10 +998,12 @@ def _build_tarball(
|
||||
spec_file = spack.store.layout.spec_file_path(spec)
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
||||
signed_specfile_path = '{0}.sig'.format(specfile_path)
|
||||
deprecated_specfile_path = specfile_path.replace('.spec.json', '.spec.yaml')
|
||||
|
||||
remote_specfile_path = url_util.join(
|
||||
outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir)))
|
||||
remote_signed_specfile_path = '{0}.sig'.format(remote_specfile_path)
|
||||
remote_specfile_path_deprecated = url_util.join(
|
||||
outdir, os.path.relpath(deprecated_specfile_path,
|
||||
os.path.realpath(tmpdir)))
|
||||
@@ -979,9 +1012,12 @@ def _build_tarball(
|
||||
if force:
|
||||
if web_util.url_exists(remote_specfile_path):
|
||||
web_util.remove_url(remote_specfile_path)
|
||||
if web_util.url_exists(remote_signed_specfile_path):
|
||||
web_util.remove_url(remote_signed_specfile_path)
|
||||
if web_util.url_exists(remote_specfile_path_deprecated):
|
||||
web_util.remove_url(remote_specfile_path_deprecated)
|
||||
elif (web_util.url_exists(remote_specfile_path) or
|
||||
web_util.url_exists(remote_signed_specfile_path) or
|
||||
web_util.url_exists(remote_specfile_path_deprecated)):
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
@@ -1043,6 +1079,7 @@ def _build_tarball(
|
||||
raise ValueError(
|
||||
'{0} not a valid spec file type (json or yaml)'.format(
|
||||
spec_file))
|
||||
spec_dict['buildcache_layout_version'] = 1
|
||||
bchecksum = {}
|
||||
bchecksum['hash_algorithm'] = 'sha256'
|
||||
bchecksum['hash'] = checksum
|
||||
@@ -1061,25 +1098,15 @@ def _build_tarball(
|
||||
# sign the tarball and spec file with gpg
|
||||
if not unsigned:
|
||||
key = select_signing_key(key)
|
||||
sign_tarball(key, force, specfile_path)
|
||||
|
||||
# put tarball, spec and signature files in .spack archive
|
||||
with closing(tarfile.open(spackfile_path, 'w')) as tar:
|
||||
tar.add(name=tarfile_path, arcname='%s' % tarfile_name)
|
||||
tar.add(name=specfile_path, arcname='%s' % specfile_name)
|
||||
if not unsigned:
|
||||
tar.add(name='%s.asc' % specfile_path,
|
||||
arcname='%s.asc' % specfile_name)
|
||||
|
||||
# cleanup file moved to archive
|
||||
os.remove(tarfile_path)
|
||||
if not unsigned:
|
||||
os.remove('%s.asc' % specfile_path)
|
||||
sign_specfile(key, force, specfile_path)
|
||||
|
||||
# push tarball and signed spec json to remote mirror
|
||||
web_util.push_to_url(
|
||||
spackfile_path, remote_spackfile_path, keep_original=False)
|
||||
web_util.push_to_url(
|
||||
specfile_path, remote_specfile_path, keep_original=False)
|
||||
signed_specfile_path if not unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not unsigned else remote_specfile_path,
|
||||
keep_original=False)
|
||||
|
||||
tty.debug('Buildcache for "{0}" written to \n {1}'
|
||||
.format(spec, remote_spackfile_path))
|
||||
@@ -1162,48 +1189,174 @@ def push(specs, push_url, specs_kwargs=None, **kwargs):
|
||||
warnings.warn(str(e))
|
||||
|
||||
|
||||
def download_tarball(spec, preferred_mirrors=None):
|
||||
def try_verify(specfile_path):
|
||||
"""Utility function to attempt to verify a local file. Assumes the
|
||||
file is a clearsigned signature file.
|
||||
|
||||
Args:
|
||||
specfile_path (str): Path to file to be verified.
|
||||
|
||||
Returns:
|
||||
``True`` if the signature could be verified, ``False`` otherwise.
|
||||
"""
|
||||
suppress = config.get('config:suppress_gpg_warnings', False)
|
||||
|
||||
try:
|
||||
spack.util.gpg.verify(specfile_path, suppress_warnings=suppress)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def try_fetch(url_to_fetch):
|
||||
"""Utility function to try and fetch a file from a url, stage it
|
||||
locally, and return the path to the staged file.
|
||||
|
||||
Args:
|
||||
url_to_fetch (str): Url pointing to remote resource to fetch
|
||||
|
||||
Returns:
|
||||
Path to locally staged resource or ``None`` if it could not be fetched.
|
||||
"""
|
||||
stage = Stage(url_to_fetch, keep=True)
|
||||
stage.create()
|
||||
|
||||
try:
|
||||
stage.fetch()
|
||||
except fs.FetchError:
|
||||
stage.destroy()
|
||||
return None
|
||||
|
||||
return stage
|
||||
|
||||
|
||||
def _delete_staged_downloads(download_result):
|
||||
"""Clean up stages used to download tarball and specfile"""
|
||||
download_result['tarball_stage'].destroy()
|
||||
download_result['specfile_stage'].destroy()
|
||||
|
||||
|
||||
def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
"""
|
||||
Download binary tarball for given package into stage area, returning
|
||||
path to downloaded tarball if successful, None otherwise.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): Concrete spec
|
||||
preferred_mirrors (list): If provided, this is a list of preferred
|
||||
mirror urls. Other configured mirrors will only be used if the
|
||||
tarball can't be retrieved from one of these.
|
||||
unsigned (bool): Whether or not to require signed binaries
|
||||
mirrors_for_spec (list): Optional list of concrete specs and mirrors
|
||||
obtained by calling binary_distribution.get_mirrors_for_spec().
|
||||
These will be checked in order first before looking in other
|
||||
configured mirrors.
|
||||
|
||||
Returns:
|
||||
Path to the downloaded tarball, or ``None`` if the tarball could not
|
||||
be downloaded from any configured mirrors.
|
||||
``None`` if the tarball could not be downloaded (maybe also verified,
|
||||
depending on whether new-style signed binary packages were found).
|
||||
Otherwise, return an object indicating the path to the downloaded
|
||||
tarball, the path to the downloaded specfile (in the case of new-style
|
||||
buildcache), and whether or not the tarball is already verified.
|
||||
|
||||
.. code-block:: JSON
|
||||
|
||||
{
|
||||
"tarball_path": "path-to-locally-saved-tarfile",
|
||||
"specfile_path": "none-or-path-to-locally-saved-specfile",
|
||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||
}
|
||||
"""
|
||||
if not spack.mirror.MirrorCollection():
|
||||
tty.die("Please add a spack mirror to allow " +
|
||||
"download of pre-compiled packages.")
|
||||
|
||||
tarball = tarball_path_name(spec, '.spack')
|
||||
specfile_prefix = tarball_name(spec, '.spec')
|
||||
|
||||
urls_to_try = []
|
||||
mirrors_to_try = []
|
||||
|
||||
if preferred_mirrors:
|
||||
for preferred_url in preferred_mirrors:
|
||||
urls_to_try.append(url_util.join(
|
||||
preferred_url, _build_cache_relative_path, tarball))
|
||||
# Note on try_first and try_next:
|
||||
# mirrors_for_spec mostly likely came from spack caching remote
|
||||
# mirror indices locally and adding their specs to a local data
|
||||
# structure supporting quick lookup of concrete specs. Those
|
||||
# mirrors are likely a subset of all configured mirrors, and
|
||||
# we'll probably find what we need in one of them. But we'll
|
||||
# look in all configured mirrors if needed, as maybe the spec
|
||||
# we need was in an un-indexed mirror. No need to check any
|
||||
# mirror for the spec twice though.
|
||||
try_first = [i['mirror_url'] for i in mirrors_for_spec] if mirrors_for_spec else []
|
||||
try_next = [
|
||||
i.fetch_url for i in spack.mirror.MirrorCollection().values()
|
||||
if i.fetch_url not in try_first
|
||||
]
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection().values():
|
||||
if not preferred_mirrors or mirror.fetch_url not in preferred_mirrors:
|
||||
urls_to_try.append(url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, tarball))
|
||||
for url in try_first + try_next:
|
||||
mirrors_to_try.append({
|
||||
'specfile': url_util.join(url,
|
||||
_build_cache_relative_path, specfile_prefix),
|
||||
'spackfile': url_util.join(url,
|
||||
_build_cache_relative_path, tarball)
|
||||
})
|
||||
|
||||
for try_url in urls_to_try:
|
||||
# stage the tarball into standard place
|
||||
stage = Stage(try_url, name="build_cache", keep=True)
|
||||
stage.create()
|
||||
try:
|
||||
stage.fetch()
|
||||
return stage.save_filename
|
||||
except fs.FetchError:
|
||||
continue
|
||||
tried_to_verify_sigs = []
|
||||
|
||||
# Assumes we care more about finding a spec file by preferred ext
|
||||
# than by mirrory priority. This can be made less complicated as
|
||||
# we remove support for deprecated spec formats and buildcache layouts.
|
||||
for ext in ['json.sig', 'json', 'yaml']:
|
||||
for mirror_to_try in mirrors_to_try:
|
||||
specfile_url = '{0}.{1}'.format(mirror_to_try['specfile'], ext)
|
||||
spackfile_url = mirror_to_try['spackfile']
|
||||
local_specfile_stage = try_fetch(specfile_url)
|
||||
if local_specfile_stage:
|
||||
local_specfile_path = local_specfile_stage.save_filename
|
||||
signature_verified = False
|
||||
|
||||
if ext.endswith('.sig') and not unsigned:
|
||||
# If we found a signed specfile at the root, try to verify
|
||||
# the signature immediately. We will not download the
|
||||
# tarball if we could not verify the signature.
|
||||
tried_to_verify_sigs.append(specfile_url)
|
||||
signature_verified = try_verify(local_specfile_path)
|
||||
if not signature_verified:
|
||||
tty.warn("Failed to verify: {0}".format(specfile_url))
|
||||
|
||||
if unsigned or signature_verified or not ext.endswith('.sig'):
|
||||
# We will download the tarball in one of three cases:
|
||||
# 1. user asked for --no-check-signature
|
||||
# 2. user didn't ask for --no-check-signature, but we
|
||||
# found a spec.json.sig and verified the signature already
|
||||
# 3. neither of the first two cases are true, but this file
|
||||
# is *not* a signed json (not a spec.json.sig file). That
|
||||
# means we already looked at all the mirrors and either didn't
|
||||
# find any .sig files or couldn't verify any of them. But it
|
||||
# is still possible to find an old style binary package where
|
||||
# the signature is a detached .asc file in the outer archive
|
||||
# of the tarball, and in that case, the only way to know is to
|
||||
# download the tarball. This is a deprecated use case, so if
|
||||
# something goes wrong during the extraction process (can't
|
||||
# verify signature, checksum doesn't match) we will fail at
|
||||
# that point instead of trying to download more tarballs from
|
||||
# the remaining mirrors, looking for one we can use.
|
||||
tarball_stage = try_fetch(spackfile_url)
|
||||
if tarball_stage:
|
||||
return {
|
||||
'tarball_stage': tarball_stage,
|
||||
'specfile_stage': local_specfile_stage,
|
||||
'signature_verified': signature_verified,
|
||||
}
|
||||
|
||||
local_specfile_stage.destroy()
|
||||
|
||||
# Falling through the nested loops meeans we exhaustively searched
|
||||
# for all known kinds of spec files on all mirrors and did not find
|
||||
# an acceptable one for which we could download a tarball.
|
||||
|
||||
if tried_to_verify_sigs:
|
||||
raise NoVerifyException(("Spack found new style signed binary packages, "
|
||||
"but was unable to verify any of them. Please "
|
||||
"obtain and trust the correct public key. If "
|
||||
"these are public spack binaries, please see the "
|
||||
"spack docs for locations where keys can be found."))
|
||||
|
||||
tty.warn("download_tarball() was unable to download " +
|
||||
"{0} from any configured mirrors".format(spec))
|
||||
@@ -1330,11 +1483,25 @@ def is_backup_file(file):
|
||||
|
||||
# If we are not installing back to the same install tree do the relocation
|
||||
if old_prefix != new_prefix:
|
||||
files_to_relocate = [os.path.join(workdir, filename)
|
||||
for filename in buildinfo.get('relocate_binaries')
|
||||
]
|
||||
# Relocate links to the new install prefix
|
||||
links = [link for link in buildinfo.get('relocate_links', [])]
|
||||
relocate.relocate_links(
|
||||
links, old_layout_root, old_prefix, new_prefix
|
||||
)
|
||||
|
||||
# For all buildcaches
|
||||
# relocate the install prefixes in text files including dependencies
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
# If the buildcache was not created with relativized rpaths
|
||||
# do the relocation of path in binaries
|
||||
# do the relocation of rpaths in binaries
|
||||
# TODO: Is this necessary? How are null-terminated strings handled
|
||||
# in the rpath header?
|
||||
files_to_relocate = [
|
||||
os.path.join(workdir, filename)
|
||||
for filename in buildinfo.get('relocate_binaries')
|
||||
]
|
||||
|
||||
platform = spack.platforms.by_name(spec.platform)
|
||||
if 'macho' in platform.binary_formats:
|
||||
relocate.relocate_macho_binaries(files_to_relocate,
|
||||
@@ -1350,25 +1517,11 @@ def is_backup_file(file):
|
||||
prefix_to_prefix_bin, rel,
|
||||
old_prefix,
|
||||
new_prefix)
|
||||
# Relocate links to the new install prefix
|
||||
links = [link for link in buildinfo.get('relocate_links', [])]
|
||||
relocate.relocate_links(
|
||||
links, old_layout_root, old_prefix, new_prefix
|
||||
)
|
||||
|
||||
# For all buildcaches
|
||||
# relocate the install prefixes in text files including dependencies
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
paths_to_relocate = [old_prefix, old_layout_root]
|
||||
paths_to_relocate.extend(prefix_to_hash.keys())
|
||||
files_to_relocate = list(filter(
|
||||
lambda pathname: not relocate.file_is_relocatable(
|
||||
pathname, paths_to_relocate=paths_to_relocate),
|
||||
map(lambda filename: os.path.join(workdir, filename),
|
||||
buildinfo['relocate_binaries'])))
|
||||
# relocate the install prefixes in binary files including dependencies
|
||||
relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||
# If offsets is None, we will recompute offsets when needed
|
||||
offsets = buildinfo.get('offsets', None)
|
||||
relocate.relocate_text_bin(
|
||||
files_to_relocate, prefix_to_prefix_bin, offsets, workdir)
|
||||
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
@@ -1377,7 +1530,55 @@ def is_backup_file(file):
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
|
||||
def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
|
||||
stagepath = os.path.dirname(filename)
|
||||
spackfile_name = tarball_name(spec, '.spack')
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
tarfile_name = tarball_name(spec, '.tar.gz')
|
||||
tarfile_path = os.path.join(extract_to, tarfile_name)
|
||||
deprecated_yaml_name = tarball_name(spec, '.spec.yaml')
|
||||
deprecated_yaml_path = os.path.join(extract_to, deprecated_yaml_name)
|
||||
json_name = tarball_name(spec, '.spec.json')
|
||||
json_path = os.path.join(extract_to, json_name)
|
||||
with closing(tarfile.open(spackfile_path, 'r')) as tar:
|
||||
tar.extractall(extract_to)
|
||||
# some buildcache tarfiles use bzip2 compression
|
||||
if not os.path.exists(tarfile_path):
|
||||
tarfile_name = tarball_name(spec, '.tar.bz2')
|
||||
tarfile_path = os.path.join(extract_to, tarfile_name)
|
||||
|
||||
if os.path.exists(json_path):
|
||||
specfile_path = json_path
|
||||
elif os.path.exists(deprecated_yaml_path):
|
||||
specfile_path = deprecated_yaml_path
|
||||
else:
|
||||
raise ValueError('Cannot find spec file for {0}.'.format(extract_to))
|
||||
|
||||
if not unsigned:
|
||||
if os.path.exists('%s.asc' % specfile_path):
|
||||
suppress = config.get('config:suppress_gpg_warnings', False)
|
||||
try:
|
||||
spack.util.gpg.verify('%s.asc' % specfile_path, specfile_path, suppress)
|
||||
except Exception:
|
||||
raise NoVerifyException("Spack was unable to verify package "
|
||||
"signature, please obtain and trust the "
|
||||
"correct public key.")
|
||||
else:
|
||||
raise UnsignedPackageException(
|
||||
"To install unsigned packages, use the --no-check-signature option.")
|
||||
# get the sha256 checksum of the tarball
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# if the checksums don't match don't install
|
||||
if local_checksum != remote_checksum['hash']:
|
||||
raise NoChecksumException(
|
||||
"Package tarball failed checksum verification.\n"
|
||||
"It cannot be installed.")
|
||||
|
||||
return tarfile_path
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, allow_root=False, unsigned=False,
|
||||
force=False):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
@@ -1388,66 +1589,56 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
else:
|
||||
raise NoOverwriteException(str(spec.prefix))
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
stagepath = os.path.dirname(filename)
|
||||
spackfile_name = tarball_name(spec, '.spack')
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
tarfile_name = tarball_name(spec, '.tar.gz')
|
||||
tarfile_path = os.path.join(tmpdir, tarfile_name)
|
||||
specfile_is_json = True
|
||||
deprecated_yaml_name = tarball_name(spec, '.spec.yaml')
|
||||
deprecated_yaml_path = os.path.join(tmpdir, deprecated_yaml_name)
|
||||
json_name = tarball_name(spec, '.spec.json')
|
||||
json_path = os.path.join(tmpdir, json_name)
|
||||
with closing(tarfile.open(spackfile_path, 'r')) as tar:
|
||||
tar.extractall(tmpdir)
|
||||
# some buildcache tarfiles use bzip2 compression
|
||||
if not os.path.exists(tarfile_path):
|
||||
tarfile_name = tarball_name(spec, '.tar.bz2')
|
||||
tarfile_path = os.path.join(tmpdir, tarfile_name)
|
||||
specfile_path = download_result['specfile_stage'].save_filename
|
||||
|
||||
if os.path.exists(json_path):
|
||||
specfile_path = json_path
|
||||
elif os.path.exists(deprecated_yaml_path):
|
||||
specfile_is_json = False
|
||||
specfile_path = deprecated_yaml_path
|
||||
else:
|
||||
raise ValueError('Cannot find spec file for {0}.'.format(tmpdir))
|
||||
|
||||
if not unsigned:
|
||||
if os.path.exists('%s.asc' % specfile_path):
|
||||
try:
|
||||
suppress = config.get('config:suppress_gpg_warnings', False)
|
||||
spack.util.gpg.verify(
|
||||
'%s.asc' % specfile_path, specfile_path, suppress)
|
||||
except Exception as e:
|
||||
shutil.rmtree(tmpdir)
|
||||
raise e
|
||||
else:
|
||||
shutil.rmtree(tmpdir)
|
||||
raise NoVerifyException(
|
||||
"Package spec file failed signature verification.\n"
|
||||
"Use spack buildcache keys to download "
|
||||
"and install a key for verification from the mirror.")
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# get the sha256 checksum recorded at creation
|
||||
spec_dict = {}
|
||||
with open(specfile_path, 'r') as inputfile:
|
||||
content = inputfile.read()
|
||||
if specfile_is_json:
|
||||
if specfile_path.endswith('.json.sig'):
|
||||
spec_dict = Spec.extract_json_from_clearsig(content)
|
||||
elif specfile_path.endswith('.json'):
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
spec_dict = syaml.load(content)
|
||||
bchecksum = spec_dict['binary_cache_checksum']
|
||||
|
||||
# if the checksums don't match don't install
|
||||
if bchecksum['hash'] != checksum:
|
||||
shutil.rmtree(tmpdir)
|
||||
raise NoChecksumException(
|
||||
"Package tarball failed checksum verification.\n"
|
||||
"It cannot be installed.")
|
||||
bchecksum = spec_dict['binary_cache_checksum']
|
||||
filename = download_result['tarball_stage'].save_filename
|
||||
signature_verified = download_result['signature_verified']
|
||||
tmpdir = None
|
||||
|
||||
if ('buildcache_layout_version' not in spec_dict or
|
||||
int(spec_dict['buildcache_layout_version']) < 1):
|
||||
# Handle the older buildcache layout where the .spack file
|
||||
# contains a spec json/yaml, maybe an .asc file (signature),
|
||||
# and another tarball containing the actual install tree.
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
tarfile_path = _extract_inner_tarball(
|
||||
spec, filename, tmpdir, unsigned, bchecksum)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(tmpdir)
|
||||
raise e
|
||||
else:
|
||||
# Newer buildcache layout: the .spack file contains just
|
||||
# in the install tree, the signature, if it exists, is
|
||||
# wrapped around the spec.json at the root. If sig verify
|
||||
# was required, it was already done before downloading
|
||||
# the tarball.
|
||||
tarfile_path = filename
|
||||
|
||||
if not unsigned and not signature_verified:
|
||||
raise UnsignedPackageException(
|
||||
"To install unsigned packages, use the --no-check-signature option.")
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# if the checksums don't match don't install
|
||||
if local_checksum != bchecksum['hash']:
|
||||
_delete_staged_downloads(download_result)
|
||||
raise NoChecksumException(
|
||||
"Package tarball failed checksum verification.\n"
|
||||
"It cannot be installed.")
|
||||
|
||||
new_relative_prefix = str(os.path.relpath(spec.prefix,
|
||||
spack.store.layout.root))
|
||||
@@ -1472,11 +1663,13 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
try:
|
||||
tar.extractall(path=extract_tmp)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
try:
|
||||
shutil.move(extracted_dir, spec.prefix)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
os.remove(tarfile_path)
|
||||
@@ -1495,9 +1688,11 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
spec_id = spec.format('{name}/{hash:7}')
|
||||
tty.warn('No manifest file in tarball for spec %s' % spec_id)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
if os.path.exists(filename):
|
||||
os.remove(filename)
|
||||
_delete_staged_downloads(download_result)
|
||||
|
||||
|
||||
def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None):
|
||||
@@ -1525,21 +1720,23 @@ def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None
|
||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||
return
|
||||
|
||||
tarball = download_tarball(spec)
|
||||
if not tarball:
|
||||
download_result = download_tarball(spec, unsigned)
|
||||
if not download_result:
|
||||
msg = 'download of binary cache file for spec "{0}" failed'
|
||||
raise RuntimeError(msg.format(spec.format()))
|
||||
|
||||
if sha256:
|
||||
checker = spack.util.crypto.Checker(sha256)
|
||||
msg = 'cannot verify checksum for "{0}" [expected={1}]'
|
||||
msg = msg.format(tarball, sha256)
|
||||
if not checker.check(tarball):
|
||||
tarball_path = download_result['tarball_stage'].save_filename
|
||||
msg = msg.format(tarball_path, sha256)
|
||||
if not checker.check(tarball_path):
|
||||
_delete_staged_downloads(download_result)
|
||||
raise spack.binary_distribution.NoChecksumException(msg)
|
||||
tty.debug('Verified SHA256 checksum of the build cache')
|
||||
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, tarball, allow_root, unsigned, force)
|
||||
extract_tarball(spec, download_result, allow_root, unsigned, force)
|
||||
spack.hooks.post_install(spec)
|
||||
spack.store.db.add(spec, spack.store.layout)
|
||||
|
||||
@@ -1565,6 +1762,8 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
"""
|
||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
signed_specfile_name = tarball_name(spec, '.spec.json.sig')
|
||||
specfile_is_signed = False
|
||||
specfile_is_json = True
|
||||
found_specs = []
|
||||
|
||||
@@ -1573,24 +1772,35 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name)
|
||||
buildcache_fetch_url_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, specfile_name)
|
||||
buildcache_fetch_url_signed_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, signed_specfile_name)
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||
specfile_is_signed = True
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_json), url_err)
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_yaml), url_err_y)
|
||||
continue
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_signed_json), url_err)
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_json), url_err_x)
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_yaml), url_err_y)
|
||||
continue
|
||||
specfile_contents = codecs.getreader('utf-8')(fs).read()
|
||||
|
||||
# read the spec from the build cache file. All specs in build caches
|
||||
# are concrete (as they are built) so we need to mark this spec
|
||||
# concrete on read-in.
|
||||
if specfile_is_json:
|
||||
if specfile_is_signed:
|
||||
specfile_json = Spec.extract_json_from_clearsig(specfile_contents)
|
||||
fetched_spec = Spec.from_dict(specfile_json)
|
||||
elif specfile_is_json:
|
||||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
else:
|
||||
fetched_spec = Spec.from_yaml(specfile_contents)
|
||||
@@ -1627,7 +1837,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
return {}
|
||||
|
||||
results = binary_index.find_built_spec(spec)
|
||||
results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
|
||||
|
||||
# Maybe we just didn't have the latest information from the mirror, so
|
||||
# try to fetch directly, unless we are only considering the indices.
|
||||
@@ -1917,7 +2127,8 @@ def download_single_spec(
|
||||
'path': local_tarball_path,
|
||||
'required': True,
|
||||
}, {
|
||||
'url': [tarball_name(concrete_spec, '.spec.json'),
|
||||
'url': [tarball_name(concrete_spec, '.spec.json.sig'),
|
||||
tarball_name(concrete_spec, '.spec.json'),
|
||||
tarball_name(concrete_spec, '.spec.yaml')],
|
||||
'path': destination,
|
||||
'required': True,
|
||||
|
@@ -5,6 +5,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import copy
|
||||
import fnmatch
|
||||
import functools
|
||||
import json
|
||||
@@ -37,6 +38,11 @@
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
METADATA_YAML_FILENAME = 'metadata.yaml'
|
||||
|
||||
#: Map a bootstrapper type to the corresponding class
|
||||
_bootstrap_methods = {}
|
||||
@@ -204,12 +210,43 @@ def _executables_in_store(executables, query_spec, query_info=None):
|
||||
return False
|
||||
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
class _BootstrapperBase(object):
|
||||
"""Base class to derive types that can bootstrap software for Spack"""
|
||||
config_scope_name = ''
|
||||
|
||||
def __init__(self, conf):
|
||||
self.name = conf['name']
|
||||
self.url = conf['info']['url']
|
||||
|
||||
@property
|
||||
def mirror_url(self):
|
||||
# Absolute paths
|
||||
if os.path.isabs(self.url):
|
||||
return spack.util.url.format(self.url)
|
||||
|
||||
# Check for :// and assume it's an url if we find it
|
||||
if '://' in self.url:
|
||||
return self.url
|
||||
|
||||
# Otherwise, it's a relative path
|
||||
return spack.util.url.format(os.path.join(self.metadata_dir, self.url))
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
return spack.config.InternalConfigScope(
|
||||
self.config_scope_name, {'mirrors:': {self.name: self.mirror_url}}
|
||||
)
|
||||
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(_BootstrapperBase):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
|
||||
config_scope_name = 'bootstrap_buildcache'
|
||||
|
||||
def __init__(self, conf):
|
||||
super(_BuildcacheBootstrapper, self).__init__(conf)
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
|
||||
self.last_search = None
|
||||
|
||||
@staticmethod
|
||||
@@ -232,9 +269,8 @@ def _spec_and_platform(abstract_spec_str):
|
||||
def _read_metadata(self, package_name):
|
||||
"""Return metadata about the given package."""
|
||||
json_filename = '{0}.json'.format(package_name)
|
||||
json_path = os.path.join(
|
||||
spack.paths.share_path, 'bootstrap', self.name, json_filename
|
||||
)
|
||||
json_dir = self.metadata_dir
|
||||
json_path = os.path.join(json_dir, json_filename)
|
||||
with open(json_path) as f:
|
||||
data = json.load(f)
|
||||
return data
|
||||
@@ -308,12 +344,6 @@ def _install_and_test(
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
return spack.config.InternalConfigScope(
|
||||
'bootstrap_buildcache', {'mirrors:': {self.name: self.url}}
|
||||
)
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
@@ -343,9 +373,13 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
|
||||
|
||||
@_bootstrapper(type='install')
|
||||
class _SourceBootstrapper(object):
|
||||
class _SourceBootstrapper(_BootstrapperBase):
|
||||
"""Install the software needed during bootstrapping from sources."""
|
||||
config_scope_name = 'bootstrap_source'
|
||||
|
||||
def __init__(self, conf):
|
||||
super(_SourceBootstrapper, self).__init__(conf)
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
|
||||
self.conf = conf
|
||||
self.last_search = None
|
||||
|
||||
@@ -378,7 +412,8 @@ def try_import(self, module, abstract_spec_str):
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
|
||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -391,6 +426,8 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
||||
tty.info("Bootstrapping {0} from sources".format(abstract_spec_str))
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
@@ -403,7 +440,8 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
concrete_spec.package.do_install()
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install()
|
||||
if _executables_in_store(executables, concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -486,11 +524,10 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
return
|
||||
|
||||
abstract_spec = abstract_spec or module
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
|
||||
h = GroupedExceptionHandler()
|
||||
|
||||
for current_config in source_configs:
|
||||
for current_config in bootstrapping_sources():
|
||||
with h.forward(current_config['name']):
|
||||
_validate_source_is_trusted(current_config)
|
||||
|
||||
@@ -529,11 +566,10 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
return cmd
|
||||
|
||||
executables_str = ', '.join(executables)
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
|
||||
h = GroupedExceptionHandler()
|
||||
|
||||
for current_config in source_configs:
|
||||
for current_config in bootstrapping_sources():
|
||||
with h.forward(current_config['name']):
|
||||
_validate_source_is_trusted(current_config)
|
||||
|
||||
@@ -818,6 +854,19 @@ def ensure_flake8_in_path_or_raise():
|
||||
return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
|
||||
|
||||
|
||||
def all_root_specs(development=False):
|
||||
"""Return a list of all the root specs that may be used to bootstrap Spack.
|
||||
|
||||
Args:
|
||||
development (bool): if True include dev dependencies
|
||||
"""
|
||||
specs = [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||
if development:
|
||||
specs += [isort_root_spec(), mypy_root_spec(),
|
||||
black_root_spec(), flake8_root_spec()]
|
||||
return specs
|
||||
|
||||
|
||||
def _missing(name, purpose, system_only=True):
|
||||
"""Message to be printed if an executable is not found"""
|
||||
msg = '[{2}] MISSING "{0}": {1}'
|
||||
@@ -955,3 +1004,23 @@ def status_message(section):
|
||||
msg += '\n'
|
||||
msg = msg.format(pass_token if not missing_software else fail_token)
|
||||
return msg, missing_software
|
||||
|
||||
|
||||
def bootstrapping_sources(scope=None):
|
||||
"""Return the list of configured sources of software for bootstrapping Spack
|
||||
|
||||
Args:
|
||||
scope (str or None): if a valid configuration scope is given, return the
|
||||
list only from that scope
|
||||
"""
|
||||
source_configs = spack.config.get('bootstrap:sources', default=None, scope=scope)
|
||||
source_configs = source_configs or []
|
||||
list_of_sources = []
|
||||
for entry in source_configs:
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry['metadata'])
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
with open(metadata_yaml) as f:
|
||||
current.update(spack.util.spack_yaml.load(f))
|
||||
list_of_sources.append(current)
|
||||
return list_of_sources
|
||||
|
@@ -55,7 +55,7 @@
|
||||
import spack.config
|
||||
import spack.install_test
|
||||
import spack.main
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
@@ -722,7 +722,7 @@ def get_std_cmake_args(pkg):
|
||||
package were a CMakePackage instance.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): package under consideration
|
||||
pkg (spack.package_base.PackageBase): package under consideration
|
||||
|
||||
Returns:
|
||||
list: arguments for cmake
|
||||
@@ -738,7 +738,7 @@ def get_std_meson_args(pkg):
|
||||
package were a MesonPackage instance.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): package under consideration
|
||||
pkg (spack.package_base.PackageBase): package under consideration
|
||||
|
||||
Returns:
|
||||
list: arguments for meson
|
||||
@@ -748,12 +748,12 @@ def get_std_meson_args(pkg):
|
||||
|
||||
def parent_class_modules(cls):
|
||||
"""
|
||||
Get list of superclass modules that descend from spack.package.PackageBase
|
||||
Get list of superclass modules that descend from spack.package_base.PackageBase
|
||||
|
||||
Includes cls.__module__
|
||||
"""
|
||||
if (not issubclass(cls, spack.package.PackageBase) or
|
||||
issubclass(spack.package.PackageBase, cls)):
|
||||
if (not issubclass(cls, spack.package_base.PackageBase) or
|
||||
issubclass(spack.package_base.PackageBase, cls)):
|
||||
return []
|
||||
result = []
|
||||
module = sys.modules.get(cls.__module__)
|
||||
@@ -771,7 +771,7 @@ def load_external_modules(pkg):
|
||||
associated with them.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): package to load deps for
|
||||
pkg (spack.package_base.PackageBase): package to load deps for
|
||||
"""
|
||||
for dep in list(pkg.spec.traverse()):
|
||||
external_modules = dep.external_modules or []
|
||||
@@ -1109,7 +1109,7 @@ def start_build_process(pkg, function, kwargs):
|
||||
|
||||
Args:
|
||||
|
||||
pkg (spack.package.PackageBase): package whose environment we should set up the
|
||||
pkg (spack.package_base.PackageBase): package whose environment we should set up the
|
||||
child process for.
|
||||
function (typing.Callable): argless function to run in the child
|
||||
process.
|
||||
@@ -1234,7 +1234,7 @@ def make_stack(tb, stack=None):
|
||||
if 'self' in frame.f_locals:
|
||||
# Find the first proper subclass of PackageBase.
|
||||
obj = frame.f_locals['self']
|
||||
if isinstance(obj, spack.package.PackageBase):
|
||||
if isinstance(obj, spack.package_base.PackageBase):
|
||||
break
|
||||
|
||||
# We found obj, the Package implementation we care about.
|
||||
|
@@ -9,7 +9,7 @@
|
||||
|
||||
from spack.build_systems.autotools import AutotoolsPackage
|
||||
from spack.directives import extends
|
||||
from spack.package import ExtensionError
|
||||
from spack.package_base import ExtensionError
|
||||
from spack.util.executable import which
|
||||
|
||||
|
||||
|
@@ -16,7 +16,7 @@
|
||||
from spack.build_environment import InstallError
|
||||
from spack.directives import conflicts, depends_on
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
from spack.package import PackageBase, run_after, run_before
|
||||
from spack.package_base import PackageBase, run_after, run_before
|
||||
from spack.util.executable import Executable
|
||||
from spack.version import Version
|
||||
|
||||
|
@@ -8,7 +8,7 @@
|
||||
from llnl.util.filesystem import install, mkdirp
|
||||
|
||||
from spack.build_systems.cmake import CMakePackage
|
||||
from spack.package import run_after
|
||||
from spack.package_base import run_after
|
||||
|
||||
|
||||
def cmake_cache_path(name, value, comment=""):
|
||||
@@ -210,6 +210,10 @@ def std_initconfig_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
]
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
"""This method is to be overwritten by the package"""
|
||||
return []
|
||||
|
||||
def initconfig(self, spec, prefix):
|
||||
cache_entries = (self.std_initconfig_entries() +
|
||||
self.initconfig_compiler_entries() +
|
||||
|
@@ -18,7 +18,7 @@
|
||||
|
||||
import spack.build_environment
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package import InstallError, PackageBase, run_after
|
||||
from spack.package_base import InstallError, PackageBase, run_after
|
||||
from spack.util.path import convert_to_posix_path
|
||||
|
||||
# Regex to extract the primary generator from the CMake generator
|
||||
|
@@ -6,7 +6,7 @@
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.package import PackageBase
|
||||
from spack.package_base import PackageBase
|
||||
|
||||
|
||||
class CudaPackage(PackageBase):
|
||||
@@ -37,6 +37,7 @@ class CudaPackage(PackageBase):
|
||||
variant('cuda_arch',
|
||||
description='CUDA architecture',
|
||||
values=spack.variant.any_combination_of(*cuda_arch_values),
|
||||
sticky=True,
|
||||
when='+cuda')
|
||||
|
||||
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#nvcc-examples
|
||||
|
@@ -3,14 +3,16 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.package
|
||||
from typing import Optional
|
||||
|
||||
import spack.package_base
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class GNUMirrorPackage(spack.package.PackageBase):
|
||||
class GNUMirrorPackage(spack.package_base.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for GNU packages."""
|
||||
#: Path of the package in a GNU mirror
|
||||
gnu_mirror_path = None
|
||||
gnu_mirror_path = None # type: Optional[str]
|
||||
|
||||
#: List of GNU mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
@@ -26,7 +26,7 @@
|
||||
|
||||
import spack.error
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.package import InstallError, PackageBase, run_after
|
||||
from spack.package_base import InstallError, PackageBase, run_after
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.prefix import Prefix
|
||||
@@ -1115,7 +1115,7 @@ def _setup_dependent_env_callback(
|
||||
raise InstallError('compilers_of_client arg required for MPI')
|
||||
|
||||
def setup_dependent_package(self, module, dep_spec):
|
||||
# https://spack.readthedocs.io/en/latest/spack.html#spack.package.PackageBase.setup_dependent_package
|
||||
# https://spack.readthedocs.io/en/latest/spack.html#spack.package_base.PackageBase.setup_dependent_package
|
||||
# Reminder: "module" refers to Python module.
|
||||
# Called before the install() method of dependents.
|
||||
|
||||
|
@@ -10,7 +10,7 @@
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.package import PackageBase
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
|
@@ -11,7 +11,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import conflicts
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class MakefilePackage(PackageBase):
|
||||
|
@@ -7,7 +7,7 @@
|
||||
from llnl.util.filesystem import install_tree, working_dir
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.util.executable import which
|
||||
|
||||
|
||||
|
@@ -11,7 +11,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import depends_on, variant
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class MesonPackage(PackageBase):
|
||||
|
@@ -6,7 +6,7 @@
|
||||
import inspect
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class OctavePackage(PackageBase):
|
||||
|
@@ -14,7 +14,7 @@
|
||||
|
||||
from llnl.util.filesystem import find_headers, find_libraries, join_path
|
||||
|
||||
from spack.package import Package
|
||||
from spack.package_base import Package
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
@@ -10,7 +10,7 @@
|
||||
from llnl.util.filesystem import filter_file
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
|
@@ -6,6 +6,7 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import (
|
||||
@@ -19,13 +20,13 @@
|
||||
from llnl.util.lang import match_predicate
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class PythonPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
#: Package name, version, and extension on PyPI
|
||||
pypi = None
|
||||
pypi = None # type: Optional[str]
|
||||
|
||||
maintainers = ['adamjstewart']
|
||||
|
||||
@@ -46,7 +47,7 @@ class PythonPackage(PackageBase):
|
||||
# package manually
|
||||
depends_on('py-wheel', type='build')
|
||||
|
||||
py_namespace = None
|
||||
py_namespace = None # type: Optional[str]
|
||||
|
||||
@staticmethod
|
||||
def _std_args(cls):
|
||||
|
@@ -9,7 +9,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class QMakePackage(PackageBase):
|
||||
|
@@ -5,9 +5,10 @@
|
||||
|
||||
|
||||
import inspect
|
||||
from typing import Optional
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class RPackage(PackageBase):
|
||||
@@ -28,10 +29,10 @@ class RPackage(PackageBase):
|
||||
# package attributes that can be expanded to set the homepage, url,
|
||||
# list_url, and git values
|
||||
# For CRAN packages
|
||||
cran = None
|
||||
cran = None # type: Optional[str]
|
||||
|
||||
# For Bioconductor packages
|
||||
bioc = None
|
||||
bioc = None # type: Optional[str]
|
||||
|
||||
maintainers = ['glennpj']
|
||||
|
||||
|
@@ -3,13 +3,14 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.environment import env_flag
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
@@ -36,8 +37,8 @@ class RacketPackage(PackageBase):
|
||||
extends('racket')
|
||||
|
||||
pkgs = False
|
||||
subdirectory = None
|
||||
name = None
|
||||
subdirectory = None # type: Optional[str]
|
||||
name = None # type: Optional[str]
|
||||
parallel = True
|
||||
|
||||
@property
|
||||
|
@@ -77,7 +77,7 @@
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package import PackageBase
|
||||
from spack.package_base import PackageBase
|
||||
|
||||
|
||||
class ROCmPackage(PackageBase):
|
||||
|
@@ -7,7 +7,7 @@
|
||||
import inspect
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class RubyPackage(PackageBase):
|
||||
|
@@ -7,7 +7,7 @@
|
||||
import inspect
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class SConsPackage(PackageBase):
|
||||
|
@@ -11,7 +11,7 @@
|
||||
from llnl.util.filesystem import find, join_path, working_dir
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class SIPPackage(PackageBase):
|
||||
|
@@ -3,15 +3,17 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.package
|
||||
from typing import Optional
|
||||
|
||||
import spack.package_base
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class SourceforgePackage(spack.package.PackageBase):
|
||||
class SourceforgePackage(spack.package_base.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for Sourceforge
|
||||
packages."""
|
||||
#: Path of the package in a Sourceforge mirror
|
||||
sourceforge_mirror_path = None
|
||||
sourceforge_mirror_path = None # type: Optional[str]
|
||||
|
||||
#: List of Sourceforge mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
@@ -2,16 +2,17 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import Optional
|
||||
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class SourcewarePackage(spack.package.PackageBase):
|
||||
class SourcewarePackage(spack.package_base.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for Sourceware.org
|
||||
packages."""
|
||||
#: Path of the package in a Sourceware mirror
|
||||
sourceware_mirror_path = None
|
||||
sourceware_mirror_path = None # type: Optional[str]
|
||||
|
||||
#: List of Sourceware mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
@@ -9,7 +9,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class WafPackage(PackageBase):
|
||||
|
@@ -3,15 +3,17 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.package
|
||||
from typing import Optional
|
||||
|
||||
import spack.package_base
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class XorgPackage(spack.package.PackageBase):
|
||||
class XorgPackage(spack.package_base.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for x.org
|
||||
packages."""
|
||||
#: Path of the package in a x.org mirror
|
||||
xorg_mirror_path = None
|
||||
xorg_mirror_path = None # type: Optional[str]
|
||||
|
||||
#: List of x.org mirrors used by Spack
|
||||
# Note: x.org mirrors are a bit tricky, since many are out-of-sync or off.
|
||||
|
@@ -33,7 +33,6 @@
|
||||
import spack.util.executable as exe
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.error import SpackError
|
||||
from spack.spec import Spec
|
||||
@@ -42,10 +41,8 @@
|
||||
'always',
|
||||
]
|
||||
|
||||
SPACK_PR_MIRRORS_ROOT_URL = 's3://spack-binaries-prs'
|
||||
SPACK_SHARED_PR_MIRROR_URL = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
|
||||
'shared_pr_mirror')
|
||||
TEMP_STORAGE_MIRROR_NAME = 'ci_temporary_mirror'
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
|
||||
spack_gpg = spack.main.SpackCommand('gpg')
|
||||
spack_compiler = spack.main.SpackCommand('compiler')
|
||||
@@ -199,6 +196,11 @@ def _get_cdash_build_name(spec, build_group):
|
||||
spec.name, spec.version, spec.compiler, spec.architecture, build_group)
|
||||
|
||||
|
||||
def _remove_reserved_tags(tags):
|
||||
"""Convenience function to strip reserved tags from jobs"""
|
||||
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
||||
|
||||
|
||||
def _get_spec_string(spec):
|
||||
format_elements = [
|
||||
'{name}{@version}',
|
||||
@@ -231,8 +233,10 @@ def _add_dependency(spec_label, dep_label, deps):
|
||||
deps[spec_label].add(dep_label)
|
||||
|
||||
|
||||
def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
|
||||
spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only)
|
||||
def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False,
|
||||
mirrors_to_check=None):
|
||||
spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check)
|
||||
|
||||
if spec_deps_obj:
|
||||
dependencies = spec_deps_obj['dependencies']
|
||||
@@ -249,7 +253,7 @@ def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
|
||||
_add_dependency(entry['spec'], entry['depends'], deps)
|
||||
|
||||
|
||||
def stage_spec_jobs(specs, check_index_only=False):
|
||||
def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
||||
"""Take a set of release specs and generate a list of "stages", where the
|
||||
jobs in any stage are dependent only on jobs in previous stages. This
|
||||
allows us to maximize build parallelism within the gitlab-ci framework.
|
||||
@@ -261,6 +265,8 @@ def stage_spec_jobs(specs, check_index_only=False):
|
||||
are up to date on those mirrors. This flag limits that search to
|
||||
the binary cache indices on those mirrors to speed the process up,
|
||||
even though there is no garantee the index is up to date.
|
||||
mirrors_to_checK: Optional mapping giving mirrors to check instead of
|
||||
any configured mirrors.
|
||||
|
||||
Returns: A tuple of information objects describing the specs, dependencies
|
||||
and stages:
|
||||
@@ -297,8 +303,8 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
deps = {}
|
||||
spec_labels = {}
|
||||
|
||||
_get_spec_dependencies(
|
||||
specs, deps, spec_labels, check_index_only=check_index_only)
|
||||
_get_spec_dependencies(specs, deps, spec_labels, check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check)
|
||||
|
||||
# Save the original deps, as we need to return them at the end of the
|
||||
# function. In the while loop below, the "dependencies" variable is
|
||||
@@ -340,7 +346,7 @@ def _print_staging_summary(spec_labels, dependencies, stages):
|
||||
_get_spec_string(s)))
|
||||
|
||||
|
||||
def _compute_spec_deps(spec_list, check_index_only=False):
|
||||
def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None):
|
||||
"""
|
||||
Computes all the dependencies for the spec(s) and generates a JSON
|
||||
object which provides both a list of unique spec names as well as a
|
||||
@@ -413,7 +419,7 @@ def append_dep(s, d):
|
||||
continue
|
||||
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=s, index_only=check_index_only)
|
||||
spec=s, mirrors_to_check=mirrors_to_check, index_only=check_index_only)
|
||||
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = {
|
||||
@@ -602,8 +608,8 @@ def get_spec_filter_list(env, affected_pkgs, dependencies=True, dependents=True)
|
||||
def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
prune_dag=False, check_index_only=False,
|
||||
run_optimizer=False, use_dependencies=False,
|
||||
artifacts_root=None):
|
||||
""" Generate a gitlab yaml file to run a dynamic chile pipeline from
|
||||
artifacts_root=None, remote_mirror_override=None):
|
||||
""" Generate a gitlab yaml file to run a dynamic child pipeline from
|
||||
the spec matrix in the active environment.
|
||||
|
||||
Arguments:
|
||||
@@ -629,6 +635,10 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
artifacts_root (str): Path where artifacts like logs, environment
|
||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||
requires this to be within the project directory.
|
||||
remote_mirror_override (str): Typically only needed when one spack.yaml
|
||||
is used to populate several mirrors with binaries, based on some
|
||||
criteria. Spack protected pipelines populate different mirrors based
|
||||
on branch name, facilitated by this option.
|
||||
"""
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
with env.write_transaction():
|
||||
@@ -678,17 +688,19 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
for s in affected_specs:
|
||||
tty.debug(' {0}'.format(s.name))
|
||||
|
||||
generate_job_name = os.environ.get('CI_JOB_NAME', None)
|
||||
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', None)
|
||||
# Downstream jobs will "need" (depend on, for both scheduling and
|
||||
# artifacts, which include spack.lock file) this pipeline generation
|
||||
# job by both name and pipeline id. If those environment variables
|
||||
# do not exist, then maybe this is just running in a shell, in which
|
||||
# case, there is no expectation gitlab will ever run the generated
|
||||
# pipeline and those environment variables do not matter.
|
||||
generate_job_name = os.environ.get('CI_JOB_NAME', 'job-does-not-exist')
|
||||
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', 'pipeline-does-not-exist')
|
||||
|
||||
# Values: "spack_pull_request", "spack_protected_branch", or not set
|
||||
spack_pipeline_type = os.environ.get('SPACK_PIPELINE_TYPE', None)
|
||||
is_pr_pipeline = spack_pipeline_type == 'spack_pull_request'
|
||||
|
||||
spack_pr_branch = os.environ.get('SPACK_PR_BRANCH', None)
|
||||
pr_mirror_url = None
|
||||
if spack_pr_branch:
|
||||
pr_mirror_url = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
|
||||
spack_pr_branch)
|
||||
spack_buildcache_copy = os.environ.get('SPACK_COPY_BUILDCACHE', None)
|
||||
|
||||
if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
|
||||
tty.die('spack ci generate requires an env containing a mirror')
|
||||
@@ -743,14 +755,25 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'strip-compilers': False,
|
||||
})
|
||||
|
||||
# Add per-PR mirror (and shared PR mirror) if enabled, as some specs might
|
||||
# be up to date in one of those and thus not need to be rebuilt.
|
||||
if pr_mirror_url:
|
||||
spack.mirror.add(
|
||||
'ci_pr_mirror', pr_mirror_url, cfg.default_modify_scope())
|
||||
spack.mirror.add('ci_shared_pr_mirror',
|
||||
SPACK_SHARED_PR_MIRROR_URL,
|
||||
cfg.default_modify_scope())
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
# generate.
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type == 'spack_protected_branch':
|
||||
# Overriding the main mirror in this case might result
|
||||
# in skipping jobs on a release pipeline because specs are
|
||||
# up to date in develop. Eventually we want to notice and take
|
||||
# advantage of this by scheduling a job to copy the spec from
|
||||
# develop to the release, but until we have that, this makes
|
||||
# sure we schedule a rebuild job if the spec isn't already in
|
||||
# override mirror.
|
||||
mirrors_to_check = {
|
||||
'override': remote_mirror_override
|
||||
}
|
||||
else:
|
||||
spack.mirror.add(
|
||||
'ci_pr_mirror', remote_mirror_override, cfg.default_modify_scope())
|
||||
|
||||
pipeline_artifacts_dir = artifacts_root
|
||||
if not pipeline_artifacts_dir:
|
||||
@@ -825,11 +848,13 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
phase_spec.concretize()
|
||||
staged_phases[phase_name] = stage_spec_jobs(
|
||||
concrete_phase_specs,
|
||||
check_index_only=check_index_only)
|
||||
check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check)
|
||||
finally:
|
||||
# Clean up PR mirror if enabled
|
||||
if pr_mirror_url:
|
||||
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
|
||||
# Clean up remote mirror override if enabled
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type != 'spack_protected_branch':
|
||||
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
|
||||
|
||||
all_job_names = []
|
||||
output_object = {}
|
||||
@@ -889,6 +914,14 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
|
||||
tags = [tag for tag in runner_attribs['tags']]
|
||||
|
||||
if spack_pipeline_type is not None:
|
||||
# For spack pipelines "public" and "protected" are reserved tags
|
||||
tags = _remove_reserved_tags(tags)
|
||||
if spack_pipeline_type == 'spack_protected_branch':
|
||||
tags.extend(['aws', 'protected'])
|
||||
elif spack_pipeline_type == 'spack_pull_request':
|
||||
tags.extend(['public'])
|
||||
|
||||
variables = {}
|
||||
if 'variables' in runner_attribs:
|
||||
variables.update(runner_attribs['variables'])
|
||||
@@ -1174,6 +1207,10 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
service_job_config,
|
||||
cleanup_job)
|
||||
|
||||
if 'tags' in cleanup_job:
|
||||
service_tags = _remove_reserved_tags(cleanup_job['tags'])
|
||||
cleanup_job['tags'] = service_tags
|
||||
|
||||
cleanup_job['stage'] = 'cleanup-temp-storage'
|
||||
cleanup_job['script'] = [
|
||||
'spack -d mirror destroy --mirror-url {0}/$CI_PIPELINE_ID'.format(
|
||||
@@ -1181,9 +1218,74 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
]
|
||||
cleanup_job['when'] = 'always'
|
||||
cleanup_job['retry'] = service_job_retries
|
||||
cleanup_job['interruptible'] = True
|
||||
|
||||
output_object['cleanup'] = cleanup_job
|
||||
|
||||
if ('signing-job-attributes' in gitlab_ci and
|
||||
spack_pipeline_type == 'spack_protected_branch'):
|
||||
# External signing: generate a job to check and sign binary pkgs
|
||||
stage_names.append('stage-sign-pkgs')
|
||||
signing_job_config = gitlab_ci['signing-job-attributes']
|
||||
signing_job = {}
|
||||
|
||||
signing_job_attrs_to_copy = [
|
||||
'image',
|
||||
'tags',
|
||||
'variables',
|
||||
'before_script',
|
||||
'script',
|
||||
'after_script',
|
||||
]
|
||||
|
||||
_copy_attributes(signing_job_attrs_to_copy,
|
||||
signing_job_config,
|
||||
signing_job)
|
||||
|
||||
signing_job_tags = []
|
||||
if 'tags' in signing_job:
|
||||
signing_job_tags = _remove_reserved_tags(signing_job['tags'])
|
||||
|
||||
for tag in ['aws', 'protected', 'notary']:
|
||||
if tag not in signing_job_tags:
|
||||
signing_job_tags.append(tag)
|
||||
signing_job['tags'] = signing_job_tags
|
||||
|
||||
signing_job['stage'] = 'stage-sign-pkgs'
|
||||
signing_job['when'] = 'always'
|
||||
signing_job['retry'] = {
|
||||
'max': 2,
|
||||
'when': ['always']
|
||||
}
|
||||
signing_job['interruptible'] = True
|
||||
|
||||
output_object['sign-pkgs'] = signing_job
|
||||
|
||||
if spack_buildcache_copy:
|
||||
# Generate a job to copy the contents from wherever the builds are getting
|
||||
# pushed to the url specified in the "SPACK_BUILDCACHE_COPY" environment
|
||||
# variable.
|
||||
src_url = remote_mirror_override or remote_mirror_url
|
||||
dest_url = spack_buildcache_copy
|
||||
|
||||
stage_names.append('stage-copy-buildcache')
|
||||
copy_job = {
|
||||
'stage': 'stage-copy-buildcache',
|
||||
'tags': ['spack', 'public', 'medium', 'aws', 'x86_64'],
|
||||
'image': 'ghcr.io/spack/python-aws-bash:0.0.1',
|
||||
'when': 'on_success',
|
||||
'interruptible': True,
|
||||
'retry': service_job_retries,
|
||||
'script': [
|
||||
'. ./share/spack/setup-env.sh',
|
||||
'spack --version',
|
||||
'aws s3 sync --exclude *index.json* --exclude *pgp* {0} {1}'.format(
|
||||
src_url, dest_url)
|
||||
]
|
||||
}
|
||||
|
||||
output_object['copy-mirror'] = copy_job
|
||||
|
||||
if rebuild_index_enabled:
|
||||
# Add a final job to regenerate the index
|
||||
stage_names.append('stage-rebuild-index')
|
||||
@@ -1194,9 +1296,13 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
service_job_config,
|
||||
final_job)
|
||||
|
||||
if 'tags' in final_job:
|
||||
service_tags = _remove_reserved_tags(final_job['tags'])
|
||||
final_job['tags'] = service_tags
|
||||
|
||||
index_target_mirror = mirror_urls[0]
|
||||
if is_pr_pipeline:
|
||||
index_target_mirror = pr_mirror_url
|
||||
if remote_mirror_override:
|
||||
index_target_mirror = remote_mirror_override
|
||||
|
||||
final_job['stage'] = 'stage-rebuild-index'
|
||||
final_job['script'] = [
|
||||
@@ -1205,6 +1311,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
]
|
||||
final_job['when'] = 'always'
|
||||
final_job['retry'] = service_job_retries
|
||||
final_job['interruptible'] = True
|
||||
|
||||
output_object['rebuild-index'] = final_job
|
||||
|
||||
@@ -1237,8 +1344,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'SPACK_PIPELINE_TYPE': str(spack_pipeline_type)
|
||||
}
|
||||
|
||||
if pr_mirror_url:
|
||||
output_object['variables']['SPACK_PR_MIRROR_URL'] = pr_mirror_url
|
||||
if remote_mirror_override:
|
||||
(output_object['variables']
|
||||
['SPACK_REMOTE_MIRROR_OVERRIDE']) = remote_mirror_override
|
||||
|
||||
spack_stack_name = os.environ.get('SPACK_CI_STACK_NAME', None)
|
||||
if spack_stack_name:
|
||||
|
@@ -155,31 +155,17 @@ def parse_specs(args, **kwargs):
|
||||
normalize = kwargs.get('normalize', False)
|
||||
tests = kwargs.get('tests', False)
|
||||
|
||||
try:
|
||||
sargs = args
|
||||
if not isinstance(args, six.string_types):
|
||||
sargs = ' '.join(spack.util.string.quote(args))
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
sargs = args
|
||||
if not isinstance(args, six.string_types):
|
||||
sargs = ' '.join(spack.util.string.quote(args))
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
|
||||
return specs
|
||||
|
||||
except spack.spec.SpecParseError as e:
|
||||
msg = e.message + "\n" + str(e.string) + "\n"
|
||||
msg += (e.pos + 2) * " " + "^"
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
except spack.error.SpecError as e:
|
||||
|
||||
msg = e.message
|
||||
if e.long_message:
|
||||
msg += e.long_message
|
||||
|
||||
raise spack.error.SpackError(msg)
|
||||
return specs
|
||||
|
||||
|
||||
def matching_spec_from_env(spec):
|
||||
|
@@ -6,7 +6,9 @@
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty
|
||||
import llnl.util.tty.color
|
||||
|
||||
@@ -15,6 +17,9 @@
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
|
||||
description = "manage bootstrap configuration"
|
||||
@@ -22,6 +27,38 @@
|
||||
level = "long"
|
||||
|
||||
|
||||
# Tarball to be downloaded if binary packages are requested in a local mirror
|
||||
BINARY_TARBALL = 'https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.2/bootstrap-buildcache.tar.gz'
|
||||
|
||||
#: Subdirectory where to create the mirror
|
||||
LOCAL_MIRROR_DIR = 'bootstrap_cache'
|
||||
|
||||
# Metadata for a generated binary mirror
|
||||
BINARY_METADATA = {
|
||||
'type': 'buildcache',
|
||||
'description': ('Buildcache copied from a public tarball available on Github.'
|
||||
'The sha256 checksum of binaries is checked before installation.'),
|
||||
'info': {
|
||||
'url': os.path.join('..', '..', LOCAL_MIRROR_DIR),
|
||||
'homepage': 'https://github.com/spack/spack-bootstrap-mirrors',
|
||||
'releases': 'https://github.com/spack/spack-bootstrap-mirrors/releases',
|
||||
'tarball': BINARY_TARBALL
|
||||
}
|
||||
}
|
||||
|
||||
CLINGO_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/clingo.json'
|
||||
GNUPG_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/gnupg.json'
|
||||
|
||||
# Metadata for a generated source mirror
|
||||
SOURCE_METADATA = {
|
||||
'type': 'install',
|
||||
'description': 'Mirror with software needed to bootstrap Spack',
|
||||
'info': {
|
||||
'url': os.path.join('..', '..', LOCAL_MIRROR_DIR)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
@@ -67,24 +104,61 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
list = sp.add_parser(
|
||||
'list', help='list the methods available for bootstrapping'
|
||||
'list', help='list all the sources of software to bootstrap Spack'
|
||||
)
|
||||
_add_scope_option(list)
|
||||
|
||||
trust = sp.add_parser(
|
||||
'trust', help='trust a bootstrapping method'
|
||||
'trust', help='trust a bootstrapping source'
|
||||
)
|
||||
_add_scope_option(trust)
|
||||
trust.add_argument(
|
||||
'name', help='name of the method to be trusted'
|
||||
'name', help='name of the source to be trusted'
|
||||
)
|
||||
|
||||
untrust = sp.add_parser(
|
||||
'untrust', help='untrust a bootstrapping method'
|
||||
'untrust', help='untrust a bootstrapping source'
|
||||
)
|
||||
_add_scope_option(untrust)
|
||||
untrust.add_argument(
|
||||
'name', help='name of the method to be untrusted'
|
||||
'name', help='name of the source to be untrusted'
|
||||
)
|
||||
|
||||
add = sp.add_parser(
|
||||
'add', help='add a new source for bootstrapping'
|
||||
)
|
||||
_add_scope_option(add)
|
||||
add.add_argument(
|
||||
'--trust', action='store_true',
|
||||
help='trust the source immediately upon addition')
|
||||
add.add_argument(
|
||||
'name', help='name of the new source of software'
|
||||
)
|
||||
add.add_argument(
|
||||
'metadata_dir', help='directory where to find metadata files'
|
||||
)
|
||||
|
||||
remove = sp.add_parser(
|
||||
'remove', help='remove a bootstrapping source'
|
||||
)
|
||||
remove.add_argument(
|
||||
'name', help='name of the source to be removed'
|
||||
)
|
||||
|
||||
mirror = sp.add_parser(
|
||||
'mirror', help='create a local mirror to bootstrap Spack'
|
||||
)
|
||||
mirror.add_argument(
|
||||
'--binary-packages', action='store_true',
|
||||
help='download public binaries in the mirror'
|
||||
)
|
||||
mirror.add_argument(
|
||||
'--dev', action='store_true',
|
||||
help='download dev dependencies too'
|
||||
)
|
||||
mirror.add_argument(
|
||||
metavar='DIRECTORY', dest='root_dir',
|
||||
help='root directory in which to create the mirror and metadata'
|
||||
)
|
||||
|
||||
|
||||
@@ -137,10 +211,7 @@ def _root(args):
|
||||
|
||||
|
||||
def _list(args):
|
||||
sources = spack.config.get(
|
||||
'bootstrap:sources', default=None, scope=args.scope
|
||||
)
|
||||
|
||||
sources = spack.bootstrap.bootstrapping_sources(scope=args.scope)
|
||||
if not sources:
|
||||
llnl.util.tty.msg(
|
||||
"No method available for bootstrapping Spack's dependencies"
|
||||
@@ -249,6 +320,119 @@ def _status(args):
|
||||
print()
|
||||
|
||||
|
||||
def _add(args):
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
names = [s['name'] for s in initial_sources]
|
||||
|
||||
# If the name is already used error out
|
||||
if args.name in names:
|
||||
msg = 'a source named "{0}" already exist. Please choose a different name'
|
||||
raise RuntimeError(msg.format(args.name))
|
||||
|
||||
# Check that the metadata file exists
|
||||
metadata_dir = spack.util.path.canonicalize_path(args.metadata_dir)
|
||||
if not os.path.exists(metadata_dir) or not os.path.isdir(metadata_dir):
|
||||
raise RuntimeError(
|
||||
'the directory "{0}" does not exist'.format(args.metadata_dir)
|
||||
)
|
||||
|
||||
file = os.path.join(metadata_dir, 'metadata.yaml')
|
||||
if not os.path.exists(file):
|
||||
raise RuntimeError('the file "{0}" does not exist'.format(file))
|
||||
|
||||
# Insert the new source as the highest priority one
|
||||
write_scope = args.scope or spack.config.default_modify_scope(section='bootstrap')
|
||||
sources = spack.config.get('bootstrap:sources', scope=write_scope) or []
|
||||
sources = [
|
||||
{'name': args.name, 'metadata': args.metadata_dir}
|
||||
] + sources
|
||||
spack.config.set('bootstrap:sources', sources, scope=write_scope)
|
||||
|
||||
msg = 'New bootstrapping source "{0}" added in the "{1}" configuration scope'
|
||||
llnl.util.tty.msg(msg.format(args.name, write_scope))
|
||||
if args.trust:
|
||||
_trust(args)
|
||||
|
||||
|
||||
def _remove(args):
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
names = [s['name'] for s in initial_sources]
|
||||
if args.name not in names:
|
||||
msg = ('cannot find any bootstrapping source named "{0}". '
|
||||
'Run `spack bootstrap list` to see available sources.')
|
||||
raise RuntimeError(msg.format(args.name))
|
||||
|
||||
for current_scope in spack.config.scopes():
|
||||
sources = spack.config.get('bootstrap:sources', scope=current_scope) or []
|
||||
if args.name in [s['name'] for s in sources]:
|
||||
sources = [s for s in sources if s['name'] != args.name]
|
||||
spack.config.set('bootstrap:sources', sources, scope=current_scope)
|
||||
msg = ('Removed the bootstrapping source named "{0}" from the '
|
||||
'"{1}" configuration scope.')
|
||||
llnl.util.tty.msg(msg.format(args.name, current_scope))
|
||||
trusted = spack.config.get('bootstrap:trusted', scope=current_scope) or []
|
||||
if args.name in trusted:
|
||||
trusted.pop(args.name)
|
||||
spack.config.set('bootstrap:trusted', trusted, scope=current_scope)
|
||||
msg = 'Deleting information on "{0}" from list of trusted sources'
|
||||
llnl.util.tty.msg(msg.format(args.name))
|
||||
|
||||
|
||||
def _mirror(args):
|
||||
mirror_dir = os.path.join(args.root_dir, LOCAL_MIRROR_DIR)
|
||||
|
||||
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
||||
# TODO: as soon as we have an option to add to a mirror all the possible
|
||||
# TODO: dependencies of a spec
|
||||
root_specs = spack.bootstrap.all_root_specs(development=args.dev) + ['gnuconfig']
|
||||
for spec_str in root_specs:
|
||||
msg = 'Adding "{0}" and dependencies to the mirror at {1}'
|
||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||
# Suppress tty from the call below for terser messages
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
spec = spack.spec.Spec(spec_str).concretized()
|
||||
for node in spec.traverse():
|
||||
spack.mirror.create(mirror_dir, [node])
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
if args.binary_packages:
|
||||
msg = 'Adding binary packages from "{0}" to the mirror at {1}'
|
||||
llnl.util.tty.msg(msg.format(BINARY_TARBALL, mirror_dir))
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
stage = spack.stage.Stage(BINARY_TARBALL, path=tempfile.mkdtemp())
|
||||
stage.create()
|
||||
stage.fetch()
|
||||
stage.expand_archive()
|
||||
build_cache_dir = os.path.join(stage.source_path, 'build_cache')
|
||||
shutil.move(build_cache_dir, mirror_dir)
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
def write_metadata(subdir, metadata):
|
||||
metadata_rel_dir = os.path.join('metadata', subdir)
|
||||
metadata_yaml = os.path.join(
|
||||
args.root_dir, metadata_rel_dir, 'metadata.yaml'
|
||||
)
|
||||
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
|
||||
with open(metadata_yaml, mode='w') as f:
|
||||
spack.util.spack_yaml.dump(metadata, stream=f)
|
||||
return os.path.dirname(metadata_yaml), metadata_rel_dir
|
||||
|
||||
instructions = ('\nTo register the mirror on the platform where it\'s supposed '
|
||||
'to be used, move "{0}" to its final location and run the '
|
||||
'following command(s):\n\n').format(args.root_dir)
|
||||
cmd = ' % spack bootstrap add --trust {0} <final-path>/{1}\n'
|
||||
_, rel_directory = write_metadata(subdir='sources', metadata=SOURCE_METADATA)
|
||||
instructions += cmd.format('local-sources', rel_directory)
|
||||
if args.binary_packages:
|
||||
abs_directory, rel_directory = write_metadata(
|
||||
subdir='binaries', metadata=BINARY_METADATA
|
||||
)
|
||||
shutil.copy(spack.util.path.canonicalize_path(CLINGO_JSON), abs_directory)
|
||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||
instructions += cmd.format('local-binaries', rel_directory)
|
||||
print(instructions)
|
||||
|
||||
|
||||
def bootstrap(parser, args):
|
||||
callbacks = {
|
||||
'status': _status,
|
||||
@@ -258,6 +442,9 @@ def bootstrap(parser, args):
|
||||
'root': _root,
|
||||
'list': _list,
|
||||
'trust': _trust,
|
||||
'untrust': _untrust
|
||||
'untrust': _untrust,
|
||||
'add': _add,
|
||||
'remove': _remove,
|
||||
'mirror': _mirror
|
||||
}
|
||||
callbacks[args.subcommand](args)
|
||||
|
@@ -478,11 +478,12 @@ def save_specfile_fn(args):
|
||||
if args.root_specfile:
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
spec_format = 'json'
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
||||
|
||||
|
@@ -14,7 +14,7 @@
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
from spack.package import preferred_version
|
||||
from spack.package_base import preferred_version
|
||||
from spack.util.naming import valid_fully_qualified_module_name
|
||||
from spack.version import Version, ver
|
||||
|
||||
|
@@ -64,6 +64,11 @@ def setup_parser(subparser):
|
||||
'--dependencies', action='store_true', default=False,
|
||||
help="(Experimental) disable DAG scheduling; use "
|
||||
' "plain" dependencies.')
|
||||
generate.add_argument(
|
||||
'--buildcache-destination', default=None,
|
||||
help="Override the mirror configured in the environment (spack.yaml) " +
|
||||
"in order to push binaries from the generated pipeline to a " +
|
||||
"different location.")
|
||||
prune_group = generate.add_mutually_exclusive_group()
|
||||
prune_group.add_argument(
|
||||
'--prune-dag', action='store_true', dest='prune_dag',
|
||||
@@ -127,6 +132,7 @@ def ci_generate(args):
|
||||
prune_dag = args.prune_dag
|
||||
index_only = args.index_only
|
||||
artifacts_root = args.artifacts_root
|
||||
buildcache_destination = args.buildcache_destination
|
||||
|
||||
if not output_file:
|
||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||
@@ -140,7 +146,8 @@ def ci_generate(args):
|
||||
spack_ci.generate_gitlab_ci_yaml(
|
||||
env, True, output_file, prune_dag=prune_dag,
|
||||
check_index_only=index_only, run_optimizer=run_optimizer,
|
||||
use_dependencies=use_dependencies, artifacts_root=artifacts_root)
|
||||
use_dependencies=use_dependencies, artifacts_root=artifacts_root,
|
||||
remote_mirror_override=buildcache_destination)
|
||||
|
||||
if copy_yaml_to:
|
||||
copy_to_dir = os.path.dirname(copy_yaml_to)
|
||||
@@ -180,6 +187,9 @@ def ci_rebuild(args):
|
||||
if not gitlab_ci:
|
||||
tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')
|
||||
|
||||
tty.msg('SPACK_BUILDCACHE_DESTINATION={0}'.format(
|
||||
os.environ.get('SPACK_BUILDCACHE_DESTINATION', None)))
|
||||
|
||||
# Grab the environment variables we need. These either come from the
|
||||
# pipeline generation step ("spack ci generate"), where they were written
|
||||
# out as variables, or else provided by GitLab itself.
|
||||
@@ -196,7 +206,7 @@ def ci_rebuild(args):
|
||||
compiler_action = get_env_var('SPACK_COMPILER_ACTION')
|
||||
cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
|
||||
spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
|
||||
pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
|
||||
remote_mirror_override = get_env_var('SPACK_REMOTE_MIRROR_OVERRIDE')
|
||||
remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')
|
||||
|
||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||
@@ -244,6 +254,10 @@ def ci_rebuild(args):
|
||||
tty.debug('Pipeline type - PR: {0}, develop: {1}'.format(
|
||||
spack_is_pr_pipeline, spack_is_develop_pipeline))
|
||||
|
||||
# If no override url exists, then just push binary package to the
|
||||
# normal remote mirror url.
|
||||
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
|
||||
|
||||
# Figure out what is our temporary storage mirror: Is it artifacts
|
||||
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
||||
# force something or pipelines might not have a way to propagate build
|
||||
@@ -373,7 +387,24 @@ def ci_rebuild(args):
|
||||
cfg.default_modify_scope())
|
||||
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
matches = bindist.get_mirrors_for_spec(job_spec, index_only=False)
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override and spack_pipeline_type == 'spack_protected_branch':
|
||||
# Passing "mirrors_to_check" below means we *only* look in the override
|
||||
# mirror to see if we should skip building, which is what we want.
|
||||
mirrors_to_check = {
|
||||
'override': remote_mirror_override
|
||||
}
|
||||
|
||||
# Adding this mirror to the list of configured mirrors means dependencies
|
||||
# could be installed from either the override mirror or any other configured
|
||||
# mirror (e.g. remote_mirror_url which is defined in the environment or
|
||||
# pipeline_mirror_url), which is also what we want.
|
||||
spack.mirror.add('mirror_override',
|
||||
remote_mirror_override,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
matches = bindist.get_mirrors_for_spec(
|
||||
job_spec, mirrors_to_check=mirrors_to_check, index_only=False)
|
||||
|
||||
if matches:
|
||||
# Got a hash match on at least one configured mirror. All
|
||||
@@ -517,13 +548,6 @@ def ci_rebuild(args):
|
||||
# any logs from the staging directory to artifacts now
|
||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||
|
||||
# Create buildcache on remote mirror, either on pr-specific mirror or
|
||||
# on the main mirror defined in the gitlab-enabled spack environment
|
||||
if spack_is_pr_pipeline:
|
||||
buildcache_mirror_url = pr_mirror_url
|
||||
else:
|
||||
buildcache_mirror_url = remote_mirror_url
|
||||
|
||||
# If the install succeeded, create a buildcache entry for this job spec
|
||||
# and push it to one or more mirrors. If the install did not succeed,
|
||||
# print out some instructions on how to reproduce this build failure
|
||||
|
@@ -57,7 +57,7 @@
|
||||
# See the Spack documentation for more information on packaging.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
from spack import *
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class {class_name}({base_class_name}):
|
||||
|
@@ -11,7 +11,7 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.store
|
||||
|
||||
@@ -57,7 +57,7 @@ def dependencies(parser, args):
|
||||
|
||||
else:
|
||||
spec = specs[0]
|
||||
dependencies = spack.package.possible_dependencies(
|
||||
dependencies = spack.package_base.possible_dependencies(
|
||||
spec,
|
||||
transitive=args.transitive,
|
||||
expand_virtuals=args.expand_virtuals,
|
||||
|
@@ -200,7 +200,7 @@ def external_list(args):
|
||||
list(spack.repo.path.all_packages())
|
||||
# Print all the detectable packages
|
||||
tty.msg("Detectable packages per repository")
|
||||
for namespace, pkgs in sorted(spack.package.detectable_packages.items()):
|
||||
for namespace, pkgs in sorted(spack.package_base.detectable_packages.items()):
|
||||
print("Repository:", namespace)
|
||||
colify.colify(pkgs, indent=4, output=sys.stdout)
|
||||
|
||||
|
@@ -18,7 +18,7 @@
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.package import has_test_method, preferred_version
|
||||
from spack.package_base import has_test_method, preferred_version
|
||||
|
||||
description = 'get detailed information on a particular package'
|
||||
section = 'basic'
|
||||
@@ -269,14 +269,14 @@ def print_tests(pkg):
|
||||
names = []
|
||||
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
|
||||
if has_test_method(pkg_cls):
|
||||
pkg_base = spack.package.PackageBase
|
||||
pkg_base = spack.package_base.PackageBase
|
||||
test_pkgs = [str(cls.test) for cls in inspect.getmro(pkg_cls) if
|
||||
issubclass(cls, pkg_base) and cls.test != pkg_base.test]
|
||||
test_pkgs = list(set(test_pkgs))
|
||||
names.extend([(test.split()[1]).lower() for test in test_pkgs])
|
||||
|
||||
# TODO Refactor START
|
||||
# Use code from package.py's test_process IF this functionality is
|
||||
# Use code from package_base.py's test_process IF this functionality is
|
||||
# accepted.
|
||||
v_names = list(set([vspec.name for vspec in pkg.virtuals_provided]))
|
||||
|
||||
|
@@ -302,7 +302,7 @@ def install(parser, args, **kwargs):
|
||||
)
|
||||
|
||||
reporter = spack.report.collect_info(
|
||||
spack.package.PackageInstaller, '_install_task', args.log_format, args)
|
||||
spack.package_base.PackageInstaller, '_install_task', args.log_format, args)
|
||||
if args.log_file:
|
||||
reporter.filename = args.log_file
|
||||
|
||||
|
@@ -12,7 +12,7 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.error
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.database import InstallStatuses
|
||||
|
@@ -15,8 +15,10 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.hash_types as ht
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.solver.asp as asp
|
||||
|
||||
description = "concretize a specs using an ASP solver"
|
||||
@@ -74,6 +76,51 @@ def setup_parser(subparser):
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
|
||||
def _process_result(result, show, required_format, kwargs):
|
||||
result.raise_if_unsat()
|
||||
opt, _, _ = min(result.answers)
|
||||
if ("opt" in show) and (not required_format):
|
||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||
tty.msg("Optimization Criteria:")
|
||||
|
||||
maxlen = max(len(s[2]) for s in result.criteria)
|
||||
color.cprint(
|
||||
"@*{ Priority Criterion %sInstalled ToBuild}" % ((maxlen - 10) * " ")
|
||||
)
|
||||
|
||||
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
|
||||
for i, (installed_cost, build_cost, name) in enumerate(result.criteria, 1):
|
||||
color.cprint(
|
||||
fmt % (
|
||||
i,
|
||||
name,
|
||||
"-" if build_cost is None else installed_cost,
|
||||
installed_cost if build_cost is None else build_cost,
|
||||
)
|
||||
)
|
||||
print()
|
||||
|
||||
# dump the solutions as concretized specs
|
||||
if 'solutions' in show:
|
||||
for spec in result.specs:
|
||||
# With -y, just print YAML to output.
|
||||
if required_format == 'yaml':
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif required_format == 'json':
|
||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
sys.stdout.write(
|
||||
spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||
print()
|
||||
|
||||
if result.unsolved_specs and "solutions" in show:
|
||||
tty.msg("Unsolved specs")
|
||||
for spec in result.unsolved_specs:
|
||||
print(spec)
|
||||
print()
|
||||
|
||||
|
||||
def solve(parser, args):
|
||||
# these are the same options as `spack spec`
|
||||
name_fmt = '{namespace}.{name}' if args.namespaces else '{name}'
|
||||
@@ -102,58 +149,42 @@ def solve(parser, args):
|
||||
if models < 0:
|
||||
tty.die("model count must be non-negative: %d")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
# Format required for the output (JSON, YAML or None)
|
||||
required_format = args.format
|
||||
|
||||
# If we have an active environment, pick the specs from there
|
||||
env = spack.environment.active_environment()
|
||||
if env and args.specs:
|
||||
msg = "cannot give explicit specs when an environment is active"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
specs = list(env.user_specs) if env else spack.cmd.parse_specs(args.specs)
|
||||
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
solver = asp.Solver()
|
||||
output = sys.stdout if "asp" in show else None
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
models=models,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=(set(show) == {'asp'})
|
||||
)
|
||||
if 'solutions' not in show:
|
||||
return
|
||||
|
||||
# die if no solution was found
|
||||
result.raise_if_unsat()
|
||||
|
||||
# show the solutions as concretized specs
|
||||
if 'solutions' in show:
|
||||
opt, _, _ = min(result.answers)
|
||||
|
||||
if ("opt" in show) and (not args.format):
|
||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||
tty.msg("Optimization Criteria:")
|
||||
|
||||
maxlen = max(len(s[2]) for s in result.criteria)
|
||||
color.cprint(
|
||||
"@*{ Priority Criterion %sInstalled ToBuild}" % ((maxlen - 10) * " ")
|
||||
)
|
||||
|
||||
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
|
||||
for i, (idx, build_idx, name) in enumerate(result.criteria, 1):
|
||||
color.cprint(
|
||||
fmt % (
|
||||
i,
|
||||
name,
|
||||
"-" if build_idx is None else opt[idx],
|
||||
opt[idx] if build_idx is None else opt[build_idx],
|
||||
)
|
||||
)
|
||||
print()
|
||||
|
||||
for spec in result.specs:
|
||||
# With -y, just print YAML to output.
|
||||
if args.format == 'yaml':
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif args.format == 'json':
|
||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||
setup_only = set(show) == {'asp'}
|
||||
unify = spack.config.get('concretizer:unify')
|
||||
if unify != 'when_possible':
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
models=models,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
for idx, result in enumerate(solver.solve_in_rounds(
|
||||
specs, out=output, models=models, timers=args.timers, stats=args.stats
|
||||
)):
|
||||
if "solutions" in show:
|
||||
tty.msg("ROUND {0}".format(idx))
|
||||
tty.msg("")
|
||||
else:
|
||||
sys.stdout.write(
|
||||
spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||
print("% END ROUND {0}\n".format(idx))
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
|
@@ -65,7 +65,7 @@ def is_package(f):
|
||||
packages, since we allow `from spack import *` and poking globals
|
||||
into packages.
|
||||
"""
|
||||
return f.startswith("var/spack/repos/")
|
||||
return f.startswith("var/spack/repos/") and f.endswith('package.py')
|
||||
|
||||
|
||||
#: decorator for adding tools to the list
|
||||
@@ -236,7 +236,7 @@ def translate(match):
|
||||
continue
|
||||
if not args.root_relative and re_obj:
|
||||
line = re_obj.sub(translate, line)
|
||||
print(" " + line)
|
||||
print(line)
|
||||
|
||||
|
||||
def print_style_header(file_list, args):
|
||||
@@ -290,20 +290,26 @@ def run_flake8(flake8_cmd, file_list, args):
|
||||
@tool("mypy")
|
||||
def run_mypy(mypy_cmd, file_list, args):
|
||||
# always run with config from running spack prefix
|
||||
mypy_args = [
|
||||
common_mypy_args = [
|
||||
"--config-file", os.path.join(spack.paths.prefix, "pyproject.toml"),
|
||||
"--package", "spack",
|
||||
"--package", "llnl",
|
||||
"--show-error-codes",
|
||||
]
|
||||
# not yet, need other updates to enable this
|
||||
# if any([is_package(f) for f in file_list]):
|
||||
# mypy_args.extend(["--package", "packages"])
|
||||
mypy_arg_sets = [common_mypy_args + [
|
||||
"--package", "spack",
|
||||
"--package", "llnl",
|
||||
]]
|
||||
if 'SPACK_MYPY_CHECK_PACKAGES' in os.environ:
|
||||
mypy_arg_sets.append(common_mypy_args + [
|
||||
'--package', 'packages',
|
||||
'--disable-error-code', 'no-redef',
|
||||
])
|
||||
|
||||
output = mypy_cmd(*mypy_args, fail_on_error=False, output=str)
|
||||
returncode = mypy_cmd.returncode
|
||||
returncode = 0
|
||||
for mypy_args in mypy_arg_sets:
|
||||
output = mypy_cmd(*mypy_args, fail_on_error=False, output=str)
|
||||
returncode |= mypy_cmd.returncode
|
||||
|
||||
rewrite_and_print_output(output, args)
|
||||
rewrite_and_print_output(output, args)
|
||||
|
||||
print_tool_result("mypy", returncode)
|
||||
return returncode
|
||||
@@ -318,16 +324,29 @@ def run_isort(isort_cmd, file_list, args):
|
||||
|
||||
pat = re.compile("ERROR: (.*) Imports are incorrectly sorted")
|
||||
replacement = "ERROR: {0} Imports are incorrectly sorted"
|
||||
returncode = 0
|
||||
for chunk in grouper(file_list, 100):
|
||||
packed_args = isort_args + tuple(chunk)
|
||||
output = isort_cmd(*packed_args, fail_on_error=False, output=str, error=str)
|
||||
returncode |= isort_cmd.returncode
|
||||
returncode = [0]
|
||||
|
||||
rewrite_and_print_output(output, args, pat, replacement)
|
||||
def process_files(file_list, is_args):
|
||||
for chunk in grouper(file_list, 100):
|
||||
packed_args = is_args + tuple(chunk)
|
||||
output = isort_cmd(*packed_args, fail_on_error=False, output=str, error=str)
|
||||
returncode[0] |= isort_cmd.returncode
|
||||
|
||||
print_tool_result("isort", returncode)
|
||||
return returncode
|
||||
rewrite_and_print_output(output, args, pat, replacement)
|
||||
|
||||
packages_isort_args = ('--rm', 'spack', '--rm', 'spack.pkgkit', '--rm',
|
||||
'spack.package_defs', '-a', 'from spack.package import *')
|
||||
packages_isort_args = packages_isort_args + isort_args
|
||||
|
||||
# packages
|
||||
process_files(filter(is_package, file_list),
|
||||
packages_isort_args)
|
||||
# non-packages
|
||||
process_files(filter(lambda f: not is_package(f), file_list),
|
||||
isort_args)
|
||||
|
||||
print_tool_result("isort", returncode[0])
|
||||
return returncode[0]
|
||||
|
||||
|
||||
@tool("black")
|
||||
|
@@ -20,7 +20,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.install_test
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.report
|
||||
|
||||
@@ -189,7 +189,7 @@ def test_run(args):
|
||||
# Set up reporter
|
||||
setattr(args, 'package', [s.format() for s in test_suite.specs])
|
||||
reporter = spack.report.collect_info(
|
||||
spack.package.PackageBase, 'do_test', args.log_format, args)
|
||||
spack.package_base.PackageBase, 'do_test', args.log_format, args)
|
||||
if not reporter.filename:
|
||||
if args.log_file:
|
||||
if os.path.isabs(args.log_file):
|
||||
@@ -217,7 +217,7 @@ def test_list(args):
|
||||
else set()
|
||||
|
||||
def has_test_and_tags(pkg_class):
|
||||
return spack.package.has_test_method(pkg_class) and \
|
||||
return spack.package_base.has_test_method(pkg_class) and \
|
||||
(not args.tag or pkg_class.name in tagged)
|
||||
|
||||
if args.list_all:
|
||||
|
@@ -24,7 +24,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.17"
|
||||
tutorial_branch = "releases/v0.18"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
@@ -15,7 +15,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.database import InstallStatuses
|
||||
@@ -221,7 +221,7 @@ def do_uninstall(env, specs, force):
|
||||
except spack.repo.UnknownEntityError:
|
||||
# The package.py file has gone away -- but still
|
||||
# want to uninstall.
|
||||
spack.package.Package.uninstall_by_spec(item, force=True)
|
||||
spack.package_base.Package.uninstall_by_spec(item, force=True)
|
||||
|
||||
# A package is ready to be uninstalled when nothing else references it,
|
||||
# unless we are requested to force uninstall it.
|
||||
|
@@ -422,7 +422,7 @@ def url_list_parsing(args, urls, url, pkg):
|
||||
urls (set): List of URLs that have already been added
|
||||
url (str or None): A URL to potentially add to ``urls`` depending on
|
||||
``args``
|
||||
pkg (spack.package.PackageBase): The Spack package
|
||||
pkg (spack.package_base.PackageBase): The Spack package
|
||||
|
||||
Returns:
|
||||
set: The updated set of ``urls``
|
||||
@@ -470,7 +470,7 @@ def name_parsed_correctly(pkg, name):
|
||||
"""Determine if the name of a package was correctly parsed.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): The Spack package
|
||||
pkg (spack.package_base.PackageBase): The Spack package
|
||||
name (str): The name that was extracted from the URL
|
||||
|
||||
Returns:
|
||||
@@ -487,7 +487,7 @@ def version_parsed_correctly(pkg, version):
|
||||
"""Determine if the version of a package was correctly parsed.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): The Spack package
|
||||
pkg (spack.package_base.PackageBase): The Spack package
|
||||
version (str): The version that was extracted from the URL
|
||||
|
||||
Returns:
|
||||
|
@@ -766,7 +766,8 @@ def name_matches(name, name_list):
|
||||
toolchains.add(compiler_cls.__name__)
|
||||
|
||||
if len(toolchains) > 1:
|
||||
if toolchains == set(['Clang', 'AppleClang', 'Aocc']):
|
||||
if toolchains == set(['Clang', 'AppleClang', 'Aocc']) or \
|
||||
toolchains == set(['Dpcpp', 'Oneapi']):
|
||||
return False
|
||||
tty.debug("[TOOLCHAINS] {0}".format(toolchains))
|
||||
return True
|
||||
|
@@ -88,7 +88,7 @@
|
||||
|
||||
#: Path to the default configuration
|
||||
configuration_defaults_path = (
|
||||
'defaults', os.path.join(spack.paths.etc_path, 'spack', 'defaults')
|
||||
'defaults', os.path.join(spack.paths.etc_path, 'defaults')
|
||||
)
|
||||
|
||||
#: Hard-coded default values for some key configuration options.
|
||||
@@ -104,6 +104,7 @@
|
||||
'build_jobs': min(16, cpus_available()),
|
||||
'build_stage': '$tempdir/spack-stage',
|
||||
'concretizer': 'clingo',
|
||||
'license_dir': spack.paths.default_license_dir,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -815,7 +816,7 @@ def _config():
|
||||
# Site configuration is per spack instance, for sites or projects
|
||||
# No site-level configs should be checked into spack by default.
|
||||
configuration_paths.append(
|
||||
('site', os.path.join(spack.paths.etc_path, 'spack')),
|
||||
('site', os.path.join(spack.paths.etc_path)),
|
||||
)
|
||||
|
||||
# User configuration can override both spack defaults and site config
|
||||
|
@@ -356,10 +356,10 @@ def __init__(self, root, db_dir=None, upstream_dbs=None,
|
||||
self.prefix_fail_path = os.path.join(self._db_dir, 'prefix_failures')
|
||||
|
||||
# Create needed directories and files
|
||||
if not os.path.exists(self._db_dir):
|
||||
if not is_upstream and not os.path.exists(self._db_dir):
|
||||
fs.mkdirp(self._db_dir)
|
||||
|
||||
if not os.path.exists(self._failure_dir) and not is_upstream:
|
||||
if not is_upstream and not os.path.exists(self._failure_dir):
|
||||
fs.mkdirp(self._failure_dir)
|
||||
|
||||
self.is_upstream = is_upstream
|
||||
@@ -1064,9 +1064,7 @@ def _read(self):
|
||||
self._state_is_inconsistent = False
|
||||
return
|
||||
elif self.is_upstream:
|
||||
raise UpstreamDatabaseLockingError(
|
||||
"No database index file is present, and upstream"
|
||||
" databases cannot generate an index file")
|
||||
tty.warn('upstream not found: {0}'.format(self._index_path))
|
||||
|
||||
def _add(
|
||||
self,
|
||||
|
@@ -240,7 +240,7 @@ def compute_windows_program_path_for_package(pkg):
|
||||
program files location, return list of best guesses
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): package for which
|
||||
pkg (spack.package_base.Package): package for which
|
||||
Program Files location is to be computed
|
||||
"""
|
||||
if not is_windows:
|
||||
|
@@ -79,8 +79,9 @@
|
||||
env_subdir_name = '.spack-env'
|
||||
|
||||
|
||||
#: default spack.yaml file to put in new environments
|
||||
default_manifest_yaml = """\
|
||||
def default_manifest_yaml():
|
||||
"""default spack.yaml file to put in new environments"""
|
||||
return """\
|
||||
# This is a Spack Environment file.
|
||||
#
|
||||
# It describes a set of packages to be installed, along with
|
||||
@@ -89,7 +90,11 @@
|
||||
# add package specs to the `specs` list
|
||||
specs: []
|
||||
view: true
|
||||
"""
|
||||
concretizer:
|
||||
unify: {}
|
||||
""".format('true' if spack.config.get('concretizer:unify') else 'false')
|
||||
|
||||
|
||||
#: regex for validating enviroment names
|
||||
valid_environment_name_re = r'^\w[\w-]*$'
|
||||
|
||||
@@ -623,7 +628,7 @@ def __init__(self, path, init_file=None, with_view=None, keep_relative=False):
|
||||
|
||||
# This attribute will be set properly from configuration
|
||||
# during concretization
|
||||
self.concretization = None
|
||||
self.unify = None
|
||||
self.clear()
|
||||
|
||||
if init_file:
|
||||
@@ -632,11 +637,11 @@ def __init__(self, path, init_file=None, with_view=None, keep_relative=False):
|
||||
# the init file.
|
||||
with fs.open_if_filename(init_file) as f:
|
||||
if hasattr(f, 'name') and f.name.endswith('.lock'):
|
||||
self._read_manifest(default_manifest_yaml)
|
||||
self._read_manifest(default_manifest_yaml())
|
||||
self._read_lockfile(f)
|
||||
self._set_user_specs_from_lockfile()
|
||||
else:
|
||||
self._read_manifest(f, raw_yaml=default_manifest_yaml)
|
||||
self._read_manifest(f, raw_yaml=default_manifest_yaml())
|
||||
|
||||
# Rewrite relative develop paths when initializing a new
|
||||
# environment in a different location from the spack.yaml file.
|
||||
@@ -700,7 +705,7 @@ def _read(self):
|
||||
default_manifest = not os.path.exists(self.manifest_path)
|
||||
if default_manifest:
|
||||
# No manifest, use default yaml
|
||||
self._read_manifest(default_manifest_yaml)
|
||||
self._read_manifest(default_manifest_yaml())
|
||||
else:
|
||||
with open(self.manifest_path) as f:
|
||||
self._read_manifest(f)
|
||||
@@ -766,8 +771,15 @@ def _read_manifest(self, f, raw_yaml=None):
|
||||
self.views = {}
|
||||
# Retrieve the current concretization strategy
|
||||
configuration = config_dict(self.yaml)
|
||||
# default concretization to separately
|
||||
self.concretization = configuration.get('concretization', 'separately')
|
||||
|
||||
# Let `concretization` overrule `concretize:unify` config for now,
|
||||
# but use a translation table to have internally a representation
|
||||
# as if we were using the new configuration
|
||||
translation = {'separately': False, 'together': True}
|
||||
try:
|
||||
self.unify = translation[configuration['concretization']]
|
||||
except KeyError:
|
||||
self.unify = spack.config.get('concretizer:unify', False)
|
||||
|
||||
# Retrieve dev-build packages:
|
||||
self.dev_specs = configuration.get('develop', {})
|
||||
@@ -1148,14 +1160,44 @@ def concretize(self, force=False, tests=False):
|
||||
self.specs_by_hash = {}
|
||||
|
||||
# Pick the right concretization strategy
|
||||
if self.concretization == 'together':
|
||||
if self.unify == 'when_possible':
|
||||
return self._concretize_together_where_possible(tests=tests)
|
||||
|
||||
if self.unify is True:
|
||||
return self._concretize_together(tests=tests)
|
||||
|
||||
if self.concretization == 'separately':
|
||||
if self.unify is False:
|
||||
return self._concretize_separately(tests=tests)
|
||||
|
||||
msg = 'concretization strategy not implemented [{0}]'
|
||||
raise SpackEnvironmentError(msg.format(self.concretization))
|
||||
raise SpackEnvironmentError(msg.format(self.unify))
|
||||
|
||||
def _concretize_together_where_possible(self, tests=False):
|
||||
# Avoid cyclic dependency
|
||||
import spack.solver.asp
|
||||
|
||||
# Exit early if the set of concretized specs is the set of user specs
|
||||
user_specs_did_not_change = not bool(
|
||||
set(self.user_specs) - set(self.concretized_user_specs)
|
||||
)
|
||||
if user_specs_did_not_change:
|
||||
return []
|
||||
|
||||
# Proceed with concretization
|
||||
self.concretized_user_specs = []
|
||||
self.concretized_order = []
|
||||
self.specs_by_hash = {}
|
||||
|
||||
result_by_user_spec = {}
|
||||
solver = spack.solver.asp.Solver()
|
||||
for result in solver.solve_in_rounds(self.user_specs, tests=tests):
|
||||
result_by_user_spec.update(result.specs_by_input)
|
||||
|
||||
result = []
|
||||
for abstract, concrete in sorted(result_by_user_spec.items()):
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
result.append((abstract, concrete))
|
||||
return result
|
||||
|
||||
def _concretize_together(self, tests=False):
|
||||
"""Concretization strategy that concretizes all the specs
|
||||
@@ -1308,7 +1350,7 @@ def concretize_and_add(self, user_spec, concrete_spec=None, tests=False):
|
||||
concrete_spec: if provided, then it is assumed that it is the
|
||||
result of concretizing the provided ``user_spec``
|
||||
"""
|
||||
if self.concretization == 'together':
|
||||
if self.unify is True:
|
||||
msg = 'cannot install a single spec in an environment that is ' \
|
||||
'configured to be concretized together. Run instead:\n\n' \
|
||||
' $ spack add <spec>\n' \
|
||||
@@ -1611,7 +1653,14 @@ def all_specs(self):
|
||||
"""Return all specs, even those a user spec would shadow."""
|
||||
all_specs = set()
|
||||
for h in self.concretized_order:
|
||||
all_specs.update(self.specs_by_hash[h].traverse())
|
||||
try:
|
||||
spec = self.specs_by_hash[h]
|
||||
except KeyError:
|
||||
tty.warn(
|
||||
'Environment %s appears to be corrupt: missing spec '
|
||||
'"%s"' % (self.name, h))
|
||||
continue
|
||||
all_specs.update(spec.traverse())
|
||||
|
||||
return sorted(all_specs)
|
||||
|
||||
@@ -1869,17 +1918,15 @@ def write(self, regenerate=True):
|
||||
regenerate (bool): regenerate views and run post-write hooks as
|
||||
well as writing if True.
|
||||
"""
|
||||
# Intercept environment not using the latest schema format and prevent
|
||||
# them from being modified
|
||||
manifest_exists = os.path.exists(self.manifest_path)
|
||||
if manifest_exists and not is_latest_format(self.manifest_path):
|
||||
msg = ('The environment "{0}" needs to be written to disk, but '
|
||||
'is currently using a deprecated format. Please update it '
|
||||
'using:\n\n'
|
||||
'\tspack env update {0}\n\n'
|
||||
'Note that previous versions of Spack will not be able to '
|
||||
# Warn that environments are not in the latest format.
|
||||
if not is_latest_format(self.manifest_path):
|
||||
ver = '.'.join(str(s) for s in spack.spack_version_info[:2])
|
||||
msg = ('The environment "{}" is written to disk in a deprecated format. '
|
||||
'Please update it using:\n\n'
|
||||
'\tspack env update {}\n\n'
|
||||
'Note that versions of Spack older than {} may not be able to '
|
||||
'use the updated configuration.')
|
||||
raise RuntimeError(msg.format(self.name))
|
||||
tty.warn(msg.format(self.name, self.name, ver))
|
||||
|
||||
# ensure path in var/spack/environments
|
||||
fs.mkdirp(self.path)
|
||||
@@ -2231,14 +2278,16 @@ def _top_level_key(data):
|
||||
|
||||
|
||||
def is_latest_format(manifest):
|
||||
"""Return True if the manifest file is at the latest schema format,
|
||||
False otherwise.
|
||||
"""Return False if the manifest file exists and is not in the latest schema format.
|
||||
|
||||
Args:
|
||||
manifest (str): manifest file to be analyzed
|
||||
"""
|
||||
with open(manifest) as f:
|
||||
data = syaml.load(f)
|
||||
try:
|
||||
with open(manifest) as f:
|
||||
data = syaml.load(f)
|
||||
except (OSError, IOError):
|
||||
return True
|
||||
top_level_key = _top_level_key(data)
|
||||
changed = spack.schema.env.update(data[top_level_key])
|
||||
return not changed
|
||||
|
@@ -10,9 +10,9 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
#: whether we should write stack traces or short error messages
|
||||
#: at what level we should write stack traces or short error messages
|
||||
#: this is module-scoped because it needs to be set very early
|
||||
debug = False
|
||||
debug = 0
|
||||
|
||||
|
||||
class SpackError(Exception):
|
||||
|
@@ -1556,7 +1556,7 @@ def _extrapolate(pkg, version):
|
||||
try:
|
||||
return URLFetchStrategy(pkg.url_for_version(version),
|
||||
fetch_options=pkg.fetch_options)
|
||||
except spack.package.NoURLError:
|
||||
except spack.package_base.NoURLError:
|
||||
msg = ("Can't extrapolate a URL for version %s "
|
||||
"because package %s defines no URLs")
|
||||
raise ExtrapolationError(msg % (version, pkg.name))
|
||||
|
@@ -95,10 +95,7 @@ def view_copy(src, dst, view, spec=None):
|
||||
view.get_projection_for_spec(dep)
|
||||
|
||||
if spack.relocate.is_binary(dst):
|
||||
spack.relocate.relocate_text_bin(
|
||||
binaries=[dst],
|
||||
prefixes=prefix_to_projection
|
||||
)
|
||||
spack.relocate.relocate_text_bin([dst], prefix_to_projection)
|
||||
else:
|
||||
prefix_to_projection[spack.store.layout.root] = view._root
|
||||
prefix_to_projection[orig_sbang] = new_sbang
|
||||
|
@@ -493,9 +493,11 @@ def write(self, spec, color=None, out=None):
|
||||
|
||||
# Replace node with its dependencies
|
||||
self._frontier.pop(i)
|
||||
deps = node.dependencies(deptype=self.deptype)
|
||||
if deps:
|
||||
deps = sorted((d.dag_hash() for d in deps), reverse=True)
|
||||
edges = sorted(
|
||||
node.edges_to_dependencies(deptype=self.deptype), reverse=True
|
||||
)
|
||||
if edges:
|
||||
deps = [e.spec.dag_hash() for e in edges]
|
||||
self._connect_deps(i, deps, "new-deps") # anywhere.
|
||||
|
||||
elif self._frontier:
|
||||
|
@@ -50,7 +50,7 @@
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.monitor
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.package_prefs as prefs
|
||||
import spack.repo
|
||||
import spack.store
|
||||
@@ -103,7 +103,7 @@ def _check_last_phase(pkg):
|
||||
package already.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): the package being installed
|
||||
pkg (spack.package_base.PackageBase): the package being installed
|
||||
|
||||
Raises:
|
||||
``BadInstallPhase`` if stop_before or last phase is invalid
|
||||
@@ -125,7 +125,7 @@ def _handle_external_and_upstream(pkg, explicit):
|
||||
database if it is external package.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): the package whose installation is under
|
||||
pkg (spack.package_base.Package): the package whose installation is under
|
||||
consideration
|
||||
explicit (bool): the package was explicitly requested by the user
|
||||
Return:
|
||||
@@ -265,7 +265,7 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
|
||||
Extract the package from binary cache
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): the package to install from the binary cache
|
||||
pkg (spack.package_base.PackageBase): package to install from the binary cache
|
||||
cache_only (bool): only extract from binary cache
|
||||
explicit (bool): ``True`` if installing the package was explicitly
|
||||
requested by the user, otherwise, ``False``
|
||||
@@ -350,27 +350,27 @@ def _process_external_package(pkg, explicit):
|
||||
|
||||
|
||||
def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
|
||||
preferred_mirrors=None):
|
||||
mirrors_for_spec=None):
|
||||
"""
|
||||
Process the binary cache tarball.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): the package being installed
|
||||
pkg (spack.package_base.PackageBase): the package being installed
|
||||
binary_spec (spack.spec.Spec): the spec whose cache has been confirmed
|
||||
explicit (bool): the package was explicitly requested by the user
|
||||
unsigned (bool): ``True`` if binary package signatures to be checked,
|
||||
otherwise, ``False``
|
||||
preferred_mirrors (list): Optional list of urls to prefer when
|
||||
attempting to download the tarball
|
||||
mirrors_for_spec (list): Optional list of concrete specs and mirrors
|
||||
obtained by calling binary_distribution.get_mirrors_for_spec().
|
||||
|
||||
Return:
|
||||
bool: ``True`` if the package was extracted from binary cache,
|
||||
else ``False``
|
||||
"""
|
||||
tarball = binary_distribution.download_tarball(
|
||||
binary_spec, preferred_mirrors=preferred_mirrors)
|
||||
download_result = binary_distribution.download_tarball(
|
||||
binary_spec, unsigned, mirrors_for_spec=mirrors_for_spec)
|
||||
# see #10063 : install from source if tarball doesn't exist
|
||||
if tarball is None:
|
||||
if download_result is None:
|
||||
tty.msg('{0} exists in binary cache but with different hash'
|
||||
.format(pkg.name))
|
||||
return False
|
||||
@@ -380,9 +380,9 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
|
||||
|
||||
# don't print long padded paths while extracting/relocating binaries
|
||||
with spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(
|
||||
binary_spec, tarball, allow_root=False, unsigned=unsigned, force=False
|
||||
)
|
||||
binary_distribution.extract_tarball(binary_spec, download_result,
|
||||
allow_root=False, unsigned=unsigned,
|
||||
force=False)
|
||||
|
||||
pkg.installed_from_binary_cache = True
|
||||
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
|
||||
@@ -394,7 +394,7 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
|
||||
Try to extract the package from binary cache.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): the package to be extracted from binary cache
|
||||
pkg (spack.package_base.PackageBase): package to be extracted from binary cache
|
||||
explicit (bool): the package was explicitly requested by the user
|
||||
unsigned (bool): ``True`` if binary package signatures to be checked,
|
||||
otherwise, ``False``
|
||||
@@ -406,12 +406,8 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
|
||||
if not matches:
|
||||
return False
|
||||
|
||||
# In the absence of guidance from user or some other reason to prefer one
|
||||
# mirror over another, any match will suffice, so just pick the first one.
|
||||
preferred_mirrors = [match['mirror_url'] for match in matches]
|
||||
binary_spec = matches[0]['spec']
|
||||
return _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
|
||||
preferred_mirrors=preferred_mirrors)
|
||||
return _process_binary_cache_tarball(pkg, pkg.spec, explicit, unsigned,
|
||||
mirrors_for_spec=matches)
|
||||
|
||||
|
||||
def clear_failures():
|
||||
@@ -534,7 +530,7 @@ def log(pkg):
|
||||
Copy provenance into the install directory on success
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): the package that was built and installed
|
||||
pkg (spack.package_base.Package): the package that was built and installed
|
||||
"""
|
||||
packages_dir = spack.store.layout.build_packages_path(pkg.spec)
|
||||
|
||||
@@ -620,7 +616,7 @@ def package_id(pkg):
|
||||
and packages for combinatorial environments.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): the package from which the identifier is
|
||||
pkg (spack.package_base.PackageBase): the package from which the identifier is
|
||||
derived
|
||||
"""
|
||||
if not pkg.spec.concrete:
|
||||
@@ -773,7 +769,7 @@ def _add_bootstrap_compilers(
|
||||
Args:
|
||||
compiler: the compiler to boostrap
|
||||
architecture: the architecture for which to bootstrap the compiler
|
||||
pkgs (spack.package.PackageBase): the package with possible compiler
|
||||
pkgs (spack.package_base.PackageBase): the package with possible compiler
|
||||
dependencies
|
||||
request (BuildRequest): the associated install request
|
||||
all_deps (defaultdict(set)): dictionary of all dependencies and
|
||||
@@ -790,7 +786,7 @@ def _add_init_task(self, pkg, request, is_compiler, all_deps):
|
||||
Creates and queus the initial build task for the package.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): the package to be built and installed
|
||||
pkg (spack.package_base.Package): the package to be built and installed
|
||||
request (BuildRequest or None): the associated install request
|
||||
where ``None`` can be used to indicate the package was
|
||||
explicitly requested by the user
|
||||
@@ -972,7 +968,7 @@ def _cleanup_task(self, pkg):
|
||||
Cleanup the build task for the spec
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): the package being installed
|
||||
pkg (spack.package_base.PackageBase): the package being installed
|
||||
"""
|
||||
self._remove_task(package_id(pkg))
|
||||
|
||||
@@ -986,7 +982,7 @@ def _ensure_install_ready(self, pkg):
|
||||
already locked.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): the package being locally installed
|
||||
pkg (spack.package_base.PackageBase): the package being locally installed
|
||||
"""
|
||||
pkg_id = package_id(pkg)
|
||||
pre = "{0} cannot be installed locally:".format(pkg_id)
|
||||
@@ -1018,7 +1014,8 @@ def _ensure_locked(self, lock_type, pkg):
|
||||
|
||||
Args:
|
||||
lock_type (str): 'read' for a read lock, 'write' for a write lock
|
||||
pkg (spack.package.PackageBase): the package whose spec is being installed
|
||||
pkg (spack.package_base.PackageBase): the package whose spec is being
|
||||
installed
|
||||
|
||||
Return:
|
||||
(lock_type, lock) tuple where lock will be None if it could not
|
||||
@@ -1232,7 +1229,7 @@ def _install_task(self, task):
|
||||
|
||||
# Create a child process to do the actual installation.
|
||||
# Preserve verbosity settings across installs.
|
||||
spack.package.PackageBase._verbose = (
|
||||
spack.package_base.PackageBase._verbose = (
|
||||
spack.build_environment.start_build_process(
|
||||
pkg, build_process, install_args)
|
||||
)
|
||||
@@ -1377,7 +1374,7 @@ def _setup_install_dir(self, pkg):
|
||||
Write a small metadata file with the current spack environment.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): the package to be built and installed
|
||||
pkg (spack.package_base.Package): the package to be built and installed
|
||||
"""
|
||||
if not os.path.exists(pkg.spec.prefix):
|
||||
tty.debug('Creating the installation directory {0}'.format(pkg.spec.prefix))
|
||||
@@ -1451,7 +1448,7 @@ def _flag_installed(self, pkg, dependent_ids=None):
|
||||
known dependents.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): Package that has been installed locally,
|
||||
pkg (spack.package_base.Package): Package that has been installed locally,
|
||||
externally or upstream
|
||||
dependent_ids (list or None): list of the package's
|
||||
dependent ids, or None if the dependent ids are limited to
|
||||
@@ -1540,7 +1537,7 @@ def install(self):
|
||||
Install the requested package(s) and or associated dependencies.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): the package to be built and installed"""
|
||||
pkg (spack.package_base.Package): the package to be built and installed"""
|
||||
|
||||
self._init_queue()
|
||||
fail_fast_err = 'Terminating after first install failure'
|
||||
@@ -1792,7 +1789,7 @@ def __init__(self, pkg, install_args):
|
||||
process in the build.
|
||||
|
||||
Arguments:
|
||||
pkg (spack.package.PackageBase) the package being installed.
|
||||
pkg (spack.package_base.PackageBase) the package being installed.
|
||||
install_args (dict) arguments to do_install() from parent process.
|
||||
|
||||
"""
|
||||
@@ -1852,8 +1849,8 @@ def run(self):
|
||||
|
||||
# get verbosity from do_install() parameter or saved value
|
||||
self.echo = self.verbose
|
||||
if spack.package.PackageBase._verbose is not None:
|
||||
self.echo = spack.package.PackageBase._verbose
|
||||
if spack.package_base.PackageBase._verbose is not None:
|
||||
self.echo = spack.package_base.PackageBase._verbose
|
||||
|
||||
self.pkg.stage.keep = self.keep_stage
|
||||
|
||||
@@ -2005,7 +2002,7 @@ def build_process(pkg, install_args):
|
||||
This function's return value is returned to the parent process.
|
||||
|
||||
Arguments:
|
||||
pkg (spack.package.PackageBase): the package being installed.
|
||||
pkg (spack.package_base.PackageBase): the package being installed.
|
||||
install_args (dict): arguments to do_install() from parent process.
|
||||
|
||||
"""
|
||||
@@ -2053,7 +2050,7 @@ def __init__(self, pkg, request, compiler, start, attempts, status,
|
||||
Instantiate a build task for a package.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): the package to be built and installed
|
||||
pkg (spack.package_base.Package): the package to be built and installed
|
||||
request (BuildRequest or None): the associated install request
|
||||
where ``None`` can be used to indicate the package was
|
||||
explicitly requested by the user
|
||||
@@ -2066,7 +2063,7 @@ def __init__(self, pkg, request, compiler, start, attempts, status,
|
||||
"""
|
||||
|
||||
# Ensure dealing with a package that has a concrete spec
|
||||
if not isinstance(pkg, spack.package.PackageBase):
|
||||
if not isinstance(pkg, spack.package_base.PackageBase):
|
||||
raise ValueError("{0} must be a package".format(str(pkg)))
|
||||
|
||||
self.pkg = pkg
|
||||
@@ -2244,11 +2241,11 @@ def __init__(self, pkg, install_args):
|
||||
Instantiate a build request for a package.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): the package to be built and installed
|
||||
pkg (spack.package_base.Package): the package to be built and installed
|
||||
install_args (dict): the install arguments associated with ``pkg``
|
||||
"""
|
||||
# Ensure dealing with a package that has a concrete spec
|
||||
if not isinstance(pkg, spack.package.PackageBase):
|
||||
if not isinstance(pkg, spack.package_base.PackageBase):
|
||||
raise ValueError("{0} must be a package".format(str(pkg)))
|
||||
|
||||
self.pkg = pkg
|
||||
@@ -2318,7 +2315,7 @@ def get_deptypes(self, pkg):
|
||||
"""Determine the required dependency types for the associated package.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): explicit or implicit package being
|
||||
pkg (spack.package_base.PackageBase): explicit or implicit package being
|
||||
installed
|
||||
|
||||
Returns:
|
||||
@@ -2341,7 +2338,7 @@ def run_tests(self, pkg):
|
||||
"""Determine if the tests should be run for the provided packages
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): explicit or implicit package being
|
||||
pkg (spack.package_base.PackageBase): explicit or implicit package being
|
||||
installed
|
||||
|
||||
Returns:
|
||||
|
@@ -375,13 +375,6 @@ def make_argument_parser(**kwargs):
|
||||
# stat names in groups of 7, for nice wrapping.
|
||||
stat_lines = list(zip(*(iter(stat_names),) * 7))
|
||||
|
||||
# help message for --show-cores
|
||||
show_cores_help = 'provide additional information on concretization failures\n'
|
||||
show_cores_help += 'off (default): show only the violated rule\n'
|
||||
show_cores_help += 'full: show raw unsat cores from clingo\n'
|
||||
show_cores_help += 'minimized: show subset-minimal unsat cores '
|
||||
show_cores_help += '(Warning: this may take hours for some specs)'
|
||||
|
||||
parser.add_argument(
|
||||
'-h', '--help',
|
||||
dest='help', action='store_const', const='short', default=None,
|
||||
@@ -405,9 +398,6 @@ def make_argument_parser(**kwargs):
|
||||
'-d', '--debug', action='count', default=0,
|
||||
help="write out debug messages "
|
||||
"(more d's for more verbosity: -d, -dd, -ddd, etc.)")
|
||||
parser.add_argument(
|
||||
'--show-cores', choices=["off", "full", "minimized"], default="off",
|
||||
help=show_cores_help)
|
||||
parser.add_argument(
|
||||
'--timestamp', action='store_true',
|
||||
help="Add a timestamp to tty output")
|
||||
@@ -490,18 +480,11 @@ def setup_main_options(args):
|
||||
# errors raised by spack.config.
|
||||
|
||||
if args.debug:
|
||||
spack.error.debug = True
|
||||
spack.error.debug = args.debug
|
||||
spack.util.debug.register_interrupt_handler()
|
||||
spack.config.set('config:debug', True, scope='command_line')
|
||||
spack.util.environment.tracing_enabled = True
|
||||
|
||||
if args.show_cores != "off":
|
||||
# minimize_cores defaults to true, turn it off if we're showing full core
|
||||
# but don't want to wait to minimize it.
|
||||
spack.solver.asp.full_cores = True
|
||||
if args.show_cores == 'full':
|
||||
spack.solver.asp.minimize_cores = False
|
||||
|
||||
if args.timestamp:
|
||||
tty.set_timestamp(True)
|
||||
|
||||
|
@@ -196,6 +196,14 @@ def provides(self):
|
||||
if self.spec.name == 'llvm-amdgpu':
|
||||
provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
|
||||
provides['compiler'].name = 'rocmcc'
|
||||
# Special case for oneapi
|
||||
if self.spec.name == 'intel-oneapi-compilers':
|
||||
provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
|
||||
provides['compiler'].name = 'oneapi'
|
||||
# Special case for oneapi classic
|
||||
if self.spec.name == 'intel-oneapi-compilers-classic':
|
||||
provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
|
||||
provides['compiler'].name = 'intel'
|
||||
|
||||
# All the other tokens in the hierarchy must be virtual dependencies
|
||||
for x in self.hierarchy_tokens:
|
||||
|
File diff suppressed because it is too large
Load Diff
3107
lib/spack/spack/package_base.py
Normal file
3107
lib/spack/spack/package_base.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -123,11 +123,11 @@ def accept(self, id):
|
||||
|
||||
def next_token_error(self, message):
|
||||
"""Raise an error about the next token in the stream."""
|
||||
raise ParseError(message, self.text, self.token.end)
|
||||
raise ParseError(message, self.text[0], self.token.end)
|
||||
|
||||
def last_token_error(self, message):
|
||||
"""Raise an error about the previous token in the stream."""
|
||||
raise ParseError(message, self.text, self.token.start)
|
||||
raise ParseError(message, self.text[0], self.token.start)
|
||||
|
||||
def unexpected_token(self):
|
||||
self.next_token_error("Unexpected token: '%s'" % self.next.value)
|
||||
|
@@ -356,7 +356,7 @@ def patch_for_package(self, sha256, pkg):
|
||||
|
||||
Arguments:
|
||||
sha256 (str): sha256 hash to look up
|
||||
pkg (spack.package.Package): Package object to get patch for.
|
||||
pkg (spack.package_base.Package): Package object to get patch for.
|
||||
|
||||
We build patch objects lazily because building them requires that
|
||||
we have information about the package's location in its repo.
|
||||
|
@@ -43,8 +43,12 @@
|
||||
hooks_path = os.path.join(module_path, "hooks")
|
||||
opt_path = os.path.join(prefix, "opt")
|
||||
share_path = os.path.join(prefix, "share", "spack")
|
||||
etc_path = os.path.join(prefix, "etc")
|
||||
etc_path = os.path.join(prefix, "etc", "spack")
|
||||
|
||||
#
|
||||
# Things in $spack/etc/spack
|
||||
#
|
||||
default_license_dir = os.path.join(etc_path, "licenses")
|
||||
|
||||
#
|
||||
# Things in $spack/var/spack
|
||||
|
@@ -1,83 +0,0 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# flake8: noqa: F401
|
||||
"""pkgkit is a set of useful build tools and directives for packages.
|
||||
|
||||
Everything in this module is automatically imported into Spack package files.
|
||||
"""
|
||||
import llnl.util.filesystem
|
||||
from llnl.util.filesystem import *
|
||||
|
||||
import spack.directives
|
||||
import spack.util.executable
|
||||
from spack.build_systems.aspell_dict import AspellDictPackage
|
||||
from spack.build_systems.autotools import AutotoolsPackage
|
||||
from spack.build_systems.cached_cmake import (
|
||||
CachedCMakePackage,
|
||||
cmake_cache_option,
|
||||
cmake_cache_path,
|
||||
cmake_cache_string,
|
||||
)
|
||||
from spack.build_systems.cmake import CMakePackage
|
||||
from spack.build_systems.cuda import CudaPackage
|
||||
from spack.build_systems.gnu import GNUMirrorPackage
|
||||
from spack.build_systems.intel import IntelPackage
|
||||
from spack.build_systems.lua import LuaPackage
|
||||
from spack.build_systems.makefile import MakefilePackage
|
||||
from spack.build_systems.maven import MavenPackage
|
||||
from spack.build_systems.meson import MesonPackage
|
||||
from spack.build_systems.octave import OctavePackage
|
||||
from spack.build_systems.oneapi import (
|
||||
IntelOneApiLibraryPackage,
|
||||
IntelOneApiPackage,
|
||||
IntelOneApiStaticLibraryList,
|
||||
)
|
||||
from spack.build_systems.perl import PerlPackage
|
||||
from spack.build_systems.python import PythonPackage
|
||||
from spack.build_systems.qmake import QMakePackage
|
||||
from spack.build_systems.r import RPackage
|
||||
from spack.build_systems.racket import RacketPackage
|
||||
from spack.build_systems.rocm import ROCmPackage
|
||||
from spack.build_systems.ruby import RubyPackage
|
||||
from spack.build_systems.scons import SConsPackage
|
||||
from spack.build_systems.sip import SIPPackage
|
||||
from spack.build_systems.sourceforge import SourceforgePackage
|
||||
from spack.build_systems.sourceware import SourcewarePackage
|
||||
from spack.build_systems.waf import WafPackage
|
||||
from spack.build_systems.xorg import XorgPackage
|
||||
from spack.dependency import all_deptypes
|
||||
from spack.directives import *
|
||||
from spack.install_test import get_escaped_text_output
|
||||
from spack.installer import (
|
||||
ExternalPackageError,
|
||||
InstallError,
|
||||
InstallLockError,
|
||||
UpstreamPackageError,
|
||||
)
|
||||
from spack.mixins import filter_compiler_wrappers
|
||||
from spack.multimethod import when
|
||||
from spack.package import (
|
||||
BundlePackage,
|
||||
DependencyConflictError,
|
||||
Package,
|
||||
build_system_flags,
|
||||
env_flags,
|
||||
flatten_dependencies,
|
||||
inject_flags,
|
||||
install_dependency_symlinks,
|
||||
on_package_attributes,
|
||||
run_after,
|
||||
run_before,
|
||||
)
|
||||
from spack.spec import InvalidSpecDetected, Spec
|
||||
from spack.util.executable import *
|
||||
from spack.variant import (
|
||||
any_combination_of,
|
||||
auto_or_any_combination_of,
|
||||
conditional,
|
||||
disjoint_sets,
|
||||
)
|
||||
from spack.version import Version, ver
|
@@ -469,47 +469,6 @@ def _replace_prefix_text(filename, compiled_prefixes):
|
||||
f.truncate()
|
||||
|
||||
|
||||
def _replace_prefix_bin(filename, byte_prefixes):
|
||||
"""Replace all the occurrences of the old install prefix with a
|
||||
new install prefix in binary files.
|
||||
|
||||
The new install prefix is prefixed with ``os.sep`` until the
|
||||
lengths of the prefixes are the same.
|
||||
|
||||
Args:
|
||||
filename (str): target binary file
|
||||
byte_prefixes (OrderedDict): OrderedDictionary where the keys are
|
||||
precompiled regex of the old prefixes and the values are the new
|
||||
prefixes (uft-8 encoded)
|
||||
"""
|
||||
|
||||
with open(filename, 'rb+') as f:
|
||||
data = f.read()
|
||||
f.seek(0)
|
||||
for orig_bytes, new_bytes in byte_prefixes.items():
|
||||
original_data_len = len(data)
|
||||
# Skip this hassle if not found
|
||||
if orig_bytes not in data:
|
||||
continue
|
||||
# We only care about this problem if we are about to replace
|
||||
length_compatible = len(new_bytes) <= len(orig_bytes)
|
||||
if not length_compatible:
|
||||
tty.debug('Binary failing to relocate is %s' % filename)
|
||||
raise BinaryTextReplaceError(orig_bytes, new_bytes)
|
||||
pad_length = len(orig_bytes) - len(new_bytes)
|
||||
padding = os.sep * pad_length
|
||||
padding = padding.encode('utf-8')
|
||||
data = data.replace(orig_bytes, new_bytes + padding)
|
||||
# Really needs to be the same length
|
||||
if not len(data) == original_data_len:
|
||||
print('Length of pad:', pad_length, 'should be', len(padding))
|
||||
print(new_bytes, 'was to replace', orig_bytes)
|
||||
raise BinaryStringReplacementError(
|
||||
filename, original_data_len, len(data))
|
||||
f.write(data)
|
||||
f.truncate()
|
||||
|
||||
|
||||
def relocate_macho_binaries(path_names, old_layout_root, new_layout_root,
|
||||
prefix_to_prefix, rel, old_prefix, new_prefix):
|
||||
"""
|
||||
@@ -817,49 +776,6 @@ def relocate_text(files, prefixes, concurrency=32):
|
||||
tp.join()
|
||||
|
||||
|
||||
def relocate_text_bin(binaries, prefixes, concurrency=32):
|
||||
"""Replace null terminated path strings hard coded into binaries.
|
||||
|
||||
The new install prefix must be shorter than the original one.
|
||||
|
||||
Args:
|
||||
binaries (list): binaries to be relocated
|
||||
prefixes (OrderedDict): String prefixes which need to be changed.
|
||||
concurrency (int): Desired degree of parallelism.
|
||||
|
||||
Raises:
|
||||
BinaryTextReplaceError: when the new path is longer than the old path
|
||||
"""
|
||||
byte_prefixes = collections.OrderedDict({})
|
||||
|
||||
for orig_prefix, new_prefix in prefixes.items():
|
||||
if orig_prefix != new_prefix:
|
||||
if isinstance(orig_prefix, bytes):
|
||||
orig_bytes = orig_prefix
|
||||
else:
|
||||
orig_bytes = orig_prefix.encode('utf-8')
|
||||
if isinstance(new_prefix, bytes):
|
||||
new_bytes = new_prefix
|
||||
else:
|
||||
new_bytes = new_prefix.encode('utf-8')
|
||||
byte_prefixes[orig_bytes] = new_bytes
|
||||
|
||||
# Do relocations on text in binaries that refers to the install tree
|
||||
# multiprocesing.ThreadPool.map requires single argument
|
||||
args = []
|
||||
|
||||
for binary in binaries:
|
||||
args.append((binary, byte_prefixes))
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
|
||||
try:
|
||||
tp.map(llnl.util.lang.star(_replace_prefix_bin), args)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
|
||||
|
||||
def is_relocatable(spec):
|
||||
"""Returns True if an installed spec is relocatable.
|
||||
|
||||
@@ -1126,3 +1042,120 @@ def fixup_macos_rpaths(spec):
|
||||
))
|
||||
else:
|
||||
tty.debug('No rpath fixup needed for ' + specname)
|
||||
|
||||
|
||||
def compute_indices(filename, paths_to_relocate):
|
||||
"""
|
||||
Compute the indices in filename at which each of paths_to_relocate occurs.
|
||||
|
||||
Arguments:
|
||||
filename (str): file to compute indices for
|
||||
paths_to_relocate (List[str]): paths to find indices of
|
||||
Returns:
|
||||
Dict
|
||||
"""
|
||||
with open(filename, 'rb') as f:
|
||||
contents = f.read()
|
||||
|
||||
substring_prefix = os.path.commonprefix(paths_to_relocate).encode('utf-8')
|
||||
|
||||
indices = {}
|
||||
index = 0
|
||||
max_length = max(len(path) for path in paths_to_relocate)
|
||||
while True:
|
||||
try:
|
||||
# We search for the smallest substring of all paths we relocate
|
||||
# In practice, this is the spack install root, and we relocate
|
||||
# prefixes in the root and the root itself
|
||||
index = contents.index(substring_prefix, index)
|
||||
except ValueError:
|
||||
# The string isn't found in the rest of the binary
|
||||
break
|
||||
else:
|
||||
# only copy the smallest portion of the binary for comparisons
|
||||
substring_to_check = contents[index:index + max_length]
|
||||
for path in paths_to_relocate:
|
||||
# We guarantee any substring in the list comes after any superstring
|
||||
p = path.encode('utf-8')
|
||||
if substring_to_check.startswith(p):
|
||||
indices[index] = str(path)
|
||||
index += len(path)
|
||||
break
|
||||
else:
|
||||
index += 1
|
||||
return indices
|
||||
|
||||
|
||||
def _relocate_binary_text(filename, offsets, prefix_to_prefix):
|
||||
"""
|
||||
Relocate the text of a single binary file, given the offsets at which the
|
||||
replacements need to be made
|
||||
|
||||
Arguments:
|
||||
filename (str): file to modify
|
||||
offsets (Dict[int, str]): locations of the strings to replace
|
||||
prefix_to_prefix (Dict[str, str]): strings to replace and their replacements
|
||||
"""
|
||||
with open(filename, 'rb+') as f:
|
||||
for index, prefix in offsets.items():
|
||||
replacement = prefix_to_prefix[prefix].encode('utf-8')
|
||||
if len(replacement) > len(prefix):
|
||||
raise BinaryTextReplaceError(prefix, replacement)
|
||||
|
||||
# read forward until we find the end of the string including
|
||||
# the prefix and compute the replacement as we go
|
||||
f.seek(index + len(prefix))
|
||||
c = f.read(1)
|
||||
while c not in (None, b'\x00'):
|
||||
replacement += c
|
||||
c = f.read(1)
|
||||
|
||||
# seek back to the index position and write the replacement in
|
||||
# and add null-terminator
|
||||
f.seek(index)
|
||||
f.write(replacement)
|
||||
f.write(b'\x00')
|
||||
|
||||
|
||||
def relocate_text_bin(
|
||||
files_to_relocate, prefix_to_prefix, offsets=None,
|
||||
relative_root=None, concurrency=32
|
||||
):
|
||||
"""
|
||||
For each file given, replace all keys in the given translation dict with
|
||||
the associated values. Optionally executes using precomputed memoized offsets
|
||||
for the substitutions.
|
||||
|
||||
Arguments:
|
||||
files_to_relocate (List[str]): The files to modify
|
||||
prefix_to_prefix (Dict[str, str]): keys are strings to replace, values are
|
||||
replacements
|
||||
offsets (Dict[str, Dict[int, str]): (optional) Mapping from relative filenames to
|
||||
a mapping from indices to strings to replace found at each index
|
||||
relative_root (str): (optional) prefix for relative paths in offsets
|
||||
"""
|
||||
# defaults to the common prefix of all input files
|
||||
rel_root = relative_root or os.path.commonprefix(files_to_relocate)
|
||||
|
||||
if offsets is None:
|
||||
offsets = {}
|
||||
for filename in files_to_relocate:
|
||||
indices = compute_indices(
|
||||
filename,
|
||||
list(prefix_to_prefix.keys()),
|
||||
)
|
||||
relpath = os.path.relpath(filename, rel_root)
|
||||
offsets[relpath] = indices
|
||||
|
||||
args = [
|
||||
(filename, offsets[os.path.relpath(filename, rel_root)], prefix_to_prefix)
|
||||
for filename in files_to_relocate
|
||||
]
|
||||
|
||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||
|
||||
try:
|
||||
tp.map(llnl.util.lang.star(_relocate_binary_text), args)
|
||||
finally:
|
||||
tp.terminate()
|
||||
tp.join()
|
||||
|
@@ -202,17 +202,11 @@ class RepoLoader(_PrependFileLoader):
|
||||
"""Loads a Python module associated with a package in specific repository"""
|
||||
#: Code in ``_package_prepend`` is prepended to imported packages.
|
||||
#:
|
||||
#: Spack packages were originally expected to call `from spack import *`
|
||||
#: themselves, but it became difficult to manage and imports in the Spack
|
||||
#: core the top-level namespace polluted by package symbols this way. To
|
||||
#: solve this, the top-level ``spack`` package contains very few symbols
|
||||
#: of its own, and importing ``*`` is essentially a no-op. The common
|
||||
#: routines and directives that packages need are now in ``spack.pkgkit``,
|
||||
#: and the import system forces packages to automatically include
|
||||
#: this. This way, old packages that call ``from spack import *`` will
|
||||
#: continue to work without modification, but it's no longer required.
|
||||
#: Spack packages are expected to call `from spack.package import *`
|
||||
#: themselves, but we are allowing a deprecation period before breaking
|
||||
#: external repos that don't do this yet.
|
||||
_package_prepend = ('from __future__ import absolute_import;'
|
||||
'from spack.pkgkit import *')
|
||||
'from spack.package import *')
|
||||
|
||||
def __init__(self, fullname, repo, package_name):
|
||||
self.repo = repo
|
||||
@@ -450,10 +444,10 @@ def is_package_file(filename):
|
||||
# Package files are named `package.py` and are not in lib/spack/spack
|
||||
# We have to remove the file extension because it can be .py and can be
|
||||
# .pyc depending on context, and can differ between the files
|
||||
import spack.package # break cycle
|
||||
import spack.package_base # break cycle
|
||||
filename_noext = os.path.splitext(filename)[0]
|
||||
packagebase_filename_noext = os.path.splitext(
|
||||
inspect.getfile(spack.package.PackageBase))[0]
|
||||
inspect.getfile(spack.package_base.PackageBase))[0]
|
||||
return (filename_noext != packagebase_filename_noext and
|
||||
os.path.basename(filename_noext) == 'package')
|
||||
|
||||
|
@@ -15,7 +15,7 @@
|
||||
|
||||
import spack.build_environment
|
||||
import spack.fetch_strategy
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
from spack.install_test import TestSuite
|
||||
from spack.reporter import Reporter
|
||||
from spack.reporters.cdash import CDash
|
||||
@@ -131,7 +131,7 @@ def gather_info(do_fn):
|
||||
"""
|
||||
@functools.wraps(do_fn)
|
||||
def wrapper(instance, *args, **kwargs):
|
||||
if isinstance(instance, spack.package.PackageBase):
|
||||
if isinstance(instance, spack.package_base.PackageBase):
|
||||
pkg = instance
|
||||
elif hasattr(args[0], 'pkg'):
|
||||
pkg = args[0].pkg
|
||||
|
@@ -22,7 +22,7 @@
|
||||
|
||||
import spack.build_environment
|
||||
import spack.fetch_strategy
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
from spack.error import SpackError
|
||||
from spack.reporter import Reporter
|
||||
from spack.util.crypto import checksum
|
||||
|
@@ -7,7 +7,7 @@
|
||||
|
||||
import spack.build_environment
|
||||
import spack.fetch_strategy
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
from spack.reporter import Reporter
|
||||
|
||||
__all__ = ['JUnit']
|
||||
|
@@ -93,8 +93,10 @@ def rewire_node(spec, explicit):
|
||||
False,
|
||||
spec.build_spec.prefix,
|
||||
spec.prefix)
|
||||
relocate.relocate_text_bin(binaries=bins_to_relocate,
|
||||
prefixes=prefix_to_prefix)
|
||||
|
||||
# Relocate text strings of prefixes embedded in binaries
|
||||
relocate.relocate_text_bin(bins_to_relocate, prefix_to_prefix)
|
||||
|
||||
# Copy package into place, except for spec.json (because spec.json
|
||||
# describes the old spec and not the new spliced spec).
|
||||
shutil.copytree(os.path.join(tempdir, spec.dag_hash()), spec.prefix,
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user