Compare commits
132 Commits
packages/v
...
v0.22.4
Author | SHA1 | Date | |
---|---|---|---|
![]() |
1f55b59171 | ||
![]() |
6a46a1c27d | ||
![]() |
c78545d5f1 | ||
![]() |
05dcd1d2b7 | ||
![]() |
67e5b4fecf | ||
![]() |
ba6cb62df8 | ||
![]() |
e1437349d1 | ||
![]() |
c1eb3f965b | ||
![]() |
e0d246210f | ||
![]() |
88b47d2714 | ||
![]() |
342520175d | ||
![]() |
8a55299214 | ||
![]() |
174308ca3d | ||
![]() |
506c176d25 | ||
![]() |
88171ff353 | ||
![]() |
fb8f2d8301 | ||
![]() |
27beb100e1 | ||
![]() |
02613d778d | ||
![]() |
a6065334ad | ||
![]() |
243ee7203a | ||
![]() |
271106e5dd | ||
![]() |
9fb9156e82 | ||
![]() |
d88cfbd839 | ||
![]() |
4127a93a91 | ||
![]() |
5df2189e43 | ||
![]() |
667c1960d0 | ||
![]() |
4449058257 | ||
![]() |
1b03f1d5bc | ||
![]() |
cf4e9cb3b3 | ||
![]() |
d189b12050 | ||
![]() |
5134504fd8 | ||
![]() |
ae5018ee09 | ||
![]() |
106ebeb502 | ||
![]() |
ba89754ee1 | ||
![]() |
e733eb0fd9 | ||
![]() |
07b344bf10 | ||
![]() |
a71c65399e | ||
![]() |
d8b73331f6 | ||
![]() |
4741ea683c | ||
![]() |
f33c18290b | ||
![]() |
5ea67e8882 | ||
![]() |
8ade071253 | ||
![]() |
dca09e6e0f | ||
![]() |
2776402c90 | ||
![]() |
3961a86f86 | ||
![]() |
39e594d096 | ||
![]() |
7a91bed5c9 | ||
![]() |
594a376c52 | ||
![]() |
1538c48616 | ||
![]() |
683e50b8d9 | ||
![]() |
9b32fb0beb | ||
![]() |
2c6df0d491 | ||
![]() |
ce7218acae | ||
![]() |
246eeb2b69 | ||
![]() |
cc47ee3984 | ||
![]() |
7b644719c1 | ||
![]() |
d8a6aa551e | ||
![]() |
ac7b18483a | ||
![]() |
39f37de4ce | ||
![]() |
703e153404 | ||
![]() |
aa013611bc | ||
![]() |
6a7ccd4e46 | ||
![]() |
1c6c4b4690 | ||
![]() |
68558b3dd0 | ||
![]() |
5440fe09cd | ||
![]() |
03c22f403f | ||
![]() |
f339225d22 | ||
![]() |
22c815f3d4 | ||
![]() |
4354288e44 | ||
![]() |
ea2d43b4a6 | ||
![]() |
85e67d60a0 | ||
![]() |
bf6a9ff5ed | ||
![]() |
1bdc30979d | ||
![]() |
ef1eabe5b3 | ||
![]() |
43d673f915 | ||
![]() |
8a9c501030 | ||
![]() |
9f035ca030 | ||
![]() |
d66dce2d66 | ||
![]() |
ef2aa2f5f5 | ||
![]() |
41f5f6eaab | ||
![]() |
cba347e0b7 | ||
![]() |
a3cef0f02e | ||
![]() |
45fca040c3 | ||
![]() |
eb2b5739b2 | ||
![]() |
d299e17d43 | ||
![]() |
d883883be0 | ||
![]() |
249dcb49e2 | ||
![]() |
8628add66b | ||
![]() |
aeccba8bc0 | ||
![]() |
d94e8ab36f | ||
![]() |
e66c26871f | ||
![]() |
2db4ff7061 | ||
![]() |
c248932a94 | ||
![]() |
f15d302fc7 | ||
![]() |
74ef630241 | ||
![]() |
a70ea11e69 | ||
![]() |
a79b1bd9af | ||
![]() |
ac5d5485b9 | ||
![]() |
04258f9cce | ||
![]() |
1b14170bd1 | ||
![]() |
a3bc9dbfe8 | ||
![]() |
e7c86259bd | ||
![]() |
2605aeb072 | ||
![]() |
94536d2b66 | ||
![]() |
5e580fc82e | ||
![]() |
195bad8675 | ||
![]() |
bd9f3f100a | ||
![]() |
b5962613a0 | ||
![]() |
cbcfc7e10a | ||
![]() |
579fadacd0 | ||
![]() |
b86d08b022 | ||
![]() |
02d62cf40f | ||
![]() |
97369776f0 | ||
![]() |
47af0159dc | ||
![]() |
db6ead6fc1 | ||
![]() |
b4aa2c3cab | ||
![]() |
4108de1ce4 | ||
![]() |
5fe93fee1e | ||
![]() |
8207f11333 | ||
![]() |
5bb5d2696f | ||
![]() |
55f37dffe5 | ||
![]() |
252a5bd71b | ||
![]() |
f55224f161 | ||
![]() |
189ae4b06e | ||
![]() |
5e9c702fa7 | ||
![]() |
965bb4d3c0 | ||
![]() |
354f98c94a | ||
![]() |
5dce480154 | ||
![]() |
f634d48b7c | ||
![]() |
4daee565ae | ||
![]() |
8e4dbdc2d7 | ||
![]() |
4f6adc03cd |
24
.github/workflows/bootstrap.yml
vendored
24
.github/workflows/bootstrap.yml
vendored
@@ -83,14 +83,12 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Setup macOS
|
- name: Setup macOS
|
||||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
|
run: brew install tree gawk
|
||||||
|
- name: Remove system executables
|
||||||
run: |
|
run: |
|
||||||
brew install tree
|
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||||
# Remove GnuPG since we want to bootstrap it
|
sudo rm $(command -v gpg gpg2 patchelf)
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
done
|
||||||
- name: Setup Ubuntu
|
|
||||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
|
||||||
run: |
|
|
||||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||||
with:
|
with:
|
||||||
@@ -112,14 +110,12 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Setup macOS
|
- name: Setup macOS
|
||||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||||
|
run: brew install tree
|
||||||
|
- name: Remove system executables
|
||||||
run: |
|
run: |
|
||||||
brew install tree
|
while [ -n "$(command -v gpg gpg2 patchelf)" ]; do
|
||||||
# Remove GnuPG since we want to bootstrap it
|
sudo rm $(command -v gpg gpg2 patchelf)
|
||||||
sudo rm -rf /usr/local/bin/gpg
|
done
|
||||||
- name: Setup Ubuntu
|
|
||||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
|
||||||
run: |
|
|
||||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||||
with:
|
with:
|
||||||
|
7
.github/workflows/ci.yaml
vendored
7
.github/workflows/ci.yaml
vendored
@@ -77,13 +77,8 @@ jobs:
|
|||||||
needs: [ prechecks, changes ]
|
needs: [ prechecks, changes ]
|
||||||
uses: ./.github/workflows/unit_tests.yaml
|
uses: ./.github/workflows/unit_tests.yaml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
windows:
|
|
||||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
|
||||||
needs: [ prechecks ]
|
|
||||||
uses: ./.github/workflows/windows_python.yml
|
|
||||||
secrets: inherit
|
|
||||||
all:
|
all:
|
||||||
needs: [ windows, unit-tests, bootstrap ]
|
needs: [ unit-tests, bootstrap ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Success
|
- name: Success
|
||||||
|
51
.github/workflows/unit_tests.yaml
vendored
51
.github/workflows/unit_tests.yaml
vendored
@@ -14,14 +14,14 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-22.04]
|
||||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
||||||
concretizer: ['clingo']
|
concretizer: ['clingo']
|
||||||
on_develop:
|
on_develop:
|
||||||
- ${{ github.ref == 'refs/heads/develop' }}
|
- ${{ github.ref == 'refs/heads/develop' }}
|
||||||
include:
|
include:
|
||||||
- python-version: '3.11'
|
- python-version: '3.11'
|
||||||
os: ubuntu-latest
|
os: ubuntu-20.04
|
||||||
concretizer: original
|
concretizer: original
|
||||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||||
- python-version: '3.6'
|
- python-version: '3.6'
|
||||||
@@ -30,24 +30,24 @@ jobs:
|
|||||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||||
exclude:
|
exclude:
|
||||||
- python-version: '3.7'
|
- python-version: '3.7'
|
||||||
os: ubuntu-latest
|
|
||||||
concretizer: 'clingo'
|
concretizer: 'clingo'
|
||||||
|
os: ubuntu-22.04
|
||||||
on_develop: false
|
on_develop: false
|
||||||
- python-version: '3.8'
|
- python-version: '3.8'
|
||||||
os: ubuntu-latest
|
|
||||||
concretizer: 'clingo'
|
concretizer: 'clingo'
|
||||||
|
os: ubuntu-22.04
|
||||||
on_develop: false
|
on_develop: false
|
||||||
- python-version: '3.9'
|
- python-version: '3.9'
|
||||||
os: ubuntu-latest
|
|
||||||
concretizer: 'clingo'
|
concretizer: 'clingo'
|
||||||
|
os: ubuntu-22.04
|
||||||
on_develop: false
|
on_develop: false
|
||||||
- python-version: '3.10'
|
- python-version: '3.10'
|
||||||
os: ubuntu-latest
|
|
||||||
concretizer: 'clingo'
|
concretizer: 'clingo'
|
||||||
|
os: ubuntu-22.04
|
||||||
on_develop: false
|
on_develop: false
|
||||||
- python-version: '3.11'
|
- python-version: '3.11'
|
||||||
os: ubuntu-latest
|
|
||||||
concretizer: 'clingo'
|
concretizer: 'clingo'
|
||||||
|
os: ubuntu-22.04
|
||||||
on_develop: false
|
on_develop: false
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -98,7 +98,7 @@ jobs:
|
|||||||
verbose: true
|
verbose: true
|
||||||
# Test shell integration
|
# Test shell integration
|
||||||
shell:
|
shell:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||||
with:
|
with:
|
||||||
@@ -158,7 +158,7 @@ jobs:
|
|||||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||||
# Test for the clingo based solver (using clingo-cffi)
|
# Test for the clingo based solver (using clingo-cffi)
|
||||||
clingo-cffi:
|
clingo-cffi:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||||
with:
|
with:
|
||||||
@@ -195,7 +195,7 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [macos-latest, macos-14]
|
os: [macos-13, macos-14]
|
||||||
python-version: ["3.11"]
|
python-version: ["3.11"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||||
@@ -228,3 +228,34 @@ jobs:
|
|||||||
flags: unittests,macos
|
flags: unittests,macos
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
verbose: true
|
verbose: true
|
||||||
|
# Run unit tests on Windows
|
||||||
|
windows:
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell:
|
||||||
|
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
- name: Install Python packages
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
||||||
|
- name: Create local develop
|
||||||
|
run: |
|
||||||
|
./.github/workflows/setup_git.ps1
|
||||||
|
- name: Unit Test
|
||||||
|
run: |
|
||||||
|
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||||
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
|
coverage combine -a
|
||||||
|
coverage xml
|
||||||
|
- uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c
|
||||||
|
with:
|
||||||
|
flags: unittests,windows
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
verbose: true
|
||||||
|
83
.github/workflows/windows_python.yml
vendored
83
.github/workflows/windows_python.yml
vendored
@@ -1,83 +0,0 @@
|
|||||||
name: windows
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: windows-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
shell:
|
|
||||||
powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}
|
|
||||||
jobs:
|
|
||||||
unit-tests:
|
|
||||||
runs-on: windows-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
- name: Install Python packages
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip pywin32 setuptools pytest-cov clingo
|
|
||||||
- name: Create local develop
|
|
||||||
run: |
|
|
||||||
./.github/workflows/setup_git.ps1
|
|
||||||
- name: Unit Test
|
|
||||||
run: |
|
|
||||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
|
||||||
coverage combine -a
|
|
||||||
coverage xml
|
|
||||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
|
||||||
with:
|
|
||||||
flags: unittests,windows
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
verbose: true
|
|
||||||
unit-tests-cmd:
|
|
||||||
runs-on: windows-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
- name: Install Python packages
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip pywin32 setuptools coverage pytest-cov clingo
|
|
||||||
- name: Create local develop
|
|
||||||
run: |
|
|
||||||
./.github/workflows/setup_git.ps1
|
|
||||||
- name: Command Unit Test
|
|
||||||
run: |
|
|
||||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
|
||||||
coverage combine -a
|
|
||||||
coverage xml
|
|
||||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
|
||||||
with:
|
|
||||||
flags: unittests,windows
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
verbose: true
|
|
||||||
build-abseil:
|
|
||||||
runs-on: windows-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
- name: Install Python packages
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip pywin32 setuptools coverage
|
|
||||||
- name: Build Test
|
|
||||||
run: |
|
|
||||||
spack compiler find
|
|
||||||
spack -d external find cmake ninja
|
|
||||||
spack -d install abseil-cpp
|
|
@@ -14,3 +14,26 @@ sphinx:
|
|||||||
python:
|
python:
|
||||||
install:
|
install:
|
||||||
- requirements: lib/spack/docs/requirements.txt
|
- requirements: lib/spack/docs/requirements.txt
|
||||||
|
|
||||||
|
search:
|
||||||
|
ranking:
|
||||||
|
spack.html: -10
|
||||||
|
spack.*.html: -10
|
||||||
|
llnl.html: -10
|
||||||
|
llnl.*.html: -10
|
||||||
|
_modules/*: -10
|
||||||
|
command_index.html: -9
|
||||||
|
basic_usage.html: 5
|
||||||
|
configuration.html: 5
|
||||||
|
config_yaml.html: 5
|
||||||
|
packages_yaml.html: 5
|
||||||
|
build_settings.html: 5
|
||||||
|
environments.html: 5
|
||||||
|
containers.html: 5
|
||||||
|
mirrors.html: 5
|
||||||
|
module_file_support.html: 5
|
||||||
|
repositories.html: 5
|
||||||
|
binary_caches.html: 5
|
||||||
|
chain.html: 5
|
||||||
|
pipelines.html: 5
|
||||||
|
packaging_guide.html: 5
|
||||||
|
463
CHANGELOG.md
463
CHANGELOG.md
@@ -1,3 +1,466 @@
|
|||||||
|
# v0.22.4 (2025-02-18)
|
||||||
|
|
||||||
|
## Bugfixes
|
||||||
|
- Continue to mark non-roots as implicitly installed on partial env installs (#47183)
|
||||||
|
|
||||||
|
# v0.22.3 (2024-11-18)
|
||||||
|
|
||||||
|
## Bugfixes
|
||||||
|
- Forward compatibility with Python 3.13 (#46775, #46983, #47035, #47175)
|
||||||
|
- `archspec` was updated to v0.2.5 (#46503, #46958)
|
||||||
|
- Fix path to Spack in `spack env depfile` makefile (#46966)
|
||||||
|
- Fix `glibc` detection in Chinese locales (#47434)
|
||||||
|
- Fix pickle round-trip of specs propagating variants (#47351)
|
||||||
|
- Fix a bug where concurrent spack install commands would not always update explicits correctly
|
||||||
|
(#47358)
|
||||||
|
- Fix a bug where autopush would run before all post install hooks modifying the install prefix
|
||||||
|
had run (#47329)
|
||||||
|
- Fix `spack find -u` (#47102)
|
||||||
|
- Fix a bug where sometimes the wrong Python interpreter was used for build dependencies such as
|
||||||
|
`py-setuptools` (#46980)
|
||||||
|
- Fix default config errors found by `spack audit externals` (#47308)
|
||||||
|
- Fix duplicate printing of external roots in installer (#44917)
|
||||||
|
- Fix modules schema in `compilers.yaml` (#47197)
|
||||||
|
- Reduce the size of generated YAML for Gitlab CI (#44995)
|
||||||
|
- Handle missing metadata file gracefully in bootstrap (#47278)
|
||||||
|
- Show underlying errors on fetch failure (#45714)
|
||||||
|
- Recognize `.` and `..` as paths instead of names in buildcache commands (#47105)
|
||||||
|
- Documentation and style (#46991, #47107, #47110, #47111, #47346, #47307, #47309, #47328, #47160,
|
||||||
|
#47402, #47557, #46709, #47080)
|
||||||
|
- Tests and CI fixes (#47165, #46711)
|
||||||
|
|
||||||
|
## Package updates
|
||||||
|
- ffmpeg: fix hash of patch (#45574)
|
||||||
|
|
||||||
|
# v0.22.2 (2024-09-21)
|
||||||
|
|
||||||
|
## Bugfixes
|
||||||
|
- Forward compatibility with Spack 0.23 packages with language dependencies (#45205, #45191)
|
||||||
|
- Forward compatibility with `urllib` from Python 3.12.6+ (#46453, #46483)
|
||||||
|
- Bump vendored `archspec` for better aarch64 support (#45721, #46445)
|
||||||
|
- Support macOS Sequoia (#45018, #45127)
|
||||||
|
- Fix regression in `{variants.X}` and `{variants.X.value}` format strings (#46206)
|
||||||
|
- Ensure shell escaping of environment variable values in load and activate commands (#42780)
|
||||||
|
- Fix an issue where `spec[pkg]` considers specs outside the current DAG (#45090)
|
||||||
|
- Do not halt concretization on unknown variants in externals (#45326)
|
||||||
|
- Improve validation of `develop` config section (#46485)
|
||||||
|
- Explicitly disable `ccache` if turned off in config, to avoid cache pollution (#45275)
|
||||||
|
- Improve backwards compatibility in `include_concrete` (#45766)
|
||||||
|
- Fix issue where package tags were sometimes repeated (#45160)
|
||||||
|
- Make `setup-env.sh` "sourced only" by dropping execution bits (#45641)
|
||||||
|
- Make certain source/binary fetch errors recoverable instead of a hard error (#45683)
|
||||||
|
- Remove debug statements in package hash computation (#45235)
|
||||||
|
- Remove redundant clingo warnings (#45269)
|
||||||
|
- Remove hard-coded layout version (#45645)
|
||||||
|
- Do not initialize previous store state in `use_store` (#45268)
|
||||||
|
- Docs improvements (#46475)
|
||||||
|
|
||||||
|
## Package updates
|
||||||
|
- `chapel` major update (#42197, #44931, #45304)
|
||||||
|
|
||||||
|
# v0.22.1 (2024-07-04)
|
||||||
|
|
||||||
|
## Bugfixes
|
||||||
|
- Fix reuse of externals on Linux (#44316)
|
||||||
|
- Ensure parent gcc-runtime version >= child (#44834, #44870)
|
||||||
|
- Ensure the latest gcc-runtime is rpath'ed when multiple exist among link deps (#44219)
|
||||||
|
- Improve version detection of glibc (#44154)
|
||||||
|
- Improve heuristics for solver (#44893, #44976, #45023)
|
||||||
|
- Make strong preferences override reuse (#44373)
|
||||||
|
- Reduce verbosity when C compiler is missing (#44182)
|
||||||
|
- Make missing ccache executable an error when required (#44740)
|
||||||
|
- Make every environment view containing `python` a `venv` (#44382)
|
||||||
|
- Fix external detection for compilers with os but no target (#44156)
|
||||||
|
- Fix version optimization for roots (#44272)
|
||||||
|
- Handle common implementations of pagination of tags in OCI build caches (#43136)
|
||||||
|
- Apply fetched patches to develop specs (#44950)
|
||||||
|
- Avoid Windows wrappers for filesystem utilities on non-Windows (#44126)
|
||||||
|
- Fix issue with long filenames in build caches on Windows (#43851)
|
||||||
|
- Fix formatting issue in `spack audit` (#45045)
|
||||||
|
- CI fixes (#44582, #43965, #43967, #44279, #44213)
|
||||||
|
|
||||||
|
## Package updates
|
||||||
|
- protobuf: fix 3.4:3.21 patch checksum (#44443)
|
||||||
|
- protobuf: update hash for patch needed when="@3.4:3.21" (#44210)
|
||||||
|
- git: bump v2.39 to 2.45; deprecate unsafe versions (#44248)
|
||||||
|
- gcc: use -rpath {rpath_dir} not -rpath={rpath dir} (#44315)
|
||||||
|
- Remove mesa18 and libosmesa (#44264)
|
||||||
|
- Enforce consistency of `gl` providers (#44307)
|
||||||
|
- Require libiconv for iconv (#44335, #45026).
|
||||||
|
Notice that glibc/musl also provide iconv, but are not guaranteed to be
|
||||||
|
complete. Set `packages:iconv:require:[glibc]` to restore the old behavior.
|
||||||
|
- py-matplotlib: qualify when to do a post install (#44191)
|
||||||
|
- rust: fix v1.78.0 instructions (#44127)
|
||||||
|
- suite-sparse: improve setting of the `libs` property (#44214)
|
||||||
|
- netlib-lapack: provide blas and lapack together (#44981)
|
||||||
|
|
||||||
|
|
||||||
|
# v0.22.0 (2024-05-12)
|
||||||
|
|
||||||
|
`v0.22.0` is a major feature release.
|
||||||
|
|
||||||
|
## Features in this release
|
||||||
|
|
||||||
|
1. **Compiler dependencies**
|
||||||
|
|
||||||
|
We are in the process of making compilers proper dependencies in Spack, and a number
|
||||||
|
of changes in `v0.22` support that effort. You may notice nodes in your dependency
|
||||||
|
graphs for compiler runtime libraries like `gcc-runtime` or `libgfortran`, and you
|
||||||
|
may notice that Spack graphs now include `libc`. We've also begun moving compiler
|
||||||
|
configuration from `compilers.yaml` to `packages.yaml` to make it consistent with
|
||||||
|
other externals. We are trying to do this with the least disruption possible, so
|
||||||
|
your existing `compilers.yaml` files should still work. We expect to be done with
|
||||||
|
this transition by the `v0.23` release in November.
|
||||||
|
|
||||||
|
* #41104: Packages compiled with `%gcc` on Linux, macOS and FreeBSD now depend on a
|
||||||
|
new package `gcc-runtime`, which contains a copy of the shared compiler runtime
|
||||||
|
libraries. This enables gcc runtime libraries to be installed and relocated when
|
||||||
|
using a build cache. When building minimal Spack-generated container images it is
|
||||||
|
no longer necessary to install libgfortran, libgomp etc. using the system package
|
||||||
|
manager.
|
||||||
|
|
||||||
|
* #42062: Packages compiled with `%oneapi` now depend on a new package
|
||||||
|
`intel-oneapi-runtime`. This is similar to `gcc-runtime`, and the runtimes can
|
||||||
|
provide virtuals and compilers can inject dependencies on virtuals into compiled
|
||||||
|
packages. This allows us to model library soname compatibility and allows
|
||||||
|
compilers like `%oneapi` to provide virtuals like `sycl` (which can also be
|
||||||
|
provided by standalone libraries). Note that until we have an agreement in place
|
||||||
|
with intel, Intel packages are marked `redistribute(source=False, binary=False)`
|
||||||
|
and must be downloaded outside of Spack.
|
||||||
|
|
||||||
|
* #43272: changes to the optimization criteria of the solver improve the hit-rate of
|
||||||
|
buildcaches by a fair amount. The solver more relaxed compatibility rules and will
|
||||||
|
not try to strictly match compilers or targets of reused specs. Users can still
|
||||||
|
enforce the previous strict behavior with `require:` sections in `packages.yaml`.
|
||||||
|
Note that to enforce correct linking, Spack will *not* reuse old `%gcc` and
|
||||||
|
`%oneapi` specs that do not have the runtime libraries as a dependency.
|
||||||
|
|
||||||
|
* #43539: Spack will reuse specs built with compilers that are *not* explicitly
|
||||||
|
configured in `compilers.yaml`. Because we can now keep runtime libraries in build
|
||||||
|
cache, we do not require you to also have a local configured compiler to *use* the
|
||||||
|
runtime libraries. This improves reuse in buildcaches and avoids conflicts with OS
|
||||||
|
updates that happen underneath Spack.
|
||||||
|
|
||||||
|
* #43190: binary compatibility on `linux` is now based on the `libc` version,
|
||||||
|
instead of on the `os` tag. Spack builds now detect the host `libc` (`glibc` or
|
||||||
|
`musl`) and add it as an implicit external node in the dependency graph. Binaries
|
||||||
|
with a `libc` with the same name and a version less than or equal to that of the
|
||||||
|
detected `libc` can be reused. This is only on `linux`, not `macos` or `Windows`.
|
||||||
|
|
||||||
|
* #43464: each package that can provide a compiler is now detectable using `spack
|
||||||
|
external find`. External packages defining compiler paths are effectively used as
|
||||||
|
compilers, and `spack external find -t compiler` can be used as a substitute for
|
||||||
|
`spack compiler find`. More details on this transition are in
|
||||||
|
[the docs](https://spack.readthedocs.io/en/latest/getting_started.html#manual-compiler-configuration)
|
||||||
|
|
||||||
|
2. **Improved `spack find` UI for Environments**
|
||||||
|
|
||||||
|
If you're working in an enviroment, you likely care about:
|
||||||
|
|
||||||
|
* What are the roots
|
||||||
|
* Which ones are installed / not installed
|
||||||
|
* What's been added that still needs to be concretized
|
||||||
|
|
||||||
|
We've tweaked `spack find` in environments to show this information much more
|
||||||
|
clearly. Installation status is shown next to each root, so you can see what is
|
||||||
|
installed. Roots are also shown in bold in the list of installed packages. There is
|
||||||
|
also a new option for `spack find -r` / `--only-roots` that will only show env
|
||||||
|
roots, if you don't want to look at all the installed specs.
|
||||||
|
|
||||||
|
More details in #42334.
|
||||||
|
|
||||||
|
3. **Improved command-line string quoting**
|
||||||
|
|
||||||
|
We are making some breaking changes to how Spack parses specs on the CLI in order to
|
||||||
|
respect shell quoting instead of trying to fight it. If you (sadly) had to write
|
||||||
|
something like this on the command line:
|
||||||
|
|
||||||
|
```
|
||||||
|
spack install zlib cflags=\"-O2 -g\"
|
||||||
|
```
|
||||||
|
|
||||||
|
That will now result in an error, but you can now write what you probably expected
|
||||||
|
to work in the first place:
|
||||||
|
|
||||||
|
```
|
||||||
|
spack install zlib cflags="-O2 -g"
|
||||||
|
```
|
||||||
|
|
||||||
|
Quoted can also now include special characters, so you can supply flags like:
|
||||||
|
|
||||||
|
```
|
||||||
|
spack intall zlib ldflags='-Wl,-rpath=$ORIGIN/_libs'
|
||||||
|
```
|
||||||
|
|
||||||
|
To reduce ambiguity in parsing, we now require that you *not* put spaces around `=`
|
||||||
|
and `==` when for flags or variants. This would not have broken before but will now
|
||||||
|
result in an error:
|
||||||
|
|
||||||
|
```
|
||||||
|
spack install zlib cflags = "-O2 -g"
|
||||||
|
```
|
||||||
|
|
||||||
|
More details and discussion in #30634.
|
||||||
|
|
||||||
|
4. **Revert default `spack install` behavior to `--reuse`**
|
||||||
|
|
||||||
|
We changed the default concretizer behavior from `--reuse` to `--reuse-deps` in
|
||||||
|
#30990 (in `v0.20`), which meant that *every* `spack install` invocation would
|
||||||
|
attempt to build a new version of the requested package / any environment roots.
|
||||||
|
While this is a common ask for *upgrading* and for *developer* workflows, we don't
|
||||||
|
think it should be the default for a package manager.
|
||||||
|
|
||||||
|
We are going to try to stick to this policy:
|
||||||
|
1. Prioritize reuse and build as little as possible by default.
|
||||||
|
2. Only upgrade or install duplicates if they are explicitly asked for, or if there
|
||||||
|
is a known security issue that necessitates an upgrade.
|
||||||
|
|
||||||
|
With the install command you now have three options:
|
||||||
|
|
||||||
|
* `--reuse` (default): reuse as many existing installations as possible.
|
||||||
|
* `--reuse-deps` / `--fresh-roots`: upgrade (freshen) roots but reuse dependencies if possible.
|
||||||
|
* `--fresh`: install fresh versions of requested packages (roots) and their dependencies.
|
||||||
|
|
||||||
|
We've also introduced `--fresh-roots` as an alias for `--reuse-deps` to make it more clear
|
||||||
|
that it may give you fresh versions. More details in #41302 and #43988.
|
||||||
|
|
||||||
|
5. **More control over reused specs**
|
||||||
|
|
||||||
|
You can now control which packages to reuse and how. There is a new
|
||||||
|
`concretizer:reuse` config option, which accepts the following properties:
|
||||||
|
|
||||||
|
- `roots`: `true` to reuse roots, `false` to reuse just dependencies
|
||||||
|
- `exclude`: list of constraints used to select which specs *not* to reuse
|
||||||
|
- `include`: list of constraints used to select which specs *to* reuse
|
||||||
|
- `from`: list of sources for reused specs (some combination of `local`,
|
||||||
|
`buildcache`, or `external`)
|
||||||
|
|
||||||
|
For example, to reuse only specs compiled with GCC, you could write:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
concretizer:
|
||||||
|
reuse:
|
||||||
|
roots: true
|
||||||
|
include:
|
||||||
|
- "%gcc"
|
||||||
|
```
|
||||||
|
|
||||||
|
Or, if `openmpi` must be used from externals, and it must be the only external used:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
concretizer:
|
||||||
|
reuse:
|
||||||
|
roots: true
|
||||||
|
from:
|
||||||
|
- type: local
|
||||||
|
exclude: ["openmpi"]
|
||||||
|
- type: buildcache
|
||||||
|
exclude: ["openmpi"]
|
||||||
|
- type: external
|
||||||
|
include: ["openmpi"]
|
||||||
|
```
|
||||||
|
|
||||||
|
6. **New `redistribute()` directive**
|
||||||
|
|
||||||
|
Some packages can't be redistributed in source or binary form. We need an explicit
|
||||||
|
way to say that in a package.
|
||||||
|
|
||||||
|
Now there is a `redistribute()` directive so that package authors can write:
|
||||||
|
|
||||||
|
```python
|
||||||
|
class MyPackage(Package):
|
||||||
|
redistribute(source=False, binary=False)
|
||||||
|
```
|
||||||
|
|
||||||
|
Like other directives, this works with `when=`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
class MyPackage(Package):
|
||||||
|
# 12.0 and higher are proprietary
|
||||||
|
redistribute(source=False, binary=False, when="@12.0:")
|
||||||
|
|
||||||
|
# can't redistribute when we depend on some proprietary dependency
|
||||||
|
redistribute(source=False, binary=False, when="^proprietary-dependency")
|
||||||
|
```
|
||||||
|
|
||||||
|
More in #20185.
|
||||||
|
|
||||||
|
7. **New `conflict:` and `prefer:` syntax for package preferences**
|
||||||
|
|
||||||
|
Previously, you could express conflicts and preferences in `packages.yaml` through
|
||||||
|
some contortions with `require:`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
packages:
|
||||||
|
zlib-ng:
|
||||||
|
require:
|
||||||
|
- one_of: ["%clang", "@:"] # conflict on %clang
|
||||||
|
- any_of: ["+shared", "@:"] # strong preference for +shared
|
||||||
|
```
|
||||||
|
|
||||||
|
You can now use `require:` and `prefer:` for a much more readable configuration:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
packages:
|
||||||
|
zlib-ng:
|
||||||
|
conflict:
|
||||||
|
- "%clang"
|
||||||
|
prefer:
|
||||||
|
- "+shared"
|
||||||
|
```
|
||||||
|
|
||||||
|
See [the documentation](https://spack.readthedocs.io/en/latest/packages_yaml.html#conflicts-and-strong-preferences)
|
||||||
|
and #41832 for more details.
|
||||||
|
|
||||||
|
8. **`include_concrete` in environments**
|
||||||
|
|
||||||
|
You may want to build on the *concrete* contents of another environment without
|
||||||
|
changing that environment. You can now include the concrete specs from another
|
||||||
|
environment's `spack.lock` with `include_concrete`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
spack:
|
||||||
|
specs: []
|
||||||
|
concretizer:
|
||||||
|
unify: true
|
||||||
|
include_concrete:
|
||||||
|
- /path/to/environment1
|
||||||
|
- /path/to/environment2
|
||||||
|
```
|
||||||
|
|
||||||
|
Now, when *this* environment is concretized, it will bring in the already concrete
|
||||||
|
specs from `environment1` and `environment2`, and build on top of them without
|
||||||
|
changing them. This is useful if you have phased deployments, where old deployments
|
||||||
|
should not be modified but you want to use as many of them as possible. More details
|
||||||
|
in #33768.
|
||||||
|
|
||||||
|
9. **`python-venv` isolation**
|
||||||
|
|
||||||
|
Spack has unique requirements for Python because it:
|
||||||
|
1. installs every package in its own independent directory, and
|
||||||
|
2. allows users to register *external* python installations.
|
||||||
|
|
||||||
|
External installations may contain their own installed packages that can interfere
|
||||||
|
with Spack installations, and some distributions (Debian and Ubuntu) even change the
|
||||||
|
`sysconfig` in ways that alter the installation layout of installed Python packages
|
||||||
|
(e.g., with the addition of a `/local` prefix on Debian or Ubuntu). To isolate Spack
|
||||||
|
from these and other issues, we now insert a small `python-venv` package in between
|
||||||
|
`python` and packages that need to install Python code. This isolates Spack's build
|
||||||
|
environment, isolates Spack from any issues with an external python, and resolves a
|
||||||
|
large number of issues we've had with Python installations.
|
||||||
|
|
||||||
|
See #40773 for further details.
|
||||||
|
|
||||||
|
## New commands, options, and directives
|
||||||
|
|
||||||
|
* Allow packages to be pushed to build cache after install from source (#42423)
|
||||||
|
* `spack develop`: stage build artifacts in same root as non-dev builds #41373
|
||||||
|
* Don't delete `spack develop` build artifacts after install (#43424)
|
||||||
|
* `spack find`: add options for local/upstream only (#42999)
|
||||||
|
* `spack logs`: print log files for packages (either partially built or installed) (#42202)
|
||||||
|
* `patch`: support reversing patches (#43040)
|
||||||
|
* `develop`: Add -b/--build-directory option to set build_directory package attribute (#39606)
|
||||||
|
* `spack list`: add `--namesapce` / `--repo` option (#41948)
|
||||||
|
* directives: add `checked_by` field to `license()`, add some license checks
|
||||||
|
* `spack gc`: add options for environments and build dependencies (#41731)
|
||||||
|
* Add `--create` to `spack env activate` (#40896)
|
||||||
|
|
||||||
|
## Performance improvements
|
||||||
|
|
||||||
|
* environment.py: fix excessive re-reads (#43746)
|
||||||
|
* ruamel yaml: fix quadratic complexity bug (#43745)
|
||||||
|
* Refactor to improve `spec format` speed (#43712)
|
||||||
|
* Do not acquire a write lock on the env post install if no views (#43505)
|
||||||
|
* asp.py: fewer calls to `spec.copy()` (#43715)
|
||||||
|
* spec.py: early return in `__str__`
|
||||||
|
* avoid `jinja2` import at startup unless needed (#43237)
|
||||||
|
|
||||||
|
## Other new features of note
|
||||||
|
|
||||||
|
* `archspec`: update to `v0.2.4`: support for Windows, bugfixes for `neoverse-v1` and
|
||||||
|
`neoverse-v2` detection.
|
||||||
|
* `spack config get`/`blame`: with no args, show entire config
|
||||||
|
* `spack env create <env>`: dir if dir-like (#44024)
|
||||||
|
* ASP-based solver: update os compatibility for macOS (#43862)
|
||||||
|
* Add handling of custom ssl certs in urllib ops (#42953)
|
||||||
|
* Add ability to rename environments (#43296)
|
||||||
|
* Add config option and compiler support to reuse across OS's (#42693)
|
||||||
|
* Support for prereleases (#43140)
|
||||||
|
* Only reuse externals when configured (#41707)
|
||||||
|
* Environments: Add support for including views (#42250)
|
||||||
|
|
||||||
|
## Binary caches
|
||||||
|
* Build cache: make signed/unsigned a mirror property (#41507)
|
||||||
|
* tools stack
|
||||||
|
|
||||||
|
## Removals, deprecations, and syntax changes
|
||||||
|
* remove `dpcpp` compiler and package (#43418)
|
||||||
|
* spack load: remove --only argument (#42120)
|
||||||
|
|
||||||
|
## Notable Bugfixes
|
||||||
|
* repo.py: drop deleted packages from provider cache (#43779)
|
||||||
|
* Allow `+` in module file names (#41999)
|
||||||
|
* `cmd/python`: use runpy to allow multiprocessing in scripts (#41789)
|
||||||
|
* Show extension commands with spack -h (#41726)
|
||||||
|
* Support environment variable expansion inside module projections (#42917)
|
||||||
|
* Alert user to failed concretizations (#42655)
|
||||||
|
* shell: fix zsh color formatting for PS1 in environments (#39497)
|
||||||
|
* spack mirror create --all: include patches (#41579)
|
||||||
|
|
||||||
|
## Spack community stats
|
||||||
|
|
||||||
|
* 7,994 total packages; 525 since `v0.21.0`
|
||||||
|
* 178 new Python packages, 5 new R packages
|
||||||
|
* 358 people contributed to this release
|
||||||
|
* 344 committers to packages
|
||||||
|
* 45 committers to core
|
||||||
|
|
||||||
|
|
||||||
|
# v0.21.2 (2024-03-01)
|
||||||
|
|
||||||
|
## Bugfixes
|
||||||
|
|
||||||
|
- Containerize: accommodate nested or pre-existing spack-env paths (#41558)
|
||||||
|
- Fix setup-env script, when going back and forth between instances (#40924)
|
||||||
|
- Fix using fully-qualified namespaces from root specs (#41957)
|
||||||
|
- Fix a bug when a required provider is requested for multiple virtuals (#42088)
|
||||||
|
- OCI buildcaches:
|
||||||
|
- only push in parallel when forking (#42143)
|
||||||
|
- use pickleable errors (#42160)
|
||||||
|
- Fix using sticky variants in externals (#42253)
|
||||||
|
- Fix a rare issue with conditional requirements and multi-valued variants (#42566)
|
||||||
|
|
||||||
|
## Package updates
|
||||||
|
- rust: add v1.75, rework a few variants (#41161,#41903)
|
||||||
|
- py-transformers: add v4.35.2 (#41266)
|
||||||
|
- mgard: fix OpenMP on AppleClang (#42933)
|
||||||
|
|
||||||
|
# v0.21.1 (2024-01-11)
|
||||||
|
|
||||||
|
## New features
|
||||||
|
- Add support for reading buildcaches created by Spack v0.22 (#41773)
|
||||||
|
|
||||||
|
## Bugfixes
|
||||||
|
|
||||||
|
- spack graph: fix coloring with environments (#41240)
|
||||||
|
- spack info: sort variants in --variants-by-name (#41389)
|
||||||
|
- Spec.format: error on old style format strings (#41934)
|
||||||
|
- ASP-based solver:
|
||||||
|
- fix infinite recursion when computing concretization errors (#41061)
|
||||||
|
- don't error for type mismatch on preferences (#41138)
|
||||||
|
- don't emit spurious debug output (#41218)
|
||||||
|
- Improve the error message for deprecated preferences (#41075)
|
||||||
|
- Fix MSVC preview version breaking clingo build on Windows (#41185)
|
||||||
|
- Fix multi-word aliases (#41126)
|
||||||
|
- Add a warning for unconfigured compiler (#41213)
|
||||||
|
- environment: fix an issue with deconcretization/reconcretization of specs (#41294)
|
||||||
|
- buildcache: don't error if a patch is missing, when installing from binaries (#41986)
|
||||||
|
- Multiple improvements to unit-tests (#41215,#41369,#41495,#41359,#41361,#41345,#41342,#41308,#41226)
|
||||||
|
|
||||||
|
## Package updates
|
||||||
|
- root: add a webgui patch to address security issue (#41404)
|
||||||
|
- BerkeleyGW: update source urls (#38218)
|
||||||
|
|
||||||
# v0.21.0 (2023-11-11)
|
# v0.21.0 (2023-11-11)
|
||||||
|
|
||||||
`v0.21.0` is a major feature release.
|
`v0.21.0` is a major feature release.
|
||||||
|
@@ -144,3 +144,5 @@ switch($SpackSubCommand)
|
|||||||
"unload" {Invoke-SpackLoad}
|
"unload" {Invoke-SpackLoad}
|
||||||
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
default {python "$Env:SPACK_ROOT/bin/spack" $SpackCMD_params $SpackSubCommand $SpackSubCommandArgs}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exit $LASTEXITCODE
|
||||||
|
@@ -42,8 +42,8 @@ concretizer:
|
|||||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||||
strategy: minimal
|
strategy: minimal
|
||||||
# Option to specify compatiblity between operating systems for reuse of compilers and packages
|
# Option to specify compatibility between operating systems for reuse of compilers and packages
|
||||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||||
os_compatible: {}
|
os_compatible: {}
|
||||||
|
@@ -1,19 +0,0 @@
|
|||||||
# -------------------------------------------------------------------------
|
|
||||||
# This file controls default concretization preferences for Spack.
|
|
||||||
#
|
|
||||||
# Settings here are versioned with Spack and are intended to provide
|
|
||||||
# sensible defaults out of the box. Spack maintainers should edit this
|
|
||||||
# file to keep it current.
|
|
||||||
#
|
|
||||||
# Users can override these settings by editing the following files.
|
|
||||||
#
|
|
||||||
# Per-spack-instance settings (overrides defaults):
|
|
||||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
|
||||||
#
|
|
||||||
# Per-user settings (overrides default and site settings):
|
|
||||||
# ~/.spack/packages.yaml
|
|
||||||
# -------------------------------------------------------------------------
|
|
||||||
packages:
|
|
||||||
all:
|
|
||||||
providers:
|
|
||||||
iconv: [glibc, musl, libiconv]
|
|
@@ -1,19 +1,3 @@
|
|||||||
# -------------------------------------------------------------------------
|
|
||||||
# This file controls default concretization preferences for Spack.
|
|
||||||
#
|
|
||||||
# Settings here are versioned with Spack and are intended to provide
|
|
||||||
# sensible defaults out of the box. Spack maintainers should edit this
|
|
||||||
# file to keep it current.
|
|
||||||
#
|
|
||||||
# Users can override these settings by editing the following files.
|
|
||||||
#
|
|
||||||
# Per-spack-instance settings (overrides defaults):
|
|
||||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
|
||||||
#
|
|
||||||
# Per-user settings (overrides default and site settings):
|
|
||||||
# ~/.spack/packages.yaml
|
|
||||||
# -------------------------------------------------------------------------
|
|
||||||
packages:
|
packages:
|
||||||
all:
|
iconv:
|
||||||
providers:
|
require: [libiconv]
|
||||||
iconv: [glibc, musl, libiconv]
|
|
||||||
|
@@ -37,11 +37,10 @@ packages:
|
|||||||
jpeg: [libjpeg-turbo, libjpeg]
|
jpeg: [libjpeg-turbo, libjpeg]
|
||||||
lapack: [openblas, amdlibflame]
|
lapack: [openblas, amdlibflame]
|
||||||
libc: [glibc, musl]
|
libc: [glibc, musl]
|
||||||
libgfortran: [ gcc-runtime ]
|
libgfortran: [gcc-runtime]
|
||||||
libglx: [mesa+glx, mesa18+glx]
|
libglx: [mesa+glx]
|
||||||
libifcore: [ intel-oneapi-runtime ]
|
libifcore: [intel-oneapi-runtime]
|
||||||
libllvm: [llvm]
|
libllvm: [llvm]
|
||||||
libosmesa: [mesa+osmesa, mesa18+osmesa]
|
|
||||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||||
luajit: [lua-luajit-openresty, lua-luajit]
|
luajit: [lua-luajit-openresty, lua-luajit]
|
||||||
mariadb-client: [mariadb-c-client, mariadb]
|
mariadb-client: [mariadb-c-client, mariadb]
|
||||||
|
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. chain:
|
.. chain:
|
||||||
|
|
||||||
============================
|
=============================================
|
||||||
Chaining Spack Installations
|
Chaining Spack Installations (upstreams.yaml)
|
||||||
============================
|
=============================================
|
||||||
|
|
||||||
You can point your Spack installation to another installation to use any
|
You can point your Spack installation to another installation to use any
|
||||||
packages that are installed there. To register the other Spack instance,
|
packages that are installed there. To register the other Spack instance,
|
||||||
|
@@ -184,7 +184,7 @@ Style Tests
|
|||||||
|
|
||||||
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
|
||||||
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
|
`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance and
|
||||||
`mypy <https://mypy.readthedocs.io/en/stable/>` for type checking. PEP 8 is
|
`mypy <https://mypy.readthedocs.io/en/stable/>`_ for type checking. PEP 8 is
|
||||||
a series of style guides for Python that provide suggestions for everything
|
a series of style guides for Python that provide suggestions for everything
|
||||||
from variable naming to indentation. In order to limit the number of PRs that
|
from variable naming to indentation. In order to limit the number of PRs that
|
||||||
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
|
||||||
|
@@ -716,27 +716,27 @@ Release branches
|
|||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
There are currently two types of Spack releases: :ref:`major releases
|
There are currently two types of Spack releases: :ref:`major releases
|
||||||
<major-releases>` (``0.17.0``, ``0.18.0``, etc.) and :ref:`point releases
|
<major-releases>` (``0.21.0``, ``0.22.0``, etc.) and :ref:`patch releases
|
||||||
<point-releases>` (``0.17.1``, ``0.17.2``, ``0.17.3``, etc.). Here is a
|
<patch-releases>` (``0.22.1``, ``0.22.2``, ``0.22.3``, etc.). Here is a
|
||||||
diagram of how Spack release branches work::
|
diagram of how Spack release branches work::
|
||||||
|
|
||||||
o branch: develop (latest version, v0.19.0.dev0)
|
o branch: develop (latest version, v0.23.0.dev0)
|
||||||
|
|
|
|
||||||
o
|
o
|
||||||
| o branch: releases/v0.18, tag: v0.18.1
|
| o branch: releases/v0.22, tag: v0.22.1
|
||||||
o |
|
o |
|
||||||
| o tag: v0.18.0
|
| o tag: v0.22.0
|
||||||
o |
|
o |
|
||||||
| o
|
| o
|
||||||
|/
|
|/
|
||||||
o
|
o
|
||||||
|
|
|
|
||||||
o
|
o
|
||||||
| o branch: releases/v0.17, tag: v0.17.2
|
| o branch: releases/v0.21, tag: v0.21.2
|
||||||
o |
|
o |
|
||||||
| o tag: v0.17.1
|
| o tag: v0.21.1
|
||||||
o |
|
o |
|
||||||
| o tag: v0.17.0
|
| o tag: v0.21.0
|
||||||
o |
|
o |
|
||||||
| o
|
| o
|
||||||
|/
|
|/
|
||||||
@@ -747,8 +747,8 @@ requests target ``develop``. The ``develop`` branch will report that its
|
|||||||
version is that of the next **major** release with a ``.dev0`` suffix.
|
version is that of the next **major** release with a ``.dev0`` suffix.
|
||||||
|
|
||||||
Each Spack release series also has a corresponding branch, e.g.
|
Each Spack release series also has a corresponding branch, e.g.
|
||||||
``releases/v0.18`` has ``0.18.x`` versions of Spack, and
|
``releases/v0.22`` has ``v0.22.x`` versions of Spack, and
|
||||||
``releases/v0.17`` has ``0.17.x`` versions. A major release is the first
|
``releases/v0.21`` has ``v0.21.x`` versions. A major release is the first
|
||||||
tagged version on a release branch. Minor releases are back-ported from
|
tagged version on a release branch. Minor releases are back-ported from
|
||||||
develop onto release branches. This is typically done by cherry-picking
|
develop onto release branches. This is typically done by cherry-picking
|
||||||
bugfix commits off of ``develop``.
|
bugfix commits off of ``develop``.
|
||||||
@@ -778,27 +778,40 @@ for more details.
|
|||||||
Scheduling work for releases
|
Scheduling work for releases
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
We schedule work for releases by creating `GitHub projects
|
We schedule work for **major releases** through `milestones
|
||||||
<https://github.com/spack/spack/projects>`_. At any time, there may be
|
<https://github.com/spack/spack/milestones>`_ and `GitHub Projects
|
||||||
several open release projects. For example, below are two releases (from
|
<https://github.com/spack/spack/projects>`_, while **patch releases** use `labels
|
||||||
some past version of the page linked above):
|
<https://github.com/spack/spack/labels>`_.
|
||||||
|
|
||||||
.. image:: images/projects.png
|
There is only one milestone open at a time. Its name corresponds to the next major version, for
|
||||||
|
example ``v0.23``. Important issues and pull requests should be assigned to this milestone by
|
||||||
|
core developers, so that they are not forgotten at the time of release. The milestone is closed
|
||||||
|
when the release is made, and a new milestone is created for the next major release.
|
||||||
|
|
||||||
This image shows one release in progress for ``0.15.1`` and another for
|
Bug reports in GitHub issues are automatically labelled ``bug`` and ``triage``. Spack developers
|
||||||
``0.16.0``. Each of these releases has a project board containing issues
|
assign one of the labels ``impact-low``, ``impact-medium`` or ``impact-high``. This will make the
|
||||||
and pull requests. GitHub shows a status bar with completed work in
|
issue appear in the `Triaged bugs <https://github.com/orgs/spack/projects/6>`_ project board.
|
||||||
green, work in progress in purple, and work not started yet in gray, so
|
Important issues should be assigned to the next milestone as well, so they appear at the top of
|
||||||
it's fairly easy to see progress.
|
the project board.
|
||||||
|
|
||||||
Spack's project boards are not firm commitments so we move work between
|
Spack's milestones are not firm commitments so we move work between releases frequently. If we
|
||||||
releases frequently. If we need to make a release and some tasks are not
|
need to make a release and some tasks are not yet done, we will simply move them to the next major
|
||||||
yet done, we will simply move them to the next minor or major release, rather
|
release milestone, rather than delaying the release to complete them.
|
||||||
than delaying the release to complete them.
|
|
||||||
|
|
||||||
For more on using GitHub project boards, see `GitHub's documentation
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
<https://docs.github.com/en/github/managing-your-work-on-github/about-project-boards>`_.
|
Backporting bug fixes
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
When a bug is fixed in the ``develop`` branch, it is often necessary to backport the fix to one
|
||||||
|
(or more) of the ``release/vX.Y`` branches. Only the release manager is responsible for doing
|
||||||
|
backports, but Spack maintainers are responsible for labelling pull requests (and issues if no bug
|
||||||
|
fix is available yet) with ``vX.Y.Z`` labels. The label should correspond to the next patch version
|
||||||
|
that the bug fix should be backported to.
|
||||||
|
|
||||||
|
Backports are done publicly by the release manager using a pull request named ``Backports vX.Y.Z``.
|
||||||
|
This pull request is opened from the ``backports/vX.Y.Z`` branch, targets the ``releases/vX.Y``
|
||||||
|
branch and contains a (growing) list of cherry-picked commits from the ``develop`` branch.
|
||||||
|
Typically there are one or two backport pull requests open at any given time.
|
||||||
|
|
||||||
.. _major-releases:
|
.. _major-releases:
|
||||||
|
|
||||||
@@ -806,25 +819,21 @@ For more on using GitHub project boards, see `GitHub's documentation
|
|||||||
Making major releases
|
Making major releases
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Assuming a project board has already been created and all required work
|
Assuming all required work from the milestone is completed, the steps to make the major release
|
||||||
completed, the steps to make the major release are:
|
are:
|
||||||
|
|
||||||
#. Create two new project boards:
|
#. `Create a new milestone <https://github.com/spack/spack/milestones>`_ for the next major
|
||||||
|
release.
|
||||||
|
|
||||||
* One for the next major release
|
#. `Create a new label <https://github.com/spack/spack/labels>`_ for the next patch release.
|
||||||
* One for the next point release
|
|
||||||
|
|
||||||
#. Move any optional tasks that are not done to one of the new project boards.
|
#. Move any optional tasks that are not done to the next milestone.
|
||||||
|
|
||||||
In general, small bugfixes should go to the next point release. Major
|
|
||||||
features, refactors, and changes that could affect concretization should
|
|
||||||
go in the next major release.
|
|
||||||
|
|
||||||
#. Create a branch for the release, based on ``develop``:
|
#. Create a branch for the release, based on ``develop``:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ git checkout -b releases/v0.15 develop
|
$ git checkout -b releases/v0.23 develop
|
||||||
|
|
||||||
For a version ``vX.Y.Z``, the branch's name should be
|
For a version ``vX.Y.Z``, the branch's name should be
|
||||||
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
``releases/vX.Y``. That is, you should create a ``releases/vX.Y``
|
||||||
@@ -860,8 +869,8 @@ completed, the steps to make the major release are:
|
|||||||
|
|
||||||
Create a pull request targeting the ``develop`` branch, bumping the major
|
Create a pull request targeting the ``develop`` branch, bumping the major
|
||||||
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
version in ``lib/spack/spack/__init__.py`` with a ``dev0`` release segment.
|
||||||
For instance when you have just released ``v0.15.0``, set the version
|
For instance when you have just released ``v0.23.0``, set the version
|
||||||
to ``(0, 16, 0, 'dev0')`` on ``develop``.
|
to ``(0, 24, 0, 'dev0')`` on ``develop``.
|
||||||
|
|
||||||
#. Follow the steps in :ref:`publishing-releases`.
|
#. Follow the steps in :ref:`publishing-releases`.
|
||||||
|
|
||||||
@@ -870,82 +879,52 @@ completed, the steps to make the major release are:
|
|||||||
#. Follow the steps in :ref:`announcing-releases`.
|
#. Follow the steps in :ref:`announcing-releases`.
|
||||||
|
|
||||||
|
|
||||||
.. _point-releases:
|
.. _patch-releases:
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
Making point releases
|
Making patch releases
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Assuming a project board has already been created and all required work
|
To make the patch release process both efficient and transparent, we use a *backports pull request*
|
||||||
completed, the steps to make the point release are:
|
which contains cherry-picked commits from the ``develop`` branch. The majority of the work is to
|
||||||
|
cherry-pick the bug fixes, which ideally should be done as soon as they land on ``develop``:
|
||||||
|
this ensures cherry-picking happens in order, and makes conflicts easier to resolve since the
|
||||||
|
changes are fresh in the mind of the developer.
|
||||||
|
|
||||||
#. Create a new project board for the next point release.
|
The backports pull request is always titled ``Backports vX.Y.Z`` and is labelled ``backports``. It
|
||||||
|
is opened from a branch named ``backports/vX.Y.Z`` and targets the ``releases/vX.Y`` branch.
|
||||||
|
|
||||||
#. Move any optional tasks that are not done to the next project board.
|
Whenever a pull request labelled ``vX.Y.Z`` is merged, cherry-pick the associated squashed commit
|
||||||
|
on ``develop`` to the ``backports/vX.Y.Z`` branch. For pull requests that were rebased (or not
|
||||||
|
squashed), cherry-pick each associated commit individually. Never force push to the
|
||||||
|
``backports/vX.Y.Z`` branch.
|
||||||
|
|
||||||
#. Check out the release branch (it should already exist).
|
.. warning::
|
||||||
|
|
||||||
For the ``X.Y.Z`` release, the release branch is called ``releases/vX.Y``.
|
Sometimes you may **still** get merge conflicts even if you have
|
||||||
For ``v0.15.1``, you would check out ``releases/v0.15``:
|
cherry-picked all the commits in order. This generally means there
|
||||||
|
is some other intervening pull request that the one you're trying
|
||||||
|
to pick depends on. In these cases, you'll need to make a judgment
|
||||||
|
call regarding those pull requests. Consider the number of affected
|
||||||
|
files and/or the resulting differences.
|
||||||
|
|
||||||
.. code-block:: console
|
1. If the changes are small, you might just cherry-pick it.
|
||||||
|
|
||||||
$ git checkout releases/v0.15
|
2. If the changes are large, then you may decide that this fix is not
|
||||||
|
worth including in a patch release, in which case you should remove
|
||||||
|
the label from the pull request. Remember that large, manual backports
|
||||||
|
are seldom the right choice for a patch release.
|
||||||
|
|
||||||
#. If a pull request to the release branch named ``Backports vX.Y.Z`` is not already
|
When all commits are cherry-picked in the ``backports/vX.Y.Z`` branch, make the patch
|
||||||
in the project, create it. This pull request ought to be created as early as
|
release as follows:
|
||||||
possible when working on a release project, so that we can build the release
|
|
||||||
commits incrementally, and identify potential conflicts at an early stage.
|
|
||||||
|
|
||||||
#. Cherry-pick each pull request in the ``Done`` column of the release
|
#. `Create a new label <https://github.com/spack/spack/labels>`_ ``vX.Y.{Z+1}`` for the next patch
|
||||||
project board onto the ``Backports vX.Y.Z`` pull request.
|
release.
|
||||||
|
|
||||||
This is **usually** fairly simple since we squash the commits from the
|
#. Replace the label ``vX.Y.Z`` with ``vX.Y.{Z+1}`` for all PRs and issues that are not done.
|
||||||
vast majority of pull requests. That means there is only one commit
|
|
||||||
per pull request to cherry-pick. For example, `this pull request
|
|
||||||
<https://github.com/spack/spack/pull/15777>`_ has three commits, but
|
|
||||||
they were squashed into a single commit on merge. You can see the
|
|
||||||
commit that was created here:
|
|
||||||
|
|
||||||
.. image:: images/pr-commit.png
|
#. Manually push a single commit with commit message ``Set version to vX.Y.Z`` to the
|
||||||
|
``backports/vX.Y.Z`` branch, that both bumps the Spack version number and updates the changelog:
|
||||||
You can easily cherry pick it like this (assuming you already have the
|
|
||||||
release branch checked out):
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ git cherry-pick 7e46da7
|
|
||||||
|
|
||||||
For pull requests that were rebased (or not squashed), you'll need to
|
|
||||||
cherry-pick each associated commit individually.
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
It is important to cherry-pick commits in the order they happened,
|
|
||||||
otherwise you can get conflicts while cherry-picking. When
|
|
||||||
cherry-picking look at the merge date,
|
|
||||||
**not** the number of the pull request or the date it was opened.
|
|
||||||
|
|
||||||
Sometimes you may **still** get merge conflicts even if you have
|
|
||||||
cherry-picked all the commits in order. This generally means there
|
|
||||||
is some other intervening pull request that the one you're trying
|
|
||||||
to pick depends on. In these cases, you'll need to make a judgment
|
|
||||||
call regarding those pull requests. Consider the number of affected
|
|
||||||
files and or the resulting differences.
|
|
||||||
|
|
||||||
1. If the dependency changes are small, you might just cherry-pick it,
|
|
||||||
too. If you do this, add the task to the release board.
|
|
||||||
|
|
||||||
2. If the changes are large, then you may decide that this fix is not
|
|
||||||
worth including in a point release, in which case you should remove
|
|
||||||
the task from the release project.
|
|
||||||
|
|
||||||
3. You can always decide to manually back-port the fix to the release
|
|
||||||
branch if neither of the above options makes sense, but this can
|
|
||||||
require a lot of work. It's seldom the right choice.
|
|
||||||
|
|
||||||
#. When all the commits from the project board are cherry-picked into
|
|
||||||
the ``Backports vX.Y.Z`` pull request, you can push a commit to:
|
|
||||||
|
|
||||||
1. Bump the version in ``lib/spack/spack/__init__.py``.
|
1. Bump the version in ``lib/spack/spack/__init__.py``.
|
||||||
2. Update ``CHANGELOG.md`` with a list of the changes.
|
2. Update ``CHANGELOG.md`` with a list of the changes.
|
||||||
@@ -954,20 +933,22 @@ completed, the steps to make the point release are:
|
|||||||
release branch. See `the changelog from 0.14.1
|
release branch. See `the changelog from 0.14.1
|
||||||
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
<https://github.com/spack/spack/commit/ff0abb9838121522321df2a054d18e54b566b44a>`_.
|
||||||
|
|
||||||
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
#. Make sure CI passes on the **backports pull request**, including:
|
||||||
is needed to keep track in the release branch of all the commits that were
|
|
||||||
cherry-picked.
|
|
||||||
|
|
||||||
#. Make sure CI passes on the release branch, including:
|
|
||||||
|
|
||||||
* Regular unit tests
|
* Regular unit tests
|
||||||
* Build tests
|
* Build tests
|
||||||
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
* The E4S pipeline at `gitlab.spack.io <https://gitlab.spack.io>`_
|
||||||
|
|
||||||
If CI does not pass, you'll need to figure out why, and make changes
|
#. Merge the ``Backports vX.Y.Z`` PR with the **Rebase and merge** strategy. This
|
||||||
to the release branch until it does. You can make more commits, modify
|
is needed to keep track in the release branch of all the commits that were
|
||||||
or remove cherry-picked commits, or cherry-pick **more** from
|
cherry-picked.
|
||||||
``develop`` to make this happen.
|
|
||||||
|
#. Make sure CI passes on the last commit of the **release branch**.
|
||||||
|
|
||||||
|
#. In the rare case you need to include additional commits in the patch release after the backports
|
||||||
|
PR is merged, it is best to delete the last commit ``Set version to vX.Y.Z`` from the release
|
||||||
|
branch with a single force push, open a new backports PR named ``Backports vX.Y.Z (2)``, and
|
||||||
|
repeat the process. Avoid repeated force pushes to the release branch.
|
||||||
|
|
||||||
#. Follow the steps in :ref:`publishing-releases`.
|
#. Follow the steps in :ref:`publishing-releases`.
|
||||||
|
|
||||||
@@ -1042,25 +1023,31 @@ Updating `releases/latest`
|
|||||||
|
|
||||||
If the new release is the **highest** Spack release yet, you should
|
If the new release is the **highest** Spack release yet, you should
|
||||||
also tag it as ``releases/latest``. For example, suppose the highest
|
also tag it as ``releases/latest``. For example, suppose the highest
|
||||||
release is currently ``0.15.3``:
|
release is currently ``0.22.3``:
|
||||||
|
|
||||||
* If you are releasing ``0.15.4`` or ``0.16.0``, then you should tag
|
* If you are releasing ``0.22.4`` or ``0.23.0``, then you should tag
|
||||||
it with ``releases/latest``, as these are higher than ``0.15.3``.
|
it with ``releases/latest``, as these are higher than ``0.22.3``.
|
||||||
|
|
||||||
* If you are making a new release of an **older** major version of
|
* If you are making a new release of an **older** major version of
|
||||||
Spack, e.g. ``0.14.4``, then you should not tag it as
|
Spack, e.g. ``0.21.4``, then you should not tag it as
|
||||||
``releases/latest`` (as there are newer major versions).
|
``releases/latest`` (as there are newer major versions).
|
||||||
|
|
||||||
To tag ``releases/latest``, do this:
|
To do so, first fetch the latest tag created on GitHub, since you may not have it locally:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ git checkout releases/vX.Y # vX.Y is the new release's branch
|
$ git fetch --force git@github.com:spack/spack vX.Y.Z
|
||||||
$ git tag --force releases/latest
|
|
||||||
$ git push --force --tags
|
|
||||||
|
|
||||||
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing
|
Then tag ``vX.Y.Z`` as ``releases/latest`` and push the individual tag to GitHub.
|
||||||
``releases/latest`` tag with the new one.
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ git tag --force releases/latest vX.Y.Z
|
||||||
|
$ git push --force git@github.com:spack/spack releases/latest
|
||||||
|
|
||||||
|
The ``--force`` argument to ``git tag`` makes ``git`` overwrite the existing ``releases/latest``
|
||||||
|
tag with the new one. Do **not** use the ``--tags`` flag when pushing, since this will push *all*
|
||||||
|
local tags.
|
||||||
|
|
||||||
|
|
||||||
.. _announcing-releases:
|
.. _announcing-releases:
|
||||||
|
@@ -460,6 +460,125 @@ Sourcing that file in Bash will make the environment available to the
|
|||||||
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
||||||
file may also be copied out of the environment, renamed, etc.
|
file may also be copied out of the environment, renamed, etc.
|
||||||
|
|
||||||
|
|
||||||
|
.. _environment_include_concrete:
|
||||||
|
|
||||||
|
------------------------------
|
||||||
|
Included Concrete Environments
|
||||||
|
------------------------------
|
||||||
|
|
||||||
|
Spack environments can create an environment based off of information in already
|
||||||
|
established environments. You can think of it as a combination of existing
|
||||||
|
environments. It will gather information from the existing environment's
|
||||||
|
``spack.lock`` and use that during the creation of this included concrete
|
||||||
|
environment. When an included concrete environment is created it will generate
|
||||||
|
a ``spack.lock`` file for the newly created environment.
|
||||||
|
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Creating included environments
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
To create a combined concrete environment, you must have at least one existing
|
||||||
|
concrete environment. You will use the command ``spack env create`` with the
|
||||||
|
argument ``--include-concrete`` followed by the name or path of the environment
|
||||||
|
you'd like to include. Here is an example of how to create a combined environment
|
||||||
|
from the command line.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env create myenv
|
||||||
|
$ spack -e myenv add python
|
||||||
|
$ spack -e myenv concretize
|
||||||
|
$ spack env create --include-concrete myenv included_env
|
||||||
|
|
||||||
|
|
||||||
|
You can also include an environment directly in the ``spack.yaml`` file. It
|
||||||
|
involves adding the ``include_concrete`` heading in the yaml followed by the
|
||||||
|
absolute path to the independent environments.
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
spack:
|
||||||
|
specs: []
|
||||||
|
concretizer:
|
||||||
|
unify: true
|
||||||
|
include_concrete:
|
||||||
|
- /absolute/path/to/environment1
|
||||||
|
- /absolute/path/to/environment2
|
||||||
|
|
||||||
|
|
||||||
|
Once the ``spack.yaml`` has been updated you must concretize the environment to
|
||||||
|
get the concrete specs from the included environments.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Updating an included environment
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
If changes were made to the base environment and you want that reflected in the
|
||||||
|
included environment you will need to reconcretize both the base environment and the
|
||||||
|
included environment for the change to be implemented. For example:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env create myenv
|
||||||
|
$ spack -e myenv add python
|
||||||
|
$ spack -e myenv concretize
|
||||||
|
$ spack env create --include-concrete myenv included_env
|
||||||
|
|
||||||
|
|
||||||
|
$ spack -e myenv find
|
||||||
|
==> In environment myenv
|
||||||
|
==> Root specs
|
||||||
|
python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
|
||||||
|
$ spack -e included_env find
|
||||||
|
==> In environment included_env
|
||||||
|
==> No root specs
|
||||||
|
==> Included specs
|
||||||
|
python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
Here we see that ``included_env`` has access to the python package through
|
||||||
|
the ``myenv`` environment. But if we were to add another spec to ``myenv``,
|
||||||
|
``included_env`` will not be able to access the new information.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack -e myenv add perl
|
||||||
|
$ spack -e myenv concretize
|
||||||
|
$ spack -e myenv find
|
||||||
|
==> In environment myenv
|
||||||
|
==> Root specs
|
||||||
|
perl python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
|
||||||
|
$ spack -e included_env find
|
||||||
|
==> In environment included_env
|
||||||
|
==> No root specs
|
||||||
|
==> Included specs
|
||||||
|
python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
|
It isn't until you run the ``spack concretize`` command that the combined
|
||||||
|
environment will get the updated information from the reconcretized base environmennt.
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack -e included_env concretize
|
||||||
|
$ spack -e included_env find
|
||||||
|
==> In environment included_env
|
||||||
|
==> No root specs
|
||||||
|
==> Included specs
|
||||||
|
perl python
|
||||||
|
|
||||||
|
==> 0 installed packages
|
||||||
|
|
||||||
.. _environment-configuration:
|
.. _environment-configuration:
|
||||||
|
|
||||||
------------------------
|
------------------------
|
||||||
@@ -811,6 +930,7 @@ For example, the following environment has three root packages:
|
|||||||
This allows for a much-needed reduction in redundancy between packages
|
This allows for a much-needed reduction in redundancy between packages
|
||||||
and constraints.
|
and constraints.
|
||||||
|
|
||||||
|
|
||||||
----------------
|
----------------
|
||||||
Filesystem Views
|
Filesystem Views
|
||||||
----------------
|
----------------
|
||||||
@@ -1044,7 +1164,7 @@ other targets to depend on the environment installation.
|
|||||||
|
|
||||||
A typical workflow is as follows:
|
A typical workflow is as follows:
|
||||||
|
|
||||||
.. code:: console
|
.. code-block:: console
|
||||||
|
|
||||||
spack env create -d .
|
spack env create -d .
|
||||||
spack -e . add perl
|
spack -e . add perl
|
||||||
@@ -1137,7 +1257,7 @@ its dependencies. This can be useful when certain flags should only apply to
|
|||||||
dependencies. Below we show a use case where a spec is installed with verbose
|
dependencies. Below we show a use case where a spec is installed with verbose
|
||||||
output (``spack install --verbose``) while its dependencies are installed silently:
|
output (``spack install --verbose``) while its dependencies are installed silently:
|
||||||
|
|
||||||
.. code:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env depfile -o Makefile
|
$ spack env depfile -o Makefile
|
||||||
|
|
||||||
@@ -1159,7 +1279,7 @@ This can be accomplished through the generated ``[<prefix>/]SPACK_PACKAGE_IDS``
|
|||||||
variable. Assuming we have an active and concrete environment, we generate the
|
variable. Assuming we have an active and concrete environment, we generate the
|
||||||
associated ``Makefile`` with a prefix ``example``:
|
associated ``Makefile`` with a prefix ``example``:
|
||||||
|
|
||||||
.. code:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack env depfile -o env.mk --make-prefix example
|
$ spack env depfile -o env.mk --make-prefix example
|
||||||
|
|
||||||
|
@@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo
|
|||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
apt update
|
apt update
|
||||||
apt install build-essential ca-certificates coreutils curl environment-modules gfortran git gpg lsb-release python3 python3-distutils python3-venv unzip zip
|
apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
|
||||||
|
|
||||||
.. tab-item:: RHEL
|
.. tab-item:: RHEL
|
||||||
|
|
||||||
@@ -43,14 +43,14 @@ A build matrix showing which packages are working on which systems is shown belo
|
|||||||
|
|
||||||
dnf install epel-release
|
dnf install epel-release
|
||||||
dnf group install "Development Tools"
|
dnf group install "Development Tools"
|
||||||
dnf install curl findutils gcc-gfortran gnupg2 hostname iproute redhat-lsb-core python3 python3-pip python3-setuptools unzip python3-boto3
|
dnf install gcc-gfortran redhat-lsb-core python3 unzip
|
||||||
|
|
||||||
.. tab-item:: macOS Brew
|
.. tab-item:: macOS Brew
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
brew update
|
brew update
|
||||||
brew install curl gcc git gnupg zip
|
brew install gcc git zip
|
||||||
|
|
||||||
------------
|
------------
|
||||||
Installation
|
Installation
|
||||||
|
Binary file not shown.
Before Width: | Height: | Size: 44 KiB |
Binary file not shown.
Before Width: | Height: | Size: 68 KiB |
@@ -12,10 +12,6 @@
|
|||||||
Spack
|
Spack
|
||||||
===================
|
===================
|
||||||
|
|
||||||
.. epigraph::
|
|
||||||
|
|
||||||
`These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
|
|
||||||
|
|
||||||
Spack is a package management tool designed to support multiple
|
Spack is a package management tool designed to support multiple
|
||||||
versions and configurations of software on a wide variety of platforms
|
versions and configurations of software on a wide variety of platforms
|
||||||
and environments. It was designed for large supercomputing centers,
|
and environments. It was designed for large supercomputing centers,
|
||||||
|
@@ -2442,15 +2442,14 @@ with. For example, suppose that in the ``libdwarf`` package you write:
|
|||||||
|
|
||||||
depends_on("libelf@0.8")
|
depends_on("libelf@0.8")
|
||||||
|
|
||||||
Now ``libdwarf`` will require ``libelf`` at *exactly* version ``0.8``.
|
Now ``libdwarf`` will require ``libelf`` in the range ``0.8``, which
|
||||||
You can also specify a requirement for a particular variant or for
|
includes patch versions ``0.8.1``, ``0.8.2``, etc. Apart from version
|
||||||
specific compiler flags:
|
restrictions, you can also specify variants if this package requires
|
||||||
|
optional features of the dependency.
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
depends_on("libelf@0.8+debug")
|
depends_on("libelf@0.8 +parser +pic")
|
||||||
depends_on("libelf debug=True")
|
|
||||||
depends_on("libelf cppflags='-fPIC'")
|
|
||||||
|
|
||||||
Both users *and* package authors can use the same spec syntax to refer
|
Both users *and* package authors can use the same spec syntax to refer
|
||||||
to different package configurations. Users use the spec syntax on the
|
to different package configurations. Users use the spec syntax on the
|
||||||
@@ -2458,46 +2457,82 @@ command line to find installed packages or to install packages with
|
|||||||
particular constraints, and package authors can use specs to describe
|
particular constraints, and package authors can use specs to describe
|
||||||
relationships between packages.
|
relationships between packages.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Version ranges
|
Specifying backward and forward compatibility
|
||||||
^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Although some packages require a specific version for their dependencies,
|
Packages are often compatible with a range of versions of their
|
||||||
most can be built with a range of versions. For example, if you are
|
dependencies. This is typically referred to as backward and forward
|
||||||
writing a package for a legacy Python module that only works with Python
|
compatibility. Spack allows you to specify this in the ``depends_on``
|
||||||
2.4 through 2.6, this would look like:
|
directive using version ranges.
|
||||||
|
|
||||||
|
**Backwards compatibility** means that the package requires at least a
|
||||||
|
certain version of its dependency:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
depends_on("python@2.4:2.6")
|
depends_on("python@3.10:")
|
||||||
|
|
||||||
Version ranges in Spack are *inclusive*, so ``2.4:2.6`` means any version
|
In this case, the package requires Python 3.10 or newer.
|
||||||
greater than or equal to ``2.4`` and up to and including any ``2.6.x``. If
|
|
||||||
you want to specify that a package works with any version of Python 3 (or
|
Commonly, packages drop support for older versions of a dependency as
|
||||||
higher), this would look like:
|
they release new versions. In Spack you can conveniently add every
|
||||||
|
backward compatibility rule as a separate line:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
depends_on("python@3:")
|
# backward compatibility with Python
|
||||||
|
depends_on("python@3.8:")
|
||||||
|
depends_on("python@3.9:", when="@1.2:")
|
||||||
|
depends_on("python@3.10:", when="@1.4:")
|
||||||
|
|
||||||
Here we leave out the upper bound. If you want to say that a package
|
This means that in general we need Python 3.8 or newer; from version
|
||||||
requires Python 2, you can similarly leave out the lower bound:
|
1.2 onwards we need Python 3.9 or newer; from version 1.4 onwards we
|
||||||
|
need Python 3.10 or newer. Notice that it's fine to have overlapping
|
||||||
|
ranges in the ``when`` clauses.
|
||||||
|
|
||||||
|
**Forward compatibility** means that the package requires at most a
|
||||||
|
certain version of its dependency. Forward compatibility rules are
|
||||||
|
necessary when there are breaking changes in the dependency that the
|
||||||
|
package cannot handle. In Spack we often add forward compatibility
|
||||||
|
bounds only at the time a new, breaking version of a dependency is
|
||||||
|
released. As with backward compatibility, it is typical to see a list
|
||||||
|
of forward compatibility bounds in a package file as seperate lines:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
depends_on("python@:2")
|
# forward compatibility with Python
|
||||||
|
depends_on("python@:3.12", when="@:1.10")
|
||||||
|
depends_on("python@:3.13", when="@:1.12")
|
||||||
|
|
||||||
Notice that we didn't use ``@:3``. Version ranges are *inclusive*, so
|
Notice how the ``:`` now appears before the version number both in the
|
||||||
``@:3`` means "up to and including any 3.x version".
|
dependency and in the ``when`` clause. This tells Spack that in general
|
||||||
|
we need Python 3.13 or older up to version ``1.12.x``, and up to version
|
||||||
|
``1.10.x`` we need Python 3.12 or older. Said differently, forward compatibility
|
||||||
|
with Python 3.13 was added in version 1.11, while version 1.13 added forward
|
||||||
|
compatibility with Python 3.14.
|
||||||
|
|
||||||
You can also simply write
|
Notice that a version range ``@:3.12`` includes *any* patch version
|
||||||
|
number ``3.12.x``, which is often useful when specifying forward compatibility
|
||||||
|
bounds.
|
||||||
|
|
||||||
|
So far we have seen open-ended version ranges, which is by far the most
|
||||||
|
common use case. It is also possible to specify both a lower and an upper bound
|
||||||
|
on the version of a dependency, like this:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
depends_on("python@2.7")
|
depends_on("python@3.10:3.12")
|
||||||
|
|
||||||
to tell Spack that the package needs Python 2.7.x. This is equivalent to
|
There is short syntax to specify that a package is compatible with say any
|
||||||
``@2.7:2.7``.
|
``3.x`` version:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
depends_on("python@3")
|
||||||
|
|
||||||
|
The above is equivalent to ``depends_on("python@3:3")``, which means at least
|
||||||
|
Python version 3 and at most any version ``3.x.y``.
|
||||||
|
|
||||||
In very rare cases, you may need to specify an exact version, for example
|
In very rare cases, you may need to specify an exact version, for example
|
||||||
if you need to distinguish between ``3.2`` and ``3.2.1``:
|
if you need to distinguish between ``3.2`` and ``3.2.1``:
|
||||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
* Homepage: https://pypi.python.org/pypi/archspec
|
* Homepage: https://pypi.python.org/pypi/archspec
|
||||||
* Usage: Labeling, comparison and detection of microarchitectures
|
* Usage: Labeling, comparison and detection of microarchitectures
|
||||||
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
|
* Version: 0.2.5 (commit 38ce485258ffc4fc6dd6688f8dc90cb269478c47)
|
||||||
|
|
||||||
astunparse
|
astunparse
|
||||||
----------------
|
----------------
|
||||||
|
12
lib/spack/external/archspec/cpu/detect.py
vendored
12
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -47,7 +47,11 @@ def decorator(factory):
|
|||||||
|
|
||||||
|
|
||||||
def partial_uarch(
|
def partial_uarch(
|
||||||
name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
|
name: str = "",
|
||||||
|
vendor: str = "",
|
||||||
|
features: Optional[Set[str]] = None,
|
||||||
|
generation: int = 0,
|
||||||
|
cpu_part: str = "",
|
||||||
) -> Microarchitecture:
|
) -> Microarchitecture:
|
||||||
"""Construct a partial microarchitecture, from information gathered during system scan."""
|
"""Construct a partial microarchitecture, from information gathered during system scan."""
|
||||||
return Microarchitecture(
|
return Microarchitecture(
|
||||||
@@ -57,6 +61,7 @@ def partial_uarch(
|
|||||||
features=features or set(),
|
features=features or set(),
|
||||||
compilers={},
|
compilers={},
|
||||||
generation=generation,
|
generation=generation,
|
||||||
|
cpu_part=cpu_part,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -90,6 +95,7 @@ def proc_cpuinfo() -> Microarchitecture:
|
|||||||
return partial_uarch(
|
return partial_uarch(
|
||||||
vendor=_canonicalize_aarch64_vendor(data),
|
vendor=_canonicalize_aarch64_vendor(data),
|
||||||
features=_feature_set(data, key="Features"),
|
features=_feature_set(data, key="Features"),
|
||||||
|
cpu_part=data.get("CPU part", ""),
|
||||||
)
|
)
|
||||||
|
|
||||||
if architecture in (PPC64LE, PPC64):
|
if architecture in (PPC64LE, PPC64):
|
||||||
@@ -345,6 +351,10 @@ def sorting_fn(item):
|
|||||||
generic_candidates = [c for c in candidates if c.vendor == "generic"]
|
generic_candidates = [c for c in candidates if c.vendor == "generic"]
|
||||||
best_generic = max(generic_candidates, key=sorting_fn)
|
best_generic = max(generic_candidates, key=sorting_fn)
|
||||||
|
|
||||||
|
# Relevant for AArch64. Filter on "cpu_part" if we have any match
|
||||||
|
if info.cpu_part != "" and any(c for c in candidates if info.cpu_part == c.cpu_part):
|
||||||
|
candidates = [c for c in candidates if info.cpu_part == c.cpu_part]
|
||||||
|
|
||||||
# Filter the candidates to be descendant of the best generic candidate.
|
# Filter the candidates to be descendant of the best generic candidate.
|
||||||
# This is to avoid that the lack of a niche feature that can be disabled
|
# This is to avoid that the lack of a niche feature that can be disabled
|
||||||
# from e.g. BIOS prevents detection of a reasonably performant architecture
|
# from e.g. BIOS prevents detection of a reasonably performant architecture
|
||||||
|
@@ -2,9 +2,7 @@
|
|||||||
# Archspec Project Developers. See the top-level COPYRIGHT file for details.
|
# Archspec Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""Types and functions to manage information
|
"""Types and functions to manage information on CPU microarchitectures."""
|
||||||
on CPU microarchitectures.
|
|
||||||
"""
|
|
||||||
import functools
|
import functools
|
||||||
import platform
|
import platform
|
||||||
import re
|
import re
|
||||||
@@ -65,23 +63,31 @@ class Microarchitecture:
|
|||||||
passed in as argument above.
|
passed in as argument above.
|
||||||
* versions: versions that support this micro-architecture.
|
* versions: versions that support this micro-architecture.
|
||||||
|
|
||||||
generation (int): generation of the micro-architecture, if
|
generation (int): generation of the micro-architecture, if relevant.
|
||||||
relevant.
|
cpu_part (str): cpu part of the architecture, if relevant.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# pylint: disable=too-many-arguments
|
# pylint: disable=too-many-arguments,too-many-instance-attributes
|
||||||
#: Aliases for micro-architecture's features
|
#: Aliases for micro-architecture's features
|
||||||
feature_aliases = FEATURE_ALIASES
|
feature_aliases = FEATURE_ALIASES
|
||||||
|
|
||||||
def __init__(self, name, parents, vendor, features, compilers, generation=0):
|
def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu_part=""):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.parents = parents
|
self.parents = parents
|
||||||
self.vendor = vendor
|
self.vendor = vendor
|
||||||
self.features = features
|
self.features = features
|
||||||
self.compilers = compilers
|
self.compilers = compilers
|
||||||
|
# Only relevant for PowerPC
|
||||||
self.generation = generation
|
self.generation = generation
|
||||||
# Cache the ancestor computation
|
# Only relevant for AArch64
|
||||||
|
self.cpu_part = cpu_part
|
||||||
|
|
||||||
|
# Cache the "ancestor" computation
|
||||||
self._ancestors = None
|
self._ancestors = None
|
||||||
|
# Cache the "generic" computation
|
||||||
|
self._generic = None
|
||||||
|
# Cache the "family" computation
|
||||||
|
self._family = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ancestors(self):
|
def ancestors(self):
|
||||||
@@ -111,8 +117,12 @@ def __eq__(self, other):
|
|||||||
and self.parents == other.parents # avoid ancestors here
|
and self.parents == other.parents # avoid ancestors here
|
||||||
and self.compilers == other.compilers
|
and self.compilers == other.compilers
|
||||||
and self.generation == other.generation
|
and self.generation == other.generation
|
||||||
|
and self.cpu_part == other.cpu_part
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self.name)
|
||||||
|
|
||||||
@coerce_target_names
|
@coerce_target_names
|
||||||
def __ne__(self, other):
|
def __ne__(self, other):
|
||||||
return not self == other
|
return not self == other
|
||||||
@@ -143,7 +153,8 @@ def __repr__(self):
|
|||||||
cls_name = self.__class__.__name__
|
cls_name = self.__class__.__name__
|
||||||
fmt = (
|
fmt = (
|
||||||
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
|
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
|
||||||
"{0.features!r}, {0.compilers!r}, {0.generation!r})"
|
"{0.features!r}, {0.compilers!r}, generation={0.generation!r}, "
|
||||||
|
"cpu_part={0.cpu_part!r})"
|
||||||
)
|
)
|
||||||
return fmt.format(self)
|
return fmt.format(self)
|
||||||
|
|
||||||
@@ -168,18 +179,22 @@ def __contains__(self, feature):
|
|||||||
@property
|
@property
|
||||||
def family(self):
|
def family(self):
|
||||||
"""Returns the architecture family a given target belongs to"""
|
"""Returns the architecture family a given target belongs to"""
|
||||||
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
if self._family is None:
|
||||||
msg = "a target is expected to belong to just one architecture family"
|
roots = [x for x in [self] + self.ancestors if not x.ancestors]
|
||||||
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
msg = "a target is expected to belong to just one architecture family"
|
||||||
assert len(roots) == 1, msg
|
msg += f"[found {', '.join(str(x) for x in roots)}]"
|
||||||
|
assert len(roots) == 1, msg
|
||||||
|
self._family = roots.pop()
|
||||||
|
|
||||||
return roots.pop()
|
return self._family
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def generic(self):
|
def generic(self):
|
||||||
"""Returns the best generic architecture that is compatible with self"""
|
"""Returns the best generic architecture that is compatible with self"""
|
||||||
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
if self._generic is None:
|
||||||
return max(generics, key=lambda x: len(x.ancestors))
|
generics = [x for x in [self] + self.ancestors if x.vendor == "generic"]
|
||||||
|
self._generic = max(generics, key=lambda x: len(x.ancestors))
|
||||||
|
return self._generic
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
"""Returns a dictionary representation of this object."""
|
"""Returns a dictionary representation of this object."""
|
||||||
@@ -190,6 +205,7 @@ def to_dict(self):
|
|||||||
"generation": self.generation,
|
"generation": self.generation,
|
||||||
"parents": [str(x) for x in self.parents],
|
"parents": [str(x) for x in self.parents],
|
||||||
"compilers": self.compilers,
|
"compilers": self.compilers,
|
||||||
|
"cpupart": self.cpu_part,
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -202,6 +218,7 @@ def from_dict(data) -> "Microarchitecture":
|
|||||||
features=set(data["features"]),
|
features=set(data["features"]),
|
||||||
compilers=data.get("compilers", {}),
|
compilers=data.get("compilers", {}),
|
||||||
generation=data.get("generation", 0),
|
generation=data.get("generation", 0),
|
||||||
|
cpu_part=data.get("cpupart", ""),
|
||||||
)
|
)
|
||||||
|
|
||||||
def optimization_flags(self, compiler, version):
|
def optimization_flags(self, compiler, version):
|
||||||
@@ -360,8 +377,11 @@ def fill_target_from_dict(name, data, targets):
|
|||||||
features = set(values["features"])
|
features = set(values["features"])
|
||||||
compilers = values.get("compilers", {})
|
compilers = values.get("compilers", {})
|
||||||
generation = values.get("generation", 0)
|
generation = values.get("generation", 0)
|
||||||
|
cpu_part = values.get("cpupart", "")
|
||||||
|
|
||||||
targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
|
targets[name] = Microarchitecture(
|
||||||
|
name, parents, vendor, features, compilers, generation=generation, cpu_part=cpu_part
|
||||||
|
)
|
||||||
|
|
||||||
known_targets = {}
|
known_targets = {}
|
||||||
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
|
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
|
||||||
|
@@ -1482,7 +1482,6 @@
|
|||||||
"cldemote",
|
"cldemote",
|
||||||
"movdir64b",
|
"movdir64b",
|
||||||
"movdiri",
|
"movdiri",
|
||||||
"pdcm",
|
|
||||||
"serialize",
|
"serialize",
|
||||||
"waitpkg"
|
"waitpkg"
|
||||||
],
|
],
|
||||||
@@ -2225,14 +2224,96 @@
|
|||||||
],
|
],
|
||||||
"nvhpc": [
|
"nvhpc": [
|
||||||
{
|
{
|
||||||
"versions": "21.11:",
|
"versions": "21.11:23.8",
|
||||||
"name": "zen3",
|
"name": "zen3",
|
||||||
"flags": "-tp {name}",
|
"flags": "-tp {name}",
|
||||||
"warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
|
"warnings": "zen4 is not fully supported by nvhpc versions < 23.9, falling back to zen3"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"versions": "23.9:",
|
||||||
|
"flags": "-tp {name}"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"zen5": {
|
||||||
|
"from": ["zen4"],
|
||||||
|
"vendor": "AuthenticAMD",
|
||||||
|
"features": [
|
||||||
|
"abm",
|
||||||
|
"aes",
|
||||||
|
"avx",
|
||||||
|
"avx2",
|
||||||
|
"avx512_bf16",
|
||||||
|
"avx512_bitalg",
|
||||||
|
"avx512bw",
|
||||||
|
"avx512cd",
|
||||||
|
"avx512dq",
|
||||||
|
"avx512f",
|
||||||
|
"avx512ifma",
|
||||||
|
"avx512vbmi",
|
||||||
|
"avx512_vbmi2",
|
||||||
|
"avx512vl",
|
||||||
|
"avx512_vnni",
|
||||||
|
"avx512_vp2intersect",
|
||||||
|
"avx512_vpopcntdq",
|
||||||
|
"avx_vnni",
|
||||||
|
"bmi1",
|
||||||
|
"bmi2",
|
||||||
|
"clflushopt",
|
||||||
|
"clwb",
|
||||||
|
"clzero",
|
||||||
|
"cppc",
|
||||||
|
"cx16",
|
||||||
|
"f16c",
|
||||||
|
"flush_l1d",
|
||||||
|
"fma",
|
||||||
|
"fsgsbase",
|
||||||
|
"gfni",
|
||||||
|
"ibrs_enhanced",
|
||||||
|
"mmx",
|
||||||
|
"movbe",
|
||||||
|
"movdir64b",
|
||||||
|
"movdiri",
|
||||||
|
"pclmulqdq",
|
||||||
|
"popcnt",
|
||||||
|
"rdseed",
|
||||||
|
"sse",
|
||||||
|
"sse2",
|
||||||
|
"sse4_1",
|
||||||
|
"sse4_2",
|
||||||
|
"sse4a",
|
||||||
|
"ssse3",
|
||||||
|
"tsc_adjust",
|
||||||
|
"vaes",
|
||||||
|
"vpclmulqdq",
|
||||||
|
"xsavec",
|
||||||
|
"xsaveopt"
|
||||||
|
],
|
||||||
|
"compilers": {
|
||||||
|
"gcc": [
|
||||||
|
{
|
||||||
|
"versions": "14.1:",
|
||||||
|
"name": "znver5",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"aocc": [
|
||||||
|
{
|
||||||
|
"versions": "5.0:",
|
||||||
|
"name": "znver5",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"clang": [
|
||||||
|
{
|
||||||
|
"versions": "19.1:",
|
||||||
|
"name": "znver5",
|
||||||
|
"flags": "-march={name} -mtune={name}"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
"ppc64": {
|
"ppc64": {
|
||||||
"from": [],
|
"from": [],
|
||||||
"vendor": "generic",
|
"vendor": "generic",
|
||||||
@@ -2711,7 +2792,8 @@
|
|||||||
"flags": "-mcpu=thunderx2t99"
|
"flags": "-mcpu=thunderx2t99"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
|
"cpupart": "0x0af"
|
||||||
},
|
},
|
||||||
"a64fx": {
|
"a64fx": {
|
||||||
"from": ["armv8.2a"],
|
"from": ["armv8.2a"],
|
||||||
@@ -2779,7 +2861,8 @@
|
|||||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
|
"cpupart": "0x001"
|
||||||
},
|
},
|
||||||
"cortex_a72": {
|
"cortex_a72": {
|
||||||
"from": ["aarch64"],
|
"from": ["aarch64"],
|
||||||
@@ -2816,7 +2899,8 @@
|
|||||||
"flags" : "-mcpu=cortex-a72"
|
"flags" : "-mcpu=cortex-a72"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
|
"cpupart": "0xd08"
|
||||||
},
|
},
|
||||||
"neoverse_n1": {
|
"neoverse_n1": {
|
||||||
"from": ["cortex_a72", "armv8.2a"],
|
"from": ["cortex_a72", "armv8.2a"],
|
||||||
@@ -2837,8 +2921,7 @@
|
|||||||
"asimdrdm",
|
"asimdrdm",
|
||||||
"lrcpc",
|
"lrcpc",
|
||||||
"dcpop",
|
"dcpop",
|
||||||
"asimddp",
|
"asimddp"
|
||||||
"ssbs"
|
|
||||||
],
|
],
|
||||||
"compilers" : {
|
"compilers" : {
|
||||||
"gcc": [
|
"gcc": [
|
||||||
@@ -2902,7 +2985,8 @@
|
|||||||
"flags": "-tp {name}"
|
"flags": "-tp {name}"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
|
"cpupart": "0xd0c"
|
||||||
},
|
},
|
||||||
"neoverse_v1": {
|
"neoverse_v1": {
|
||||||
"from": ["neoverse_n1", "armv8.4a"],
|
"from": ["neoverse_n1", "armv8.4a"],
|
||||||
@@ -2926,8 +3010,6 @@
|
|||||||
"lrcpc",
|
"lrcpc",
|
||||||
"dcpop",
|
"dcpop",
|
||||||
"sha3",
|
"sha3",
|
||||||
"sm3",
|
|
||||||
"sm4",
|
|
||||||
"asimddp",
|
"asimddp",
|
||||||
"sha512",
|
"sha512",
|
||||||
"sve",
|
"sve",
|
||||||
@@ -2936,7 +3018,6 @@
|
|||||||
"uscat",
|
"uscat",
|
||||||
"ilrcpc",
|
"ilrcpc",
|
||||||
"flagm",
|
"flagm",
|
||||||
"ssbs",
|
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"svei8mm",
|
"svei8mm",
|
||||||
"svebf16",
|
"svebf16",
|
||||||
@@ -3004,7 +3085,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "11:",
|
"versions": "11:",
|
||||||
"flags" : "-march=armv8.4-a+sve+ssbs+fp16+bf16+crypto+i8mm+rng"
|
"flags" : "-march=armv8.4-a+sve+fp16+bf16+crypto+i8mm+rng"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "12:",
|
"versions": "12:",
|
||||||
@@ -3028,7 +3109,8 @@
|
|||||||
"flags": "-tp {name}"
|
"flags": "-tp {name}"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
|
"cpupart": "0xd40"
|
||||||
},
|
},
|
||||||
"neoverse_v2": {
|
"neoverse_v2": {
|
||||||
"from": ["neoverse_n1", "armv9.0a"],
|
"from": ["neoverse_n1", "armv9.0a"],
|
||||||
@@ -3052,32 +3134,22 @@
|
|||||||
"lrcpc",
|
"lrcpc",
|
||||||
"dcpop",
|
"dcpop",
|
||||||
"sha3",
|
"sha3",
|
||||||
"sm3",
|
|
||||||
"sm4",
|
|
||||||
"asimddp",
|
"asimddp",
|
||||||
"sha512",
|
"sha512",
|
||||||
"sve",
|
"sve",
|
||||||
"asimdfhm",
|
"asimdfhm",
|
||||||
"dit",
|
|
||||||
"uscat",
|
"uscat",
|
||||||
"ilrcpc",
|
"ilrcpc",
|
||||||
"flagm",
|
"flagm",
|
||||||
"ssbs",
|
|
||||||
"sb",
|
"sb",
|
||||||
"dcpodp",
|
"dcpodp",
|
||||||
"sve2",
|
"sve2",
|
||||||
"sveaes",
|
|
||||||
"svepmull",
|
|
||||||
"svebitperm",
|
|
||||||
"svesha3",
|
|
||||||
"svesm4",
|
|
||||||
"flagm2",
|
"flagm2",
|
||||||
"frint",
|
"frint",
|
||||||
"svei8mm",
|
"svei8mm",
|
||||||
"svebf16",
|
"svebf16",
|
||||||
"i8mm",
|
"i8mm",
|
||||||
"bf16",
|
"bf16"
|
||||||
"dgh"
|
|
||||||
],
|
],
|
||||||
"compilers" : {
|
"compilers" : {
|
||||||
"gcc": [
|
"gcc": [
|
||||||
@@ -3102,15 +3174,19 @@
|
|||||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "10.0:11.99",
|
"versions": "10.0:11.3.99",
|
||||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"versions": "11.4:11.99",
|
||||||
|
"flags" : "-mcpu=neoverse-v2"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"versions": "12.0:12.99",
|
"versions": "12.0:12.2.99",
|
||||||
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
|
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"versions": "13.0:",
|
"versions": "12.3:",
|
||||||
"flags" : "-mcpu=neoverse-v2"
|
"flags" : "-mcpu=neoverse-v2"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@@ -3145,7 +3221,112 @@
|
|||||||
"flags": "-tp {name}"
|
"flags": "-tp {name}"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
|
"cpupart": "0xd4f"
|
||||||
|
},
|
||||||
|
"neoverse_n2": {
|
||||||
|
"from": ["neoverse_n1", "armv9.0a"],
|
||||||
|
"vendor": "ARM",
|
||||||
|
"features": [
|
||||||
|
"fp",
|
||||||
|
"asimd",
|
||||||
|
"evtstrm",
|
||||||
|
"aes",
|
||||||
|
"pmull",
|
||||||
|
"sha1",
|
||||||
|
"sha2",
|
||||||
|
"crc32",
|
||||||
|
"atomics",
|
||||||
|
"fphp",
|
||||||
|
"asimdhp",
|
||||||
|
"cpuid",
|
||||||
|
"asimdrdm",
|
||||||
|
"jscvt",
|
||||||
|
"fcma",
|
||||||
|
"lrcpc",
|
||||||
|
"dcpop",
|
||||||
|
"sha3",
|
||||||
|
"asimddp",
|
||||||
|
"sha512",
|
||||||
|
"sve",
|
||||||
|
"asimdfhm",
|
||||||
|
"uscat",
|
||||||
|
"ilrcpc",
|
||||||
|
"flagm",
|
||||||
|
"sb",
|
||||||
|
"dcpodp",
|
||||||
|
"sve2",
|
||||||
|
"flagm2",
|
||||||
|
"frint",
|
||||||
|
"svei8mm",
|
||||||
|
"svebf16",
|
||||||
|
"i8mm",
|
||||||
|
"bf16"
|
||||||
|
],
|
||||||
|
"compilers" : {
|
||||||
|
"gcc": [
|
||||||
|
{
|
||||||
|
"versions": "4.8:5.99",
|
||||||
|
"flags": "-march=armv8-a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"versions": "6:6.99",
|
||||||
|
"flags" : "-march=armv8.1-a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"versions": "7.0:7.99",
|
||||||
|
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"versions": "8.0:8.99",
|
||||||
|
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"versions": "9.0:9.99",
|
||||||
|
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"versions": "10.0:10.99",
|
||||||
|
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"versions": "11.0:",
|
||||||
|
"flags" : "-mcpu=neoverse-n2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"clang" : [
|
||||||
|
{
|
||||||
|
"versions": "9.0:10.99",
|
||||||
|
"flags" : "-march=armv8.5-a+sve"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"versions": "11.0:13.99",
|
||||||
|
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"versions": "14.0:15.99",
|
||||||
|
"flags" : "-march=armv9-a+i8mm+bf16"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"versions": "16.0:",
|
||||||
|
"flags" : "-mcpu=neoverse-n2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"arm" : [
|
||||||
|
{
|
||||||
|
"versions": "23.04.0:",
|
||||||
|
"flags" : "-mcpu=neoverse-n2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"nvhpc" : [
|
||||||
|
{
|
||||||
|
"versions": "23.3:",
|
||||||
|
"name": "neoverse-n1",
|
||||||
|
"flags": "-tp {name}"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"cpupart": "0xd49"
|
||||||
},
|
},
|
||||||
"m1": {
|
"m1": {
|
||||||
"from": ["armv8.4a"],
|
"from": ["armv8.4a"],
|
||||||
@@ -3211,7 +3392,8 @@
|
|||||||
"flags" : "-mcpu=apple-m1"
|
"flags" : "-mcpu=apple-m1"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
|
"cpupart": "0x022"
|
||||||
},
|
},
|
||||||
"m2": {
|
"m2": {
|
||||||
"from": ["m1", "armv8.5a"],
|
"from": ["m1", "armv8.5a"],
|
||||||
@@ -3289,7 +3471,8 @@
|
|||||||
"flags" : "-mcpu=apple-m2"
|
"flags" : "-mcpu=apple-m2"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
|
"cpupart": "0x032"
|
||||||
},
|
},
|
||||||
"arm": {
|
"arm": {
|
||||||
"from": [],
|
"from": [],
|
||||||
|
@@ -52,6 +52,9 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"cpupart": {
|
||||||
|
"type": "string"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
@@ -107,4 +110,4 @@
|
|||||||
"additionalProperties": false
|
"additionalProperties": false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -98,3 +98,10 @@ def path_filter_caller(*args, **kwargs):
|
|||||||
if _func:
|
if _func:
|
||||||
return holder_func(_func)
|
return holder_func(_func)
|
||||||
return holder_func
|
return holder_func
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_win_longpath(path: str) -> str:
|
||||||
|
"""Strip Windows extended path prefix from strings
|
||||||
|
Returns sanitized string.
|
||||||
|
no-op if extended path prefix is not present"""
|
||||||
|
return path.lstrip("\\\\?\\")
|
||||||
|
@@ -187,12 +187,18 @@ def polite_filename(filename: str) -> str:
|
|||||||
return _polite_antipattern().sub("_", filename)
|
return _polite_antipattern().sub("_", filename)
|
||||||
|
|
||||||
|
|
||||||
def getuid():
|
def getuid() -> Union[str, int]:
|
||||||
|
"""Returns os getuid on non Windows
|
||||||
|
On Windows returns 0 for admin users, login string otherwise
|
||||||
|
This is in line with behavior from get_owner_uid which
|
||||||
|
always returns the login string on Windows
|
||||||
|
"""
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
import ctypes
|
import ctypes
|
||||||
|
|
||||||
|
# If not admin, use the string name of the login as a unique ID
|
||||||
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
|
||||||
return 1
|
return os.getlogin()
|
||||||
return 0
|
return 0
|
||||||
else:
|
else:
|
||||||
return os.getuid()
|
return os.getuid()
|
||||||
@@ -213,6 +219,15 @@ def _win_rename(src, dst):
|
|||||||
os.replace(src, dst)
|
os.replace(src, dst)
|
||||||
|
|
||||||
|
|
||||||
|
@system_path_filter
|
||||||
|
def msdos_escape_parens(path):
|
||||||
|
"""MS-DOS interprets parens as grouping parameters even in a quoted string"""
|
||||||
|
if sys.platform == "win32":
|
||||||
|
return path.replace("(", "^(").replace(")", "^)")
|
||||||
|
else:
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def rename(src, dst):
|
def rename(src, dst):
|
||||||
# On Windows, os.rename will fail if the destination file already exists
|
# On Windows, os.rename will fail if the destination file already exists
|
||||||
@@ -553,7 +568,13 @@ def exploding_archive_handler(tarball_container, stage):
|
|||||||
|
|
||||||
|
|
||||||
@system_path_filter(arg_slice=slice(1))
|
@system_path_filter(arg_slice=slice(1))
|
||||||
def get_owner_uid(path, err_msg=None):
|
def get_owner_uid(path, err_msg=None) -> Union[str, int]:
|
||||||
|
"""Returns owner UID of path destination
|
||||||
|
On non Windows this is the value of st_uid
|
||||||
|
On Windows this is the login string associated with the
|
||||||
|
owning user.
|
||||||
|
|
||||||
|
"""
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
mkdirp(path, mode=stat.S_IRWXU)
|
mkdirp(path, mode=stat.S_IRWXU)
|
||||||
|
|
||||||
@@ -822,7 +843,7 @@ def copy_tree(
|
|||||||
if islink(s):
|
if islink(s):
|
||||||
link_target = resolve_link_target_relative_to_the_link(s)
|
link_target = resolve_link_target_relative_to_the_link(s)
|
||||||
if symlinks:
|
if symlinks:
|
||||||
target = os.readlink(s)
|
target = readlink(s)
|
||||||
if os.path.isabs(target):
|
if os.path.isabs(target):
|
||||||
|
|
||||||
def escaped_path(path):
|
def escaped_path(path):
|
||||||
@@ -2429,9 +2450,10 @@ def add_library_dependent(self, *dest):
|
|||||||
"""
|
"""
|
||||||
for pth in dest:
|
for pth in dest:
|
||||||
if os.path.isfile(pth):
|
if os.path.isfile(pth):
|
||||||
self._additional_library_dependents.add(pathlib.Path(pth).parent)
|
new_pth = pathlib.Path(pth).parent
|
||||||
else:
|
else:
|
||||||
self._additional_library_dependents.add(pathlib.Path(pth))
|
new_pth = pathlib.Path(pth)
|
||||||
|
self._additional_library_dependents.add(new_pth)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def rpaths(self):
|
def rpaths(self):
|
||||||
@@ -2509,8 +2531,14 @@ def establish_link(self):
|
|||||||
|
|
||||||
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
||||||
# install a symlink to each dependent library
|
# install a symlink to each dependent library
|
||||||
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
|
||||||
self._link(library, lib_dir)
|
# do not rpath for system libraries included in the dag
|
||||||
|
# we should not be modifying libraries managed by the Windows system
|
||||||
|
# as this will negatively impact linker behavior and can result in permission
|
||||||
|
# errors if those system libs are not modifiable by Spack
|
||||||
|
if "windows-system" not in getattr(self.pkg, "tags", []):
|
||||||
|
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
||||||
|
self._link(library, lib_dir)
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
|
@@ -11,7 +11,7 @@
|
|||||||
|
|
||||||
from llnl.util import lang, tty
|
from llnl.util import lang, tty
|
||||||
|
|
||||||
from ..path import system_path_filter
|
from ..path import sanitize_win_longpath, system_path_filter
|
||||||
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
from win32file import CreateHardLink
|
from win32file import CreateHardLink
|
||||||
@@ -247,9 +247,9 @@ def _windows_create_junction(source: str, link: str):
|
|||||||
out, err = proc.communicate()
|
out, err = proc.communicate()
|
||||||
tty.debug(out.decode())
|
tty.debug(out.decode())
|
||||||
if proc.returncode != 0:
|
if proc.returncode != 0:
|
||||||
err = err.decode()
|
err_str = err.decode()
|
||||||
tty.error(err)
|
tty.error(err_str)
|
||||||
raise SymlinkError("Make junction command returned a non-zero return code.", err)
|
raise SymlinkError("Make junction command returned a non-zero return code.", err_str)
|
||||||
|
|
||||||
|
|
||||||
def _windows_create_hard_link(path: str, link: str):
|
def _windows_create_hard_link(path: str, link: str):
|
||||||
@@ -269,14 +269,14 @@ def _windows_create_hard_link(path: str, link: str):
|
|||||||
CreateHardLink(link, path)
|
CreateHardLink(link, path)
|
||||||
|
|
||||||
|
|
||||||
def readlink(path: str):
|
def readlink(path: str, *, dir_fd=None):
|
||||||
"""Spack utility to override of os.readlink method to work cross platform"""
|
"""Spack utility to override of os.readlink method to work cross platform"""
|
||||||
if _windows_is_hardlink(path):
|
if _windows_is_hardlink(path):
|
||||||
return _windows_read_hard_link(path)
|
return _windows_read_hard_link(path)
|
||||||
elif _windows_is_junction(path):
|
elif _windows_is_junction(path):
|
||||||
return _windows_read_junction(path)
|
return _windows_read_junction(path)
|
||||||
else:
|
else:
|
||||||
return os.readlink(path)
|
return sanitize_win_longpath(os.readlink(path, dir_fd=dir_fd))
|
||||||
|
|
||||||
|
|
||||||
def _windows_read_hard_link(link: str) -> str:
|
def _windows_read_hard_link(link: str) -> str:
|
||||||
|
@@ -18,9 +18,10 @@
|
|||||||
import threading
|
import threading
|
||||||
import traceback
|
import traceback
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
from multiprocessing.connection import Connection
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import Optional
|
from typing import Callable, Optional
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
@@ -329,49 +330,6 @@ def close(self):
|
|||||||
self.file.close()
|
self.file.close()
|
||||||
|
|
||||||
|
|
||||||
class MultiProcessFd:
|
|
||||||
"""Return an object which stores a file descriptor and can be passed as an
|
|
||||||
argument to a function run with ``multiprocessing.Process``, such that
|
|
||||||
the file descriptor is available in the subprocess."""
|
|
||||||
|
|
||||||
def __init__(self, fd):
|
|
||||||
self._connection = None
|
|
||||||
self._fd = None
|
|
||||||
if sys.version_info >= (3, 8):
|
|
||||||
self._connection = multiprocessing.connection.Connection(fd)
|
|
||||||
else:
|
|
||||||
self._fd = fd
|
|
||||||
|
|
||||||
@property
|
|
||||||
def fd(self):
|
|
||||||
if self._connection:
|
|
||||||
return self._connection._handle
|
|
||||||
else:
|
|
||||||
return self._fd
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
if self._connection:
|
|
||||||
self._connection.close()
|
|
||||||
else:
|
|
||||||
os.close(self._fd)
|
|
||||||
|
|
||||||
|
|
||||||
def close_connection_and_file(multiprocess_fd, file):
|
|
||||||
# MultiprocessFd is intended to transmit a FD
|
|
||||||
# to a child process, this FD is then opened to a Python File object
|
|
||||||
# (using fdopen). In >= 3.8, MultiprocessFd encapsulates a
|
|
||||||
# multiprocessing.connection.Connection; Connection closes the FD
|
|
||||||
# when it is deleted, and prints a warning about duplicate closure if
|
|
||||||
# it is not explicitly closed. In < 3.8, MultiprocessFd encapsulates a
|
|
||||||
# simple FD; closing the FD here appears to conflict with
|
|
||||||
# closure of the File object (in < 3.8 that is). Therefore this needs
|
|
||||||
# to choose whether to close the File or the Connection.
|
|
||||||
if sys.version_info >= (3, 8):
|
|
||||||
multiprocess_fd.close()
|
|
||||||
else:
|
|
||||||
file.close()
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def replace_environment(env):
|
def replace_environment(env):
|
||||||
"""Replace the current environment (`os.environ`) with `env`.
|
"""Replace the current environment (`os.environ`) with `env`.
|
||||||
@@ -529,22 +487,20 @@ def __enter__(self):
|
|||||||
# forcing debug output.
|
# forcing debug output.
|
||||||
self._saved_debug = tty._debug
|
self._saved_debug = tty._debug
|
||||||
|
|
||||||
# OS-level pipe for redirecting output to logger
|
# Pipe for redirecting output to logger
|
||||||
read_fd, write_fd = os.pipe()
|
read_fd, self.write_fd = multiprocessing.Pipe(duplex=False)
|
||||||
|
|
||||||
read_multiprocess_fd = MultiProcessFd(read_fd)
|
# Pipe for communication back from the daemon
|
||||||
|
|
||||||
# Multiprocessing pipe for communication back from the daemon
|
|
||||||
# Currently only used to save echo value between uses
|
# Currently only used to save echo value between uses
|
||||||
self.parent_pipe, child_pipe = multiprocessing.Pipe()
|
self.parent_pipe, child_pipe = multiprocessing.Pipe(duplex=False)
|
||||||
|
|
||||||
# Sets a daemon that writes to file what it reads from a pipe
|
# Sets a daemon that writes to file what it reads from a pipe
|
||||||
try:
|
try:
|
||||||
# need to pass this b/c multiprocessing closes stdin in child.
|
# need to pass this b/c multiprocessing closes stdin in child.
|
||||||
input_multiprocess_fd = None
|
input_fd = None
|
||||||
try:
|
try:
|
||||||
if sys.stdin.isatty():
|
if sys.stdin.isatty():
|
||||||
input_multiprocess_fd = MultiProcessFd(os.dup(sys.stdin.fileno()))
|
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||||
except BaseException:
|
except BaseException:
|
||||||
# just don't forward input if this fails
|
# just don't forward input if this fails
|
||||||
pass
|
pass
|
||||||
@@ -553,9 +509,9 @@ def __enter__(self):
|
|||||||
self.process = multiprocessing.Process(
|
self.process = multiprocessing.Process(
|
||||||
target=_writer_daemon,
|
target=_writer_daemon,
|
||||||
args=(
|
args=(
|
||||||
input_multiprocess_fd,
|
input_fd,
|
||||||
read_multiprocess_fd,
|
read_fd,
|
||||||
write_fd,
|
self.write_fd,
|
||||||
self.echo,
|
self.echo,
|
||||||
self.log_file,
|
self.log_file,
|
||||||
child_pipe,
|
child_pipe,
|
||||||
@@ -566,9 +522,9 @@ def __enter__(self):
|
|||||||
self.process.start()
|
self.process.start()
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
if input_multiprocess_fd:
|
if input_fd:
|
||||||
input_multiprocess_fd.close()
|
input_fd.close()
|
||||||
read_multiprocess_fd.close()
|
read_fd.close()
|
||||||
|
|
||||||
# Flush immediately before redirecting so that anything buffered
|
# Flush immediately before redirecting so that anything buffered
|
||||||
# goes to the original stream
|
# goes to the original stream
|
||||||
@@ -586,9 +542,9 @@ def __enter__(self):
|
|||||||
self._saved_stderr = os.dup(sys.stderr.fileno())
|
self._saved_stderr = os.dup(sys.stderr.fileno())
|
||||||
|
|
||||||
# redirect to the pipe we created above
|
# redirect to the pipe we created above
|
||||||
os.dup2(write_fd, sys.stdout.fileno())
|
os.dup2(self.write_fd.fileno(), sys.stdout.fileno())
|
||||||
os.dup2(write_fd, sys.stderr.fileno())
|
os.dup2(self.write_fd.fileno(), sys.stderr.fileno())
|
||||||
os.close(write_fd)
|
self.write_fd.close()
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Handle I/O the Python way. This won't redirect lower-level
|
# Handle I/O the Python way. This won't redirect lower-level
|
||||||
@@ -601,7 +557,7 @@ def __enter__(self):
|
|||||||
self._saved_stderr = sys.stderr
|
self._saved_stderr = sys.stderr
|
||||||
|
|
||||||
# create a file object for the pipe; redirect to it.
|
# create a file object for the pipe; redirect to it.
|
||||||
pipe_fd_out = os.fdopen(write_fd, "w")
|
pipe_fd_out = os.fdopen(self.write_fd.fileno(), "w", closefd=False)
|
||||||
sys.stdout = pipe_fd_out
|
sys.stdout = pipe_fd_out
|
||||||
sys.stderr = pipe_fd_out
|
sys.stderr = pipe_fd_out
|
||||||
|
|
||||||
@@ -637,6 +593,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
|||||||
else:
|
else:
|
||||||
sys.stdout = self._saved_stdout
|
sys.stdout = self._saved_stdout
|
||||||
sys.stderr = self._saved_stderr
|
sys.stderr = self._saved_stderr
|
||||||
|
self.write_fd.close()
|
||||||
|
|
||||||
# print log contents in parent if needed.
|
# print log contents in parent if needed.
|
||||||
if self.log_file.write_in_parent:
|
if self.log_file.write_in_parent:
|
||||||
@@ -850,14 +807,14 @@ def force_echo(self):
|
|||||||
|
|
||||||
|
|
||||||
def _writer_daemon(
|
def _writer_daemon(
|
||||||
stdin_multiprocess_fd,
|
stdin_fd: Optional[Connection],
|
||||||
read_multiprocess_fd,
|
read_fd: Connection,
|
||||||
write_fd,
|
write_fd: Connection,
|
||||||
echo,
|
echo: bool,
|
||||||
log_file_wrapper,
|
log_file_wrapper: FileWrapper,
|
||||||
control_pipe,
|
control_fd: Connection,
|
||||||
filter_fn,
|
filter_fn: Optional[Callable[[str], str]],
|
||||||
):
|
) -> None:
|
||||||
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
|
||||||
|
|
||||||
The daemon receives output from the parent process and writes it both
|
The daemon receives output from the parent process and writes it both
|
||||||
@@ -894,43 +851,37 @@ def _writer_daemon(
|
|||||||
``StringIO`` in the parent. This is mainly for testing.
|
``StringIO`` in the parent. This is mainly for testing.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
stdin_multiprocess_fd (int): input from the terminal
|
stdin_fd: optional input from the terminal
|
||||||
read_multiprocess_fd (int): pipe for reading from parent's redirected
|
read_fd: pipe for reading from parent's redirected stdout
|
||||||
stdout
|
echo: initial echo setting -- controlled by user and preserved across multiple writer
|
||||||
echo (bool): initial echo setting -- controlled by user and
|
daemons
|
||||||
preserved across multiple writer daemons
|
log_file_wrapper: file to log all output
|
||||||
log_file_wrapper (FileWrapper): file to log all output
|
control_pipe: multiprocessing pipe on which to send control information to the parent
|
||||||
control_pipe (Pipe): multiprocessing pipe on which to send control
|
filter_fn: optional function to filter each line of output
|
||||||
information to the parent
|
|
||||||
filter_fn (callable, optional): function to filter each line of output
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# If this process was forked, then it will inherit file descriptors from
|
# This process depends on closing all instances of write_pipe to terminate the reading loop
|
||||||
# the parent process. This process depends on closing all instances of
|
write_fd.close()
|
||||||
# write_fd to terminate the reading loop, so we close the file descriptor
|
|
||||||
# here. Forking is the process spawning method everywhere except Mac OS
|
|
||||||
# for Python >= 3.8 and on Windows
|
|
||||||
if sys.version_info < (3, 8) or sys.platform != "darwin":
|
|
||||||
os.close(write_fd)
|
|
||||||
|
|
||||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||||
# that prevents unbuffered text I/O.
|
# that prevents unbuffered text I/O.
|
||||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
|
# 3. closefd=False because Connection has "ownership"
|
||||||
|
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
|
||||||
|
|
||||||
if stdin_multiprocess_fd:
|
if stdin_fd:
|
||||||
stdin = os.fdopen(stdin_multiprocess_fd.fd)
|
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
||||||
else:
|
else:
|
||||||
stdin = None
|
stdin_file = None
|
||||||
|
|
||||||
# list of streams to select from
|
# list of streams to select from
|
||||||
istreams = [in_pipe, stdin] if stdin else [in_pipe]
|
istreams = [read_file, stdin_file] if stdin_file else [read_file]
|
||||||
force_echo = False # parent can force echo for certain output
|
force_echo = False # parent can force echo for certain output
|
||||||
|
|
||||||
log_file = log_file_wrapper.unwrap()
|
log_file = log_file_wrapper.unwrap()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with keyboard_input(stdin) as kb:
|
with keyboard_input(stdin_file) as kb:
|
||||||
while True:
|
while True:
|
||||||
# fix the terminal settings if we recently came to
|
# fix the terminal settings if we recently came to
|
||||||
# the foreground
|
# the foreground
|
||||||
@@ -943,12 +894,12 @@ def _writer_daemon(
|
|||||||
# Allow user to toggle echo with 'v' key.
|
# Allow user to toggle echo with 'v' key.
|
||||||
# Currently ignores other chars.
|
# Currently ignores other chars.
|
||||||
# only read stdin if we're in the foreground
|
# only read stdin if we're in the foreground
|
||||||
if stdin in rlist and not _is_background_tty(stdin):
|
if stdin_file and stdin_file in rlist and not _is_background_tty(stdin_file):
|
||||||
# it's possible to be backgrounded between the above
|
# it's possible to be backgrounded between the above
|
||||||
# check and the read, so we ignore SIGTTIN here.
|
# check and the read, so we ignore SIGTTIN here.
|
||||||
with ignore_signal(signal.SIGTTIN):
|
with ignore_signal(signal.SIGTTIN):
|
||||||
try:
|
try:
|
||||||
if stdin.read(1) == "v":
|
if stdin_file.read(1) == "v":
|
||||||
echo = not echo
|
echo = not echo
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
# If SIGTTIN is ignored, the system gives EIO
|
# If SIGTTIN is ignored, the system gives EIO
|
||||||
@@ -957,13 +908,13 @@ def _writer_daemon(
|
|||||||
if e.errno != errno.EIO:
|
if e.errno != errno.EIO:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
if in_pipe in rlist:
|
if read_file in rlist:
|
||||||
line_count = 0
|
line_count = 0
|
||||||
try:
|
try:
|
||||||
while line_count < 100:
|
while line_count < 100:
|
||||||
# Handle output from the calling process.
|
# Handle output from the calling process.
|
||||||
try:
|
try:
|
||||||
line = _retry(in_pipe.readline)()
|
line = _retry(read_file.readline)()
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
# installs like --test=root gpgme produce non-UTF8 logs
|
# installs like --test=root gpgme produce non-UTF8 logs
|
||||||
line = "<line lost: output was not encoded as UTF-8>\n"
|
line = "<line lost: output was not encoded as UTF-8>\n"
|
||||||
@@ -992,7 +943,7 @@ def _writer_daemon(
|
|||||||
if xoff in controls:
|
if xoff in controls:
|
||||||
force_echo = False
|
force_echo = False
|
||||||
|
|
||||||
if not _input_available(in_pipe):
|
if not _input_available(read_file):
|
||||||
break
|
break
|
||||||
finally:
|
finally:
|
||||||
if line_count > 0:
|
if line_count > 0:
|
||||||
@@ -1007,14 +958,14 @@ def _writer_daemon(
|
|||||||
finally:
|
finally:
|
||||||
# send written data back to parent if we used a StringIO
|
# send written data back to parent if we used a StringIO
|
||||||
if isinstance(log_file, io.StringIO):
|
if isinstance(log_file, io.StringIO):
|
||||||
control_pipe.send(log_file.getvalue())
|
control_fd.send(log_file.getvalue())
|
||||||
log_file_wrapper.close()
|
log_file_wrapper.close()
|
||||||
close_connection_and_file(read_multiprocess_fd, in_pipe)
|
read_fd.close()
|
||||||
if stdin_multiprocess_fd:
|
if stdin_fd:
|
||||||
close_connection_and_file(stdin_multiprocess_fd, stdin)
|
stdin_fd.close()
|
||||||
|
|
||||||
# send echo value back to the parent so it can be preserved.
|
# send echo value back to the parent so it can be preserved.
|
||||||
control_pipe.send(echo)
|
control_fd.send(echo)
|
||||||
|
|
||||||
|
|
||||||
def _retry(function):
|
def _retry(function):
|
||||||
|
@@ -4,7 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
__version__ = "0.22.0.dev0"
|
__version__ = "0.22.4"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
|
|
||||||
|
@@ -254,8 +254,8 @@ def _search_duplicate_specs_in_externals(error_cls):
|
|||||||
|
|
||||||
@config_packages
|
@config_packages
|
||||||
def _deprecated_preferences(error_cls):
|
def _deprecated_preferences(error_cls):
|
||||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.22)"""
|
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
|
||||||
# TODO (v0.22): remove this audit as the attributes will not be allowed in config
|
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
|
||||||
errors = []
|
errors = []
|
||||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||||
|
|
||||||
@@ -779,7 +779,7 @@ def check_virtual_with_variants(spec, msg):
|
|||||||
return
|
return
|
||||||
error = error_cls(
|
error = error_cls(
|
||||||
f"{pkg_name}: {msg}",
|
f"{pkg_name}: {msg}",
|
||||||
f"remove variants from '{spec}' in depends_on directive in {filename}",
|
[f"remove variants from '{spec}' in depends_on directive in {filename}"],
|
||||||
)
|
)
|
||||||
errors.append(error)
|
errors.append(error)
|
||||||
|
|
||||||
|
@@ -23,12 +23,12 @@
|
|||||||
import warnings
|
import warnings
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
|
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
|
||||||
from urllib.error import HTTPError, URLError
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fsys
|
import llnl.util.filesystem as fsys
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
||||||
|
from llnl.util.symlink import readlink
|
||||||
|
|
||||||
import spack.caches
|
import spack.caches
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
@@ -658,7 +658,7 @@ def get_buildfile_manifest(spec):
|
|||||||
# 2. paths are used as strings.
|
# 2. paths are used as strings.
|
||||||
for rel_path in visitor.symlinks:
|
for rel_path in visitor.symlinks:
|
||||||
abs_path = os.path.join(root, rel_path)
|
abs_path = os.path.join(root, rel_path)
|
||||||
link = os.readlink(abs_path)
|
link = readlink(abs_path)
|
||||||
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
||||||
data["link_to_relocate"].append(rel_path)
|
data["link_to_relocate"].append(rel_path)
|
||||||
|
|
||||||
@@ -898,9 +898,8 @@ def url_read_method(url):
|
|||||||
try:
|
try:
|
||||||
_, _, spec_file = web_util.read_from_url(url)
|
_, _, spec_file = web_util.read_from_url(url)
|
||||||
contents = codecs.getreader("utf-8")(spec_file).read()
|
contents = codecs.getreader("utf-8")(spec_file).read()
|
||||||
except (URLError, web_util.SpackWebError) as url_err:
|
except web_util.SpackWebError as e:
|
||||||
tty.error("Error reading specfile: {0}".format(url))
|
tty.error(f"Error reading specfile: {url}: {e}")
|
||||||
tty.error(url_err)
|
|
||||||
return contents
|
return contents
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -2001,6 +2000,7 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
|||||||
with spack.util.path.filter_padding():
|
with spack.util.path.filter_padding():
|
||||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||||
extract_tarball(spec, download_result, force)
|
extract_tarball(spec, download_result, force)
|
||||||
|
spec.package.windows_establish_runtime_linkage()
|
||||||
spack.hooks.post_install(spec, False)
|
spack.hooks.post_install(spec, False)
|
||||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||||
|
|
||||||
@@ -2039,21 +2039,17 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||||
specfile_is_signed = True
|
specfile_is_signed = True
|
||||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err:
|
except web_util.SpackWebError as e1:
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
|
except web_util.SpackWebError as e2:
|
||||||
tty.debug(
|
tty.debug(
|
||||||
"Did not find {0} on {1}".format(
|
f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
|
||||||
specfile_name, buildcache_fetch_url_signed_json
|
e1,
|
||||||
),
|
|
||||||
url_err,
|
|
||||||
level=2,
|
level=2,
|
||||||
)
|
)
|
||||||
tty.debug(
|
tty.debug(
|
||||||
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
|
f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
|
||||||
url_err_x,
|
|
||||||
level=2,
|
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||||
@@ -2138,6 +2134,9 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
|||||||
|
|
||||||
for mirror in mirror_collection.values():
|
for mirror in mirror_collection.values():
|
||||||
fetch_url = mirror.fetch_url
|
fetch_url = mirror.fetch_url
|
||||||
|
# TODO: oci:// does not support signing.
|
||||||
|
if fetch_url.startswith("oci://"):
|
||||||
|
continue
|
||||||
keys_url = url_util.join(
|
keys_url = url_util.join(
|
||||||
fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
|
fetch_url, BUILD_CACHE_RELATIVE_PATH, BUILD_CACHE_KEYS_RELATIVE_PATH
|
||||||
)
|
)
|
||||||
@@ -2148,19 +2147,12 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
|||||||
try:
|
try:
|
||||||
_, _, json_file = web_util.read_from_url(keys_index)
|
_, _, json_file = web_util.read_from_url(keys_index)
|
||||||
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
|
||||||
except (URLError, web_util.SpackWebError) as url_err:
|
except web_util.SpackWebError as url_err:
|
||||||
if web_util.url_exists(keys_index):
|
if web_util.url_exists(keys_index):
|
||||||
err_msg = [
|
|
||||||
"Unable to find public keys in {0},",
|
|
||||||
" caught exception attempting to read from {1}.",
|
|
||||||
]
|
|
||||||
|
|
||||||
tty.error(
|
tty.error(
|
||||||
"".join(err_msg).format(
|
f"Unable to find public keys in {url_util.format(fetch_url)},"
|
||||||
url_util.format(fetch_url), url_util.format(keys_index)
|
f" caught exception attempting to read from {url_util.format(keys_index)}."
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
tty.debug(url_err)
|
tty.debug(url_err)
|
||||||
|
|
||||||
continue
|
continue
|
||||||
@@ -2440,7 +2432,7 @@ def get_remote_hash(self):
|
|||||||
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
|
||||||
except urllib.error.URLError:
|
except (TimeoutError, urllib.error.URLError):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Validate the hash
|
# Validate the hash
|
||||||
@@ -2462,7 +2454,7 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
|
||||||
except urllib.error.URLError as e:
|
except (TimeoutError, urllib.error.URLError) as e:
|
||||||
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -2503,10 +2495,7 @@ def __init__(self, url, etag, urlopen=web_util.urlopen):
|
|||||||
def conditional_fetch(self) -> FetchIndexResult:
|
def conditional_fetch(self) -> FetchIndexResult:
|
||||||
# Just do a conditional fetch immediately
|
# Just do a conditional fetch immediately
|
||||||
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
|
||||||
headers = {
|
headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
|
||||||
"User-Agent": web_util.SPACK_USER_AGENT,
|
|
||||||
"If-None-Match": '"{}"'.format(self.etag),
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(urllib.request.Request(url, headers=headers))
|
response = self.urlopen(urllib.request.Request(url, headers=headers))
|
||||||
@@ -2514,14 +2503,14 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
if e.getcode() == 304:
|
if e.getcode() == 304:
|
||||||
# Not modified; that means fresh.
|
# Not modified; that means fresh.
|
||||||
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
|
||||||
raise FetchIndexError("Could not fetch index {}".format(url), e) from e
|
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||||
except urllib.error.URLError as e:
|
except (TimeoutError, urllib.error.URLError) as e:
|
||||||
raise FetchIndexError("Could not fetch index {}".format(url), e) from e
|
raise FetchIndexError(f"Could not fetch index {url}", e) from e
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = codecs.getreader("utf-8")(response).read()
|
result = codecs.getreader("utf-8")(response).read()
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise FetchIndexError("Remote index {} is invalid".format(url), e) from e
|
raise FetchIndexError(f"Remote index {url} is invalid", e) from e
|
||||||
|
|
||||||
headers = response.headers
|
headers = response.headers
|
||||||
etag_header_value = headers.get("Etag", None) or headers.get("etag", None)
|
etag_header_value = headers.get("Etag", None) or headers.get("etag", None)
|
||||||
@@ -2552,21 +2541,19 @@ def conditional_fetch(self) -> FetchIndexResult:
|
|||||||
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except urllib.error.URLError as e:
|
except (TimeoutError, urllib.error.URLError) as e:
|
||||||
raise FetchIndexError(
|
raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
|
||||||
"Could not fetch manifest from {}".format(url_manifest), e
|
|
||||||
) from e
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
manifest = json.loads(response.read())
|
manifest = json.loads(response.read())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
|
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||||
|
|
||||||
# Get first blob hash, which should be the index.json
|
# Get first blob hash, which should be the index.json
|
||||||
try:
|
try:
|
||||||
index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"])
|
index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
|
raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
|
||||||
|
|
||||||
# Fresh?
|
# Fresh?
|
||||||
if index_digest.digest == self.local_hash:
|
if index_digest.digest == self.local_hash:
|
||||||
|
@@ -597,7 +597,10 @@ def bootstrapping_sources(scope: Optional[str] = None):
|
|||||||
current = copy.copy(entry)
|
current = copy.copy(entry)
|
||||||
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
||||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||||
with open(metadata_yaml, encoding="utf-8") as stream:
|
try:
|
||||||
current.update(spack.util.spack_yaml.load(stream))
|
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||||
list_of_sources.append(current)
|
current.update(spack.util.spack_yaml.load(stream))
|
||||||
|
list_of_sources.append(current)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
return list_of_sources
|
return list_of_sources
|
||||||
|
@@ -43,7 +43,8 @@
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from enum import Flag, auto
|
from enum import Flag, auto
|
||||||
from itertools import chain
|
from itertools import chain
|
||||||
from typing import List, Set, Tuple
|
from multiprocessing.connection import Connection
|
||||||
|
from typing import Callable, Dict, List, Optional, Set, Tuple
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.string import plural
|
from llnl.string import plural
|
||||||
@@ -51,7 +52,6 @@
|
|||||||
from llnl.util.lang import dedupe, stable_partition
|
from llnl.util.lang import dedupe, stable_partition
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import symlink
|
||||||
from llnl.util.tty.color import cescape, colorize
|
from llnl.util.tty.color import cescape, colorize
|
||||||
from llnl.util.tty.log import MultiProcessFd
|
|
||||||
|
|
||||||
import spack.build_systems.cmake
|
import spack.build_systems.cmake
|
||||||
import spack.build_systems.meson
|
import spack.build_systems.meson
|
||||||
@@ -72,6 +72,7 @@
|
|||||||
import spack.store
|
import spack.store
|
||||||
import spack.subprocess_context
|
import spack.subprocess_context
|
||||||
import spack.user_environment
|
import spack.user_environment
|
||||||
|
import spack.util.executable
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.pattern
|
import spack.util.pattern
|
||||||
from spack import traverse
|
from spack import traverse
|
||||||
@@ -479,12 +480,12 @@ def set_wrapper_variables(pkg, env):
|
|||||||
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
|
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
|
||||||
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
|
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
|
||||||
|
|
||||||
# Find ccache binary and hand it to build environment
|
|
||||||
if spack.config.get("config:ccache"):
|
if spack.config.get("config:ccache"):
|
||||||
ccache = Executable("ccache")
|
# Enable ccache in the compiler wrapper
|
||||||
if not ccache:
|
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
|
||||||
raise RuntimeError("No ccache binary found in PATH")
|
else:
|
||||||
env.set(SPACK_CCACHE_BINARY, ccache)
|
# Avoid cache pollution if a build system forces `ccache <compiler wrapper invocation>`.
|
||||||
|
env.set("CCACHE_DISABLE", "1")
|
||||||
|
|
||||||
# Gather information about various types of dependencies
|
# Gather information about various types of dependencies
|
||||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||||
@@ -730,12 +731,28 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwa
|
|||||||
return compiler(*compiler_args, output=compiler_output)
|
return compiler(*compiler_args, output=compiler_output)
|
||||||
|
|
||||||
|
|
||||||
def get_rpath_deps(pkg):
|
def _get_rpath_deps_from_spec(
|
||||||
"""Return immediate or transitive RPATHs depending on the package."""
|
spec: spack.spec.Spec, transitive_rpaths: bool
|
||||||
if pkg.transitive_rpaths:
|
) -> List[spack.spec.Spec]:
|
||||||
return [d for d in pkg.spec.traverse(root=False, deptype=("link"))]
|
if not transitive_rpaths:
|
||||||
else:
|
return spec.dependencies(deptype=dt.LINK)
|
||||||
return pkg.spec.dependencies(deptype="link")
|
|
||||||
|
by_name: Dict[str, spack.spec.Spec] = {}
|
||||||
|
|
||||||
|
for dep in spec.traverse(root=False, deptype=dt.LINK):
|
||||||
|
lookup = by_name.get(dep.name)
|
||||||
|
if lookup is None:
|
||||||
|
by_name[dep.name] = dep
|
||||||
|
elif lookup.version < dep.version:
|
||||||
|
by_name[dep.name] = dep
|
||||||
|
|
||||||
|
return list(by_name.values())
|
||||||
|
|
||||||
|
|
||||||
|
def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]:
|
||||||
|
"""Return immediate or transitive dependencies (depending on the package) that need to be
|
||||||
|
rpath'ed. If a package occurs multiple times, the newest version is kept."""
|
||||||
|
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||||
|
|
||||||
|
|
||||||
def get_rpaths(pkg):
|
def get_rpaths(pkg):
|
||||||
@@ -1128,18 +1145,60 @@ def get_cmake_prefix_path(pkg):
|
|||||||
|
|
||||||
|
|
||||||
def _setup_pkg_and_run(
|
def _setup_pkg_and_run(
|
||||||
serialized_pkg, function, kwargs, write_pipe, input_multiprocess_fd, jsfd1, jsfd2
|
serialized_pkg: "spack.subprocess_context.PackageInstallContext",
|
||||||
|
function: Callable,
|
||||||
|
kwargs: Dict,
|
||||||
|
write_pipe: Connection,
|
||||||
|
input_pipe: Optional[Connection],
|
||||||
|
jsfd1: Optional[Connection],
|
||||||
|
jsfd2: Optional[Connection],
|
||||||
):
|
):
|
||||||
|
"""Main entry point in the child process for Spack builds.
|
||||||
|
|
||||||
|
``_setup_pkg_and_run`` is called by the child process created in
|
||||||
|
``start_build_process()``, and its main job is to run ``function()`` on behalf of
|
||||||
|
some Spack installation (see :ref:`spack.installer.PackageInstaller._install_task`).
|
||||||
|
|
||||||
|
The child process is passed a ``write_pipe``, on which it's expected to send one of
|
||||||
|
the following:
|
||||||
|
|
||||||
|
* ``StopPhase``: error raised by a build process indicating it's stopping at a
|
||||||
|
particular build phase.
|
||||||
|
|
||||||
|
* ``BaseException``: any exception raised by a child build process, which will be
|
||||||
|
wrapped in ``ChildError`` (which adds a bunch of debug info and log context) and
|
||||||
|
raised in the parent.
|
||||||
|
|
||||||
|
* The return value of ``function()``, which can be anything (except an exception).
|
||||||
|
This is returned to the caller.
|
||||||
|
|
||||||
|
Note: ``jsfd1`` and ``jsfd2`` are passed solely to ensure that the child process
|
||||||
|
does not close these file descriptors. Some ``multiprocessing`` backends will close
|
||||||
|
them automatically in the child if they are not passed at process creation time.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
serialized_pkg: Spack package install context object (serialized form of the
|
||||||
|
package that we'll build in the child process).
|
||||||
|
function: function to call in the child process; serialized_pkg is passed to
|
||||||
|
this as the first argument.
|
||||||
|
kwargs: additional keyword arguments to pass to ``function()``.
|
||||||
|
write_pipe: multiprocessing ``Connection`` to the parent process, to which the
|
||||||
|
child *must* send a result (or an error) back to parent on.
|
||||||
|
input_multiprocess_fd: stdin from the parent (not passed currently on Windows)
|
||||||
|
jsfd1: gmake Jobserver file descriptor 1.
|
||||||
|
jsfd2: gmake Jobserver file descriptor 2.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
context: str = kwargs.get("context", "build")
|
context: str = kwargs.get("context", "build")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# We are in the child process. Python sets sys.stdin to
|
# We are in the child process. Python sets sys.stdin to open(os.devnull) to prevent our
|
||||||
# open(os.devnull) to prevent our process and its parent from
|
# process and its parent from simultaneously reading from the original stdin. But, we
|
||||||
# simultaneously reading from the original stdin. But, we assume
|
# assume that the parent process is not going to read from it till we are done with the
|
||||||
# that the parent process is not going to read from it till we
|
# child, so we undo Python's precaution. closefd=False since Connection has ownership.
|
||||||
# are done with the child, so we undo Python's precaution.
|
if input_pipe is not None:
|
||||||
if input_multiprocess_fd is not None:
|
sys.stdin = os.fdopen(input_pipe.fileno(), closefd=False)
|
||||||
sys.stdin = os.fdopen(input_multiprocess_fd.fd)
|
|
||||||
|
|
||||||
pkg = serialized_pkg.restore()
|
pkg = serialized_pkg.restore()
|
||||||
|
|
||||||
@@ -1155,13 +1214,14 @@ def _setup_pkg_and_run(
|
|||||||
# Do not create a full ChildError from this, it's not an error
|
# Do not create a full ChildError from this, it's not an error
|
||||||
# it's a control statement.
|
# it's a control statement.
|
||||||
write_pipe.send(e)
|
write_pipe.send(e)
|
||||||
except BaseException:
|
except BaseException as e:
|
||||||
# catch ANYTHING that goes wrong in the child process
|
# catch ANYTHING that goes wrong in the child process
|
||||||
exc_type, exc, tb = sys.exc_info()
|
|
||||||
|
|
||||||
# Need to unwind the traceback in the child because traceback
|
# Need to unwind the traceback in the child because traceback
|
||||||
# objects can't be sent to the parent.
|
# objects can't be sent to the parent.
|
||||||
tb_string = traceback.format_exc()
|
exc_type = type(e)
|
||||||
|
tb = e.__traceback__
|
||||||
|
tb_string = "".join(traceback.format_exception(exc_type, e, tb))
|
||||||
|
|
||||||
# build up some context from the offending package so we can
|
# build up some context from the offending package so we can
|
||||||
# show that, too.
|
# show that, too.
|
||||||
@@ -1178,8 +1238,8 @@ def _setup_pkg_and_run(
|
|||||||
elif context == "test":
|
elif context == "test":
|
||||||
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
|
logfile = os.path.join(pkg.test_suite.stage, pkg.test_suite.test_log_name(pkg.spec))
|
||||||
|
|
||||||
error_msg = str(exc)
|
error_msg = str(e)
|
||||||
if isinstance(exc, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
if isinstance(e, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||||
process = "test the installation" if context == "test" else "build from sources"
|
process = "test the installation" if context == "test" else "build from sources"
|
||||||
error_msg = (
|
error_msg = (
|
||||||
"The '{}' package cannot find an attribute while trying to {}. "
|
"The '{}' package cannot find an attribute while trying to {}. "
|
||||||
@@ -1189,7 +1249,7 @@ def _setup_pkg_and_run(
|
|||||||
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
||||||
).format(pkg.name, process, context)
|
).format(pkg.name, process, context)
|
||||||
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
||||||
error_msg = "{}\n\n{}".format(str(exc), error_msg)
|
error_msg = "{}\n\n{}".format(str(e), error_msg)
|
||||||
|
|
||||||
# make a pickleable exception to send to parent.
|
# make a pickleable exception to send to parent.
|
||||||
msg = "%s: %s" % (exc_type.__name__, error_msg)
|
msg = "%s: %s" % (exc_type.__name__, error_msg)
|
||||||
@@ -1207,8 +1267,8 @@ def _setup_pkg_and_run(
|
|||||||
|
|
||||||
finally:
|
finally:
|
||||||
write_pipe.close()
|
write_pipe.close()
|
||||||
if input_multiprocess_fd is not None:
|
if input_pipe is not None:
|
||||||
input_multiprocess_fd.close()
|
input_pipe.close()
|
||||||
|
|
||||||
|
|
||||||
def start_build_process(pkg, function, kwargs):
|
def start_build_process(pkg, function, kwargs):
|
||||||
@@ -1235,23 +1295,9 @@ def child_fun():
|
|||||||
If something goes wrong, the child process catches the error and
|
If something goes wrong, the child process catches the error and
|
||||||
passes it to the parent wrapped in a ChildError. The parent is
|
passes it to the parent wrapped in a ChildError. The parent is
|
||||||
expected to handle (or re-raise) the ChildError.
|
expected to handle (or re-raise) the ChildError.
|
||||||
|
|
||||||
This uses `multiprocessing.Process` to create the child process. The
|
|
||||||
mechanism used to create the process differs on different operating
|
|
||||||
systems and for different versions of Python. In some cases "fork"
|
|
||||||
is used (i.e. the "fork" system call) and some cases it starts an
|
|
||||||
entirely new Python interpreter process (in the docs this is referred
|
|
||||||
to as the "spawn" start method). Breaking it down by OS:
|
|
||||||
|
|
||||||
- Linux always uses fork.
|
|
||||||
- Mac OS uses fork before Python 3.8 and "spawn" for 3.8 and after.
|
|
||||||
- Windows always uses the "spawn" start method.
|
|
||||||
|
|
||||||
For more information on `multiprocessing` child process creation
|
|
||||||
mechanisms, see https://docs.python.org/3/library/multiprocessing.html#contexts-and-start-methods
|
|
||||||
"""
|
"""
|
||||||
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
read_pipe, write_pipe = multiprocessing.Pipe(duplex=False)
|
||||||
input_multiprocess_fd = None
|
input_fd = None
|
||||||
jobserver_fd1 = None
|
jobserver_fd1 = None
|
||||||
jobserver_fd2 = None
|
jobserver_fd2 = None
|
||||||
|
|
||||||
@@ -1260,14 +1306,13 @@ def child_fun():
|
|||||||
try:
|
try:
|
||||||
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
# Forward sys.stdin when appropriate, to allow toggling verbosity
|
||||||
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
|
||||||
input_fd = os.dup(sys.stdin.fileno())
|
input_fd = Connection(os.dup(sys.stdin.fileno()))
|
||||||
input_multiprocess_fd = MultiProcessFd(input_fd)
|
|
||||||
mflags = os.environ.get("MAKEFLAGS", False)
|
mflags = os.environ.get("MAKEFLAGS", False)
|
||||||
if mflags:
|
if mflags:
|
||||||
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
|
||||||
if m:
|
if m:
|
||||||
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
|
jobserver_fd1 = Connection(int(m.group(1)))
|
||||||
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
|
jobserver_fd2 = Connection(int(m.group(2)))
|
||||||
|
|
||||||
p = multiprocessing.Process(
|
p = multiprocessing.Process(
|
||||||
target=_setup_pkg_and_run,
|
target=_setup_pkg_and_run,
|
||||||
@@ -1276,7 +1321,7 @@ def child_fun():
|
|||||||
function,
|
function,
|
||||||
kwargs,
|
kwargs,
|
||||||
write_pipe,
|
write_pipe,
|
||||||
input_multiprocess_fd,
|
input_fd,
|
||||||
jobserver_fd1,
|
jobserver_fd1,
|
||||||
jobserver_fd2,
|
jobserver_fd2,
|
||||||
),
|
),
|
||||||
@@ -1296,8 +1341,8 @@ def child_fun():
|
|||||||
|
|
||||||
finally:
|
finally:
|
||||||
# Close the input stream in the parent process
|
# Close the input stream in the parent process
|
||||||
if input_multiprocess_fd is not None:
|
if input_fd is not None:
|
||||||
input_multiprocess_fd.close()
|
input_fd.close()
|
||||||
|
|
||||||
def exitcode_msg(p):
|
def exitcode_msg(p):
|
||||||
typ = "exit" if p.exitcode >= 0 else "signal"
|
typ = "exit" if p.exitcode >= 0 else "signal"
|
||||||
|
@@ -145,7 +145,7 @@ def install(self, pkg, spec, prefix):
|
|||||||
opts += self.nmake_install_args()
|
opts += self.nmake_install_args()
|
||||||
if self.makefile_name:
|
if self.makefile_name:
|
||||||
opts.append("/F{}".format(self.makefile_name))
|
opts.append("/F{}".format(self.makefile_name))
|
||||||
opts.append(self.define("PREFIX", prefix))
|
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
||||||
with fs.working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self.pkg).nmake(
|
inspect.getmodule(self.pkg).nmake(
|
||||||
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
||||||
|
@@ -120,12 +120,6 @@ def skip_modules(self) -> Iterable[str]:
|
|||||||
"""
|
"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
@property
|
|
||||||
def python_spec(self):
|
|
||||||
"""Get python-venv if it exists or python otherwise."""
|
|
||||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
|
||||||
return python
|
|
||||||
|
|
||||||
def view_file_conflicts(self, view, merge_map):
|
def view_file_conflicts(self, view, merge_map):
|
||||||
"""Report all file conflicts, excepting special cases for python.
|
"""Report all file conflicts, excepting special cases for python.
|
||||||
Specifically, this does not report errors for duplicate
|
Specifically, this does not report errors for duplicate
|
||||||
@@ -146,8 +140,12 @@ def view_file_conflicts(self, view, merge_map):
|
|||||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||||
# Patch up shebangs if the package extends Python and we put a Python interpreter in the
|
# Patch up shebangs if the package extends Python and we put a Python interpreter in the
|
||||||
# view.
|
# view.
|
||||||
python = self.python_spec
|
if not self.extendee_spec:
|
||||||
if not self.extendee_spec or python.external:
|
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||||
|
|
||||||
|
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||||
|
|
||||||
|
if python.external:
|
||||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||||
|
|
||||||
# We only patch shebangs in the bin directory.
|
# We only patch shebangs in the bin directory.
|
||||||
@@ -368,6 +366,12 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
|
|||||||
return f"https://pypi.org/simple/{name}/"
|
return f"https://pypi.org/simple/{name}/"
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def python_spec(self):
|
||||||
|
"""Get python-venv if it exists or python otherwise."""
|
||||||
|
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||||
|
return python
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self) -> HeaderList:
|
def headers(self) -> HeaderList:
|
||||||
"""Discover header files in platlib."""
|
"""Discover header files in platlib."""
|
||||||
|
@@ -22,6 +22,8 @@
|
|||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
from urllib.request import HTTPHandler, Request, build_opener
|
from urllib.request import HTTPHandler, Request, build_opener
|
||||||
|
|
||||||
|
import ruamel.yaml
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import memoized
|
from llnl.util.lang import memoized
|
||||||
@@ -44,6 +46,7 @@
|
|||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.reporters import CDash, CDashConfiguration
|
from spack.reporters import CDash, CDashConfiguration
|
||||||
|
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||||
|
|
||||||
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
||||||
@@ -683,6 +686,22 @@ def generate_gitlab_ci_yaml(
|
|||||||
"instead.",
|
"instead.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def ensure_expected_target_path(path):
|
||||||
|
"""Returns passed paths with all Windows path separators exchanged
|
||||||
|
for posix separators only if copy_only_pipeline is enabled
|
||||||
|
|
||||||
|
This is required as copy_only_pipelines are a unique scenario where
|
||||||
|
the generate job and child pipelines are run on different platforms.
|
||||||
|
To make this compatible w/ Windows, we cannot write Windows style path separators
|
||||||
|
that will be consumed on by the Posix copy job runner.
|
||||||
|
|
||||||
|
TODO (johnwparent): Refactor config + cli read/write to deal only in posix
|
||||||
|
style paths
|
||||||
|
"""
|
||||||
|
if copy_only_pipeline and path:
|
||||||
|
path = path.replace("\\", "/")
|
||||||
|
return path
|
||||||
|
|
||||||
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||||
deprecated_mirror_config = False
|
deprecated_mirror_config = False
|
||||||
buildcache_destination = None
|
buildcache_destination = None
|
||||||
@@ -806,7 +825,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
if scope not in include_scopes and scope not in env_includes:
|
if scope not in include_scopes and scope not in env_includes:
|
||||||
include_scopes.insert(0, scope)
|
include_scopes.insert(0, scope)
|
||||||
env_includes.extend(include_scopes)
|
env_includes.extend(include_scopes)
|
||||||
env_yaml_root["spack"]["include"] = env_includes
|
env_yaml_root["spack"]["include"] = [ensure_expected_target_path(i) for i in env_includes]
|
||||||
|
|
||||||
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
||||||
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
||||||
@@ -1094,7 +1113,7 @@ def main_script_replacements(cmd):
|
|||||||
if cdash_handler and cdash_handler.auth_token:
|
if cdash_handler and cdash_handler.auth_token:
|
||||||
try:
|
try:
|
||||||
cdash_handler.populate_buildgroup(all_job_names)
|
cdash_handler.populate_buildgroup(all_job_names)
|
||||||
except (SpackError, HTTPError, URLError) as err:
|
except (SpackError, HTTPError, URLError, TimeoutError) as err:
|
||||||
tty.warn(f"Problem populating buildgroup: {err}")
|
tty.warn(f"Problem populating buildgroup: {err}")
|
||||||
else:
|
else:
|
||||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||||
@@ -1227,6 +1246,9 @@ def main_script_replacements(cmd):
|
|||||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||||
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
"SPACK_REQUIRE_SIGNING": os.environ.get("SPACK_REQUIRE_SIGNING", "False"),
|
||||||
}
|
}
|
||||||
|
output_vars = output_object["variables"]
|
||||||
|
for item, val in output_vars.items():
|
||||||
|
output_vars[item] = ensure_expected_target_path(val)
|
||||||
|
|
||||||
# TODO: Remove this block in Spack 0.23
|
# TODO: Remove this block in Spack 0.23
|
||||||
if deprecated_mirror_config and remote_mirror_override:
|
if deprecated_mirror_config and remote_mirror_override:
|
||||||
@@ -1283,7 +1305,6 @@ def main_script_replacements(cmd):
|
|||||||
sorted_output = {}
|
sorted_output = {}
|
||||||
for output_key, output_value in sorted(output_object.items()):
|
for output_key, output_value in sorted(output_object.items()):
|
||||||
sorted_output[output_key] = output_value
|
sorted_output[output_key] = output_value
|
||||||
|
|
||||||
if known_broken_specs_encountered:
|
if known_broken_specs_encountered:
|
||||||
tty.error("This pipeline generated hashes known to be broken on develop:")
|
tty.error("This pipeline generated hashes known to be broken on develop:")
|
||||||
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
|
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
|
||||||
@@ -1291,8 +1312,11 @@ def main_script_replacements(cmd):
|
|||||||
if not rebuild_everything:
|
if not rebuild_everything:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
with open(output_file, "w") as outf:
|
# Minimize yaml output size through use of anchors
|
||||||
outf.write(syaml.dump(sorted_output, default_flow_style=True))
|
syaml.anchorify(sorted_output)
|
||||||
|
|
||||||
|
with open(output_file, "w") as f:
|
||||||
|
ruamel.yaml.YAML().dump(sorted_output, f)
|
||||||
|
|
||||||
|
|
||||||
def _url_encode_string(input_string):
|
def _url_encode_string(input_string):
|
||||||
@@ -1478,6 +1502,12 @@ def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
|||||||
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
copy_files_to_artifacts(os.path.join(test_stage, "*", "*.txt"), job_test_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def win_quote(quote_str: str) -> str:
|
||||||
|
if IS_WINDOWS:
|
||||||
|
quote_str = f'"{quote_str}"'
|
||||||
|
return quote_str
|
||||||
|
|
||||||
|
|
||||||
def download_and_extract_artifacts(url, work_dir):
|
def download_and_extract_artifacts(url, work_dir):
|
||||||
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
"""Look for gitlab artifacts.zip at the given url, and attempt to download
|
||||||
and extract the contents into the given work_dir
|
and extract the contents into the given work_dir
|
||||||
@@ -1500,7 +1530,7 @@ def download_and_extract_artifacts(url, work_dir):
|
|||||||
request = Request(url, headers=headers)
|
request = Request(url, headers=headers)
|
||||||
request.get_method = lambda: "GET"
|
request.get_method = lambda: "GET"
|
||||||
|
|
||||||
response = opener.open(request)
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code != 200:
|
if response_code != 200:
|
||||||
@@ -1942,9 +1972,9 @@ def compose_command_err_handling(args):
|
|||||||
# but we need to handle EXEs (git, etc) ourselves
|
# but we need to handle EXEs (git, etc) ourselves
|
||||||
catch_exe_failure = (
|
catch_exe_failure = (
|
||||||
"""
|
"""
|
||||||
if ($LASTEXITCODE -ne 0){
|
if ($LASTEXITCODE -ne 0){{
|
||||||
throw "Command {} has failed"
|
throw 'Command {} has failed'
|
||||||
}
|
}}
|
||||||
"""
|
"""
|
||||||
if IS_WINDOWS
|
if IS_WINDOWS
|
||||||
else ""
|
else ""
|
||||||
@@ -2070,7 +2100,7 @@ def read_broken_spec(broken_spec_url):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(broken_spec_url)
|
_, _, fs = web_util.read_from_url(broken_spec_url)
|
||||||
except (URLError, web_util.SpackWebError, HTTPError):
|
except web_util.SpackWebError:
|
||||||
tty.warn(f"Unable to read broken spec from {broken_spec_url}")
|
tty.warn(f"Unable to read broken spec from {broken_spec_url}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -2176,13 +2206,13 @@ def __init__(self, ci_cdash):
|
|||||||
def args(self):
|
def args(self):
|
||||||
return [
|
return [
|
||||||
"--cdash-upload-url",
|
"--cdash-upload-url",
|
||||||
self.upload_url,
|
win_quote(self.upload_url),
|
||||||
"--cdash-build",
|
"--cdash-build",
|
||||||
self.build_name,
|
win_quote(self.build_name),
|
||||||
"--cdash-site",
|
"--cdash-site",
|
||||||
self.site,
|
win_quote(self.site),
|
||||||
"--cdash-buildstamp",
|
"--cdash-buildstamp",
|
||||||
self.build_stamp,
|
win_quote(self.build_stamp),
|
||||||
]
|
]
|
||||||
|
|
||||||
@property # type: ignore
|
@property # type: ignore
|
||||||
@@ -2248,7 +2278,7 @@ def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
|||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers)
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
|
|
||||||
response = opener.open(request)
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code not in [200, 201]:
|
if response_code not in [200, 201]:
|
||||||
@@ -2294,7 +2324,7 @@ def populate_buildgroup(self, job_names):
|
|||||||
request = Request(url, data=enc_data, headers=headers)
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
request.get_method = lambda: "PUT"
|
request.get_method = lambda: "PUT"
|
||||||
|
|
||||||
response = opener.open(request)
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
response_code = response.getcode()
|
response_code = response.getcode()
|
||||||
|
|
||||||
if response_code != 200:
|
if response_code != 200:
|
||||||
|
@@ -13,7 +13,6 @@
|
|||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import urllib.request
|
|
||||||
from typing import Dict, List, Optional, Tuple, Union
|
from typing import Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -54,6 +53,7 @@
|
|||||||
from spack.oci.oci import (
|
from spack.oci.oci import (
|
||||||
copy_missing_layers_with_retry,
|
copy_missing_layers_with_retry,
|
||||||
get_manifest_and_config_with_retry,
|
get_manifest_and_config_with_retry,
|
||||||
|
list_tags,
|
||||||
upload_blob_with_retry,
|
upload_blob_with_retry,
|
||||||
upload_manifest_with_retry,
|
upload_manifest_with_retry,
|
||||||
)
|
)
|
||||||
@@ -813,7 +813,7 @@ def _push_oci(
|
|||||||
|
|
||||||
def extra_config(spec: Spec):
|
def extra_config(spec: Spec):
|
||||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||||
spec_dict["buildcache_layout_version"] = 1
|
spec_dict["buildcache_layout_version"] = bindist.CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||||
spec_dict["binary_cache_checksum"] = {
|
spec_dict["binary_cache_checksum"] = {
|
||||||
"hash_algorithm": "sha256",
|
"hash_algorithm": "sha256",
|
||||||
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
"hash": checksums[spec.dag_hash()].compressed_digest.digest,
|
||||||
@@ -856,10 +856,7 @@ def _config_from_tag(image_ref: ImageReference, tag: str) -> Optional[dict]:
|
|||||||
|
|
||||||
|
|
||||||
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
def _update_index_oci(image_ref: ImageReference, tmpdir: str, pool: MaybePool) -> None:
|
||||||
request = urllib.request.Request(url=image_ref.tags_url())
|
tags = list_tags(image_ref)
|
||||||
response = spack.oci.opener.urlopen(request)
|
|
||||||
spack.oci.opener.ensure_status(request, response, 200)
|
|
||||||
tags = json.load(response)["tags"]
|
|
||||||
|
|
||||||
# Fetch all image config files in parallel
|
# Fetch all image config files in parallel
|
||||||
spec_dicts = pool.starmap(
|
spec_dicts = pool.starmap(
|
||||||
|
@@ -31,7 +31,6 @@
|
|||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
SPACK_COMMAND = "spack"
|
SPACK_COMMAND = "spack"
|
||||||
MAKE_COMMAND = "make"
|
|
||||||
INSTALL_FAIL_CODE = 1
|
INSTALL_FAIL_CODE = 1
|
||||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||||
|
|
||||||
@@ -40,6 +39,12 @@ def deindent(desc):
|
|||||||
return desc.replace(" ", "")
|
return desc.replace(" ", "")
|
||||||
|
|
||||||
|
|
||||||
|
def unicode_escape(path: str) -> str:
|
||||||
|
"""Returns transformed path with any unicode
|
||||||
|
characters replaced with their corresponding escapes"""
|
||||||
|
return path.encode("unicode-escape").decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
setup_parser.parser = subparser
|
setup_parser.parser = subparser
|
||||||
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
subparsers = subparser.add_subparsers(help="CI sub-commands")
|
||||||
@@ -551,75 +556,35 @@ def ci_rebuild(args):
|
|||||||
# No hash match anywhere means we need to rebuild spec
|
# No hash match anywhere means we need to rebuild spec
|
||||||
|
|
||||||
# Start with spack arguments
|
# Start with spack arguments
|
||||||
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose"]
|
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose", "install"]
|
||||||
|
|
||||||
config = cfg.get("config")
|
config = cfg.get("config")
|
||||||
if not config["verify_ssl"]:
|
if not config["verify_ssl"]:
|
||||||
spack_cmd.append("-k")
|
spack_cmd.append("-k")
|
||||||
|
|
||||||
install_args = []
|
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
|
||||||
|
|
||||||
can_verify = spack_ci.can_verify_binaries()
|
can_verify = spack_ci.can_verify_binaries()
|
||||||
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
||||||
if not verify_binaries:
|
if not verify_binaries:
|
||||||
install_args.append("--no-check-signature")
|
install_args.append("--no-check-signature")
|
||||||
|
|
||||||
slash_hash = "/{}".format(job_spec.dag_hash())
|
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
|
||||||
|
|
||||||
# Arguments when installing dependencies from cache
|
|
||||||
deps_install_args = install_args
|
|
||||||
|
|
||||||
# Arguments when installing the root from sources
|
# Arguments when installing the root from sources
|
||||||
root_install_args = install_args + [
|
deps_install_args = install_args + ["--only=dependencies"]
|
||||||
"--keep-stage",
|
root_install_args = install_args + ["--keep-stage", "--only=package"]
|
||||||
"--only=package",
|
|
||||||
"--use-buildcache=package:never,dependencies:only",
|
|
||||||
]
|
|
||||||
if cdash_handler:
|
if cdash_handler:
|
||||||
# Add additional arguments to `spack install` for CDash reporting.
|
# Add additional arguments to `spack install` for CDash reporting.
|
||||||
root_install_args.extend(cdash_handler.args())
|
root_install_args.extend(cdash_handler.args())
|
||||||
root_install_args.append(slash_hash)
|
|
||||||
|
|
||||||
# ["x", "y"] -> "'x' 'y'"
|
|
||||||
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
|
|
||||||
|
|
||||||
commands = [
|
commands = [
|
||||||
# apparently there's a race when spack bootstraps? do it up front once
|
# apparently there's a race when spack bootstraps? do it up front once
|
||||||
[SPACK_COMMAND, "-e", env.path, "bootstrap", "now"],
|
[SPACK_COMMAND, "-e", unicode_escape(env.path), "bootstrap", "now"],
|
||||||
[
|
spack_cmd + deps_install_args + [slash_hash],
|
||||||
SPACK_COMMAND,
|
spack_cmd + root_install_args + [slash_hash],
|
||||||
"-e",
|
|
||||||
env.path,
|
|
||||||
"env",
|
|
||||||
"depfile",
|
|
||||||
"-o",
|
|
||||||
"Makefile",
|
|
||||||
"--use-buildcache=package:never,dependencies:only",
|
|
||||||
slash_hash, # limit to spec we're building
|
|
||||||
],
|
|
||||||
[
|
|
||||||
# --output-sync requires GNU make 4.x.
|
|
||||||
# Old make errors when you pass it a flag it doesn't recognize,
|
|
||||||
# but it doesn't error or warn when you set unrecognized flags in
|
|
||||||
# this variable.
|
|
||||||
"export",
|
|
||||||
"GNUMAKEFLAGS=--output-sync=recurse",
|
|
||||||
],
|
|
||||||
[
|
|
||||||
MAKE_COMMAND,
|
|
||||||
"SPACK={}".format(args_to_string(spack_cmd)),
|
|
||||||
"SPACK_COLOR=always",
|
|
||||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
|
||||||
"-j$(nproc)",
|
|
||||||
"install-deps/{}".format(
|
|
||||||
spack.environment.depfile.MakefileSpec(job_spec).safe_format(
|
|
||||||
"{name}-{version}-{hash}"
|
|
||||||
)
|
|
||||||
),
|
|
||||||
],
|
|
||||||
spack_cmd + ["install"] + root_install_args,
|
|
||||||
]
|
]
|
||||||
|
|
||||||
tty.debug("Installing {0} from source".format(job_spec.name))
|
tty.debug("Installing {0} from source".format(job_spec.name))
|
||||||
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
||||||
|
|
||||||
|
@@ -11,7 +11,6 @@
|
|||||||
from argparse import ArgumentParser, Namespace
|
from argparse import ArgumentParser, Namespace
|
||||||
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
|
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command
|
from llnl.util.argparsewriter import ArgparseRstWriter, ArgparseWriter, Command
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
@@ -867,9 +866,6 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
|||||||
prepend_header(args, f)
|
prepend_header(args, f)
|
||||||
formatter(args, f)
|
formatter(args, f)
|
||||||
|
|
||||||
if args.update_completion:
|
|
||||||
fs.set_executable(args.update)
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
prepend_header(args, sys.stdout)
|
prepend_header(args, sys.stdout)
|
||||||
formatter(args, sys.stdout)
|
formatter(args, sys.stdout)
|
||||||
|
@@ -661,34 +661,32 @@ def mirror_name_or_url(m):
|
|||||||
# accidentally to a dir in the current working directory.
|
# accidentally to a dir in the current working directory.
|
||||||
|
|
||||||
# If there's a \ or / in the name, it's interpreted as a path or url.
|
# If there's a \ or / in the name, it's interpreted as a path or url.
|
||||||
if "/" in m or "\\" in m:
|
if "/" in m or "\\" in m or m in (".", ".."):
|
||||||
return spack.mirror.Mirror(m)
|
return spack.mirror.Mirror(m)
|
||||||
|
|
||||||
# Otherwise, the named mirror is required to exist.
|
# Otherwise, the named mirror is required to exist.
|
||||||
try:
|
try:
|
||||||
return spack.mirror.require_mirror_name(m)
|
return spack.mirror.require_mirror_name(m)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise argparse.ArgumentTypeError(
|
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e
|
||||||
str(e) + ". Did you mean {}?".format(os.path.join(".", m))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def mirror_url(url):
|
def mirror_url(url):
|
||||||
try:
|
try:
|
||||||
return spack.mirror.Mirror.from_url(url)
|
return spack.mirror.Mirror.from_url(url)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise argparse.ArgumentTypeError(str(e))
|
raise argparse.ArgumentTypeError(str(e)) from e
|
||||||
|
|
||||||
|
|
||||||
def mirror_directory(path):
|
def mirror_directory(path):
|
||||||
try:
|
try:
|
||||||
return spack.mirror.Mirror.from_local_path(path)
|
return spack.mirror.Mirror.from_local_path(path)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise argparse.ArgumentTypeError(str(e))
|
raise argparse.ArgumentTypeError(str(e)) from e
|
||||||
|
|
||||||
|
|
||||||
def mirror_name(name):
|
def mirror_name(name):
|
||||||
try:
|
try:
|
||||||
return spack.mirror.require_mirror_name(name)
|
return spack.mirror.require_mirror_name(name)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise argparse.ArgumentTypeError(str(e))
|
raise argparse.ArgumentTypeError(str(e)) from e
|
||||||
|
@@ -10,7 +10,7 @@
|
|||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
import llnl.string as string
|
import llnl.string as string
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
@@ -87,6 +87,9 @@ def env_create_setup_parser(subparser):
|
|||||||
default=None,
|
default=None,
|
||||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||||
)
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"--include-concrete", action="append", help="name of old environment to copy specs from"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def env_create(args):
|
def env_create(args):
|
||||||
@@ -104,12 +107,17 @@ def env_create(args):
|
|||||||
# the environment should not include a view.
|
# the environment should not include a view.
|
||||||
with_view = None
|
with_view = None
|
||||||
|
|
||||||
|
include_concrete = None
|
||||||
|
if hasattr(args, "include_concrete"):
|
||||||
|
include_concrete = args.include_concrete
|
||||||
|
|
||||||
env = _env_create(
|
env = _env_create(
|
||||||
args.env_name,
|
args.env_name,
|
||||||
init_file=args.envfile,
|
init_file=args.envfile,
|
||||||
dir=args.dir or os.path.sep in args.env_name or args.env_name in (".", ".."),
|
dir=args.dir or os.path.sep in args.env_name or args.env_name in (".", ".."),
|
||||||
with_view=with_view,
|
with_view=with_view,
|
||||||
keep_relative=args.keep_relative,
|
keep_relative=args.keep_relative,
|
||||||
|
include_concrete=include_concrete,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Generate views, only really useful for environments created from spack.lock files.
|
# Generate views, only really useful for environments created from spack.lock files.
|
||||||
@@ -123,31 +131,43 @@ def _env_create(
|
|||||||
dir: bool = False,
|
dir: bool = False,
|
||||||
with_view: Optional[str] = None,
|
with_view: Optional[str] = None,
|
||||||
keep_relative: bool = False,
|
keep_relative: bool = False,
|
||||||
|
include_concrete: Optional[List[str]] = None,
|
||||||
):
|
):
|
||||||
"""Create a new environment, with an optional yaml description.
|
"""Create a new environment, with an optional yaml description.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
name_or_path: name of the environment to create, or path to it
|
name_or_path (str): name of the environment to create, or path to it
|
||||||
init_file: optional initialization file -- can be a JSON lockfile (*.lock, *.json) or YAML
|
init_file (str or file): optional initialization file -- can be
|
||||||
manifest file
|
a JSON lockfile (*.lock, *.json) or YAML manifest file
|
||||||
dir: if True, create an environment in a directory instead of a managed environment
|
dir (bool): if True, create an environment in a directory instead
|
||||||
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
of a named environment
|
||||||
otherwise they may be made absolute if the new environment is in a different location
|
keep_relative (bool): if True, develop paths are copied verbatim into
|
||||||
|
the new environment file, otherwise they may be made absolute if the
|
||||||
|
new environment is in a different location
|
||||||
|
include_concrete (list): list of the included concrete environments
|
||||||
"""
|
"""
|
||||||
if not dir:
|
if not dir:
|
||||||
env = ev.create(
|
env = ev.create(
|
||||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
name_or_path,
|
||||||
|
init_file=init_file,
|
||||||
|
with_view=with_view,
|
||||||
|
keep_relative=keep_relative,
|
||||||
|
include_concrete=include_concrete,
|
||||||
)
|
)
|
||||||
tty.msg(
|
tty.msg(
|
||||||
colorize(
|
colorize(
|
||||||
f"Created environment @c{{{cescape(env.name)}}} in: @c{{{cescape(env.path)}}}"
|
f"Created environment @c{{{cescape(name_or_path)}}} in: @c{{{cescape(env.path)}}}"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
env = ev.create_in_dir(
|
env = ev.create_in_dir(
|
||||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
name_or_path,
|
||||||
|
init_file=init_file,
|
||||||
|
with_view=with_view,
|
||||||
|
keep_relative=keep_relative,
|
||||||
|
include_concrete=include_concrete,
|
||||||
)
|
)
|
||||||
tty.msg(colorize(f"Created anonymous environment in: @c{{{cescape(env.path)}}}"))
|
tty.msg(colorize(f"Created independent environment in: @c{{{cescape(env.path)}}}"))
|
||||||
tty.msg(f"Activate with: {colorize(f'@c{{spack env activate {cescape(name_or_path)}}}')}")
|
tty.msg(f"Activate with: {colorize(f'@c{{spack env activate {cescape(name_or_path)}}}')}")
|
||||||
return env
|
return env
|
||||||
|
|
||||||
@@ -434,6 +454,12 @@ def env_remove_setup_parser(subparser):
|
|||||||
"""remove an existing environment"""
|
"""remove an existing environment"""
|
||||||
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
||||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||||
|
subparser.add_argument(
|
||||||
|
"-f",
|
||||||
|
"--force",
|
||||||
|
action="store_true",
|
||||||
|
help="remove the environment even if it is included in another environment",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def env_remove(args):
|
def env_remove(args):
|
||||||
@@ -442,14 +468,34 @@ def env_remove(args):
|
|||||||
This removes an environment managed by Spack. Directory environments
|
This removes an environment managed by Spack. Directory environments
|
||||||
and manifests embedded in repositories should be removed manually.
|
and manifests embedded in repositories should be removed manually.
|
||||||
"""
|
"""
|
||||||
read_envs = []
|
remove_envs = []
|
||||||
|
valid_envs = []
|
||||||
bad_envs = []
|
bad_envs = []
|
||||||
for env_name in args.rm_env:
|
|
||||||
|
for env_name in ev.all_environment_names():
|
||||||
try:
|
try:
|
||||||
env = ev.read(env_name)
|
env = ev.read(env_name)
|
||||||
read_envs.append(env)
|
valid_envs.append(env)
|
||||||
|
|
||||||
|
if env_name in args.rm_env:
|
||||||
|
remove_envs.append(env)
|
||||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||||
bad_envs.append(env_name)
|
if env_name in args.rm_env:
|
||||||
|
bad_envs.append(env_name)
|
||||||
|
|
||||||
|
# Check if remove_env is included from another env before trying to remove
|
||||||
|
for env in valid_envs:
|
||||||
|
for remove_env in remove_envs:
|
||||||
|
# don't check if environment is included to itself
|
||||||
|
if env.name == remove_env.name:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if remove_env.path in env.included_concrete_envs:
|
||||||
|
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
|
||||||
|
if args.force:
|
||||||
|
tty.warn(msg)
|
||||||
|
else:
|
||||||
|
tty.die(msg)
|
||||||
|
|
||||||
if not args.yes_to_all:
|
if not args.yes_to_all:
|
||||||
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
||||||
@@ -458,7 +504,7 @@ def env_remove(args):
|
|||||||
if not answer:
|
if not answer:
|
||||||
tty.die("Will not remove any environments")
|
tty.die("Will not remove any environments")
|
||||||
|
|
||||||
for env in read_envs:
|
for env in remove_envs:
|
||||||
name = env.name
|
name = env.name
|
||||||
if env.active:
|
if env.active:
|
||||||
tty.die(f"Environment {name} can't be removed while activated.")
|
tty.die(f"Environment {name} can't be removed while activated.")
|
||||||
|
@@ -3,6 +3,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import copy
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
@@ -168,9 +169,9 @@ def query_arguments(args):
|
|||||||
if (args.missing or args.only_missing) and not args.only_deprecated:
|
if (args.missing or args.only_missing) and not args.only_deprecated:
|
||||||
installed.append(InstallStatuses.MISSING)
|
installed.append(InstallStatuses.MISSING)
|
||||||
|
|
||||||
known = any
|
predicate_fn = None
|
||||||
if args.unknown:
|
if args.unknown:
|
||||||
known = False
|
predicate_fn = lambda x: not spack.repo.PATH.exists(x.spec.name)
|
||||||
|
|
||||||
explicit = any
|
explicit = any
|
||||||
if args.explicit:
|
if args.explicit:
|
||||||
@@ -178,7 +179,7 @@ def query_arguments(args):
|
|||||||
if args.implicit:
|
if args.implicit:
|
||||||
explicit = False
|
explicit = False
|
||||||
|
|
||||||
q_args = {"installed": installed, "known": known, "explicit": explicit}
|
q_args = {"installed": installed, "predicate_fn": predicate_fn, "explicit": explicit}
|
||||||
|
|
||||||
install_tree = args.install_tree
|
install_tree = args.install_tree
|
||||||
upstreams = spack.config.get("upstreams", {})
|
upstreams = spack.config.get("upstreams", {})
|
||||||
@@ -271,6 +272,27 @@ def root_decorator(spec, string):
|
|||||||
|
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
if env.included_concrete_envs:
|
||||||
|
tty.msg("Included specs")
|
||||||
|
|
||||||
|
# Root specs cannot be displayed with prefixes, since those are not
|
||||||
|
# set for abstract specs. Same for hashes
|
||||||
|
root_args = copy.copy(args)
|
||||||
|
root_args.paths = False
|
||||||
|
|
||||||
|
# Roots are displayed with variants, etc. so that we can see
|
||||||
|
# specifically what the user asked for.
|
||||||
|
cmd.display_specs(
|
||||||
|
env.included_user_specs,
|
||||||
|
root_args,
|
||||||
|
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||||
|
namespace=True,
|
||||||
|
show_flags=True,
|
||||||
|
show_full_compiler=True,
|
||||||
|
variants=True,
|
||||||
|
)
|
||||||
|
print()
|
||||||
|
|
||||||
if args.show_concretized:
|
if args.show_concretized:
|
||||||
tty.msg("Concretized roots")
|
tty.msg("Concretized roots")
|
||||||
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)
|
cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)
|
||||||
|
@@ -61,7 +61,6 @@ def install_kwargs_from_args(args):
|
|||||||
"dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc),
|
"dependencies_use_cache": cache_opt(args.use_cache, dep_use_bc),
|
||||||
"dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc),
|
"dependencies_cache_only": cache_opt(args.cache_only, dep_use_bc),
|
||||||
"include_build_deps": args.include_build_deps,
|
"include_build_deps": args.include_build_deps,
|
||||||
"explicit": True, # Use true as a default for install command
|
|
||||||
"stop_at": args.until,
|
"stop_at": args.until,
|
||||||
"unsigned": args.unsigned,
|
"unsigned": args.unsigned,
|
||||||
"install_deps": ("dependencies" in args.things_to_install),
|
"install_deps": ("dependencies" in args.things_to_install),
|
||||||
@@ -473,6 +472,7 @@ def install_without_active_env(args, install_kwargs, reporter_factory):
|
|||||||
require_user_confirmation_for_overwrite(concrete_specs, args)
|
require_user_confirmation_for_overwrite(concrete_specs, args)
|
||||||
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
install_kwargs["overwrite"] = [spec.dag_hash() for spec in concrete_specs]
|
||||||
|
|
||||||
installs = [(s.package, install_kwargs) for s in concrete_specs]
|
installs = [s.package for s in concrete_specs]
|
||||||
builder = PackageInstaller(installs)
|
install_kwargs["explicit"] = [s.dag_hash() for s in concrete_specs]
|
||||||
|
builder = PackageInstaller(installs, install_kwargs)
|
||||||
builder.install()
|
builder.install()
|
||||||
|
@@ -3,7 +3,6 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import datetime
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
@@ -97,7 +96,7 @@ def list_files(args):
|
|||||||
OLD_LICENSE, SPDX_MISMATCH, GENERAL_MISMATCH = range(1, 4)
|
OLD_LICENSE, SPDX_MISMATCH, GENERAL_MISMATCH = range(1, 4)
|
||||||
|
|
||||||
#: Latest year that copyright applies. UPDATE THIS when bumping copyright.
|
#: Latest year that copyright applies. UPDATE THIS when bumping copyright.
|
||||||
latest_year = datetime.date.today().year
|
latest_year = 2024 # year of 0.22 release
|
||||||
strict_date = r"Copyright 2013-%s" % latest_year
|
strict_date = r"Copyright 2013-%s" % latest_year
|
||||||
|
|
||||||
#: regexes for valid license lines at tops of files
|
#: regexes for valid license lines at tops of files
|
||||||
|
@@ -101,8 +101,9 @@ def do_mark(specs, explicit):
|
|||||||
specs (list): list of specs to be marked
|
specs (list): list of specs to be marked
|
||||||
explicit (bool): whether to mark specs as explicitly installed
|
explicit (bool): whether to mark specs as explicitly installed
|
||||||
"""
|
"""
|
||||||
for spec in specs:
|
with spack.store.STORE.db.write_transaction():
|
||||||
spack.store.STORE.db.update_explicit(spec, explicit)
|
for spec in specs:
|
||||||
|
spack.store.STORE.db.mark(spec, "explicit", explicit)
|
||||||
|
|
||||||
|
|
||||||
def mark_specs(args, specs):
|
def mark_specs(args, specs):
|
||||||
|
@@ -377,7 +377,10 @@ def refresh(module_type, specs, args):
|
|||||||
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
def modules_cmd(parser, args, module_type, callbacks=callbacks):
|
||||||
# Qualifiers to be used when querying the db for specs
|
# Qualifiers to be used when querying the db for specs
|
||||||
constraint_qualifiers = {
|
constraint_qualifiers = {
|
||||||
"refresh": {"installed": True, "known": lambda x: not spack.repo.PATH.exists(x)}
|
"refresh": {
|
||||||
|
"installed": True,
|
||||||
|
"predicate_fn": lambda x: spack.repo.PATH.exists(x.spec.name),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
query_args = constraint_qualifiers.get(args.subparser_name, {})
|
||||||
|
|
||||||
|
@@ -23,7 +23,7 @@
|
|||||||
|
|
||||||
|
|
||||||
# tutorial configuration parameters
|
# tutorial configuration parameters
|
||||||
tutorial_branch = "releases/v0.21"
|
tutorial_branch = "releases/v0.22"
|
||||||
tutorial_mirror = "file:///mirror"
|
tutorial_mirror = "file:///mirror"
|
||||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||||
|
|
||||||
|
@@ -38,10 +38,10 @@
|
|||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
|
import spack.filesystem_view as fsv
|
||||||
import spack.schema.projections
|
import spack.schema.projections
|
||||||
import spack.store
|
import spack.store
|
||||||
from spack.config import validate
|
from spack.config import validate
|
||||||
from spack.filesystem_view import YamlFilesystemView, view_func_parser
|
|
||||||
from spack.util import spack_yaml as s_yaml
|
from spack.util import spack_yaml as s_yaml
|
||||||
|
|
||||||
description = "project packages to a compact naming scheme on the filesystem"
|
description = "project packages to a compact naming scheme on the filesystem"
|
||||||
@@ -193,17 +193,13 @@ def view(parser, args):
|
|||||||
ordered_projections = {}
|
ordered_projections = {}
|
||||||
|
|
||||||
# What method are we using for this view
|
# What method are we using for this view
|
||||||
if args.action in actions_link:
|
link_type = args.action if args.action in actions_link else "symlink"
|
||||||
link_fn = view_func_parser(args.action)
|
view = fsv.YamlFilesystemView(
|
||||||
else:
|
|
||||||
link_fn = view_func_parser("symlink")
|
|
||||||
|
|
||||||
view = YamlFilesystemView(
|
|
||||||
path,
|
path,
|
||||||
spack.store.STORE.layout,
|
spack.store.STORE.layout,
|
||||||
projections=ordered_projections,
|
projections=ordered_projections,
|
||||||
ignore_conflicts=getattr(args, "ignore_conflicts", False),
|
ignore_conflicts=getattr(args, "ignore_conflicts", False),
|
||||||
link=link_fn,
|
link_type=link_type,
|
||||||
verbose=args.verbose,
|
verbose=args.verbose,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -290,7 +290,7 @@ def __init__(
|
|||||||
operating_system,
|
operating_system,
|
||||||
target,
|
target,
|
||||||
paths,
|
paths,
|
||||||
modules=None,
|
modules: Optional[List[str]] = None,
|
||||||
alias=None,
|
alias=None,
|
||||||
environment=None,
|
environment=None,
|
||||||
extra_rpaths=None,
|
extra_rpaths=None,
|
||||||
|
@@ -220,10 +220,10 @@ def _compiler_config_from_external(config):
|
|||||||
operating_system = host_platform.operating_system("default_os")
|
operating_system = host_platform.operating_system("default_os")
|
||||||
target = host_platform.target("default_target").microarchitecture
|
target = host_platform.target("default_target").microarchitecture
|
||||||
else:
|
else:
|
||||||
target = spec.target
|
target = spec.architecture.target
|
||||||
if not target:
|
if not target:
|
||||||
host_platform = spack.platforms.host()
|
target = spack.platforms.host().target("default_target")
|
||||||
target = host_platform.target("default_target").microarchitecture
|
target = target.microarchitecture
|
||||||
|
|
||||||
operating_system = spec.os
|
operating_system = spec.os
|
||||||
if not operating_system:
|
if not operating_system:
|
||||||
|
@@ -283,12 +283,9 @@ def __reduce__(self):
|
|||||||
database. If it is a spec, we'll evaluate
|
database. If it is a spec, we'll evaluate
|
||||||
``spec.satisfies(query_spec)``
|
``spec.satisfies(query_spec)``
|
||||||
|
|
||||||
known (bool or None): Specs that are "known" are those
|
predicate_fn: optional predicate taking an InstallRecord as argument, and returning
|
||||||
for which Spack can locate a ``package.py`` file -- i.e.,
|
whether that record is selected for the query. It can be used to craft criteria
|
||||||
Spack "knows" how to install them. Specs that are unknown may
|
that need some data for selection not provided by the Database itself.
|
||||||
represent packages that existed in a previous version of
|
|
||||||
Spack, but have since either changed their name or
|
|
||||||
been removed
|
|
||||||
|
|
||||||
installed (bool or InstallStatus or typing.Iterable or None):
|
installed (bool or InstallStatus or typing.Iterable or None):
|
||||||
if ``True``, includes only installed
|
if ``True``, includes only installed
|
||||||
@@ -588,6 +585,9 @@ def _path(self, spec: "spack.spec.Spec") -> pathlib.Path:
|
|||||||
return self.dir / f"{spec.name}-{spec.dag_hash()}"
|
return self.dir / f"{spec.name}-{spec.dag_hash()}"
|
||||||
|
|
||||||
|
|
||||||
|
SelectType = Callable[[InstallRecord], bool]
|
||||||
|
|
||||||
|
|
||||||
class Database:
|
class Database:
|
||||||
#: Fields written for each install record
|
#: Fields written for each install record
|
||||||
record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS
|
record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS
|
||||||
@@ -1367,7 +1367,7 @@ def _deprecate(self, spec, deprecator):
|
|||||||
self._data[spec_key] = spec_rec
|
self._data[spec_key] = spec_rec
|
||||||
|
|
||||||
@_autospec
|
@_autospec
|
||||||
def mark(self, spec, key, value):
|
def mark(self, spec: "spack.spec.Spec", key: str, value: Any) -> None:
|
||||||
"""Mark an arbitrary record on a spec."""
|
"""Mark an arbitrary record on a spec."""
|
||||||
with self.write_transaction():
|
with self.write_transaction():
|
||||||
return self._mark(spec, key, value)
|
return self._mark(spec, key, value)
|
||||||
@@ -1516,7 +1516,7 @@ def get_by_hash(self, dag_hash, default=None, installed=any):
|
|||||||
def _query(
|
def _query(
|
||||||
self,
|
self,
|
||||||
query_spec=any,
|
query_spec=any,
|
||||||
known=any,
|
predicate_fn: Optional[SelectType] = None,
|
||||||
installed=True,
|
installed=True,
|
||||||
explicit=any,
|
explicit=any,
|
||||||
start_date=None,
|
start_date=None,
|
||||||
@@ -1524,7 +1524,7 @@ def _query(
|
|||||||
hashes=None,
|
hashes=None,
|
||||||
in_buildcache=any,
|
in_buildcache=any,
|
||||||
origin=None,
|
origin=None,
|
||||||
):
|
) -> List["spack.spec.Spec"]:
|
||||||
"""Run a query on the database."""
|
"""Run a query on the database."""
|
||||||
|
|
||||||
# TODO: Specs are a lot like queries. Should there be a
|
# TODO: Specs are a lot like queries. Should there be a
|
||||||
@@ -1570,7 +1570,7 @@ def _query(
|
|||||||
if explicit is not any and rec.explicit != explicit:
|
if explicit is not any and rec.explicit != explicit:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if known is not any and known(rec.spec.name):
|
if predicate_fn is not None and not predicate_fn(rec):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if start_date or end_date:
|
if start_date or end_date:
|
||||||
@@ -1655,14 +1655,14 @@ def query(self, *args, **kwargs):
|
|||||||
query.__doc__ = ""
|
query.__doc__ = ""
|
||||||
query.__doc__ += _QUERY_DOCSTRING
|
query.__doc__ += _QUERY_DOCSTRING
|
||||||
|
|
||||||
def query_one(self, query_spec, known=any, installed=True):
|
def query_one(self, query_spec, predicate_fn=None, installed=True):
|
||||||
"""Query for exactly one spec that matches the query spec.
|
"""Query for exactly one spec that matches the query spec.
|
||||||
|
|
||||||
Raises an assertion error if more than one spec matches the
|
Raises an assertion error if more than one spec matches the
|
||||||
query. Returns None if no installed package matches.
|
query. Returns None if no installed package matches.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
concrete_specs = self.query(query_spec, known=known, installed=installed)
|
concrete_specs = self.query(query_spec, predicate_fn=predicate_fn, installed=installed)
|
||||||
assert len(concrete_specs) <= 1
|
assert len(concrete_specs) <= 1
|
||||||
return concrete_specs[0] if concrete_specs else None
|
return concrete_specs[0] if concrete_specs else None
|
||||||
|
|
||||||
@@ -1709,24 +1709,6 @@ def root(key, record):
|
|||||||
if id(rec.spec) not in needed and rec.installed
|
if id(rec.spec) not in needed and rec.installed
|
||||||
]
|
]
|
||||||
|
|
||||||
def update_explicit(self, spec, explicit):
|
|
||||||
"""
|
|
||||||
Update the spec's explicit state in the database.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
spec (spack.spec.Spec): the spec whose install record is being updated
|
|
||||||
explicit (bool): ``True`` if the package was requested explicitly
|
|
||||||
by the user, ``False`` if it was pulled in as a dependency of
|
|
||||||
an explicit package.
|
|
||||||
"""
|
|
||||||
rec = self.get_record(spec)
|
|
||||||
if explicit != rec.explicit:
|
|
||||||
with self.write_transaction():
|
|
||||||
message = "{s.name}@{s.version} : marking the package {0}"
|
|
||||||
status = "explicit" if explicit else "implicit"
|
|
||||||
tty.debug(message.format(status, s=spec))
|
|
||||||
rec.explicit = explicit
|
|
||||||
|
|
||||||
|
|
||||||
class UpstreamDatabaseLockingError(SpackError):
|
class UpstreamDatabaseLockingError(SpackError):
|
||||||
"""Raised when an operation would need to lock an upstream database"""
|
"""Raised when an operation would need to lock an upstream database"""
|
||||||
|
@@ -97,7 +97,7 @@ class OpenMpi(Package):
|
|||||||
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
|
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
|
||||||
|
|
||||||
|
|
||||||
SUPPORTED_LANGUAGES = ("fortran", "cxx")
|
SUPPORTED_LANGUAGES = ("fortran", "cxx", "c")
|
||||||
|
|
||||||
|
|
||||||
def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
|
def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
|
||||||
|
@@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.symlink import readlink
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
@@ -181,7 +182,7 @@ def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
|||||||
base_dir = (
|
base_dir = (
|
||||||
self.path_for_spec(deprecator_spec)
|
self.path_for_spec(deprecator_spec)
|
||||||
if deprecator_spec
|
if deprecator_spec
|
||||||
else os.readlink(deprecated_spec.prefix)
|
else readlink(deprecated_spec.prefix)
|
||||||
)
|
)
|
||||||
|
|
||||||
yaml_path = os.path.join(
|
yaml_path = os.path.join(
|
||||||
|
@@ -34,6 +34,9 @@
|
|||||||
* ``spec``: a string representation of the abstract spec that was concretized
|
* ``spec``: a string representation of the abstract spec that was concretized
|
||||||
|
|
||||||
4. ``concrete_specs``: a dictionary containing the specs in the environment.
|
4. ``concrete_specs``: a dictionary containing the specs in the environment.
|
||||||
|
5. ``include_concrete`` (dictionary): an optional dictionary that includes the roots
|
||||||
|
and concrete specs from the included environments, keyed by the path to that
|
||||||
|
environment
|
||||||
|
|
||||||
Compatibility
|
Compatibility
|
||||||
-------------
|
-------------
|
||||||
@@ -50,26 +53,37 @@
|
|||||||
- ``v2``
|
- ``v2``
|
||||||
- ``v3``
|
- ``v3``
|
||||||
- ``v4``
|
- ``v4``
|
||||||
|
- ``v5``
|
||||||
* - ``v0.12:0.14``
|
* - ``v0.12:0.14``
|
||||||
- ✅
|
- ✅
|
||||||
-
|
-
|
||||||
-
|
-
|
||||||
-
|
-
|
||||||
|
-
|
||||||
* - ``v0.15:0.16``
|
* - ``v0.15:0.16``
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
-
|
-
|
||||||
-
|
-
|
||||||
|
-
|
||||||
* - ``v0.17``
|
* - ``v0.17``
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
-
|
-
|
||||||
|
-
|
||||||
* - ``v0.18:``
|
* - ``v0.18:``
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
- ✅
|
- ✅
|
||||||
|
-
|
||||||
|
* - ``v0.22:``
|
||||||
|
- ✅
|
||||||
|
- ✅
|
||||||
|
- ✅
|
||||||
|
- ✅
|
||||||
|
- ✅
|
||||||
|
|
||||||
Version 1
|
Version 1
|
||||||
---------
|
---------
|
||||||
@@ -334,6 +348,118 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Version 5
|
||||||
|
---------
|
||||||
|
|
||||||
|
Version 5 doesn't change the top-level lockfile format, but an optional dictionary is
|
||||||
|
added. The dictionary has the ``root`` and ``concrete_specs`` of the included
|
||||||
|
environments, which are keyed by the path to that environment. Since this is optional
|
||||||
|
if the environment does not have any included environments ``include_concrete`` will
|
||||||
|
not be a part of the lockfile.
|
||||||
|
|
||||||
|
.. code-block:: json
|
||||||
|
|
||||||
|
{
|
||||||
|
"_meta": {
|
||||||
|
"file-type": "spack-lockfile",
|
||||||
|
"lockfile-version": 5,
|
||||||
|
"specfile-version": 3
|
||||||
|
},
|
||||||
|
"roots": [
|
||||||
|
{
|
||||||
|
"hash": "<dag_hash 1>",
|
||||||
|
"spec": "<abstract spec 1>"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"hash": "<dag_hash 2>",
|
||||||
|
"spec": "<abstract spec 2>"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"concrete_specs": {
|
||||||
|
"<dag_hash 1>": {
|
||||||
|
"... <spec dict attributes> ...": { },
|
||||||
|
"dependencies": [
|
||||||
|
{
|
||||||
|
"name": "depname_1",
|
||||||
|
"hash": "<dag_hash for depname_1>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "depname_2",
|
||||||
|
"hash": "<dag_hash for depname_2>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"hash": "<dag_hash 1>",
|
||||||
|
},
|
||||||
|
"<daghash 2>": {
|
||||||
|
"... <spec dict attributes> ...": { },
|
||||||
|
"dependencies": [
|
||||||
|
{
|
||||||
|
"name": "depname_3",
|
||||||
|
"hash": "<dag_hash for depname_3>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "depname_4",
|
||||||
|
"hash": "<dag_hash for depname_4>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"hash": "<dag_hash 2>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"include_concrete": {
|
||||||
|
"<path to environment>": {
|
||||||
|
"roots": [
|
||||||
|
{
|
||||||
|
"hash": "<dag_hash 1>",
|
||||||
|
"spec": "<abstract spec 1>"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"hash": "<dag_hash 2>",
|
||||||
|
"spec": "<abstract spec 2>"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"concrete_specs": {
|
||||||
|
"<dag_hash 1>": {
|
||||||
|
"... <spec dict attributes> ...": { },
|
||||||
|
"dependencies": [
|
||||||
|
{
|
||||||
|
"name": "depname_1",
|
||||||
|
"hash": "<dag_hash for depname_1>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "depname_2",
|
||||||
|
"hash": "<dag_hash for depname_2>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"hash": "<dag_hash 1>",
|
||||||
|
},
|
||||||
|
"<daghash 2>": {
|
||||||
|
"... <spec dict attributes> ...": { },
|
||||||
|
"dependencies": [
|
||||||
|
{
|
||||||
|
"name": "depname_3",
|
||||||
|
"hash": "<dag_hash for depname_3>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "depname_4",
|
||||||
|
"hash": "<dag_hash for depname_4>",
|
||||||
|
"type": ["build", "link"]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"hash": "<dag_hash 2>"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from .environment import (
|
from .environment import (
|
||||||
|
@@ -9,11 +9,13 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import shlex
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.environment.environment as ev
|
import spack.environment.environment as ev
|
||||||
|
import spack.paths
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.traverse as traverse
|
import spack.traverse as traverse
|
||||||
|
|
||||||
@@ -226,6 +228,7 @@ def to_dict(self):
|
|||||||
"install_deps_target": self._target("install-deps"),
|
"install_deps_target": self._target("install-deps"),
|
||||||
"any_hash_target": self._target("%"),
|
"any_hash_target": self._target("%"),
|
||||||
"jobserver_support": self.jobserver_support,
|
"jobserver_support": self.jobserver_support,
|
||||||
|
"spack_script": shlex.quote(spack.paths.spack_script),
|
||||||
"adjacency_list": self.make_adjacency_list,
|
"adjacency_list": self.make_adjacency_list,
|
||||||
"phony_convenience_targets": " ".join(self.phony_convenience_targets),
|
"phony_convenience_targets": " ".join(self.phony_convenience_targets),
|
||||||
"pkg_ids_variable": self.pkg_identifier_variable,
|
"pkg_ids_variable": self.pkg_identifier_variable,
|
||||||
|
@@ -16,13 +16,13 @@
|
|||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Dict, Iterable, List, Optional, Set, Tuple, Union
|
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.color as clr
|
import llnl.util.tty.color as clr
|
||||||
from llnl.util.link_tree import ConflictingSpecsError
|
from llnl.util.link_tree import ConflictingSpecsError
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import readlink, symlink
|
||||||
|
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.concretize
|
import spack.concretize
|
||||||
@@ -30,6 +30,7 @@
|
|||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.fetch_strategy
|
import spack.fetch_strategy
|
||||||
|
import spack.filesystem_view as fsv
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.hooks
|
import spack.hooks
|
||||||
import spack.main
|
import spack.main
|
||||||
@@ -52,7 +53,6 @@
|
|||||||
import spack.util.url
|
import spack.util.url
|
||||||
import spack.version
|
import spack.version
|
||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.filesystem_view import SimpleFilesystemView, inverse_view_func_parser, view_func_parser
|
|
||||||
from spack.installer import PackageInstaller
|
from spack.installer import PackageInstaller
|
||||||
from spack.schema.env import TOP_LEVEL_KEY
|
from spack.schema.env import TOP_LEVEL_KEY
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
@@ -159,6 +159,8 @@ def default_manifest_yaml():
|
|||||||
default_view_name = "default"
|
default_view_name = "default"
|
||||||
# Default behavior to link all packages into views (vs. only root packages)
|
# Default behavior to link all packages into views (vs. only root packages)
|
||||||
default_view_link = "all"
|
default_view_link = "all"
|
||||||
|
# The name for any included concrete specs
|
||||||
|
included_concrete_name = "include_concrete"
|
||||||
|
|
||||||
|
|
||||||
def installed_specs():
|
def installed_specs():
|
||||||
@@ -293,6 +295,7 @@ def create(
|
|||||||
init_file: Optional[Union[str, pathlib.Path]] = None,
|
init_file: Optional[Union[str, pathlib.Path]] = None,
|
||||||
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
||||||
keep_relative: bool = False,
|
keep_relative: bool = False,
|
||||||
|
include_concrete: Optional[List[str]] = None,
|
||||||
) -> "Environment":
|
) -> "Environment":
|
||||||
"""Create a managed environment in Spack and returns it.
|
"""Create a managed environment in Spack and returns it.
|
||||||
|
|
||||||
@@ -309,10 +312,15 @@ def create(
|
|||||||
string, it specifies the path to the view
|
string, it specifies the path to the view
|
||||||
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
||||||
otherwise they are made absolute
|
otherwise they are made absolute
|
||||||
|
include_concrete: list of concrete environment names/paths to be included
|
||||||
"""
|
"""
|
||||||
environment_dir = environment_dir_from_name(name, exists_ok=False)
|
environment_dir = environment_dir_from_name(name, exists_ok=False)
|
||||||
return create_in_dir(
|
return create_in_dir(
|
||||||
environment_dir, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
environment_dir,
|
||||||
|
init_file=init_file,
|
||||||
|
with_view=with_view,
|
||||||
|
keep_relative=keep_relative,
|
||||||
|
include_concrete=include_concrete,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -321,6 +329,7 @@ def create_in_dir(
|
|||||||
init_file: Optional[Union[str, pathlib.Path]] = None,
|
init_file: Optional[Union[str, pathlib.Path]] = None,
|
||||||
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
||||||
keep_relative: bool = False,
|
keep_relative: bool = False,
|
||||||
|
include_concrete: Optional[List[str]] = None,
|
||||||
) -> "Environment":
|
) -> "Environment":
|
||||||
"""Create an environment in the directory passed as input and returns it.
|
"""Create an environment in the directory passed as input and returns it.
|
||||||
|
|
||||||
@@ -334,6 +343,7 @@ def create_in_dir(
|
|||||||
string, it specifies the path to the view
|
string, it specifies the path to the view
|
||||||
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
||||||
otherwise they are made absolute
|
otherwise they are made absolute
|
||||||
|
include_concrete: concrete environment names/paths to be included
|
||||||
"""
|
"""
|
||||||
initialize_environment_dir(root, envfile=init_file)
|
initialize_environment_dir(root, envfile=init_file)
|
||||||
|
|
||||||
@@ -346,6 +356,12 @@ def create_in_dir(
|
|||||||
if with_view is not None:
|
if with_view is not None:
|
||||||
manifest.set_default_view(with_view)
|
manifest.set_default_view(with_view)
|
||||||
|
|
||||||
|
if include_concrete is not None:
|
||||||
|
set_included_envs_to_env_paths(include_concrete)
|
||||||
|
validate_included_envs_exists(include_concrete)
|
||||||
|
validate_included_envs_concrete(include_concrete)
|
||||||
|
manifest.set_include_concrete(include_concrete)
|
||||||
|
|
||||||
manifest.flush()
|
manifest.flush()
|
||||||
|
|
||||||
except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e:
|
except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e:
|
||||||
@@ -419,6 +435,67 @@ def ensure_env_root_path_exists():
|
|||||||
fs.mkdirp(env_root_path())
|
fs.mkdirp(env_root_path())
|
||||||
|
|
||||||
|
|
||||||
|
def set_included_envs_to_env_paths(include_concrete: List[str]) -> None:
|
||||||
|
"""If the included environment(s) is the environment name
|
||||||
|
it is replaced by the path to the environment
|
||||||
|
|
||||||
|
Args:
|
||||||
|
include_concrete: list of env name or path to env"""
|
||||||
|
|
||||||
|
for i, env_name in enumerate(include_concrete):
|
||||||
|
if is_env_dir(env_name):
|
||||||
|
include_concrete[i] = env_name
|
||||||
|
elif exists(env_name):
|
||||||
|
include_concrete[i] = root(env_name)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_included_envs_exists(include_concrete: List[str]) -> None:
|
||||||
|
"""Checks that all of the included environments exist
|
||||||
|
|
||||||
|
Args:
|
||||||
|
include_concrete: list of already existing concrete environments to include
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SpackEnvironmentError: if any of the included environments do not exist
|
||||||
|
"""
|
||||||
|
|
||||||
|
missing_envs = set()
|
||||||
|
|
||||||
|
for i, env_name in enumerate(include_concrete):
|
||||||
|
if not is_env_dir(env_name):
|
||||||
|
missing_envs.add(env_name)
|
||||||
|
|
||||||
|
if missing_envs:
|
||||||
|
msg = "The following environment(s) are missing: {0}".format(", ".join(missing_envs))
|
||||||
|
raise SpackEnvironmentError(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_included_envs_concrete(include_concrete: List[str]) -> None:
|
||||||
|
"""Checks that all of the included environments are concrete
|
||||||
|
|
||||||
|
Args:
|
||||||
|
include_concrete: list of already existing concrete environments to include
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
SpackEnvironmentError: if any of the included environments are not concrete
|
||||||
|
"""
|
||||||
|
|
||||||
|
non_concrete_envs = set()
|
||||||
|
|
||||||
|
for env_path in include_concrete:
|
||||||
|
if not os.path.exists(Environment(env_path).lock_path):
|
||||||
|
non_concrete_envs.add(Environment(env_path).name)
|
||||||
|
|
||||||
|
if non_concrete_envs:
|
||||||
|
msg = "The following environment(s) are not concrete: {0}\n" "Please run:".format(
|
||||||
|
", ".join(non_concrete_envs)
|
||||||
|
)
|
||||||
|
for env in non_concrete_envs:
|
||||||
|
msg += f"\n\t`spack -e {env} concretize`"
|
||||||
|
|
||||||
|
raise SpackEnvironmentError(msg)
|
||||||
|
|
||||||
|
|
||||||
def all_environment_names():
|
def all_environment_names():
|
||||||
"""List the names of environments that currently exist."""
|
"""List the names of environments that currently exist."""
|
||||||
# just return empty if the env path does not exist. A read-only
|
# just return empty if the env path does not exist. A read-only
|
||||||
@@ -529,7 +606,7 @@ def __init__(
|
|||||||
self.projections = projections
|
self.projections = projections
|
||||||
self.select = select
|
self.select = select
|
||||||
self.exclude = exclude
|
self.exclude = exclude
|
||||||
self.link_type = view_func_parser(link_type)
|
self.link_type = fsv.canonicalize_link_type(link_type)
|
||||||
self.link = link
|
self.link = link
|
||||||
|
|
||||||
def select_fn(self, spec):
|
def select_fn(self, spec):
|
||||||
@@ -563,7 +640,7 @@ def to_dict(self):
|
|||||||
if self.exclude:
|
if self.exclude:
|
||||||
ret["exclude"] = self.exclude
|
ret["exclude"] = self.exclude
|
||||||
if self.link_type:
|
if self.link_type:
|
||||||
ret["link_type"] = inverse_view_func_parser(self.link_type)
|
ret["link_type"] = self.link_type
|
||||||
if self.link != default_view_link:
|
if self.link != default_view_link:
|
||||||
ret["link"] = self.link
|
ret["link"] = self.link
|
||||||
return ret
|
return ret
|
||||||
@@ -585,7 +662,7 @@ def _current_root(self):
|
|||||||
if not os.path.islink(self.root):
|
if not os.path.islink(self.root):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
root = os.readlink(self.root)
|
root = readlink(self.root)
|
||||||
if os.path.isabs(root):
|
if os.path.isabs(root):
|
||||||
return root
|
return root
|
||||||
|
|
||||||
@@ -613,7 +690,7 @@ def get_projection_for_spec(self, spec):
|
|||||||
to exist on the filesystem."""
|
to exist on the filesystem."""
|
||||||
return self._view(self.root).get_projection_for_spec(spec)
|
return self._view(self.root).get_projection_for_spec(spec)
|
||||||
|
|
||||||
def view(self, new: Optional[str] = None) -> SimpleFilesystemView:
|
def view(self, new: Optional[str] = None) -> fsv.SimpleFilesystemView:
|
||||||
"""
|
"""
|
||||||
Returns a view object for the *underlying* view directory. This means that the
|
Returns a view object for the *underlying* view directory. This means that the
|
||||||
self.root symlink is followed, and that the view has to exist on the filesystem
|
self.root symlink is followed, and that the view has to exist on the filesystem
|
||||||
@@ -633,14 +710,14 @@ def view(self, new: Optional[str] = None) -> SimpleFilesystemView:
|
|||||||
)
|
)
|
||||||
return self._view(path)
|
return self._view(path)
|
||||||
|
|
||||||
def _view(self, root: str) -> SimpleFilesystemView:
|
def _view(self, root: str) -> fsv.SimpleFilesystemView:
|
||||||
"""Returns a view object for a given root dir."""
|
"""Returns a view object for a given root dir."""
|
||||||
return SimpleFilesystemView(
|
return fsv.SimpleFilesystemView(
|
||||||
root,
|
root,
|
||||||
spack.store.STORE.layout,
|
spack.store.STORE.layout,
|
||||||
ignore_conflicts=True,
|
ignore_conflicts=True,
|
||||||
projections=self.projections,
|
projections=self.projections,
|
||||||
link=self.link_type,
|
link_type=self.link_type,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __contains__(self, spec):
|
def __contains__(self, spec):
|
||||||
@@ -821,6 +898,18 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
|||||||
self.specs_by_hash: Dict[str, Spec] = {}
|
self.specs_by_hash: Dict[str, Spec] = {}
|
||||||
#: Repository for this environment (memoized)
|
#: Repository for this environment (memoized)
|
||||||
self._repo = None
|
self._repo = None
|
||||||
|
|
||||||
|
#: Environment paths for concrete (lockfile) included environments
|
||||||
|
self.included_concrete_envs: List[str] = []
|
||||||
|
#: First-level included concretized spec data from/to the lockfile.
|
||||||
|
self.included_concrete_spec_data: Dict[str, Dict[str, List[str]]] = {}
|
||||||
|
#: User specs from included environments from the last concretization
|
||||||
|
self.included_concretized_user_specs: Dict[str, List[Spec]] = {}
|
||||||
|
#: Roots from included environments with the last concretization, in order
|
||||||
|
self.included_concretized_order: Dict[str, List[str]] = {}
|
||||||
|
#: Concretized specs by hash from the included environments
|
||||||
|
self.included_specs_by_hash: Dict[str, Dict[str, Spec]] = {}
|
||||||
|
|
||||||
#: Previously active environment
|
#: Previously active environment
|
||||||
self._previous_active = None
|
self._previous_active = None
|
||||||
self._dev_specs = None
|
self._dev_specs = None
|
||||||
@@ -858,7 +947,7 @@ def _read(self):
|
|||||||
|
|
||||||
if os.path.exists(self.lock_path):
|
if os.path.exists(self.lock_path):
|
||||||
with open(self.lock_path) as f:
|
with open(self.lock_path) as f:
|
||||||
read_lock_version = self._read_lockfile(f)
|
read_lock_version = self._read_lockfile(f)["_meta"]["lockfile-version"]
|
||||||
|
|
||||||
if read_lock_version == 1:
|
if read_lock_version == 1:
|
||||||
tty.debug(f"Storing backup of {self.lock_path} at {self._lock_backup_v1_path}")
|
tty.debug(f"Storing backup of {self.lock_path} at {self._lock_backup_v1_path}")
|
||||||
@@ -926,6 +1015,20 @@ def add_view(name, values):
|
|||||||
if self.views == dict():
|
if self.views == dict():
|
||||||
self.views[default_view_name] = ViewDescriptor(self.path, self.view_path_default)
|
self.views[default_view_name] = ViewDescriptor(self.path, self.view_path_default)
|
||||||
|
|
||||||
|
def _process_concrete_includes(self):
|
||||||
|
"""Extract and load into memory included concrete spec data."""
|
||||||
|
self.included_concrete_envs = self.manifest[TOP_LEVEL_KEY].get(included_concrete_name, [])
|
||||||
|
|
||||||
|
if self.included_concrete_envs:
|
||||||
|
if os.path.exists(self.lock_path):
|
||||||
|
with open(self.lock_path) as f:
|
||||||
|
data = self._read_lockfile(f)
|
||||||
|
|
||||||
|
if included_concrete_name in data:
|
||||||
|
self.included_concrete_spec_data = data[included_concrete_name]
|
||||||
|
else:
|
||||||
|
self.include_concrete_envs()
|
||||||
|
|
||||||
def _construct_state_from_manifest(self):
|
def _construct_state_from_manifest(self):
|
||||||
"""Set up user specs and views from the manifest file."""
|
"""Set up user specs and views from the manifest file."""
|
||||||
self.spec_lists = collections.OrderedDict()
|
self.spec_lists = collections.OrderedDict()
|
||||||
@@ -942,6 +1045,31 @@ def _construct_state_from_manifest(self):
|
|||||||
self.spec_lists[user_speclist_name] = user_specs
|
self.spec_lists[user_speclist_name] = user_specs
|
||||||
|
|
||||||
self._process_view(spack.config.get("view", True))
|
self._process_view(spack.config.get("view", True))
|
||||||
|
self._process_concrete_includes()
|
||||||
|
|
||||||
|
def all_concretized_user_specs(self) -> List[Spec]:
|
||||||
|
"""Returns all of the concretized user specs of the environment and
|
||||||
|
its included environment(s)."""
|
||||||
|
concretized_user_specs = self.concretized_user_specs[:]
|
||||||
|
for included_specs in self.included_concretized_user_specs.values():
|
||||||
|
for included in included_specs:
|
||||||
|
# Don't duplicate included spec(s)
|
||||||
|
if included not in concretized_user_specs:
|
||||||
|
concretized_user_specs.append(included)
|
||||||
|
|
||||||
|
return concretized_user_specs
|
||||||
|
|
||||||
|
def all_concretized_orders(self) -> List[str]:
|
||||||
|
"""Returns all of the concretized order of the environment and
|
||||||
|
its included environment(s)."""
|
||||||
|
concretized_order = self.concretized_order[:]
|
||||||
|
for included_concretized_order in self.included_concretized_order.values():
|
||||||
|
for included in included_concretized_order:
|
||||||
|
# Don't duplicate included spec(s)
|
||||||
|
if included not in concretized_order:
|
||||||
|
concretized_order.append(included)
|
||||||
|
|
||||||
|
return concretized_order
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def user_specs(self):
|
def user_specs(self):
|
||||||
@@ -966,6 +1094,26 @@ def _read_dev_specs(self):
|
|||||||
dev_specs[name] = local_entry
|
dev_specs[name] = local_entry
|
||||||
return dev_specs
|
return dev_specs
|
||||||
|
|
||||||
|
@property
|
||||||
|
def included_user_specs(self) -> SpecList:
|
||||||
|
"""Included concrete user (or root) specs from last concretization."""
|
||||||
|
spec_list = SpecList()
|
||||||
|
|
||||||
|
if not self.included_concrete_envs:
|
||||||
|
return spec_list
|
||||||
|
|
||||||
|
def add_root_specs(included_concrete_specs):
|
||||||
|
# add specs from the include *and* any nested includes it may have
|
||||||
|
for env, info in included_concrete_specs.items():
|
||||||
|
for root_list in info["roots"]:
|
||||||
|
spec_list.add(root_list["spec"])
|
||||||
|
|
||||||
|
if "include_concrete" in info:
|
||||||
|
add_root_specs(info["include_concrete"])
|
||||||
|
|
||||||
|
add_root_specs(self.included_concrete_spec_data)
|
||||||
|
return spec_list
|
||||||
|
|
||||||
def clear(self, re_read=False):
|
def clear(self, re_read=False):
|
||||||
"""Clear the contents of the environment
|
"""Clear the contents of the environment
|
||||||
|
|
||||||
@@ -977,9 +1125,15 @@ def clear(self, re_read=False):
|
|||||||
self.spec_lists[user_speclist_name] = SpecList()
|
self.spec_lists[user_speclist_name] = SpecList()
|
||||||
|
|
||||||
self._dev_specs = {}
|
self._dev_specs = {}
|
||||||
self.concretized_user_specs = [] # user specs from last concretize
|
|
||||||
self.concretized_order = [] # roots of last concretize, in order
|
self.concretized_order = [] # roots of last concretize, in order
|
||||||
|
self.concretized_user_specs = [] # user specs from last concretize
|
||||||
self.specs_by_hash = {} # concretized specs by hash
|
self.specs_by_hash = {} # concretized specs by hash
|
||||||
|
|
||||||
|
self.included_concrete_spec_data = {} # concretized specs from lockfile of included envs
|
||||||
|
self.included_concretized_order = {} # root specs of the included envs, keyed by env path
|
||||||
|
self.included_concretized_user_specs = {} # user specs from last concretize's included env
|
||||||
|
self.included_specs_by_hash = {} # concretized specs by hash from the included envs
|
||||||
|
|
||||||
self.invalidate_repository_cache()
|
self.invalidate_repository_cache()
|
||||||
self._previous_active = None # previously active environment
|
self._previous_active = None # previously active environment
|
||||||
if not re_read:
|
if not re_read:
|
||||||
@@ -1033,6 +1187,43 @@ def scope_name(self):
|
|||||||
"""Name of the config scope of this environment's manifest file."""
|
"""Name of the config scope of this environment's manifest file."""
|
||||||
return self.manifest.scope_name
|
return self.manifest.scope_name
|
||||||
|
|
||||||
|
def include_concrete_envs(self):
|
||||||
|
"""Copy and save the included envs' specs internally"""
|
||||||
|
|
||||||
|
root_hash_seen = set()
|
||||||
|
concrete_hash_seen = set()
|
||||||
|
self.included_concrete_spec_data = {}
|
||||||
|
|
||||||
|
for env_path in self.included_concrete_envs:
|
||||||
|
# Check that environment exists
|
||||||
|
if not is_env_dir(env_path):
|
||||||
|
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
|
||||||
|
|
||||||
|
env = Environment(env_path)
|
||||||
|
self.included_concrete_spec_data[env_path] = {"roots": [], "concrete_specs": {}}
|
||||||
|
|
||||||
|
# Copy unique root specs from env
|
||||||
|
for root_dict in env._concrete_roots_dict():
|
||||||
|
if root_dict["hash"] not in root_hash_seen:
|
||||||
|
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
|
||||||
|
root_hash_seen.add(root_dict["hash"])
|
||||||
|
|
||||||
|
# Copy unique concrete specs from env
|
||||||
|
for dag_hash, spec_details in env._concrete_specs_dict().items():
|
||||||
|
if dag_hash not in concrete_hash_seen:
|
||||||
|
self.included_concrete_spec_data[env_path]["concrete_specs"].update(
|
||||||
|
{dag_hash: spec_details}
|
||||||
|
)
|
||||||
|
concrete_hash_seen.add(dag_hash)
|
||||||
|
|
||||||
|
# Copy transitive include data
|
||||||
|
transitive = env.included_concrete_spec_data
|
||||||
|
if transitive:
|
||||||
|
self.included_concrete_spec_data[env_path]["include_concrete"] = transitive
|
||||||
|
|
||||||
|
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||||
|
self.write()
|
||||||
|
|
||||||
def destroy(self):
|
def destroy(self):
|
||||||
"""Remove this environment from Spack entirely."""
|
"""Remove this environment from Spack entirely."""
|
||||||
shutil.rmtree(self.path)
|
shutil.rmtree(self.path)
|
||||||
@@ -1232,6 +1423,10 @@ def concretize(self, force=False, tests=False):
|
|||||||
for spec in set(self.concretized_user_specs) - set(self.user_specs):
|
for spec in set(self.concretized_user_specs) - set(self.user_specs):
|
||||||
self.deconcretize(spec, concrete=False)
|
self.deconcretize(spec, concrete=False)
|
||||||
|
|
||||||
|
# If a combined env, check updated spec is in the linked envs
|
||||||
|
if self.included_concrete_envs:
|
||||||
|
self.include_concrete_envs()
|
||||||
|
|
||||||
# Pick the right concretization strategy
|
# Pick the right concretization strategy
|
||||||
if self.unify == "when_possible":
|
if self.unify == "when_possible":
|
||||||
return self._concretize_together_where_possible(tests=tests)
|
return self._concretize_together_where_possible(tests=tests)
|
||||||
@@ -1704,8 +1899,14 @@ def _partition_roots_by_install_status(self):
|
|||||||
of per spec."""
|
of per spec."""
|
||||||
installed, uninstalled = [], []
|
installed, uninstalled = [], []
|
||||||
with spack.store.STORE.db.read_transaction():
|
with spack.store.STORE.db.read_transaction():
|
||||||
for concretized_hash in self.concretized_order:
|
for concretized_hash in self.all_concretized_orders():
|
||||||
spec = self.specs_by_hash[concretized_hash]
|
if concretized_hash in self.specs_by_hash:
|
||||||
|
spec = self.specs_by_hash[concretized_hash]
|
||||||
|
else:
|
||||||
|
for env_path in self.included_specs_by_hash.keys():
|
||||||
|
if concretized_hash in self.included_specs_by_hash[env_path]:
|
||||||
|
spec = self.included_specs_by_hash[env_path][concretized_hash]
|
||||||
|
break
|
||||||
if not spec.installed or (
|
if not spec.installed or (
|
||||||
spec.satisfies("dev_path=*") or spec.satisfies("^dev_path=*")
|
spec.satisfies("dev_path=*") or spec.satisfies("^dev_path=*")
|
||||||
):
|
):
|
||||||
@@ -1735,13 +1936,18 @@ def install_specs(self, specs: Optional[List[Spec]] = None, **install_args):
|
|||||||
specs = specs if specs is not None else roots
|
specs = specs if specs is not None else roots
|
||||||
|
|
||||||
# Extend the set of specs to overwrite with modified dev specs and their parents
|
# Extend the set of specs to overwrite with modified dev specs and their parents
|
||||||
install_args["overwrite"] = (
|
install_args["overwrite"] = {
|
||||||
install_args.get("overwrite", []) + self._dev_specs_that_need_overwrite()
|
*install_args.get("overwrite", ()),
|
||||||
)
|
*self._dev_specs_that_need_overwrite(),
|
||||||
|
}
|
||||||
|
|
||||||
installs = [(spec.package, {**install_args, "explicit": spec in roots}) for spec in specs]
|
# Only environment roots are marked explicit
|
||||||
|
install_args["explicit"] = {
|
||||||
|
*install_args.get("explicit", ()),
|
||||||
|
*(s.dag_hash() for s in roots),
|
||||||
|
}
|
||||||
|
|
||||||
PackageInstaller(installs).install()
|
PackageInstaller([spec.package for spec in specs], install_args).install()
|
||||||
|
|
||||||
def all_specs_generator(self) -> Iterable[Spec]:
|
def all_specs_generator(self) -> Iterable[Spec]:
|
||||||
"""Returns a generator for all concrete specs"""
|
"""Returns a generator for all concrete specs"""
|
||||||
@@ -1785,8 +1991,14 @@ def added_specs(self):
|
|||||||
|
|
||||||
def concretized_specs(self):
|
def concretized_specs(self):
|
||||||
"""Tuples of (user spec, concrete spec) for all concrete specs."""
|
"""Tuples of (user spec, concrete spec) for all concrete specs."""
|
||||||
for s, h in zip(self.concretized_user_specs, self.concretized_order):
|
for s, h in zip(self.all_concretized_user_specs(), self.all_concretized_orders()):
|
||||||
yield (s, self.specs_by_hash[h])
|
if h in self.specs_by_hash:
|
||||||
|
yield (s, self.specs_by_hash[h])
|
||||||
|
else:
|
||||||
|
for env_path in self.included_specs_by_hash.keys():
|
||||||
|
if h in self.included_specs_by_hash[env_path]:
|
||||||
|
yield (s, self.included_specs_by_hash[env_path][h])
|
||||||
|
break
|
||||||
|
|
||||||
def concrete_roots(self):
|
def concrete_roots(self):
|
||||||
"""Same as concretized_specs, except it returns the list of concrete
|
"""Same as concretized_specs, except it returns the list of concrete
|
||||||
@@ -1915,8 +2127,7 @@ def _get_environment_specs(self, recurse_dependencies=True):
|
|||||||
If these specs appear under different user_specs, only one copy
|
If these specs appear under different user_specs, only one copy
|
||||||
is added to the list returned.
|
is added to the list returned.
|
||||||
"""
|
"""
|
||||||
specs = [self.specs_by_hash[h] for h in self.concretized_order]
|
specs = [self.specs_by_hash[h] for h in self.all_concretized_orders()]
|
||||||
|
|
||||||
if recurse_dependencies:
|
if recurse_dependencies:
|
||||||
specs.extend(
|
specs.extend(
|
||||||
traverse.traverse_nodes(
|
traverse.traverse_nodes(
|
||||||
@@ -1926,16 +2137,23 @@ def _get_environment_specs(self, recurse_dependencies=True):
|
|||||||
|
|
||||||
return specs
|
return specs
|
||||||
|
|
||||||
def _to_lockfile_dict(self):
|
def _concrete_specs_dict(self):
|
||||||
"""Create a dictionary to store a lockfile for this environment."""
|
|
||||||
concrete_specs = {}
|
concrete_specs = {}
|
||||||
for s in traverse.traverse_nodes(self.specs_by_hash.values(), key=traverse.by_dag_hash):
|
for s in traverse.traverse_nodes(self.specs_by_hash.values(), key=traverse.by_dag_hash):
|
||||||
spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash)
|
spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash)
|
||||||
# Assumes no legacy formats, since this was just created.
|
# Assumes no legacy formats, since this was just created.
|
||||||
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
||||||
concrete_specs[s.dag_hash()] = spec_dict
|
concrete_specs[s.dag_hash()] = spec_dict
|
||||||
|
return concrete_specs
|
||||||
|
|
||||||
|
def _concrete_roots_dict(self):
|
||||||
hash_spec_list = zip(self.concretized_order, self.concretized_user_specs)
|
hash_spec_list = zip(self.concretized_order, self.concretized_user_specs)
|
||||||
|
return [{"hash": h, "spec": str(s)} for h, s in hash_spec_list]
|
||||||
|
|
||||||
|
def _to_lockfile_dict(self):
|
||||||
|
"""Create a dictionary to store a lockfile for this environment."""
|
||||||
|
concrete_specs = self._concrete_specs_dict()
|
||||||
|
root_specs = self._concrete_roots_dict()
|
||||||
|
|
||||||
spack_dict = {"version": spack.spack_version}
|
spack_dict = {"version": spack.spack_version}
|
||||||
spack_commit = spack.main.get_spack_commit()
|
spack_commit = spack.main.get_spack_commit()
|
||||||
@@ -1956,36 +2174,81 @@ def _to_lockfile_dict(self):
|
|||||||
# spack version information
|
# spack version information
|
||||||
"spack": spack_dict,
|
"spack": spack_dict,
|
||||||
# users specs + hashes are the 'roots' of the environment
|
# users specs + hashes are the 'roots' of the environment
|
||||||
"roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list],
|
"roots": root_specs,
|
||||||
# Concrete specs by hash, including dependencies
|
# Concrete specs by hash, including dependencies
|
||||||
"concrete_specs": concrete_specs,
|
"concrete_specs": concrete_specs,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if self.included_concrete_envs:
|
||||||
|
data[included_concrete_name] = self.included_concrete_spec_data
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def _read_lockfile(self, file_or_json):
|
def _read_lockfile(self, file_or_json):
|
||||||
"""Read a lockfile from a file or from a raw string."""
|
"""Read a lockfile from a file or from a raw string."""
|
||||||
lockfile_dict = sjson.load(file_or_json)
|
lockfile_dict = sjson.load(file_or_json)
|
||||||
self._read_lockfile_dict(lockfile_dict)
|
self._read_lockfile_dict(lockfile_dict)
|
||||||
return lockfile_dict["_meta"]["lockfile-version"]
|
return lockfile_dict
|
||||||
|
|
||||||
|
def set_included_concretized_user_specs(
|
||||||
|
self,
|
||||||
|
env_name: str,
|
||||||
|
env_info: Dict[str, Dict[str, Any]],
|
||||||
|
included_json_specs_by_hash: Dict[str, Dict[str, Any]],
|
||||||
|
) -> Dict[str, Dict[str, Any]]:
|
||||||
|
"""Sets all of the concretized user specs from included environments
|
||||||
|
to include those from nested included environments.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
env_name: the name (technically the path) of the included environment
|
||||||
|
env_info: included concrete environment data
|
||||||
|
included_json_specs_by_hash: concrete spec data keyed by hash
|
||||||
|
|
||||||
|
Returns: updated specs_by_hash
|
||||||
|
"""
|
||||||
|
self.included_concretized_order[env_name] = []
|
||||||
|
self.included_concretized_user_specs[env_name] = []
|
||||||
|
|
||||||
|
def add_specs(name, info, specs_by_hash):
|
||||||
|
# Add specs from the environment as well as any of its nested
|
||||||
|
# environments.
|
||||||
|
for root_info in info["roots"]:
|
||||||
|
self.included_concretized_order[name].append(root_info["hash"])
|
||||||
|
self.included_concretized_user_specs[name].append(Spec(root_info["spec"]))
|
||||||
|
if "concrete_specs" in info:
|
||||||
|
specs_by_hash.update(info["concrete_specs"])
|
||||||
|
|
||||||
|
if included_concrete_name in info:
|
||||||
|
for included_name, included_info in info[included_concrete_name].items():
|
||||||
|
if included_name not in self.included_concretized_order:
|
||||||
|
self.included_concretized_order[included_name] = []
|
||||||
|
self.included_concretized_user_specs[included_name] = []
|
||||||
|
add_specs(included_name, included_info, specs_by_hash)
|
||||||
|
|
||||||
|
add_specs(env_name, env_info, included_json_specs_by_hash)
|
||||||
|
return included_json_specs_by_hash
|
||||||
|
|
||||||
def _read_lockfile_dict(self, d):
|
def _read_lockfile_dict(self, d):
|
||||||
"""Read a lockfile dictionary into this environment."""
|
"""Read a lockfile dictionary into this environment."""
|
||||||
self.specs_by_hash = {}
|
self.specs_by_hash = {}
|
||||||
|
self.included_specs_by_hash = {}
|
||||||
|
self.included_concretized_user_specs = {}
|
||||||
|
self.included_concretized_order = {}
|
||||||
|
|
||||||
roots = d["roots"]
|
roots = d["roots"]
|
||||||
self.concretized_user_specs = [Spec(r["spec"]) for r in roots]
|
self.concretized_user_specs = [Spec(r["spec"]) for r in roots]
|
||||||
self.concretized_order = [r["hash"] for r in roots]
|
self.concretized_order = [r["hash"] for r in roots]
|
||||||
json_specs_by_hash = d["concrete_specs"]
|
json_specs_by_hash = d["concrete_specs"]
|
||||||
|
included_json_specs_by_hash = {}
|
||||||
|
|
||||||
# Track specs by their lockfile key. Currently spack uses the finest
|
if included_concrete_name in d:
|
||||||
# grained hash as the lockfile key, while older formats used the build
|
for env_name, env_info in d[included_concrete_name].items():
|
||||||
# hash or a previous incarnation of the DAG hash (one that did not
|
included_json_specs_by_hash.update(
|
||||||
# include build deps or package hash).
|
self.set_included_concretized_user_specs(
|
||||||
specs_by_hash = {}
|
env_name, env_info, included_json_specs_by_hash
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# Track specs by their DAG hash, allows handling DAG hash collisions
|
|
||||||
first_seen = {}
|
|
||||||
current_lockfile_format = d["_meta"]["lockfile-version"]
|
current_lockfile_format = d["_meta"]["lockfile-version"]
|
||||||
try:
|
try:
|
||||||
reader = READER_CLS[current_lockfile_format]
|
reader = READER_CLS[current_lockfile_format]
|
||||||
@@ -1998,6 +2261,39 @@ def _read_lockfile_dict(self, d):
|
|||||||
msg += " You need to use a newer Spack version."
|
msg += " You need to use a newer Spack version."
|
||||||
raise SpackEnvironmentError(msg)
|
raise SpackEnvironmentError(msg)
|
||||||
|
|
||||||
|
first_seen, self.concretized_order = self.filter_specs(
|
||||||
|
reader, json_specs_by_hash, self.concretized_order
|
||||||
|
)
|
||||||
|
|
||||||
|
for spec_dag_hash in self.concretized_order:
|
||||||
|
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
|
||||||
|
|
||||||
|
if any(self.included_concretized_order.values()):
|
||||||
|
first_seen = {}
|
||||||
|
|
||||||
|
for env_name, concretized_order in self.included_concretized_order.items():
|
||||||
|
filtered_spec, self.included_concretized_order[env_name] = self.filter_specs(
|
||||||
|
reader, included_json_specs_by_hash, concretized_order
|
||||||
|
)
|
||||||
|
first_seen.update(filtered_spec)
|
||||||
|
|
||||||
|
for env_path, spec_hashes in self.included_concretized_order.items():
|
||||||
|
self.included_specs_by_hash[env_path] = {}
|
||||||
|
for spec_dag_hash in spec_hashes:
|
||||||
|
self.included_specs_by_hash[env_path].update(
|
||||||
|
{spec_dag_hash: first_seen[spec_dag_hash]}
|
||||||
|
)
|
||||||
|
|
||||||
|
def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||||
|
# Track specs by their lockfile key. Currently spack uses the finest
|
||||||
|
# grained hash as the lockfile key, while older formats used the build
|
||||||
|
# hash or a previous incarnation of the DAG hash (one that did not
|
||||||
|
# include build deps or package hash).
|
||||||
|
specs_by_hash = {}
|
||||||
|
|
||||||
|
# Track specs by their DAG hash, allows handling DAG hash collisions
|
||||||
|
first_seen = {}
|
||||||
|
|
||||||
# First pass: Put each spec in the map ignoring dependencies
|
# First pass: Put each spec in the map ignoring dependencies
|
||||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||||
spec = reader.from_node_dict(node_dict)
|
spec = reader.from_node_dict(node_dict)
|
||||||
@@ -2020,7 +2316,8 @@ def _read_lockfile_dict(self, d):
|
|||||||
# keep. This is only required as long as we support older lockfile
|
# keep. This is only required as long as we support older lockfile
|
||||||
# formats where the mapping from DAG hash to lockfile key is possibly
|
# formats where the mapping from DAG hash to lockfile key is possibly
|
||||||
# one-to-many.
|
# one-to-many.
|
||||||
for lockfile_key in self.concretized_order:
|
|
||||||
|
for lockfile_key in order_concretized:
|
||||||
for s in specs_by_hash[lockfile_key].traverse():
|
for s in specs_by_hash[lockfile_key].traverse():
|
||||||
if s.dag_hash() not in first_seen:
|
if s.dag_hash() not in first_seen:
|
||||||
first_seen[s.dag_hash()] = s
|
first_seen[s.dag_hash()] = s
|
||||||
@@ -2028,12 +2325,10 @@ def _read_lockfile_dict(self, d):
|
|||||||
# Now make sure concretized_order and our internal specs dict
|
# Now make sure concretized_order and our internal specs dict
|
||||||
# contains the keys used by modern spack (i.e. the dag_hash
|
# contains the keys used by modern spack (i.e. the dag_hash
|
||||||
# that includes build deps and package hash).
|
# that includes build deps and package hash).
|
||||||
self.concretized_order = [
|
|
||||||
specs_by_hash[h_key].dag_hash() for h_key in self.concretized_order
|
|
||||||
]
|
|
||||||
|
|
||||||
for spec_dag_hash in self.concretized_order:
|
order_concretized = [specs_by_hash[h_key].dag_hash() for h_key in order_concretized]
|
||||||
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
|
|
||||||
|
return first_seen, order_concretized
|
||||||
|
|
||||||
def write(self, regenerate: bool = True) -> None:
|
def write(self, regenerate: bool = True) -> None:
|
||||||
"""Writes an in-memory environment to its location on disk.
|
"""Writes an in-memory environment to its location on disk.
|
||||||
@@ -2046,7 +2341,7 @@ def write(self, regenerate: bool = True) -> None:
|
|||||||
regenerate: regenerate views and run post-write hooks as well as writing if True.
|
regenerate: regenerate views and run post-write hooks as well as writing if True.
|
||||||
"""
|
"""
|
||||||
self.manifest_uptodate_or_warn()
|
self.manifest_uptodate_or_warn()
|
||||||
if self.specs_by_hash:
|
if self.specs_by_hash or self.included_concrete_envs:
|
||||||
self.ensure_env_directory_exists(dot_env=True)
|
self.ensure_env_directory_exists(dot_env=True)
|
||||||
self.update_environment_repository()
|
self.update_environment_repository()
|
||||||
self.manifest.flush()
|
self.manifest.flush()
|
||||||
@@ -2545,6 +2840,19 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
|
|||||||
raise SpackEnvironmentError(msg) from e
|
raise SpackEnvironmentError(msg) from e
|
||||||
self.changed = True
|
self.changed = True
|
||||||
|
|
||||||
|
def set_include_concrete(self, include_concrete: List[str]) -> None:
|
||||||
|
"""Sets the included concrete environments in the manifest to the value(s) passed as input.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
include_concrete: list of already existing concrete environments to include
|
||||||
|
"""
|
||||||
|
self.pristine_configuration[included_concrete_name] = []
|
||||||
|
|
||||||
|
for env_path in include_concrete:
|
||||||
|
self.pristine_configuration[included_concrete_name].append(env_path)
|
||||||
|
|
||||||
|
self.changed = True
|
||||||
|
|
||||||
def add_definition(self, user_spec: str, list_name: str) -> None:
|
def add_definition(self, user_spec: str, list_name: str) -> None:
|
||||||
"""Appends a user spec to the first active definition matching the name passed as argument.
|
"""Appends a user spec to the first active definition matching the name passed as argument.
|
||||||
|
|
||||||
@@ -2728,54 +3036,56 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
|||||||
for i, config_path in enumerate(reversed(includes)):
|
for i, config_path in enumerate(reversed(includes)):
|
||||||
# allow paths to contain spack config/environment variables, etc.
|
# allow paths to contain spack config/environment variables, etc.
|
||||||
config_path = substitute_path_variables(config_path)
|
config_path = substitute_path_variables(config_path)
|
||||||
|
|
||||||
include_url = urllib.parse.urlparse(config_path)
|
include_url = urllib.parse.urlparse(config_path)
|
||||||
|
|
||||||
# Transform file:// URLs to direct includes.
|
# If scheme is not valid, config_path is not a url
|
||||||
if include_url.scheme == "file":
|
# of a type Spack is generally aware
|
||||||
config_path = urllib.request.url2pathname(include_url.path)
|
if spack.util.url.validate_scheme(include_url.scheme):
|
||||||
|
# Transform file:// URLs to direct includes.
|
||||||
|
if include_url.scheme == "file":
|
||||||
|
config_path = urllib.request.url2pathname(include_url.path)
|
||||||
|
|
||||||
# Any other URL should be fetched.
|
# Any other URL should be fetched.
|
||||||
elif include_url.scheme in ("http", "https", "ftp"):
|
elif include_url.scheme in ("http", "https", "ftp"):
|
||||||
# Stage any remote configuration file(s)
|
# Stage any remote configuration file(s)
|
||||||
staged_configs = (
|
staged_configs = (
|
||||||
os.listdir(self.config_stage_dir)
|
os.listdir(self.config_stage_dir)
|
||||||
if os.path.exists(self.config_stage_dir)
|
if os.path.exists(self.config_stage_dir)
|
||||||
else []
|
else []
|
||||||
)
|
|
||||||
remote_path = urllib.request.url2pathname(include_url.path)
|
|
||||||
basename = os.path.basename(remote_path)
|
|
||||||
if basename in staged_configs:
|
|
||||||
# Do NOT re-stage configuration files over existing
|
|
||||||
# ones with the same name since there is a risk of
|
|
||||||
# losing changes (e.g., from 'spack config update').
|
|
||||||
tty.warn(
|
|
||||||
"Will not re-stage configuration from {0} to avoid "
|
|
||||||
"losing changes to the already staged file of the "
|
|
||||||
"same name.".format(remote_path)
|
|
||||||
)
|
)
|
||||||
|
remote_path = urllib.request.url2pathname(include_url.path)
|
||||||
# Recognize the configuration stage directory
|
basename = os.path.basename(remote_path)
|
||||||
# is flattened to ensure a single copy of each
|
if basename in staged_configs:
|
||||||
# configuration file.
|
# Do NOT re-stage configuration files over existing
|
||||||
config_path = self.config_stage_dir
|
# ones with the same name since there is a risk of
|
||||||
if basename.endswith(".yaml"):
|
# losing changes (e.g., from 'spack config update').
|
||||||
config_path = os.path.join(config_path, basename)
|
tty.warn(
|
||||||
else:
|
"Will not re-stage configuration from {0} to avoid "
|
||||||
staged_path = spack.config.fetch_remote_configs(
|
"losing changes to the already staged file of the "
|
||||||
config_path, str(self.config_stage_dir), skip_existing=True
|
"same name.".format(remote_path)
|
||||||
)
|
|
||||||
if not staged_path:
|
|
||||||
raise SpackEnvironmentError(
|
|
||||||
"Unable to fetch remote configuration {0}".format(config_path)
|
|
||||||
)
|
)
|
||||||
config_path = staged_path
|
|
||||||
|
|
||||||
elif include_url.scheme:
|
# Recognize the configuration stage directory
|
||||||
raise ValueError(
|
# is flattened to ensure a single copy of each
|
||||||
f"Unsupported URL scheme ({include_url.scheme}) for "
|
# configuration file.
|
||||||
f"environment include: {config_path}"
|
config_path = self.config_stage_dir
|
||||||
)
|
if basename.endswith(".yaml"):
|
||||||
|
config_path = os.path.join(config_path, basename)
|
||||||
|
else:
|
||||||
|
staged_path = spack.config.fetch_remote_configs(
|
||||||
|
config_path, str(self.config_stage_dir), skip_existing=True
|
||||||
|
)
|
||||||
|
if not staged_path:
|
||||||
|
raise SpackEnvironmentError(
|
||||||
|
"Unable to fetch remote configuration {0}".format(config_path)
|
||||||
|
)
|
||||||
|
config_path = staged_path
|
||||||
|
|
||||||
|
elif include_url.scheme:
|
||||||
|
raise ValueError(
|
||||||
|
f"Unsupported URL scheme ({include_url.scheme}) for "
|
||||||
|
f"environment include: {config_path}"
|
||||||
|
)
|
||||||
|
|
||||||
# treat relative paths as relative to the environment
|
# treat relative paths as relative to the environment
|
||||||
if not os.path.isabs(config_path):
|
if not os.path.isabs(config_path):
|
||||||
|
@@ -30,6 +30,7 @@
|
|||||||
import shutil
|
import shutil
|
||||||
import urllib.error
|
import urllib.error
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
from pathlib import PurePath
|
from pathlib import PurePath
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
@@ -273,10 +274,7 @@ def __init__(self, url=None, checksum=None, **kwargs):
|
|||||||
@property
|
@property
|
||||||
def curl(self):
|
def curl(self):
|
||||||
if not self._curl:
|
if not self._curl:
|
||||||
try:
|
self._curl = web_util.require_curl()
|
||||||
self._curl = which("curl", required=True)
|
|
||||||
except CommandNotFoundError as exc:
|
|
||||||
tty.error(str(exc))
|
|
||||||
return self._curl
|
return self._curl
|
||||||
|
|
||||||
def source_id(self):
|
def source_id(self):
|
||||||
@@ -297,27 +295,23 @@ def candidate_urls(self):
|
|||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
tty.debug(f"Already downloaded {self.archive_file}")
|
||||||
return
|
return
|
||||||
|
|
||||||
url = None
|
errors: List[Exception] = []
|
||||||
errors = []
|
|
||||||
for url in self.candidate_urls:
|
for url in self.candidate_urls:
|
||||||
if not web_util.url_exists(url):
|
|
||||||
tty.debug("URL does not exist: " + url)
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._fetch_from_url(url)
|
self._fetch_from_url(url)
|
||||||
break
|
break
|
||||||
except FailedDownloadError as e:
|
except FailedDownloadError as e:
|
||||||
errors.append(str(e))
|
errors.extend(e.exceptions)
|
||||||
|
else:
|
||||||
for msg in errors:
|
raise FailedDownloadError(*errors)
|
||||||
tty.debug(msg)
|
|
||||||
|
|
||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
raise FailedDownloadError(url)
|
raise FailedDownloadError(
|
||||||
|
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||||
|
)
|
||||||
|
|
||||||
def _fetch_from_url(self, url):
|
def _fetch_from_url(self, url):
|
||||||
if spack.config.get("config:url_fetch_method") == "curl":
|
if spack.config.get("config:url_fetch_method") == "curl":
|
||||||
@@ -336,19 +330,20 @@ def _check_headers(self, headers):
|
|||||||
@_needs_stage
|
@_needs_stage
|
||||||
def _fetch_urllib(self, url):
|
def _fetch_urllib(self, url):
|
||||||
save_file = self.stage.save_filename
|
save_file = self.stage.save_filename
|
||||||
tty.msg("Fetching {0}".format(url))
|
|
||||||
|
|
||||||
# Run urllib but grab the mime type from the http headers
|
request = urllib.request.Request(url, headers={"User-Agent": web_util.SPACK_USER_AGENT})
|
||||||
|
|
||||||
try:
|
try:
|
||||||
url, headers, response = web_util.read_from_url(url)
|
response = web_util.urlopen(request)
|
||||||
except web_util.SpackWebError as e:
|
except (TimeoutError, urllib.error.URLError) as e:
|
||||||
# clean up archive on failure.
|
# clean up archive on failure.
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
os.remove(self.archive_file)
|
os.remove(self.archive_file)
|
||||||
if os.path.lexists(save_file):
|
if os.path.lexists(save_file):
|
||||||
os.remove(save_file)
|
os.remove(save_file)
|
||||||
msg = "urllib failed to fetch with error {0}".format(e)
|
raise FailedDownloadError(e) from e
|
||||||
raise FailedDownloadError(url, msg)
|
|
||||||
|
tty.msg(f"Fetching {url}")
|
||||||
|
|
||||||
if os.path.lexists(save_file):
|
if os.path.lexists(save_file):
|
||||||
os.remove(save_file)
|
os.remove(save_file)
|
||||||
@@ -356,7 +351,7 @@ def _fetch_urllib(self, url):
|
|||||||
with open(save_file, "wb") as _open_file:
|
with open(save_file, "wb") as _open_file:
|
||||||
shutil.copyfileobj(response, _open_file)
|
shutil.copyfileobj(response, _open_file)
|
||||||
|
|
||||||
self._check_headers(str(headers))
|
self._check_headers(str(response.headers))
|
||||||
|
|
||||||
@_needs_stage
|
@_needs_stage
|
||||||
def _fetch_curl(self, url):
|
def _fetch_curl(self, url):
|
||||||
@@ -365,7 +360,7 @@ def _fetch_curl(self, url):
|
|||||||
if self.stage.save_filename:
|
if self.stage.save_filename:
|
||||||
save_file = self.stage.save_filename
|
save_file = self.stage.save_filename
|
||||||
partial_file = self.stage.save_filename + ".part"
|
partial_file = self.stage.save_filename + ".part"
|
||||||
tty.msg("Fetching {0}".format(url))
|
tty.msg(f"Fetching {url}")
|
||||||
if partial_file:
|
if partial_file:
|
||||||
save_args = [
|
save_args = [
|
||||||
"-C",
|
"-C",
|
||||||
@@ -405,8 +400,8 @@ def _fetch_curl(self, url):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
web_util.check_curl_code(curl.returncode)
|
web_util.check_curl_code(curl.returncode)
|
||||||
except spack.error.FetchError as err:
|
except spack.error.FetchError as e:
|
||||||
raise spack.fetch_strategy.FailedDownloadError(url, str(err))
|
raise FailedDownloadError(e) from e
|
||||||
|
|
||||||
self._check_headers(headers)
|
self._check_headers(headers)
|
||||||
|
|
||||||
@@ -554,13 +549,13 @@ def fetch(self):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
response = self._urlopen(self.url)
|
response = self._urlopen(self.url)
|
||||||
except urllib.error.URLError as e:
|
except (TimeoutError, urllib.error.URLError) as e:
|
||||||
# clean up archive on failure.
|
# clean up archive on failure.
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
os.remove(self.archive_file)
|
os.remove(self.archive_file)
|
||||||
if os.path.lexists(file):
|
if os.path.lexists(file):
|
||||||
os.remove(file)
|
os.remove(file)
|
||||||
raise FailedDownloadError(self.url, f"Failed to fetch {self.url}: {e}") from e
|
raise FailedDownloadError(e) from e
|
||||||
|
|
||||||
if os.path.lexists(file):
|
if os.path.lexists(file):
|
||||||
os.remove(file)
|
os.remove(file)
|
||||||
@@ -1312,35 +1307,41 @@ def __init__(self, *args, **kwargs):
|
|||||||
@_needs_stage
|
@_needs_stage
|
||||||
def fetch(self):
|
def fetch(self):
|
||||||
if self.archive_file:
|
if self.archive_file:
|
||||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
tty.debug(f"Already downloaded {self.archive_file}")
|
||||||
return
|
return
|
||||||
|
|
||||||
parsed_url = urllib.parse.urlparse(self.url)
|
parsed_url = urllib.parse.urlparse(self.url)
|
||||||
if parsed_url.scheme != "s3":
|
if parsed_url.scheme != "s3":
|
||||||
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||||
|
|
||||||
tty.debug("Fetching {0}".format(self.url))
|
|
||||||
|
|
||||||
basename = os.path.basename(parsed_url.path)
|
basename = os.path.basename(parsed_url.path)
|
||||||
|
request = urllib.request.Request(
|
||||||
|
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
|
||||||
|
)
|
||||||
|
|
||||||
with working_dir(self.stage.path):
|
with working_dir(self.stage.path):
|
||||||
_, headers, stream = web_util.read_from_url(self.url)
|
try:
|
||||||
|
response = web_util.urlopen(request)
|
||||||
|
except (TimeoutError, urllib.error.URLError) as e:
|
||||||
|
raise FailedDownloadError(e) from e
|
||||||
|
|
||||||
|
tty.debug(f"Fetching {self.url}")
|
||||||
|
|
||||||
with open(basename, "wb") as f:
|
with open(basename, "wb") as f:
|
||||||
shutil.copyfileobj(stream, f)
|
shutil.copyfileobj(response, f)
|
||||||
|
|
||||||
content_type = web_util.get_header(headers, "Content-type")
|
content_type = web_util.get_header(response.headers, "Content-type")
|
||||||
|
|
||||||
if content_type == "text/html":
|
if content_type == "text/html":
|
||||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||||
|
|
||||||
if self.stage.save_filename:
|
if self.stage.save_filename:
|
||||||
llnl.util.filesystem.rename(
|
fs.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
||||||
os.path.join(self.stage.path, basename), self.stage.save_filename
|
|
||||||
)
|
|
||||||
|
|
||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
raise FailedDownloadError(self.url)
|
raise FailedDownloadError(
|
||||||
|
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@fetcher
|
@fetcher
|
||||||
@@ -1366,17 +1367,23 @@ def fetch(self):
|
|||||||
if parsed_url.scheme != "gs":
|
if parsed_url.scheme != "gs":
|
||||||
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||||
|
|
||||||
tty.debug("Fetching {0}".format(self.url))
|
|
||||||
|
|
||||||
basename = os.path.basename(parsed_url.path)
|
basename = os.path.basename(parsed_url.path)
|
||||||
|
request = urllib.request.Request(
|
||||||
|
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
|
||||||
|
)
|
||||||
|
|
||||||
with working_dir(self.stage.path):
|
with working_dir(self.stage.path):
|
||||||
_, headers, stream = web_util.read_from_url(self.url)
|
try:
|
||||||
|
response = web_util.urlopen(request)
|
||||||
|
except (TimeoutError, urllib.error.URLError) as e:
|
||||||
|
raise FailedDownloadError(e) from e
|
||||||
|
|
||||||
|
tty.debug(f"Fetching {self.url}")
|
||||||
|
|
||||||
with open(basename, "wb") as f:
|
with open(basename, "wb") as f:
|
||||||
shutil.copyfileobj(stream, f)
|
shutil.copyfileobj(response, f)
|
||||||
|
|
||||||
content_type = web_util.get_header(headers, "Content-type")
|
content_type = web_util.get_header(response.headers, "Content-type")
|
||||||
|
|
||||||
if content_type == "text/html":
|
if content_type == "text/html":
|
||||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||||
@@ -1385,7 +1392,9 @@ def fetch(self):
|
|||||||
os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
||||||
|
|
||||||
if not self.archive_file:
|
if not self.archive_file:
|
||||||
raise FailedDownloadError(self.url)
|
raise FailedDownloadError(
|
||||||
|
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@fetcher
|
@fetcher
|
||||||
@@ -1722,9 +1731,9 @@ class NoCacheError(spack.error.FetchError):
|
|||||||
class FailedDownloadError(spack.error.FetchError):
|
class FailedDownloadError(spack.error.FetchError):
|
||||||
"""Raised when a download fails."""
|
"""Raised when a download fails."""
|
||||||
|
|
||||||
def __init__(self, url, msg=""):
|
def __init__(self, *exceptions: Exception):
|
||||||
super().__init__("Failed to fetch file from URL: %s" % url, msg)
|
super().__init__("Failed to download")
|
||||||
self.url = url
|
self.exceptions = exceptions
|
||||||
|
|
||||||
|
|
||||||
class NoArchiveFileError(spack.error.FetchError):
|
class NoArchiveFileError(spack.error.FetchError):
|
||||||
|
@@ -10,8 +10,9 @@
|
|||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
import sys
|
import sys
|
||||||
from typing import Optional
|
from typing import Callable, Dict, Optional
|
||||||
|
|
||||||
|
from llnl.string import comma_or
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
from llnl.util.filesystem import (
|
from llnl.util.filesystem import (
|
||||||
mkdirp,
|
mkdirp,
|
||||||
@@ -32,6 +33,7 @@
|
|||||||
from llnl.util.tty.color import colorize
|
from llnl.util.tty.color import colorize
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
|
import spack.directory_layout
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.projections
|
import spack.projections
|
||||||
import spack.relocate
|
import spack.relocate
|
||||||
@@ -49,19 +51,20 @@
|
|||||||
_projections_path = ".spack/projections.yaml"
|
_projections_path = ".spack/projections.yaml"
|
||||||
|
|
||||||
|
|
||||||
def view_symlink(src, dst, **kwargs):
|
LinkCallbackType = Callable[[str, str, "FilesystemView", Optional[spack.spec.Spec]], None]
|
||||||
# keyword arguments are irrelevant
|
|
||||||
# here to fit required call signature
|
|
||||||
|
def view_symlink(src: str, dst: str, *args, **kwargs) -> None:
|
||||||
symlink(src, dst)
|
symlink(src, dst)
|
||||||
|
|
||||||
|
|
||||||
def view_hardlink(src, dst, **kwargs):
|
def view_hardlink(src: str, dst: str, *args, **kwargs) -> None:
|
||||||
# keyword arguments are irrelevant
|
|
||||||
# here to fit required call signature
|
|
||||||
os.link(src, dst)
|
os.link(src, dst)
|
||||||
|
|
||||||
|
|
||||||
def view_copy(src: str, dst: str, view, spec: Optional[spack.spec.Spec] = None):
|
def view_copy(
|
||||||
|
src: str, dst: str, view: "FilesystemView", spec: Optional[spack.spec.Spec] = None
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Copy a file from src to dst.
|
Copy a file from src to dst.
|
||||||
|
|
||||||
@@ -104,27 +107,40 @@ def view_copy(src: str, dst: str, view, spec: Optional[spack.spec.Spec] = None):
|
|||||||
tty.debug(f"Can't change the permissions for {dst}")
|
tty.debug(f"Can't change the permissions for {dst}")
|
||||||
|
|
||||||
|
|
||||||
def view_func_parser(parsed_name):
|
#: supported string values for `link_type` in an env, mapped to canonical values
|
||||||
# What method are we using for this view
|
_LINK_TYPES = {
|
||||||
if parsed_name in ("hardlink", "hard"):
|
"hardlink": "hardlink",
|
||||||
|
"hard": "hardlink",
|
||||||
|
"copy": "copy",
|
||||||
|
"relocate": "copy",
|
||||||
|
"add": "symlink",
|
||||||
|
"symlink": "symlink",
|
||||||
|
"soft": "symlink",
|
||||||
|
}
|
||||||
|
|
||||||
|
_VALID_LINK_TYPES = sorted(set(_LINK_TYPES.values()))
|
||||||
|
|
||||||
|
|
||||||
|
def canonicalize_link_type(link_type: str) -> str:
|
||||||
|
"""Return canonical"""
|
||||||
|
canonical = _LINK_TYPES.get(link_type)
|
||||||
|
if not canonical:
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid link type: '{link_type}. Must be one of {comma_or(_VALID_LINK_TYPES)}'"
|
||||||
|
)
|
||||||
|
return canonical
|
||||||
|
|
||||||
|
|
||||||
|
def function_for_link_type(link_type: str) -> LinkCallbackType:
|
||||||
|
link_type = canonicalize_link_type(link_type)
|
||||||
|
if link_type == "hardlink":
|
||||||
return view_hardlink
|
return view_hardlink
|
||||||
elif parsed_name in ("copy", "relocate"):
|
elif link_type == "symlink":
|
||||||
return view_copy
|
|
||||||
elif parsed_name in ("add", "symlink", "soft"):
|
|
||||||
return view_symlink
|
return view_symlink
|
||||||
else:
|
elif link_type == "copy":
|
||||||
raise ValueError(f"invalid link type for view: '{parsed_name}'")
|
return view_copy
|
||||||
|
|
||||||
|
assert False, "invalid link type" # need mypy Literal values
|
||||||
def inverse_view_func_parser(view_type):
|
|
||||||
# get string based on view type
|
|
||||||
if view_type is view_hardlink:
|
|
||||||
link_name = "hardlink"
|
|
||||||
elif view_type is view_copy:
|
|
||||||
link_name = "copy"
|
|
||||||
else:
|
|
||||||
link_name = "symlink"
|
|
||||||
return link_name
|
|
||||||
|
|
||||||
|
|
||||||
class FilesystemView:
|
class FilesystemView:
|
||||||
@@ -140,7 +156,16 @@ class FilesystemView:
|
|||||||
directory structure.
|
directory structure.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, root, layout, **kwargs):
|
def __init__(
|
||||||
|
self,
|
||||||
|
root: str,
|
||||||
|
layout: spack.directory_layout.DirectoryLayout,
|
||||||
|
*,
|
||||||
|
projections: Optional[Dict] = None,
|
||||||
|
ignore_conflicts: bool = False,
|
||||||
|
verbose: bool = False,
|
||||||
|
link_type: str = "symlink",
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Initialize a filesystem view under the given `root` directory with
|
Initialize a filesystem view under the given `root` directory with
|
||||||
corresponding directory `layout`.
|
corresponding directory `layout`.
|
||||||
@@ -149,15 +174,17 @@ def __init__(self, root, layout, **kwargs):
|
|||||||
"""
|
"""
|
||||||
self._root = root
|
self._root = root
|
||||||
self.layout = layout
|
self.layout = layout
|
||||||
|
self.projections = {} if projections is None else projections
|
||||||
|
|
||||||
self.projections = kwargs.get("projections", {})
|
self.ignore_conflicts = ignore_conflicts
|
||||||
|
self.verbose = verbose
|
||||||
self.ignore_conflicts = kwargs.get("ignore_conflicts", False)
|
|
||||||
self.verbose = kwargs.get("verbose", False)
|
|
||||||
|
|
||||||
# Setup link function to include view
|
# Setup link function to include view
|
||||||
link_func = kwargs.get("link", view_symlink)
|
self.link_type = link_type
|
||||||
self.link = ft.partial(link_func, view=self)
|
self._link = function_for_link_type(link_type)
|
||||||
|
|
||||||
|
def link(self, src: str, dst: str, spec: Optional[spack.spec.Spec] = None) -> None:
|
||||||
|
self._link(src, dst, self, spec)
|
||||||
|
|
||||||
def add_specs(self, *specs, **kwargs):
|
def add_specs(self, *specs, **kwargs):
|
||||||
"""
|
"""
|
||||||
@@ -255,8 +282,24 @@ class YamlFilesystemView(FilesystemView):
|
|||||||
Filesystem view to work with a yaml based directory layout.
|
Filesystem view to work with a yaml based directory layout.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, root, layout, **kwargs):
|
def __init__(
|
||||||
super().__init__(root, layout, **kwargs)
|
self,
|
||||||
|
root: str,
|
||||||
|
layout: spack.directory_layout.DirectoryLayout,
|
||||||
|
*,
|
||||||
|
projections: Optional[Dict] = None,
|
||||||
|
ignore_conflicts: bool = False,
|
||||||
|
verbose: bool = False,
|
||||||
|
link_type: str = "symlink",
|
||||||
|
):
|
||||||
|
super().__init__(
|
||||||
|
root,
|
||||||
|
layout,
|
||||||
|
projections=projections,
|
||||||
|
ignore_conflicts=ignore_conflicts,
|
||||||
|
verbose=verbose,
|
||||||
|
link_type=link_type,
|
||||||
|
)
|
||||||
|
|
||||||
# Super class gets projections from the kwargs
|
# Super class gets projections from the kwargs
|
||||||
# YAML specific to get projections from YAML file
|
# YAML specific to get projections from YAML file
|
||||||
@@ -638,9 +681,6 @@ class SimpleFilesystemView(FilesystemView):
|
|||||||
"""A simple and partial implementation of FilesystemView focused on performance and immutable
|
"""A simple and partial implementation of FilesystemView focused on performance and immutable
|
||||||
views, where specs cannot be removed after they were added."""
|
views, where specs cannot be removed after they were added."""
|
||||||
|
|
||||||
def __init__(self, root, layout, **kwargs):
|
|
||||||
super().__init__(root, layout, **kwargs)
|
|
||||||
|
|
||||||
def _sanity_check_view_projection(self, specs):
|
def _sanity_check_view_projection(self, specs):
|
||||||
"""A very common issue is that we end up with two specs of the same package, that project
|
"""A very common issue is that we end up with two specs of the same package, that project
|
||||||
to the same prefix. We want to catch that as early as possible and give a sensible error to
|
to the same prefix. We want to catch that as early as possible and give a sensible error to
|
||||||
|
@@ -41,8 +41,9 @@ def _populate_hooks(cls):
|
|||||||
|
|
||||||
relative_names = list(list_modules(spack.paths.hooks_path))
|
relative_names = list(list_modules(spack.paths.hooks_path))
|
||||||
|
|
||||||
# Ensure that write_install_manifest comes last
|
# write_install_manifest should come after any mutation of the install prefix, and
|
||||||
ensure_last(relative_names, "absolutify_elf_sonames", "write_install_manifest")
|
# autopush should include the install manifest.
|
||||||
|
ensure_last(relative_names, "absolutify_elf_sonames", "write_install_manifest", "autopush")
|
||||||
|
|
||||||
for name in relative_names:
|
for name in relative_names:
|
||||||
module_name = __name__ + "." + name
|
module_name = __name__ + "." + name
|
||||||
|
@@ -440,7 +440,7 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
|
|||||||
tty.debug(f"{pre} already registered in DB")
|
tty.debug(f"{pre} already registered in DB")
|
||||||
record = spack.store.STORE.db.get_record(spec)
|
record = spack.store.STORE.db.get_record(spec)
|
||||||
if explicit and not record.explicit:
|
if explicit and not record.explicit:
|
||||||
spack.store.STORE.db.update_explicit(spec, explicit)
|
spack.store.STORE.db.mark(spec, "explicit", True)
|
||||||
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# If not, register it and generate the module file.
|
# If not, register it and generate the module file.
|
||||||
@@ -488,6 +488,7 @@ def _process_binary_cache_tarball(
|
|||||||
|
|
||||||
with timer.measure("install"), spack.util.path.filter_padding():
|
with timer.measure("install"), spack.util.path.filter_padding():
|
||||||
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
||||||
|
pkg.windows_establish_runtime_linkage()
|
||||||
|
|
||||||
if hasattr(pkg, "_post_buildcache_install_hook"):
|
if hasattr(pkg, "_post_buildcache_install_hook"):
|
||||||
pkg._post_buildcache_install_hook()
|
pkg._post_buildcache_install_hook()
|
||||||
@@ -760,12 +761,8 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
|||||||
if not self.pkg.spec.concrete:
|
if not self.pkg.spec.concrete:
|
||||||
raise ValueError(f"{self.pkg.name} must have a concrete spec")
|
raise ValueError(f"{self.pkg.name} must have a concrete spec")
|
||||||
|
|
||||||
# Cache the package phase options with the explicit package,
|
self.pkg.stop_before_phase = install_args.get("stop_before") # type: ignore[attr-defined] # noqa: E501
|
||||||
# popping the options to ensure installation of associated
|
self.pkg.last_phase = install_args.get("stop_at") # type: ignore[attr-defined]
|
||||||
# dependencies is NOT affected by these options.
|
|
||||||
|
|
||||||
self.pkg.stop_before_phase = install_args.pop("stop_before", None) # type: ignore[attr-defined] # noqa: E501
|
|
||||||
self.pkg.last_phase = install_args.pop("stop_at", None) # type: ignore[attr-defined]
|
|
||||||
|
|
||||||
# Cache the package id for convenience
|
# Cache the package id for convenience
|
||||||
self.pkg_id = package_id(pkg.spec)
|
self.pkg_id = package_id(pkg.spec)
|
||||||
@@ -1075,19 +1072,17 @@ def flag_installed(self, installed: List[str]) -> None:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def explicit(self) -> bool:
|
def explicit(self) -> bool:
|
||||||
"""The package was explicitly requested by the user."""
|
return self.pkg.spec.dag_hash() in self.request.install_args.get("explicit", [])
|
||||||
return self.is_root and self.request.install_args.get("explicit", True)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_root(self) -> bool:
|
def is_build_request(self) -> bool:
|
||||||
"""The package was requested directly, but may or may not be explicit
|
"""The package was requested directly"""
|
||||||
in an environment."""
|
|
||||||
return self.pkg == self.request.pkg
|
return self.pkg == self.request.pkg
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def use_cache(self) -> bool:
|
def use_cache(self) -> bool:
|
||||||
_use_cache = True
|
_use_cache = True
|
||||||
if self.is_root:
|
if self.is_build_request:
|
||||||
return self.request.install_args.get("package_use_cache", _use_cache)
|
return self.request.install_args.get("package_use_cache", _use_cache)
|
||||||
else:
|
else:
|
||||||
return self.request.install_args.get("dependencies_use_cache", _use_cache)
|
return self.request.install_args.get("dependencies_use_cache", _use_cache)
|
||||||
@@ -1095,7 +1090,7 @@ def use_cache(self) -> bool:
|
|||||||
@property
|
@property
|
||||||
def cache_only(self) -> bool:
|
def cache_only(self) -> bool:
|
||||||
_cache_only = False
|
_cache_only = False
|
||||||
if self.is_root:
|
if self.is_build_request:
|
||||||
return self.request.install_args.get("package_cache_only", _cache_only)
|
return self.request.install_args.get("package_cache_only", _cache_only)
|
||||||
else:
|
else:
|
||||||
return self.request.install_args.get("dependencies_cache_only", _cache_only)
|
return self.request.install_args.get("dependencies_cache_only", _cache_only)
|
||||||
@@ -1121,24 +1116,17 @@ def priority(self):
|
|||||||
|
|
||||||
class PackageInstaller:
|
class PackageInstaller:
|
||||||
"""
|
"""
|
||||||
Class for managing the install process for a Spack instance based on a
|
Class for managing the install process for a Spack instance based on a bottom-up DAG approach.
|
||||||
bottom-up DAG approach.
|
|
||||||
|
|
||||||
This installer can coordinate concurrent batch and interactive, local
|
This installer can coordinate concurrent batch and interactive, local and distributed (on a
|
||||||
and distributed (on a shared file system) builds for the same Spack
|
shared file system) builds for the same Spack instance.
|
||||||
instance.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, installs: List[Tuple["spack.package_base.PackageBase", dict]] = []) -> None:
|
def __init__(
|
||||||
"""Initialize the installer.
|
self, packages: List["spack.package_base.PackageBase"], install_args: dict
|
||||||
|
) -> None:
|
||||||
Args:
|
|
||||||
installs (list): list of tuples, where each
|
|
||||||
tuple consists of a package (PackageBase) and its associated
|
|
||||||
install arguments (dict)
|
|
||||||
"""
|
|
||||||
# List of build requests
|
# List of build requests
|
||||||
self.build_requests = [BuildRequest(pkg, install_args) for pkg, install_args in installs]
|
self.build_requests = [BuildRequest(pkg, install_args) for pkg in packages]
|
||||||
|
|
||||||
# Priority queue of build tasks
|
# Priority queue of build tasks
|
||||||
self.build_pq: List[Tuple[Tuple[int, int], BuildTask]] = []
|
self.build_pq: List[Tuple[Tuple[int, int], BuildTask]] = []
|
||||||
@@ -1375,8 +1363,8 @@ def _prepare_for_install(self, task: BuildTask) -> None:
|
|||||||
self._update_installed(task)
|
self._update_installed(task)
|
||||||
|
|
||||||
# Only update the explicit entry once for the explicit package
|
# Only update the explicit entry once for the explicit package
|
||||||
if task.explicit:
|
if task.explicit and not rec.explicit:
|
||||||
spack.store.STORE.db.update_explicit(task.pkg.spec, True)
|
spack.store.STORE.db.mark(task.pkg.spec, "explicit", True)
|
||||||
|
|
||||||
def _cleanup_all_tasks(self) -> None:
|
def _cleanup_all_tasks(self) -> None:
|
||||||
"""Cleanup all build tasks to include releasing their locks."""
|
"""Cleanup all build tasks to include releasing their locks."""
|
||||||
@@ -1556,17 +1544,6 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
|||||||
tty.warn(f"Installation request refused: {str(err)}")
|
tty.warn(f"Installation request refused: {str(err)}")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Skip out early if the spec is not being installed locally (i.e., if
|
|
||||||
# external or upstream).
|
|
||||||
#
|
|
||||||
# External and upstream packages need to get flagged as installed to
|
|
||||||
# ensure proper status tracking for environment build.
|
|
||||||
explicit = request.install_args.get("explicit", True)
|
|
||||||
not_local = _handle_external_and_upstream(request.pkg, explicit)
|
|
||||||
if not_local:
|
|
||||||
self._flag_installed(request.pkg)
|
|
||||||
return
|
|
||||||
|
|
||||||
install_compilers = spack.config.get("config:install_missing_compilers", False)
|
install_compilers = spack.config.get("config:install_missing_compilers", False)
|
||||||
|
|
||||||
install_deps = request.install_args.get("install_deps")
|
install_deps = request.install_args.get("install_deps")
|
||||||
@@ -1682,10 +1659,6 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
|||||||
if not pkg.unit_test_check():
|
if not pkg.unit_test_check():
|
||||||
return
|
return
|
||||||
|
|
||||||
# Injecting information to know if this installation request is the root one
|
|
||||||
# to determine in BuildProcessInstaller whether installation is explicit or not
|
|
||||||
install_args["is_root"] = task.is_root
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._setup_install_dir(pkg)
|
self._setup_install_dir(pkg)
|
||||||
|
|
||||||
@@ -1997,8 +1970,8 @@ def install(self) -> None:
|
|||||||
|
|
||||||
self._init_queue()
|
self._init_queue()
|
||||||
fail_fast_err = "Terminating after first install failure"
|
fail_fast_err = "Terminating after first install failure"
|
||||||
single_explicit_spec = len(self.build_requests) == 1
|
single_requested_spec = len(self.build_requests) == 1
|
||||||
failed_explicits = []
|
failed_build_requests = []
|
||||||
|
|
||||||
install_status = InstallStatus(len(self.build_pq))
|
install_status = InstallStatus(len(self.build_pq))
|
||||||
|
|
||||||
@@ -2047,11 +2020,10 @@ def install(self) -> None:
|
|||||||
# Skip the installation if the spec is not being installed locally
|
# Skip the installation if the spec is not being installed locally
|
||||||
# (i.e., if external or upstream) BUT flag it as installed since
|
# (i.e., if external or upstream) BUT flag it as installed since
|
||||||
# some package likely depends on it.
|
# some package likely depends on it.
|
||||||
if not task.explicit:
|
if _handle_external_and_upstream(pkg, task.explicit):
|
||||||
if _handle_external_and_upstream(pkg, False):
|
term_status.clear()
|
||||||
term_status.clear()
|
self._flag_installed(pkg, task.dependents)
|
||||||
self._flag_installed(pkg, task.dependents)
|
continue
|
||||||
continue
|
|
||||||
|
|
||||||
# Flag a failed spec. Do not need an (install) prefix lock since
|
# Flag a failed spec. Do not need an (install) prefix lock since
|
||||||
# assume using a separate (failed) prefix lock file.
|
# assume using a separate (failed) prefix lock file.
|
||||||
@@ -2196,14 +2168,11 @@ def install(self) -> None:
|
|||||||
if self.fail_fast:
|
if self.fail_fast:
|
||||||
raise InstallError(f"{fail_fast_err}: {str(exc)}", pkg=pkg)
|
raise InstallError(f"{fail_fast_err}: {str(exc)}", pkg=pkg)
|
||||||
|
|
||||||
# Terminate at this point if the single explicit spec has
|
# Terminate when a single build request has failed, or summarize errors later.
|
||||||
# failed to install.
|
if task.is_build_request:
|
||||||
if single_explicit_spec and task.explicit:
|
if single_requested_spec:
|
||||||
raise
|
raise
|
||||||
|
failed_build_requests.append((pkg, pkg_id, str(exc)))
|
||||||
# Track explicit spec id and error to summarize when done
|
|
||||||
if task.explicit:
|
|
||||||
failed_explicits.append((pkg, pkg_id, str(exc)))
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# Remove the install prefix if anything went wrong during
|
# Remove the install prefix if anything went wrong during
|
||||||
@@ -2226,16 +2195,16 @@ def install(self) -> None:
|
|||||||
if request.install_args.get("install_package") and request.pkg_id not in self.installed
|
if request.install_args.get("install_package") and request.pkg_id not in self.installed
|
||||||
]
|
]
|
||||||
|
|
||||||
if failed_explicits or missing:
|
if failed_build_requests or missing:
|
||||||
for _, pkg_id, err in failed_explicits:
|
for _, pkg_id, err in failed_build_requests:
|
||||||
tty.error(f"{pkg_id}: {err}")
|
tty.error(f"{pkg_id}: {err}")
|
||||||
|
|
||||||
for _, pkg_id in missing:
|
for _, pkg_id in missing:
|
||||||
tty.error(f"{pkg_id}: Package was not installed")
|
tty.error(f"{pkg_id}: Package was not installed")
|
||||||
|
|
||||||
if len(failed_explicits) > 0:
|
if len(failed_build_requests) > 0:
|
||||||
pkg = failed_explicits[0][0]
|
pkg = failed_build_requests[0][0]
|
||||||
ids = [pkg_id for _, pkg_id, _ in failed_explicits]
|
ids = [pkg_id for _, pkg_id, _ in failed_build_requests]
|
||||||
tty.debug(
|
tty.debug(
|
||||||
"Associating installation failure with first failed "
|
"Associating installation failure with first failed "
|
||||||
f"explicit package ({ids[0]}) from {', '.join(ids)}"
|
f"explicit package ({ids[0]}) from {', '.join(ids)}"
|
||||||
@@ -2294,7 +2263,7 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
|||||||
self.verbose = bool(install_args.get("verbose", False))
|
self.verbose = bool(install_args.get("verbose", False))
|
||||||
|
|
||||||
# whether installation was explicitly requested by the user
|
# whether installation was explicitly requested by the user
|
||||||
self.explicit = install_args.get("is_root", False) and install_args.get("explicit", True)
|
self.explicit = pkg.spec.dag_hash() in install_args.get("explicit", [])
|
||||||
|
|
||||||
# env before starting installation
|
# env before starting installation
|
||||||
self.unmodified_env = install_args.get("unmodified_env", {})
|
self.unmodified_env = install_args.get("unmodified_env", {})
|
||||||
|
@@ -87,9 +87,8 @@ def from_url(url: str):
|
|||||||
"""Create an anonymous mirror by URL. This method validates the URL."""
|
"""Create an anonymous mirror by URL. This method validates the URL."""
|
||||||
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
|
if not urllib.parse.urlparse(url).scheme in supported_url_schemes:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'"{}" is not a valid mirror URL. Scheme must be once of {}.'.format(
|
f'"{url}" is not a valid mirror URL. '
|
||||||
url, ", ".join(supported_url_schemes)
|
f"Scheme must be one of {supported_url_schemes}."
|
||||||
)
|
|
||||||
)
|
)
|
||||||
return Mirror(url)
|
return Mirror(url)
|
||||||
|
|
||||||
@@ -734,7 +733,7 @@ def require_mirror_name(mirror_name):
|
|||||||
"""Find a mirror by name and raise if it does not exist"""
|
"""Find a mirror by name and raise if it does not exist"""
|
||||||
mirror = spack.mirror.MirrorCollection().get(mirror_name)
|
mirror = spack.mirror.MirrorCollection().get(mirror_name)
|
||||||
if not mirror:
|
if not mirror:
|
||||||
raise ValueError('no mirror named "{0}"'.format(mirror_name))
|
raise ValueError(f'no mirror named "{mirror_name}"')
|
||||||
return mirror
|
return mirror
|
||||||
|
|
||||||
|
|
||||||
|
@@ -11,7 +11,7 @@
|
|||||||
import urllib.parse
|
import urllib.parse
|
||||||
import urllib.request
|
import urllib.request
|
||||||
from http.client import HTTPResponse
|
from http.client import HTTPResponse
|
||||||
from typing import NamedTuple, Tuple
|
from typing import List, NamedTuple, Tuple
|
||||||
from urllib.request import Request
|
from urllib.request import Request
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -27,6 +27,7 @@
|
|||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.traverse
|
import spack.traverse
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
|
import spack.util.url
|
||||||
|
|
||||||
from .image import Digest, ImageReference
|
from .image import Digest, ImageReference
|
||||||
|
|
||||||
@@ -69,6 +70,42 @@ def with_query_param(url: str, param: str, value: str) -> str:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def list_tags(ref: ImageReference, _urlopen: spack.oci.opener.MaybeOpen = None) -> List[str]:
|
||||||
|
"""Retrieves the list of tags associated with an image, handling pagination."""
|
||||||
|
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||||
|
tags = set()
|
||||||
|
fetch_url = ref.tags_url()
|
||||||
|
|
||||||
|
while True:
|
||||||
|
# Fetch tags
|
||||||
|
request = Request(url=fetch_url)
|
||||||
|
response = _urlopen(request)
|
||||||
|
spack.oci.opener.ensure_status(request, response, 200)
|
||||||
|
tags.update(json.load(response)["tags"])
|
||||||
|
|
||||||
|
# Check for pagination
|
||||||
|
link_header = response.headers["Link"]
|
||||||
|
|
||||||
|
if link_header is None:
|
||||||
|
break
|
||||||
|
|
||||||
|
tty.debug(f"OCI tag pagination: {link_header}")
|
||||||
|
|
||||||
|
rel_next_value = spack.util.url.parse_link_rel_next(link_header)
|
||||||
|
|
||||||
|
if rel_next_value is None:
|
||||||
|
break
|
||||||
|
|
||||||
|
rel_next = urllib.parse.urlparse(rel_next_value)
|
||||||
|
|
||||||
|
if rel_next.scheme not in ("https", ""):
|
||||||
|
break
|
||||||
|
|
||||||
|
fetch_url = ref.endpoint(rel_next_value)
|
||||||
|
|
||||||
|
return sorted(tags)
|
||||||
|
|
||||||
|
|
||||||
def upload_blob(
|
def upload_blob(
|
||||||
ref: ImageReference,
|
ref: ImageReference,
|
||||||
file: str,
|
file: str,
|
||||||
|
@@ -418,18 +418,27 @@ def ensure_status(request: urllib.request.Request, response: HTTPResponse, statu
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def default_retry(f, retries: int = 3, sleep=None):
|
def default_retry(f, retries: int = 5, sleep=None):
|
||||||
sleep = sleep or time.sleep
|
sleep = sleep or time.sleep
|
||||||
|
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
for i in range(retries):
|
for i in range(retries):
|
||||||
try:
|
try:
|
||||||
return f(*args, **kwargs)
|
return f(*args, **kwargs)
|
||||||
except urllib.error.HTTPError as e:
|
except (urllib.error.URLError, TimeoutError) as e:
|
||||||
# Retry on internal server errors, and rate limit errors
|
# Retry on internal server errors, and rate limit errors
|
||||||
# Potentially this could take into account the Retry-After header
|
# Potentially this could take into account the Retry-After header
|
||||||
# if registries support it
|
# if registries support it
|
||||||
if i + 1 != retries and (500 <= e.code < 600 or e.code == 429):
|
if i + 1 != retries and (
|
||||||
|
(
|
||||||
|
isinstance(e, urllib.error.HTTPError)
|
||||||
|
and (500 <= e.code < 600 or e.code == 429)
|
||||||
|
)
|
||||||
|
or (
|
||||||
|
isinstance(e, urllib.error.URLError) and isinstance(e.reason, TimeoutError)
|
||||||
|
)
|
||||||
|
or isinstance(e, TimeoutError)
|
||||||
|
):
|
||||||
# Exponential backoff
|
# Exponential backoff
|
||||||
sleep(2**i)
|
sleep(2**i)
|
||||||
continue
|
continue
|
||||||
|
@@ -143,6 +143,7 @@ def __init__(self):
|
|||||||
"12": "monterey",
|
"12": "monterey",
|
||||||
"13": "ventura",
|
"13": "ventura",
|
||||||
"14": "sonoma",
|
"14": "sonoma",
|
||||||
|
"15": "sequoia",
|
||||||
}
|
}
|
||||||
|
|
||||||
version = macos_version()
|
version = macos_version()
|
||||||
|
@@ -161,7 +161,11 @@ def windows_establish_runtime_linkage(self):
|
|||||||
|
|
||||||
Performs symlinking to incorporate rpath dependencies to Windows runtime search paths
|
Performs symlinking to incorporate rpath dependencies to Windows runtime search paths
|
||||||
"""
|
"""
|
||||||
if sys.platform == "win32":
|
# If spec is an external, we should not be modifying its bin directory, as we would
|
||||||
|
# be doing in this method
|
||||||
|
# Spack should in general not modify things it has not installed
|
||||||
|
# we can reasonably expect externals to have their link interface properly established
|
||||||
|
if sys.platform == "win32" and not self.spec.external:
|
||||||
self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
|
self.win_rpath.add_library_dependent(*self.win_add_library_dependent())
|
||||||
self.win_rpath.add_rpath(*self.win_add_rpath())
|
self.win_rpath.add_rpath(*self.win_add_rpath())
|
||||||
self.win_rpath.establish_link()
|
self.win_rpath.establish_link()
|
||||||
@@ -195,10 +199,10 @@ def __init__(cls, name, bases, attr_dict):
|
|||||||
# assumed to be detectable
|
# assumed to be detectable
|
||||||
if hasattr(cls, "executables") or hasattr(cls, "libraries"):
|
if hasattr(cls, "executables") or hasattr(cls, "libraries"):
|
||||||
# Append a tag to each detectable package, so that finding them is faster
|
# Append a tag to each detectable package, so that finding them is faster
|
||||||
if hasattr(cls, "tags"):
|
if not hasattr(cls, "tags"):
|
||||||
getattr(cls, "tags").append(DetectablePackageMeta.TAG)
|
|
||||||
else:
|
|
||||||
setattr(cls, "tags", [DetectablePackageMeta.TAG])
|
setattr(cls, "tags", [DetectablePackageMeta.TAG])
|
||||||
|
elif DetectablePackageMeta.TAG not in cls.tags:
|
||||||
|
cls.tags.append(DetectablePackageMeta.TAG)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def platform_executables(cls):
|
def platform_executables(cls):
|
||||||
@@ -1115,10 +1119,9 @@ def _make_stage(self):
|
|||||||
if not link_format:
|
if not link_format:
|
||||||
link_format = "build-{arch}-{hash:7}"
|
link_format = "build-{arch}-{hash:7}"
|
||||||
stage_link = self.spec.format_path(link_format)
|
stage_link = self.spec.format_path(link_format)
|
||||||
return DevelopStage(compute_stage_name(self.spec), dev_path, stage_link)
|
source_stage = DevelopStage(compute_stage_name(self.spec), dev_path, stage_link)
|
||||||
|
else:
|
||||||
# To fetch the current version
|
source_stage = self._make_root_stage(self.fetcher)
|
||||||
source_stage = self._make_root_stage(self.fetcher)
|
|
||||||
|
|
||||||
# all_stages is source + resources + patches
|
# all_stages is source + resources + patches
|
||||||
all_stages = StageComposite()
|
all_stages = StageComposite()
|
||||||
@@ -1240,7 +1243,7 @@ def install_test_root(self):
|
|||||||
"""Return the install test root directory."""
|
"""Return the install test root directory."""
|
||||||
tty.warn(
|
tty.warn(
|
||||||
"The 'pkg.install_test_root' property is deprecated with removal "
|
"The 'pkg.install_test_root' property is deprecated with removal "
|
||||||
"expected v0.22. Use 'install_test_root(pkg)' instead."
|
"expected v0.23. Use 'install_test_root(pkg)' instead."
|
||||||
)
|
)
|
||||||
return install_test_root(self)
|
return install_test_root(self)
|
||||||
|
|
||||||
@@ -1447,10 +1450,8 @@ def do_fetch(self, mirror_only=False):
|
|||||||
return
|
return
|
||||||
|
|
||||||
checksum = spack.config.get("config:checksum")
|
checksum = spack.config.get("config:checksum")
|
||||||
fetch = self.stage.needs_fetching
|
|
||||||
if (
|
if (
|
||||||
checksum
|
checksum
|
||||||
and fetch
|
|
||||||
and (self.version not in self.versions)
|
and (self.version not in self.versions)
|
||||||
and (not isinstance(self.version, GitVersion))
|
and (not isinstance(self.version, GitVersion))
|
||||||
):
|
):
|
||||||
@@ -1557,13 +1558,11 @@ def do_patch(self):
|
|||||||
tty.debug("Patching failed last time. Restaging.")
|
tty.debug("Patching failed last time. Restaging.")
|
||||||
self.stage.restage()
|
self.stage.restage()
|
||||||
else:
|
else:
|
||||||
# develop specs/ DIYStages may have patch failures but
|
# develop specs may have patch failures but should never be restaged
|
||||||
# should never be restaged
|
tty.warn(
|
||||||
msg = (
|
f"A patch failure was detected in {self.name}."
|
||||||
"A patch failure was detected in %s." % self.name
|
" Build errors may occur due to this."
|
||||||
+ " Build errors may occur due to this."
|
|
||||||
)
|
)
|
||||||
tty.warn(msg)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# If this file exists, then we already applied all the patches.
|
# If this file exists, then we already applied all the patches.
|
||||||
@@ -1877,7 +1876,10 @@ def do_install(self, **kwargs):
|
|||||||
verbose (bool): Display verbose build output (by default,
|
verbose (bool): Display verbose build output (by default,
|
||||||
suppresses it)
|
suppresses it)
|
||||||
"""
|
"""
|
||||||
PackageInstaller([(self, kwargs)]).install()
|
explicit = kwargs.get("explicit", True)
|
||||||
|
if isinstance(explicit, bool):
|
||||||
|
kwargs["explicit"] = {self.spec.dag_hash()} if explicit else set()
|
||||||
|
PackageInstaller([self], kwargs).install()
|
||||||
|
|
||||||
# TODO (post-34236): Update tests and all packages that use this as a
|
# TODO (post-34236): Update tests and all packages that use this as a
|
||||||
# TODO (post-34236): package method to the routine made available to
|
# TODO (post-34236): package method to the routine made available to
|
||||||
@@ -1898,7 +1900,7 @@ def cache_extra_test_sources(self, srcs):
|
|||||||
"""
|
"""
|
||||||
msg = (
|
msg = (
|
||||||
"'pkg.cache_extra_test_sources(srcs) is deprecated with removal "
|
"'pkg.cache_extra_test_sources(srcs) is deprecated with removal "
|
||||||
"expected in v0.22. Use 'cache_extra_test_sources(pkg, srcs)' "
|
"expected in v0.23. Use 'cache_extra_test_sources(pkg, srcs)' "
|
||||||
"instead."
|
"instead."
|
||||||
)
|
)
|
||||||
warnings.warn(msg)
|
warnings.warn(msg)
|
||||||
@@ -2446,9 +2448,18 @@ def rpath(self):
|
|||||||
|
|
||||||
# on Windows, libraries of runtime interest are typically
|
# on Windows, libraries of runtime interest are typically
|
||||||
# stored in the bin directory
|
# stored in the bin directory
|
||||||
|
# Do not include Windows system libraries in the rpath interface
|
||||||
|
# these libraries are handled automatically by VS/VCVARS and adding
|
||||||
|
# Spack derived system libs into the link path or address space of a program
|
||||||
|
# can result in conflicting versions, which makes Spack packages less useable
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
rpaths = [self.prefix.bin]
|
rpaths = [self.prefix.bin]
|
||||||
rpaths.extend(d.prefix.bin for d in deps if os.path.isdir(d.prefix.bin))
|
rpaths.extend(
|
||||||
|
d.prefix.bin
|
||||||
|
for d in deps
|
||||||
|
if os.path.isdir(d.prefix.bin)
|
||||||
|
and "windows-system" not in getattr(d.package, "tags", [])
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
rpaths = [self.prefix.lib, self.prefix.lib64]
|
rpaths = [self.prefix.lib, self.prefix.lib64]
|
||||||
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
|
rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
|
||||||
|
@@ -10,6 +10,7 @@
|
|||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.symlink import readlink
|
||||||
|
|
||||||
import spack.target
|
import spack.target
|
||||||
import spack.version
|
import spack.version
|
||||||
@@ -133,7 +134,7 @@ def craype_type_and_version(cls):
|
|||||||
# Take the default version from known symlink path
|
# Take the default version from known symlink path
|
||||||
default_path = os.path.join(craype_dir, "default")
|
default_path = os.path.join(craype_dir, "default")
|
||||||
if os.path.islink(default_path):
|
if os.path.islink(default_path):
|
||||||
version = spack.version.Version(os.readlink(default_path))
|
version = spack.version.Version(readlink(default_path))
|
||||||
return (craype_type, version)
|
return (craype_type, version)
|
||||||
|
|
||||||
# If no default version, sort available versions and return latest
|
# If no default version, sort available versions and return latest
|
||||||
|
@@ -16,7 +16,7 @@
|
|||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import memoized
|
from llnl.util.lang import memoized
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import readlink, symlink
|
||||||
|
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
@@ -25,6 +25,7 @@
|
|||||||
import spack.store
|
import spack.store
|
||||||
import spack.util.elf as elf
|
import spack.util.elf as elf
|
||||||
import spack.util.executable as executable
|
import spack.util.executable as executable
|
||||||
|
import spack.util.path
|
||||||
|
|
||||||
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
|
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
|
||||||
|
|
||||||
@@ -565,7 +566,7 @@ def make_link_relative(new_links, orig_links):
|
|||||||
orig_links (list): original links
|
orig_links (list): original links
|
||||||
"""
|
"""
|
||||||
for new_link, orig_link in zip(new_links, orig_links):
|
for new_link, orig_link in zip(new_links, orig_links):
|
||||||
target = os.readlink(orig_link)
|
target = readlink(orig_link)
|
||||||
relative_target = os.path.relpath(target, os.path.dirname(orig_link))
|
relative_target = os.path.relpath(target, os.path.dirname(orig_link))
|
||||||
os.unlink(new_link)
|
os.unlink(new_link)
|
||||||
symlink(relative_target, new_link)
|
symlink(relative_target, new_link)
|
||||||
@@ -613,7 +614,7 @@ def relocate_links(links, prefix_to_prefix):
|
|||||||
"""Relocate links to a new install prefix."""
|
"""Relocate links to a new install prefix."""
|
||||||
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
|
regex = re.compile("|".join(re.escape(p) for p in prefix_to_prefix.keys()))
|
||||||
for link in links:
|
for link in links:
|
||||||
old_target = os.readlink(link)
|
old_target = readlink(link)
|
||||||
match = regex.match(old_target)
|
match = regex.match(old_target)
|
||||||
|
|
||||||
# No match.
|
# No match.
|
||||||
|
@@ -241,7 +241,7 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
|
|||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
|
||||||
type (str): String containing one or more of 'A', 'B', 'C'
|
type (str): String containing one or more of 'A', 'R', 'C'
|
||||||
rev1 (str): Revision to compare against, default is 'HEAD^'
|
rev1 (str): Revision to compare against, default is 'HEAD^'
|
||||||
rev2 (str): Revision to compare to rev1, default is 'HEAD'
|
rev2 (str): Revision to compare to rev1, default is 'HEAD'
|
||||||
|
|
||||||
@@ -264,7 +264,7 @@ def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
|
|||||||
lines = [] if not out else re.split(r"\s+", out)
|
lines = [] if not out else re.split(r"\s+", out)
|
||||||
changed = set()
|
changed = set()
|
||||||
for path in lines:
|
for path in lines:
|
||||||
pkg_name, _, _ = path.partition(os.sep)
|
pkg_name, _, _ = path.partition("/")
|
||||||
if pkg_name not in added and pkg_name not in removed:
|
if pkg_name not in added and pkg_name not in removed:
|
||||||
changed.add(pkg_name)
|
changed.add(pkg_name)
|
||||||
|
|
||||||
|
@@ -58,7 +58,8 @@
|
|||||||
# Initialize data structures common to each phase's report.
|
# Initialize data structures common to each phase's report.
|
||||||
CDASH_PHASES = set(MAP_PHASES_TO_CDASH.values())
|
CDASH_PHASES = set(MAP_PHASES_TO_CDASH.values())
|
||||||
CDASH_PHASES.add("update")
|
CDASH_PHASES.add("update")
|
||||||
|
# CDash request timeout in seconds
|
||||||
|
SPACK_CDASH_TIMEOUT = 45
|
||||||
|
|
||||||
CDashConfiguration = collections.namedtuple(
|
CDashConfiguration = collections.namedtuple(
|
||||||
"CDashConfiguration", ["upload_url", "packages", "build", "site", "buildstamp", "track"]
|
"CDashConfiguration", ["upload_url", "packages", "build", "site", "buildstamp", "track"]
|
||||||
@@ -447,7 +448,7 @@ def upload(self, filename):
|
|||||||
# By default, urllib2 only support GET and POST.
|
# By default, urllib2 only support GET and POST.
|
||||||
# CDash expects this file to be uploaded via PUT.
|
# CDash expects this file to be uploaded via PUT.
|
||||||
request.get_method = lambda: "PUT"
|
request.get_method = lambda: "PUT"
|
||||||
response = opener.open(request)
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
if self.current_package_name not in self.buildIds:
|
if self.current_package_name not in self.buildIds:
|
||||||
resp_value = response.read()
|
resp_value = response.read()
|
||||||
if isinstance(resp_value, bytes):
|
if isinstance(resp_value, bytes):
|
||||||
|
@@ -9,7 +9,7 @@
|
|||||||
import tempfile
|
import tempfile
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import readlink, symlink
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.error
|
import spack.error
|
||||||
@@ -26,7 +26,7 @@ def _relocate_spliced_links(links, orig_prefix, new_prefix):
|
|||||||
in our case. This still needs to be called after the copy to destination
|
in our case. This still needs to be called after the copy to destination
|
||||||
because it expects the new directory structure to be in place."""
|
because it expects the new directory structure to be in place."""
|
||||||
for link in links:
|
for link in links:
|
||||||
link_target = os.readlink(os.path.join(orig_prefix, link))
|
link_target = readlink(os.path.join(orig_prefix, link))
|
||||||
link_target = re.sub("^" + orig_prefix, new_prefix, link_target)
|
link_target = re.sub("^" + orig_prefix, new_prefix, link_target)
|
||||||
new_link_path = os.path.join(new_prefix, link)
|
new_link_path = os.path.join(new_prefix, link)
|
||||||
os.unlink(new_link_path)
|
os.unlink(new_link_path)
|
||||||
|
@@ -52,7 +52,10 @@
|
|||||||
"target": {"type": "string"},
|
"target": {"type": "string"},
|
||||||
"alias": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
"alias": {"anyOf": [{"type": "string"}, {"type": "null"}]},
|
||||||
"modules": {
|
"modules": {
|
||||||
"anyOf": [{"type": "string"}, {"type": "null"}, {"type": "array"}]
|
"anyOf": [
|
||||||
|
{"type": "null"},
|
||||||
|
{"type": "array", "items": {"type": "string"}},
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"implicit_rpaths": {
|
"implicit_rpaths": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
|
@@ -13,6 +13,7 @@
|
|||||||
r"\w[\w-]*": {
|
r"\w[\w-]*": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"additionalProperties": False,
|
"additionalProperties": False,
|
||||||
|
"required": ["spec"],
|
||||||
"properties": {"spec": {"type": "string"}, "path": {"type": "string"}},
|
"properties": {"spec": {"type": "string"}, "path": {"type": "string"}},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@@ -35,6 +35,7 @@
|
|||||||
{
|
{
|
||||||
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||||
"specs": spec_list_schema,
|
"specs": spec_list_schema,
|
||||||
|
"include_concrete": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
@@ -141,7 +141,7 @@
|
|||||||
"deprecatedProperties": {
|
"deprecatedProperties": {
|
||||||
"properties": ["version"],
|
"properties": ["version"],
|
||||||
"message": "setting version preferences in the 'all' section of packages.yaml "
|
"message": "setting version preferences in the 'all' section of packages.yaml "
|
||||||
"is deprecated and will be removed in v0.22\n\n\tThese preferences "
|
"is deprecated and will be removed in v0.23\n\n\tThese preferences "
|
||||||
"will be ignored by Spack. You can set them only in package-specific sections "
|
"will be ignored by Spack. You can set them only in package-specific sections "
|
||||||
"of the same file.\n",
|
"of the same file.\n",
|
||||||
"error": False,
|
"error": False,
|
||||||
@@ -197,7 +197,7 @@
|
|||||||
"properties": ["target", "compiler", "providers"],
|
"properties": ["target", "compiler", "providers"],
|
||||||
"message": "setting 'compiler:', 'target:' or 'provider:' preferences in "
|
"message": "setting 'compiler:', 'target:' or 'provider:' preferences in "
|
||||||
"a package-specific section of packages.yaml is deprecated, and will be "
|
"a package-specific section of packages.yaml is deprecated, and will be "
|
||||||
"removed in v0.22.\n\n\tThese preferences will be ignored by Spack, and "
|
"removed in v0.23.\n\n\tThese preferences will be ignored by Spack, and "
|
||||||
"can be set only in the 'all' section of the same file. "
|
"can be set only in the 'all' section of the same file. "
|
||||||
"You can run:\n\n\t\t$ spack audit configs\n\n\tto get better diagnostics, "
|
"You can run:\n\n\t\t$ spack audit configs\n\n\tto get better diagnostics, "
|
||||||
"including files:lines where the deprecated attributes are used.\n\n"
|
"including files:lines where the deprecated attributes are used.\n\n"
|
||||||
|
@@ -314,6 +314,10 @@ def using_libc_compatibility() -> bool:
|
|||||||
return spack.platforms.host().name == "linux"
|
return spack.platforms.host().name == "linux"
|
||||||
|
|
||||||
|
|
||||||
|
def c_compiler_runs(compiler: spack.compiler.Compiler) -> bool:
|
||||||
|
return compiler.compiler_verbose_output is not None
|
||||||
|
|
||||||
|
|
||||||
def extend_flag_list(flag_list, new_flags):
|
def extend_flag_list(flag_list, new_flags):
|
||||||
"""Extend a list of flags, preserving order and precedence.
|
"""Extend a list of flags, preserving order and precedence.
|
||||||
|
|
||||||
@@ -840,8 +844,6 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
|||||||
parent_dir = os.path.dirname(__file__)
|
parent_dir = os.path.dirname(__file__)
|
||||||
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
self.control.load(os.path.join(parent_dir, "concretize.lp"))
|
||||||
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
|
self.control.load(os.path.join(parent_dir, "heuristic.lp"))
|
||||||
if spack.config.CONFIG.get("concretizer:duplicates:strategy", "none") != "none":
|
|
||||||
self.control.load(os.path.join(parent_dir, "heuristic_separate.lp"))
|
|
||||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||||
if not setup.concretize_everything:
|
if not setup.concretize_everything:
|
||||||
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
|
self.control.load(os.path.join(parent_dir, "when_possible.lp"))
|
||||||
@@ -1431,16 +1433,14 @@ def condition(
|
|||||||
# caller, we won't emit partial facts.
|
# caller, we won't emit partial facts.
|
||||||
|
|
||||||
condition_id = next(self._id_counter)
|
condition_id = next(self._id_counter)
|
||||||
self.gen.fact(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
|
|
||||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
|
||||||
|
|
||||||
trigger_id = self._get_condition_id(
|
trigger_id = self._get_condition_id(
|
||||||
required_spec, cache=self._trigger_cache, body=True, transform=transform_required
|
required_spec, cache=self._trigger_cache, body=True, transform=transform_required
|
||||||
)
|
)
|
||||||
|
self.gen.fact(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
|
||||||
|
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||||
self.gen.fact(
|
self.gen.fact(
|
||||||
fn.pkg_fact(required_spec.name, fn.condition_trigger(condition_id, trigger_id))
|
fn.pkg_fact(required_spec.name, fn.condition_trigger(condition_id, trigger_id))
|
||||||
)
|
)
|
||||||
|
|
||||||
if not imposed_spec:
|
if not imposed_spec:
|
||||||
return condition_id
|
return condition_id
|
||||||
|
|
||||||
@@ -1649,11 +1649,15 @@ def external_packages(self):
|
|||||||
if isinstance(reuse_yaml, typing.Mapping):
|
if isinstance(reuse_yaml, typing.Mapping):
|
||||||
default_include = reuse_yaml.get("include", [])
|
default_include = reuse_yaml.get("include", [])
|
||||||
default_exclude = reuse_yaml.get("exclude", [])
|
default_exclude = reuse_yaml.get("exclude", [])
|
||||||
|
libc_externals = list(all_libcs())
|
||||||
for source in reuse_yaml.get("from", []):
|
for source in reuse_yaml.get("from", []):
|
||||||
if source["type"] != "external":
|
if source["type"] != "external":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
include = source.get("include", default_include)
|
include = source.get("include", default_include)
|
||||||
|
if include:
|
||||||
|
# Since libcs are implicit externals, we need to implicitly include them
|
||||||
|
include = include + libc_externals
|
||||||
exclude = source.get("exclude", default_exclude)
|
exclude = source.get("exclude", default_exclude)
|
||||||
spec_filters.append(
|
spec_filters.append(
|
||||||
SpecFilter(
|
SpecFilter(
|
||||||
@@ -1685,19 +1689,43 @@ def external_packages(self):
|
|||||||
spack.spec.parse_with_version_concrete(x["spec"]) for x in externals
|
spack.spec.parse_with_version_concrete(x["spec"]) for x in externals
|
||||||
]
|
]
|
||||||
|
|
||||||
external_specs = []
|
selected_externals = set()
|
||||||
if spec_filters:
|
if spec_filters:
|
||||||
for current_filter in spec_filters:
|
for current_filter in spec_filters:
|
||||||
current_filter.factory = lambda: candidate_specs
|
current_filter.factory = lambda: candidate_specs
|
||||||
external_specs.extend(current_filter.selected_specs())
|
selected_externals.update(current_filter.selected_specs())
|
||||||
else:
|
|
||||||
external_specs.extend(candidate_specs)
|
# Emit facts for externals specs. Note that "local_idx" is the index of the spec
|
||||||
|
# in packages:<pkg_name>:externals. This means:
|
||||||
|
#
|
||||||
|
# packages:<pkg_name>:externals[local_idx].spec == spec
|
||||||
|
external_versions = []
|
||||||
|
for local_idx, spec in enumerate(candidate_specs):
|
||||||
|
msg = f"{spec.name} available as external when satisfying {spec}"
|
||||||
|
|
||||||
|
if spec_filters and spec not in selected_externals:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not spec.versions.concrete:
|
||||||
|
warnings.warn(f"cannot use the external spec {spec}: needs a concrete version")
|
||||||
|
continue
|
||||||
|
|
||||||
|
def external_imposition(input_spec, requirements):
|
||||||
|
return requirements + [
|
||||||
|
fn.attr("external_conditions_hold", input_spec.name, local_idx)
|
||||||
|
]
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.condition(spec, spec, msg=msg, transform_imposed=external_imposition)
|
||||||
|
except (spack.error.SpecError, RuntimeError) as e:
|
||||||
|
warnings.warn(f"while setting up external spec {spec}: {e}")
|
||||||
|
continue
|
||||||
|
external_versions.append((spec.version, local_idx))
|
||||||
|
self.possible_versions[spec.name].add(spec.version)
|
||||||
|
self.gen.newline()
|
||||||
|
|
||||||
# Order the external versions to prefer more recent versions
|
# Order the external versions to prefer more recent versions
|
||||||
# even if specs in packages.yaml are not ordered that way
|
# even if specs in packages.yaml are not ordered that way
|
||||||
external_versions = [
|
|
||||||
(x.version, external_id) for external_id, x in enumerate(external_specs)
|
|
||||||
]
|
|
||||||
external_versions = [
|
external_versions = [
|
||||||
(v, idx, external_id)
|
(v, idx, external_id)
|
||||||
for idx, (v, external_id) in enumerate(sorted(external_versions, reverse=True))
|
for idx, (v, external_id) in enumerate(sorted(external_versions, reverse=True))
|
||||||
@@ -1707,19 +1735,6 @@ def external_packages(self):
|
|||||||
DeclaredVersion(version=version, idx=idx, origin=Provenance.EXTERNAL)
|
DeclaredVersion(version=version, idx=idx, origin=Provenance.EXTERNAL)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Declare external conditions with a local index into packages.yaml
|
|
||||||
for local_idx, spec in enumerate(external_specs):
|
|
||||||
msg = "%s available as external when satisfying %s" % (spec.name, spec)
|
|
||||||
|
|
||||||
def external_imposition(input_spec, requirements):
|
|
||||||
return requirements + [
|
|
||||||
fn.attr("external_conditions_hold", input_spec.name, local_idx)
|
|
||||||
]
|
|
||||||
|
|
||||||
self.condition(spec, spec, msg=msg, transform_imposed=external_imposition)
|
|
||||||
self.possible_versions[spec.name].add(spec.version)
|
|
||||||
self.gen.newline()
|
|
||||||
|
|
||||||
self.trigger_rules()
|
self.trigger_rules()
|
||||||
self.effect_rules()
|
self.effect_rules()
|
||||||
|
|
||||||
@@ -1872,11 +1887,8 @@ def _spec_clauses(
|
|||||||
)
|
)
|
||||||
|
|
||||||
clauses.append(f.variant_value(spec.name, vname, value))
|
clauses.append(f.variant_value(spec.name, vname, value))
|
||||||
|
|
||||||
if variant.propagate:
|
if variant.propagate:
|
||||||
clauses.append(
|
clauses.append(f.propagate(spec.name, fn.variant_value(vname, value)))
|
||||||
f.variant_propagation_candidate(spec.name, vname, value, spec.name)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Tell the concretizer that this is a possible value for the
|
# Tell the concretizer that this is a possible value for the
|
||||||
# variant, to account for things like int/str values where we
|
# variant, to account for things like int/str values where we
|
||||||
@@ -1931,6 +1943,11 @@ def _spec_clauses(
|
|||||||
for virtual in virtuals:
|
for virtual in virtuals:
|
||||||
clauses.append(fn.attr("virtual_on_incoming_edges", spec.name, virtual))
|
clauses.append(fn.attr("virtual_on_incoming_edges", spec.name, virtual))
|
||||||
|
|
||||||
|
# If the spec is external and concrete, we allow all the libcs on the system
|
||||||
|
if spec.external and spec.concrete and using_libc_compatibility():
|
||||||
|
for libc in self.libcs:
|
||||||
|
clauses.append(fn.attr("compatible_libc", spec.name, libc.name, libc.version))
|
||||||
|
|
||||||
# add all clauses from dependencies
|
# add all clauses from dependencies
|
||||||
if transitive:
|
if transitive:
|
||||||
# TODO: Eventually distinguish 2 deps on the same pkg (build and link)
|
# TODO: Eventually distinguish 2 deps on the same pkg (build and link)
|
||||||
@@ -2726,7 +2743,7 @@ class _Head:
|
|||||||
node_flag = fn.attr("node_flag_set")
|
node_flag = fn.attr("node_flag_set")
|
||||||
node_flag_source = fn.attr("node_flag_source")
|
node_flag_source = fn.attr("node_flag_source")
|
||||||
node_flag_propagate = fn.attr("node_flag_propagate")
|
node_flag_propagate = fn.attr("node_flag_propagate")
|
||||||
variant_propagation_candidate = fn.attr("variant_propagation_candidate")
|
propagate = fn.attr("propagate")
|
||||||
|
|
||||||
|
|
||||||
class _Body:
|
class _Body:
|
||||||
@@ -2743,7 +2760,7 @@ class _Body:
|
|||||||
node_flag = fn.attr("node_flag")
|
node_flag = fn.attr("node_flag")
|
||||||
node_flag_source = fn.attr("node_flag_source")
|
node_flag_source = fn.attr("node_flag_source")
|
||||||
node_flag_propagate = fn.attr("node_flag_propagate")
|
node_flag_propagate = fn.attr("node_flag_propagate")
|
||||||
variant_propagation_candidate = fn.attr("variant_propagation_candidate")
|
propagate = fn.attr("propagate")
|
||||||
|
|
||||||
|
|
||||||
class ProblemInstanceBuilder:
|
class ProblemInstanceBuilder:
|
||||||
@@ -2971,6 +2988,13 @@ class CompilerParser:
|
|||||||
def __init__(self, configuration) -> None:
|
def __init__(self, configuration) -> None:
|
||||||
self.compilers: Set[KnownCompiler] = set()
|
self.compilers: Set[KnownCompiler] = set()
|
||||||
for c in all_compilers_in_config(configuration):
|
for c in all_compilers_in_config(configuration):
|
||||||
|
if using_libc_compatibility() and not c_compiler_runs(c):
|
||||||
|
tty.debug(
|
||||||
|
f"the C compiler {c.cc} does not exist, or does not run correctly."
|
||||||
|
f" The compiler {c.spec} will not be used during concretization."
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
if using_libc_compatibility() and not c.default_libc:
|
if using_libc_compatibility() and not c.default_libc:
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
f"cannot detect libc from {c.spec}. The compiler will not be used "
|
f"cannot detect libc from {c.spec}. The compiler will not be used "
|
||||||
@@ -3210,6 +3234,39 @@ def requires(self, impose: str, *, when: str):
|
|||||||
self.runtime_conditions.add((imposed_spec, when_spec))
|
self.runtime_conditions.add((imposed_spec, when_spec))
|
||||||
self.reset()
|
self.reset()
|
||||||
|
|
||||||
|
def propagate(self, constraint_str: str, *, when: str):
|
||||||
|
msg = "the 'propagate' method can be called only with pkg('*')"
|
||||||
|
assert self.current_package == "*", msg
|
||||||
|
|
||||||
|
when_spec = spack.spec.Spec(when)
|
||||||
|
assert when_spec.name is None, "only anonymous when specs are accepted"
|
||||||
|
|
||||||
|
placeholder = "XXX"
|
||||||
|
node_variable = "node(ID, Package)"
|
||||||
|
when_spec.name = placeholder
|
||||||
|
|
||||||
|
body_clauses = self._setup.spec_clauses(when_spec, body=True)
|
||||||
|
body_str = (
|
||||||
|
f" {f',{os.linesep} '.join(str(x) for x in body_clauses)},\n"
|
||||||
|
f" not external({node_variable}),\n"
|
||||||
|
f" not runtime(Package)"
|
||||||
|
).replace(f'"{placeholder}"', f"{node_variable}")
|
||||||
|
|
||||||
|
constraint_spec = spack.spec.Spec(constraint_str)
|
||||||
|
assert constraint_spec.name is None, "only anonymous constraint specs are accepted"
|
||||||
|
|
||||||
|
constraint_spec.name = placeholder
|
||||||
|
constraint_clauses = self._setup.spec_clauses(constraint_spec, body=False)
|
||||||
|
for clause in constraint_clauses:
|
||||||
|
if clause.args[0] == "node_compiler_version_satisfies":
|
||||||
|
self._setup.compiler_version_constraints.add(constraint_spec.compiler)
|
||||||
|
args = f'"{constraint_spec.compiler.name}", "{constraint_spec.compiler.versions}"'
|
||||||
|
head_str = f"propagate({node_variable}, node_compiler_version_satisfies({args}))"
|
||||||
|
rule = f"{head_str} :-\n{body_str}.\n\n"
|
||||||
|
self.rules.append(rule)
|
||||||
|
|
||||||
|
self.reset()
|
||||||
|
|
||||||
def consume_facts(self):
|
def consume_facts(self):
|
||||||
"""Consume the facts collected by this object, and emits rules and
|
"""Consume the facts collected by this object, and emits rules and
|
||||||
facts for the runtimes.
|
facts for the runtimes.
|
||||||
|
@@ -811,37 +811,6 @@ node_has_variant(node(ID, Package), Variant) :-
|
|||||||
pkg_fact(Package, variant(Variant)),
|
pkg_fact(Package, variant(Variant)),
|
||||||
attr("node", node(ID, Package)).
|
attr("node", node(ID, Package)).
|
||||||
|
|
||||||
% Variant propagation is forwarded to dependencies
|
|
||||||
attr("variant_propagation_candidate", PackageNode, Variant, Value, Source) :-
|
|
||||||
attr("node", PackageNode),
|
|
||||||
depends_on(ParentNode, PackageNode),
|
|
||||||
attr("variant_value", node(_, Source), Variant, Value),
|
|
||||||
attr("variant_propagation_candidate", ParentNode, Variant, _, Source).
|
|
||||||
|
|
||||||
% If the node is a candidate, and it has the variant and value,
|
|
||||||
% then those variant and value should be propagated
|
|
||||||
attr("variant_propagate", node(ID, Package), Variant, Value, Source) :-
|
|
||||||
attr("variant_propagation_candidate", node(ID, Package), Variant, Value, Source),
|
|
||||||
node_has_variant(node(ID, Package), Variant),
|
|
||||||
pkg_fact(Package, variant_possible_value(Variant, Value)),
|
|
||||||
not attr("variant_set", node(ID, Package), Variant).
|
|
||||||
|
|
||||||
% Propagate the value, if there is the corresponding attribute
|
|
||||||
attr("variant_value", PackageNode, Variant, Value) :- attr("variant_propagate", PackageNode, Variant, Value, _).
|
|
||||||
|
|
||||||
% If a variant is propagated, we cannot have extraneous values (this is for multi valued variants)
|
|
||||||
variant_is_propagated(PackageNode, Variant) :- attr("variant_propagate", PackageNode, Variant, _, _).
|
|
||||||
:- variant_is_propagated(PackageNode, Variant),
|
|
||||||
attr("variant_value", PackageNode, Variant, Value),
|
|
||||||
not attr("variant_propagate", PackageNode, Variant, Value, _).
|
|
||||||
|
|
||||||
% Cannot receive different values from different sources on the same variant
|
|
||||||
error(100, "{0} and {1} cannot both propagate variant '{2}' to package {3} with values '{4}' and '{5}'", Source1, Source2, Variant, Package, Value1, Value2) :-
|
|
||||||
attr("variant_propagate", node(X, Package), Variant, Value1, Source1),
|
|
||||||
attr("variant_propagate", node(X, Package), Variant, Value2, Source2),
|
|
||||||
node_has_variant(node(X, Package), Variant),
|
|
||||||
Value1 < Value2, Source1 < Source2.
|
|
||||||
|
|
||||||
% a variant cannot be set if it is not a variant on the package
|
% a variant cannot be set if it is not a variant on the package
|
||||||
error(100, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
error(100, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
||||||
:- attr("variant_set", node(X, Package), Variant),
|
:- attr("variant_set", node(X, Package), Variant),
|
||||||
@@ -919,7 +888,7 @@ variant_not_default(node(ID, Package), Variant, Value)
|
|||||||
% variants set explicitly on the CLI don't count as non-default
|
% variants set explicitly on the CLI don't count as non-default
|
||||||
not attr("variant_set", node(ID, Package), Variant, Value),
|
not attr("variant_set", node(ID, Package), Variant, Value),
|
||||||
% variant values forced by propagation don't count as non-default
|
% variant values forced by propagation don't count as non-default
|
||||||
not attr("variant_propagate", node(ID, Package), Variant, Value, _),
|
not propagate(node(ID, Package), variant_value(Variant, Value)),
|
||||||
% variants set on externals that we could use don't count as non-default
|
% variants set on externals that we could use don't count as non-default
|
||||||
% this makes spack prefer to use an external over rebuilding with the
|
% this makes spack prefer to use an external over rebuilding with the
|
||||||
% default configuration
|
% default configuration
|
||||||
@@ -932,7 +901,7 @@ variant_default_not_used(node(ID, Package), Variant, Value)
|
|||||||
:- variant_default_value(Package, Variant, Value),
|
:- variant_default_value(Package, Variant, Value),
|
||||||
node_has_variant(node(ID, Package), Variant),
|
node_has_variant(node(ID, Package), Variant),
|
||||||
not attr("variant_value", node(ID, Package), Variant, Value),
|
not attr("variant_value", node(ID, Package), Variant, Value),
|
||||||
not attr("variant_propagate", node(ID, Package), Variant, _, _),
|
not propagate(node(ID, Package), variant_value(Variant, _)),
|
||||||
attr("node", node(ID, Package)).
|
attr("node", node(ID, Package)).
|
||||||
|
|
||||||
% The variant is set in an external spec
|
% The variant is set in an external spec
|
||||||
@@ -989,6 +958,67 @@ pkg_fact(Package, variant_single_value("dev_path"))
|
|||||||
#defined variant_default_value/3.
|
#defined variant_default_value/3.
|
||||||
#defined variant_default_value_from_packages_yaml/3.
|
#defined variant_default_value_from_packages_yaml/3.
|
||||||
|
|
||||||
|
%-----------------------------------------------------------------------------
|
||||||
|
% Propagation semantics
|
||||||
|
%-----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
% Propagation roots have a corresponding attr("propagate", ...)
|
||||||
|
propagate(RootNode, PropagatedAttribute) :- attr("propagate", RootNode, PropagatedAttribute).
|
||||||
|
|
||||||
|
% Propagate an attribute along edges to child nodes
|
||||||
|
propagate(ChildNode, PropagatedAttribute) :-
|
||||||
|
propagate(ParentNode, PropagatedAttribute),
|
||||||
|
depends_on(ParentNode, ChildNode).
|
||||||
|
|
||||||
|
%-----------------------------------------------------------------------------
|
||||||
|
% Activation of propagated values
|
||||||
|
%-----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
%----
|
||||||
|
% Variants
|
||||||
|
%----
|
||||||
|
|
||||||
|
% If a variant is propagated, and can be accepted, set its value
|
||||||
|
attr("variant_value", node(ID, Package), Variant, Value) :-
|
||||||
|
propagate(node(ID, Package), variant_value(Variant, Value)),
|
||||||
|
node_has_variant(node(ID, Package), Variant),
|
||||||
|
pkg_fact(Package, variant_possible_value(Variant, Value)),
|
||||||
|
not attr("variant_set", node(ID, Package), Variant).
|
||||||
|
|
||||||
|
% If a variant is propagated, we cannot have extraneous values
|
||||||
|
variant_is_propagated(PackageNode, Variant) :-
|
||||||
|
attr("variant_value", PackageNode, Variant, Value),
|
||||||
|
propagate(PackageNode, variant_value(Variant, Value)),
|
||||||
|
not attr("variant_set", PackageNode, Variant).
|
||||||
|
|
||||||
|
:- variant_is_propagated(PackageNode, Variant),
|
||||||
|
attr("variant_value", PackageNode, Variant, Value),
|
||||||
|
not propagate(PackageNode, variant_value(Variant, Value)).
|
||||||
|
|
||||||
|
%----
|
||||||
|
% Compiler constraints
|
||||||
|
%----
|
||||||
|
|
||||||
|
attr("node_compiler_version_satisfies", node(ID, Package), Compiler, Version) :-
|
||||||
|
propagate(node(ID, Package), node_compiler_version_satisfies(Compiler, Version)),
|
||||||
|
node_compiler(node(ID, Package), CompilerID),
|
||||||
|
compiler_name(CompilerID, Compiler),
|
||||||
|
not runtime(Package),
|
||||||
|
not external(Package).
|
||||||
|
|
||||||
|
%-----------------------------------------------------------------------------
|
||||||
|
% Runtimes
|
||||||
|
%-----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
% Check whether the DAG has any built package
|
||||||
|
has_built_packages() :- build(X), not external(X).
|
||||||
|
|
||||||
|
% If we build packages, the runtime nodes must use an available compiler
|
||||||
|
1 { node_compiler(PackageNode, CompilerID) : build(PackageNode), not external(PackageNode) } :-
|
||||||
|
has_built_packages(),
|
||||||
|
runtime(RuntimePackage),
|
||||||
|
node_compiler(node(_, RuntimePackage), CompilerID).
|
||||||
|
|
||||||
%-----------------------------------------------------------------------------
|
%-----------------------------------------------------------------------------
|
||||||
% Platform semantics
|
% Platform semantics
|
||||||
%-----------------------------------------------------------------------------
|
%-----------------------------------------------------------------------------
|
||||||
@@ -1090,10 +1120,18 @@ attr("node_target", PackageNode, Target)
|
|||||||
:- attr("node", PackageNode), attr("node_target_set", PackageNode, Target).
|
:- attr("node", PackageNode), attr("node_target_set", PackageNode, Target).
|
||||||
|
|
||||||
% each node has the weight of its assigned target
|
% each node has the weight of its assigned target
|
||||||
node_target_weight(node(ID, Package), Weight)
|
target_weight(Target, 0)
|
||||||
:- attr("node", node(ID, Package)),
|
:- attr("node", PackageNode),
|
||||||
attr("node_target", node(ID, Package), Target),
|
attr("node_target", PackageNode, Target),
|
||||||
target_weight(Target, Weight).
|
attr("node_target_set", PackageNode, Target).
|
||||||
|
|
||||||
|
node_target_weight(PackageNode, MinWeight)
|
||||||
|
:- attr("node", PackageNode),
|
||||||
|
attr("node_target", PackageNode, Target),
|
||||||
|
target(Target),
|
||||||
|
MinWeight = #min { Weight : target_weight(Target, Weight) }.
|
||||||
|
|
||||||
|
:- attr("node_target", PackageNode, Target), not node_target_weight(PackageNode, _).
|
||||||
|
|
||||||
% compatibility rules for targets among nodes
|
% compatibility rules for targets among nodes
|
||||||
node_target_match(ParentNode, DependencyNode)
|
node_target_match(ParentNode, DependencyNode)
|
||||||
@@ -1155,12 +1193,12 @@ error(10, "No valid compiler for {0} satisfies '%{1}'", Package, Compiler)
|
|||||||
|
|
||||||
% If the compiler of a node must satisfy a constraint, then its version
|
% If the compiler of a node must satisfy a constraint, then its version
|
||||||
% must be chosen among the ones that satisfy said constraint
|
% must be chosen among the ones that satisfy said constraint
|
||||||
error(100, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
error(100, "Package {0} cannot satisfy '%{1}@{2}'", Package, Compiler, Constraint)
|
||||||
:- attr("node", node(X, Package)),
|
:- attr("node", node(X, Package)),
|
||||||
attr("node_compiler_version_satisfies", node(X, Package), Compiler, Constraint),
|
attr("node_compiler_version_satisfies", node(X, Package), Compiler, Constraint),
|
||||||
not compiler_version_satisfies(Compiler, Constraint, _).
|
not compiler_version_satisfies(Compiler, Constraint, _).
|
||||||
|
|
||||||
error(100, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
error(100, "Package {0} cannot satisfy '%{1}@{2}'", Package, Compiler, Constraint)
|
||||||
:- attr("node", node(X, Package)),
|
:- attr("node", node(X, Package)),
|
||||||
attr("node_compiler_version_satisfies", node(X, Package), Compiler, Constraint),
|
attr("node_compiler_version_satisfies", node(X, Package), Compiler, Constraint),
|
||||||
not compiler_version_satisfies(Compiler, Constraint, ID),
|
not compiler_version_satisfies(Compiler, Constraint, ID),
|
||||||
@@ -1345,8 +1383,10 @@ build(PackageNode) :- not attr("hash", PackageNode, _), attr("node", PackageNode
|
|||||||
% topmost-priority criterion to reuse what is installed.
|
% topmost-priority criterion to reuse what is installed.
|
||||||
%
|
%
|
||||||
% The priority ranges are:
|
% The priority ranges are:
|
||||||
% 200+ Shifted priorities for build nodes; correspond to priorities 0 - 99.
|
% 1000+ Optimizations for concretization errors
|
||||||
% 100 - 199 Unshifted priorities. Currently only includes minimizing #builds.
|
% 300 - 1000 Highest priority optimizations for valid solutions
|
||||||
|
% 200 - 299 Shifted priorities for build nodes; correspond to priorities 0 - 99.
|
||||||
|
% 100 - 199 Unshifted priorities. Currently only includes minimizing #builds and minimizing dupes.
|
||||||
% 0 - 99 Priorities for non-built nodes.
|
% 0 - 99 Priorities for non-built nodes.
|
||||||
build_priority(PackageNode, 200) :- build(PackageNode), attr("node", PackageNode).
|
build_priority(PackageNode, 200) :- build(PackageNode), attr("node", PackageNode).
|
||||||
build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", PackageNode).
|
build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", PackageNode).
|
||||||
@@ -1394,6 +1434,16 @@ build_priority(PackageNode, 0) :- not build(PackageNode), attr("node", Package
|
|||||||
% 2. a `#minimize{ 0@2 : #true }.` statement that ensures the criterion
|
% 2. a `#minimize{ 0@2 : #true }.` statement that ensures the criterion
|
||||||
% is displayed (clingo doesn't display sums over empty sets by default)
|
% is displayed (clingo doesn't display sums over empty sets by default)
|
||||||
|
|
||||||
|
% A condition group specifies one or more specs that must be satisfied.
|
||||||
|
% Specs declared first are preferred, so we assign increasing weights and
|
||||||
|
% minimize the weights.
|
||||||
|
opt_criterion(310, "requirement weight").
|
||||||
|
#minimize{ 0@310: #true }.
|
||||||
|
#minimize {
|
||||||
|
Weight@310,PackageNode,Group
|
||||||
|
: requirement_weight(PackageNode, Group, Weight)
|
||||||
|
}.
|
||||||
|
|
||||||
% Try hard to reuse installed packages (i.e., minimize the number built)
|
% Try hard to reuse installed packages (i.e., minimize the number built)
|
||||||
opt_criterion(110, "number of packages to build (vs. reuse)").
|
opt_criterion(110, "number of packages to build (vs. reuse)").
|
||||||
#minimize { 0@110: #true }.
|
#minimize { 0@110: #true }.
|
||||||
@@ -1405,18 +1455,6 @@ opt_criterion(100, "number of nodes from the same package").
|
|||||||
#minimize { ID@100,Package : attr("virtual_node", node(ID, Package)) }.
|
#minimize { ID@100,Package : attr("virtual_node", node(ID, Package)) }.
|
||||||
#defined optimize_for_reuse/0.
|
#defined optimize_for_reuse/0.
|
||||||
|
|
||||||
% A condition group specifies one or more specs that must be satisfied.
|
|
||||||
% Specs declared first are preferred, so we assign increasing weights and
|
|
||||||
% minimize the weights.
|
|
||||||
opt_criterion(75, "requirement weight").
|
|
||||||
#minimize{ 0@275: #true }.
|
|
||||||
#minimize{ 0@75: #true }.
|
|
||||||
#minimize {
|
|
||||||
Weight@75+Priority,PackageNode,Group
|
|
||||||
: requirement_weight(PackageNode, Group, Weight),
|
|
||||||
build_priority(PackageNode, Priority)
|
|
||||||
}.
|
|
||||||
|
|
||||||
% Minimize the number of deprecated versions being used
|
% Minimize the number of deprecated versions being used
|
||||||
opt_criterion(73, "deprecated versions used").
|
opt_criterion(73, "deprecated versions used").
|
||||||
#minimize{ 0@273: #true }.
|
#minimize{ 0@273: #true }.
|
||||||
@@ -1424,6 +1462,7 @@ opt_criterion(73, "deprecated versions used").
|
|||||||
#minimize{
|
#minimize{
|
||||||
1@73+Priority,PackageNode
|
1@73+Priority,PackageNode
|
||||||
: attr("deprecated", PackageNode, _),
|
: attr("deprecated", PackageNode, _),
|
||||||
|
not external(PackageNode),
|
||||||
build_priority(PackageNode, Priority)
|
build_priority(PackageNode, Priority)
|
||||||
}.
|
}.
|
||||||
|
|
||||||
@@ -1431,11 +1470,11 @@ opt_criterion(73, "deprecated versions used").
|
|||||||
% 1. Version weight
|
% 1. Version weight
|
||||||
% 2. Number of variants with a non default value, if not set
|
% 2. Number of variants with a non default value, if not set
|
||||||
% for the root package.
|
% for the root package.
|
||||||
opt_criterion(70, "version weight").
|
opt_criterion(70, "version badness (roots)").
|
||||||
#minimize{ 0@270: #true }.
|
#minimize{ 0@270: #true }.
|
||||||
#minimize{ 0@70: #true }.
|
#minimize{ 0@70: #true }.
|
||||||
#minimize {
|
#minimize {
|
||||||
Weight@70+Priority
|
Weight@70+Priority,PackageNode
|
||||||
: attr("root", PackageNode),
|
: attr("root", PackageNode),
|
||||||
version_weight(PackageNode, Weight),
|
version_weight(PackageNode, Weight),
|
||||||
build_priority(PackageNode, Priority)
|
build_priority(PackageNode, Priority)
|
||||||
@@ -1495,7 +1534,7 @@ opt_criterion(45, "preferred providers (non-roots)").
|
|||||||
}.
|
}.
|
||||||
|
|
||||||
% Try to minimize the number of compiler mismatches in the DAG.
|
% Try to minimize the number of compiler mismatches in the DAG.
|
||||||
opt_criterion(40, "compiler mismatches that are not from CLI").
|
opt_criterion(40, "compiler mismatches that are not required").
|
||||||
#minimize{ 0@240: #true }.
|
#minimize{ 0@240: #true }.
|
||||||
#minimize{ 0@40: #true }.
|
#minimize{ 0@40: #true }.
|
||||||
#minimize{
|
#minimize{
|
||||||
@@ -1505,7 +1544,7 @@ opt_criterion(40, "compiler mismatches that are not from CLI").
|
|||||||
not runtime(Dependency)
|
not runtime(Dependency)
|
||||||
}.
|
}.
|
||||||
|
|
||||||
opt_criterion(39, "compiler mismatches that are not from CLI").
|
opt_criterion(39, "compiler mismatches that are required").
|
||||||
#minimize{ 0@239: #true }.
|
#minimize{ 0@239: #true }.
|
||||||
#minimize{ 0@39: #true }.
|
#minimize{ 0@39: #true }.
|
||||||
#minimize{
|
#minimize{
|
||||||
@@ -1525,13 +1564,14 @@ opt_criterion(30, "non-preferred OS's").
|
|||||||
}.
|
}.
|
||||||
|
|
||||||
% Choose more recent versions for nodes
|
% Choose more recent versions for nodes
|
||||||
opt_criterion(25, "version badness").
|
opt_criterion(25, "version badness (non roots)").
|
||||||
#minimize{ 0@225: #true }.
|
#minimize{ 0@225: #true }.
|
||||||
#minimize{ 0@25: #true }.
|
#minimize{ 0@25: #true }.
|
||||||
#minimize{
|
#minimize{
|
||||||
Weight@25+Priority,node(X, Package)
|
Weight@25+Priority,node(X, Package)
|
||||||
: version_weight(node(X, Package), Weight),
|
: version_weight(node(X, Package), Weight),
|
||||||
build_priority(node(X, Package), Priority),
|
build_priority(node(X, Package), Priority),
|
||||||
|
not attr("root", node(X, Package)),
|
||||||
not runtime(Package)
|
not runtime(Package)
|
||||||
}.
|
}.
|
||||||
|
|
||||||
|
@@ -4,21 +4,35 @@
|
|||||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
%=============================================================================
|
%=============================================================================
|
||||||
% Heuristic to speed-up solves (node with ID 0)
|
% Heuristic to speed-up solves
|
||||||
%=============================================================================
|
%=============================================================================
|
||||||
|
|
||||||
|
% No duplicates by default (most of them will be true)
|
||||||
|
#heuristic attr("node", node(PackageID, Package)). [100, init]
|
||||||
|
#heuristic attr("node", node(PackageID, Package)). [ 2, factor]
|
||||||
|
#heuristic attr("virtual_node", node(VirtualID, Virtual)). [100, init]
|
||||||
|
#heuristic attr("node", node(1..X-1, Package)) : max_dupes(Package, X), not virtual(Package), X > 1. [-1, sign]
|
||||||
|
#heuristic attr("virtual_node", node(1..X-1, Package)) : max_dupes(Package, X), virtual(Package) , X > 1. [-1, sign]
|
||||||
|
|
||||||
%-----------------
|
% Pick preferred version
|
||||||
% Domain heuristic
|
#heuristic attr("version", node(PackageID, Package), Version) : pkg_fact(Package, version_declared(Version, Weight)), attr("node", node(PackageID, Package)). [40, init]
|
||||||
%-----------------
|
#heuristic version_weight(node(PackageID, Package), 0) : pkg_fact(Package, version_declared(Version, 0 )), attr("node", node(PackageID, Package)). [ 1, sign]
|
||||||
|
#heuristic attr("version", node(PackageID, Package), Version) : pkg_fact(Package, version_declared(Version, 0 )), attr("node", node(PackageID, Package)). [ 1, sign]
|
||||||
|
#heuristic attr("version", node(PackageID, Package), Version) : pkg_fact(Package, version_declared(Version, Weight)), attr("node", node(PackageID, Package)), Weight > 0. [-1, sign]
|
||||||
|
|
||||||
% Root node
|
% Use default variants
|
||||||
#heuristic attr("version", node(0, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
#heuristic attr("variant_value", node(PackageID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(PackageID, Package)). [40, true]
|
||||||
#heuristic version_weight(node(0, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("root", node(0, Package)). [35, true]
|
#heuristic attr("variant_value", node(PackageID, Package), Variant, Value) : not variant_default_value(Package, Variant, Value), attr("node", node(PackageID, Package)). [40, false]
|
||||||
#heuristic attr("variant_value", node(0, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("root", node(0, Package)). [35, true]
|
|
||||||
#heuristic attr("node_target", node(0, Package), Target) : target_weight(Target, 0), attr("root", node(0, Package)). [35, true]
|
|
||||||
#heuristic node_target_weight(node(0, Package), 0) : attr("root", node(0, Package)). [35, true]
|
|
||||||
#heuristic node_compiler(node(0, Package), CompilerID) : compiler_weight(ID, 0), compiler_id(ID), attr("root", node(0, Package)). [35, true]
|
|
||||||
|
|
||||||
% Providers
|
% Use default operating system and platform
|
||||||
#heuristic attr("node", node(0, Package)) : default_provider_preference(Virtual, Package, 0), possible_in_link_run(Package). [30, true]
|
#heuristic attr("node_os", node(PackageID, Package), OS) : os(OS, 0), attr("root", node(PackageID, Package)). [40, true]
|
||||||
|
#heuristic attr("node_platform", node(PackageID, Package), Platform) : allowed_platform(Platform), attr("root", node(PackageID, Package)). [40, true]
|
||||||
|
|
||||||
|
% Use default targets
|
||||||
|
#heuristic attr("node_target", node(PackageID, Package), Target) : target_weight(Target, Weight), attr("node", node(PackageID, Package)). [30, init]
|
||||||
|
#heuristic attr("node_target", node(PackageID, Package), Target) : target_weight(Target, Weight), attr("node", node(PackageID, Package)). [ 2, factor]
|
||||||
|
#heuristic attr("node_target", node(PackageID, Package), Target) : target_weight(Target, 0), attr("node", node(PackageID, Package)). [ 1, sign]
|
||||||
|
#heuristic attr("node_target", node(PackageID, Package), Target) : target_weight(Target, Weight), attr("node", node(PackageID, Package)), Weight > 0. [-1, sign]
|
||||||
|
|
||||||
|
% Use the default compilers
|
||||||
|
#heuristic node_compiler(node(PackageID, Package), ID) : compiler_weight(ID, 0), compiler_id(ID), attr("node", node(PackageID, Package)). [30, init]
|
||||||
|
@@ -1,24 +0,0 @@
|
|||||||
% Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
|
||||||
% Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
%
|
|
||||||
% SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
%=============================================================================
|
|
||||||
% Heuristic to speed-up solves (node with ID > 0)
|
|
||||||
%=============================================================================
|
|
||||||
|
|
||||||
% node(ID, _)
|
|
||||||
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
|
||||||
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
|
||||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
|
||||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
|
||||||
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
|
||||||
#heuristic node_compiler(node(ID, Package), CompilerID) : compiler_weight(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), ID > 0. [25-5*ID, true]
|
|
||||||
|
|
||||||
% node(ID, _), split build dependencies
|
|
||||||
#heuristic attr("version", node(ID, Package), Version) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
|
||||||
#heuristic version_weight(node(ID, Package), 0) : pkg_fact(Package, version_declared(Version, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
|
||||||
#heuristic attr("variant_value", node(ID, Package), Variant, Value) : variant_default_value(Package, Variant, Value), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
|
||||||
#heuristic attr("node_target", node(ID, Package), Target) : pkg_fact(Package, target_weight(Target, 0)), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
|
||||||
#heuristic node_target_weight(node(ID, Package), 0) : attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
|
||||||
#heuristic node_compiler(node(ID, Package), CompilerID) : compiler_weight(CompilerID, 0), compiler_id(CompilerID), attr("node", node(ID, Package)), multiple_unification_sets(Package), ID > 0. [25, true]
|
|
@@ -10,15 +10,13 @@
|
|||||||
%=============================================================================
|
%=============================================================================
|
||||||
|
|
||||||
% A package cannot be reused if the libc is not compatible with it
|
% A package cannot be reused if the libc is not compatible with it
|
||||||
:- provider(node(X, LibcPackage), node(0, "libc")),
|
error(100, "Cannot reuse {0} since we cannot determine libc compatibility", ReusedPackage)
|
||||||
attr("version", node(X, LibcPackage), LibcVersion),
|
:- provider(node(X, LibcPackage), node(0, "libc")),
|
||||||
attr("hash", node(R, ReusedPackage), Hash),
|
attr("version", node(X, LibcPackage), LibcVersion),
|
||||||
% Libc packages can be reused without the "compatible_libc" attribute
|
attr("hash", node(R, ReusedPackage), Hash),
|
||||||
ReusedPackage != LibcPackage,
|
% Libc packages can be reused without the "compatible_libc" attribute
|
||||||
not attr("compatible_libc", node(R, ReusedPackage), LibcPackage, LibcVersion).
|
ReusedPackage != LibcPackage,
|
||||||
|
not attr("compatible_libc", node(R, ReusedPackage), LibcPackage, LibcVersion).
|
||||||
% Check whether the DAG has any built package
|
|
||||||
has_built_packages() :- build(X), not external(X).
|
|
||||||
|
|
||||||
% A libc is needed in the DAG
|
% A libc is needed in the DAG
|
||||||
:- has_built_packages(), not provider(_, node(0, "libc")).
|
:- has_built_packages(), not provider(_, node(0, "libc")).
|
||||||
|
@@ -12,6 +12,7 @@
|
|||||||
%=============================================================================
|
%=============================================================================
|
||||||
|
|
||||||
% macOS
|
% macOS
|
||||||
|
os_compatible("sequoia", "sonoma").
|
||||||
os_compatible("sonoma", "ventura").
|
os_compatible("sonoma", "ventura").
|
||||||
os_compatible("ventura", "monterey").
|
os_compatible("ventura", "monterey").
|
||||||
os_compatible("monterey", "bigsur").
|
os_compatible("monterey", "bigsur").
|
||||||
|
@@ -2045,6 +2045,18 @@ def to_node_dict(self, hash=ht.dag_hash):
|
|||||||
if params:
|
if params:
|
||||||
d["parameters"] = params
|
d["parameters"] = params
|
||||||
|
|
||||||
|
if params and not self.concrete:
|
||||||
|
flag_names = [
|
||||||
|
name
|
||||||
|
for name, flags in self.compiler_flags.items()
|
||||||
|
if any(x.propagate for x in flags)
|
||||||
|
]
|
||||||
|
d["propagate"] = sorted(
|
||||||
|
itertools.chain(
|
||||||
|
[v.name for v in self.variants.values() if v.propagate], flag_names
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
if self.external:
|
if self.external:
|
||||||
d["external"] = syaml.syaml_dict(
|
d["external"] = syaml.syaml_dict(
|
||||||
[
|
[
|
||||||
@@ -2217,16 +2229,10 @@ def node_dict_with_hashes(self, hash=ht.dag_hash):
|
|||||||
spec is concrete, the full hash is added as well. If 'build' is in
|
spec is concrete, the full hash is added as well. If 'build' is in
|
||||||
the hash_type, the build hash is also added."""
|
the hash_type, the build hash is also added."""
|
||||||
node = self.to_node_dict(hash)
|
node = self.to_node_dict(hash)
|
||||||
|
# All specs have at least a DAG hash
|
||||||
node[ht.dag_hash.name] = self.dag_hash()
|
node[ht.dag_hash.name] = self.dag_hash()
|
||||||
|
|
||||||
# dag_hash is lazily computed -- but if we write a spec out, we want it
|
if not self.concrete:
|
||||||
# to be included. This is effectively the last chance we get to compute
|
|
||||||
# it accurately.
|
|
||||||
if self.concrete:
|
|
||||||
# all specs have at least a DAG hash
|
|
||||||
node[ht.dag_hash.name] = self.dag_hash()
|
|
||||||
|
|
||||||
else:
|
|
||||||
node["concrete"] = False
|
node["concrete"] = False
|
||||||
|
|
||||||
# we can also give them other hash types if we want
|
# we can also give them other hash types if we want
|
||||||
@@ -4164,29 +4170,21 @@ def __getitem__(self, name: str):
|
|||||||
csv = query_parameters.pop().strip()
|
csv = query_parameters.pop().strip()
|
||||||
query_parameters = re.split(r"\s*,\s*", csv)
|
query_parameters = re.split(r"\s*,\s*", csv)
|
||||||
|
|
||||||
# In some cases a package appears multiple times in the same DAG for *distinct*
|
|
||||||
# specs. For example, a build-type dependency may itself depend on a package
|
|
||||||
# the current spec depends on, but their specs may differ. Therefore we iterate
|
|
||||||
# in an order here that prioritizes the build, test and runtime dependencies;
|
|
||||||
# only when we don't find the package do we consider the full DAG.
|
|
||||||
order = lambda: itertools.chain(
|
order = lambda: itertools.chain(
|
||||||
self.traverse(deptype="link"),
|
self.traverse_edges(deptype=dt.LINK, order="breadth", cover="edges"),
|
||||||
self.dependencies(deptype=dt.BUILD | dt.RUN | dt.TEST),
|
self.edges_to_dependencies(depflag=dt.BUILD | dt.RUN | dt.TEST),
|
||||||
self.traverse(), # fall back to a full search
|
self.traverse_edges(deptype=dt.ALL, order="breadth", cover="edges"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Consider runtime dependencies and direct build/test deps before transitive dependencies,
|
||||||
|
# and prefer matches closest to the root.
|
||||||
try:
|
try:
|
||||||
child: Spec = next(
|
child: Spec = next(
|
||||||
itertools.chain(
|
e.spec
|
||||||
# Regular specs
|
for e in itertools.chain(
|
||||||
(x for x in order() if x.name == name),
|
(e for e in order() if e.spec.name == name or name in e.virtuals),
|
||||||
(
|
# for historical reasons
|
||||||
x
|
(e for e in order() if e.spec.concrete and e.spec.package.provides(name)),
|
||||||
for x in order()
|
|
||||||
if (not x.virtual)
|
|
||||||
and any(name in edge.virtuals for edge in x.edges_from_dependents())
|
|
||||||
),
|
|
||||||
(x for x in order() if (not x.virtual) and x.package.provides(name)),
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
@@ -4428,9 +4426,12 @@ def format_attribute(match_object: Match) -> str:
|
|||||||
if part.startswith("_"):
|
if part.startswith("_"):
|
||||||
raise SpecFormatStringError("Attempted to format private attribute")
|
raise SpecFormatStringError("Attempted to format private attribute")
|
||||||
else:
|
else:
|
||||||
if part == "variants" and isinstance(current, vt.VariantMap):
|
if isinstance(current, vt.VariantMap):
|
||||||
# subscript instead of getattr for variant names
|
# subscript instead of getattr for variant names
|
||||||
current = current[part]
|
try:
|
||||||
|
current = current[part]
|
||||||
|
except KeyError:
|
||||||
|
raise SpecFormatStringError(f"Variant '{part}' does not exist")
|
||||||
else:
|
else:
|
||||||
# aliases
|
# aliases
|
||||||
if part == "arch":
|
if part == "arch":
|
||||||
@@ -5004,13 +5005,17 @@ def from_node_dict(cls, node):
|
|||||||
else:
|
else:
|
||||||
spec.compiler = None
|
spec.compiler = None
|
||||||
|
|
||||||
|
propagated_names = node.get("propagate", [])
|
||||||
for name, values in node.get("parameters", {}).items():
|
for name, values in node.get("parameters", {}).items():
|
||||||
|
propagate = name in propagated_names
|
||||||
if name in _valid_compiler_flags:
|
if name in _valid_compiler_flags:
|
||||||
spec.compiler_flags[name] = []
|
spec.compiler_flags[name] = []
|
||||||
for val in values:
|
for val in values:
|
||||||
spec.compiler_flags.add_flag(name, val, False)
|
spec.compiler_flags.add_flag(name, val, propagate)
|
||||||
else:
|
else:
|
||||||
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, values)
|
spec.variants[name] = vt.MultiValuedVariant.from_node_dict(
|
||||||
|
name, values, propagate=propagate
|
||||||
|
)
|
||||||
|
|
||||||
spec.external_path = None
|
spec.external_path = None
|
||||||
spec.external_modules = None
|
spec.external_modules = None
|
||||||
|
@@ -13,7 +13,7 @@
|
|||||||
import stat
|
import stat
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from typing import Callable, Dict, Iterable, Optional, Set
|
from typing import Callable, Dict, Iterable, List, Optional, Set
|
||||||
|
|
||||||
import llnl.string
|
import llnl.string
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
@@ -40,6 +40,7 @@
|
|||||||
import spack.resource
|
import spack.resource
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
|
import spack.util.crypto
|
||||||
import spack.util.lock
|
import spack.util.lock
|
||||||
import spack.util.path as sup
|
import spack.util.path as sup
|
||||||
import spack.util.pattern as pattern
|
import spack.util.pattern as pattern
|
||||||
@@ -346,8 +347,6 @@ class Stage(LockableStagingDir):
|
|||||||
similar, and are intended to persist for only one run of spack.
|
similar, and are intended to persist for only one run of spack.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
#: Most staging is managed by Spack. DIYStage is one exception.
|
|
||||||
needs_fetching = True
|
|
||||||
requires_patch_success = True
|
requires_patch_success = True
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -536,32 +535,29 @@ def generate_fetchers():
|
|||||||
for fetcher in dynamic_fetchers:
|
for fetcher in dynamic_fetchers:
|
||||||
yield fetcher
|
yield fetcher
|
||||||
|
|
||||||
def print_errors(errors):
|
errors: List[str] = []
|
||||||
for msg in errors:
|
|
||||||
tty.debug(msg)
|
|
||||||
|
|
||||||
errors = []
|
|
||||||
for fetcher in generate_fetchers():
|
for fetcher in generate_fetchers():
|
||||||
try:
|
try:
|
||||||
fetcher.stage = self
|
fetcher.stage = self
|
||||||
self.fetcher = fetcher
|
self.fetcher = fetcher
|
||||||
self.fetcher.fetch()
|
self.fetcher.fetch()
|
||||||
break
|
break
|
||||||
except spack.fetch_strategy.NoCacheError:
|
except fs.NoCacheError:
|
||||||
# Don't bother reporting when something is not cached.
|
# Don't bother reporting when something is not cached.
|
||||||
continue
|
continue
|
||||||
|
except fs.FailedDownloadError as f:
|
||||||
|
errors.extend(f"{fetcher}: {e.__class__.__name__}: {e}" for e in f.exceptions)
|
||||||
|
continue
|
||||||
except spack.error.SpackError as e:
|
except spack.error.SpackError as e:
|
||||||
errors.append("Fetching from {0} failed.".format(fetcher))
|
errors.append(f"{fetcher}: {e.__class__.__name__}: {e}")
|
||||||
tty.debug(e)
|
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
print_errors(errors)
|
|
||||||
|
|
||||||
self.fetcher = self.default_fetcher
|
self.fetcher = self.default_fetcher
|
||||||
default_msg = "All fetchers failed for {0}".format(self.name)
|
if err_msg:
|
||||||
raise spack.error.FetchError(err_msg or default_msg, None)
|
raise spack.error.FetchError(err_msg)
|
||||||
|
raise spack.error.FetchError(
|
||||||
print_errors(errors)
|
f"All fetchers failed for {self.name}", "\n".join(f" {e}" for e in errors)
|
||||||
|
)
|
||||||
|
|
||||||
def steal_source(self, dest):
|
def steal_source(self, dest):
|
||||||
"""Copy the source_path directory in its entirety to directory dest
|
"""Copy the source_path directory in its entirety to directory dest
|
||||||
@@ -772,8 +768,6 @@ def __init__(self):
|
|||||||
"cache_mirror",
|
"cache_mirror",
|
||||||
"steal_source",
|
"steal_source",
|
||||||
"disable_mirrors",
|
"disable_mirrors",
|
||||||
"needs_fetching",
|
|
||||||
"requires_patch_success",
|
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -812,6 +806,10 @@ def path(self):
|
|||||||
def archive_file(self):
|
def archive_file(self):
|
||||||
return self[0].archive_file
|
return self[0].archive_file
|
||||||
|
|
||||||
|
@property
|
||||||
|
def requires_patch_success(self):
|
||||||
|
return self[0].requires_patch_success
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def keep(self):
|
def keep(self):
|
||||||
return self[0].keep
|
return self[0].keep
|
||||||
@@ -822,64 +820,7 @@ def keep(self, value):
|
|||||||
item.keep = value
|
item.keep = value
|
||||||
|
|
||||||
|
|
||||||
class DIYStage:
|
|
||||||
"""
|
|
||||||
Simple class that allows any directory to be a spack stage. Consequently,
|
|
||||||
it does not expect or require that the source path adhere to the standard
|
|
||||||
directory naming convention.
|
|
||||||
"""
|
|
||||||
|
|
||||||
needs_fetching = False
|
|
||||||
requires_patch_success = False
|
|
||||||
|
|
||||||
def __init__(self, path):
|
|
||||||
if path is None:
|
|
||||||
raise ValueError("Cannot construct DIYStage without a path.")
|
|
||||||
elif not os.path.isdir(path):
|
|
||||||
raise StagePathError("The stage path directory does not exist:", path)
|
|
||||||
|
|
||||||
self.archive_file = None
|
|
||||||
self.path = path
|
|
||||||
self.source_path = path
|
|
||||||
self.created = True
|
|
||||||
|
|
||||||
# DIY stages do nothing as context managers.
|
|
||||||
def __enter__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def fetch(self, *args, **kwargs):
|
|
||||||
tty.debug("No need to fetch for DIY.")
|
|
||||||
|
|
||||||
def check(self):
|
|
||||||
tty.debug("No checksum needed for DIY.")
|
|
||||||
|
|
||||||
def expand_archive(self):
|
|
||||||
tty.debug("Using source directory: {0}".format(self.source_path))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def expanded(self):
|
|
||||||
"""Returns True since the source_path must exist."""
|
|
||||||
return True
|
|
||||||
|
|
||||||
def restage(self):
|
|
||||||
raise RestageError("Cannot restage a DIY stage.")
|
|
||||||
|
|
||||||
def create(self):
|
|
||||||
self.created = True
|
|
||||||
|
|
||||||
def destroy(self):
|
|
||||||
# No need to destroy DIY stage.
|
|
||||||
pass
|
|
||||||
|
|
||||||
def cache_local(self):
|
|
||||||
tty.debug("Sources for DIY stages are not cached")
|
|
||||||
|
|
||||||
|
|
||||||
class DevelopStage(LockableStagingDir):
|
class DevelopStage(LockableStagingDir):
|
||||||
needs_fetching = False
|
|
||||||
requires_patch_success = False
|
requires_patch_success = False
|
||||||
|
|
||||||
def __init__(self, name, dev_path, reference_link):
|
def __init__(self, name, dev_path, reference_link):
|
||||||
@@ -1245,7 +1186,7 @@ def _fetch_and_checksum(url, options, keep_stage, action_fn=None):
|
|||||||
# Checksum the archive and add it to the list
|
# Checksum the archive and add it to the list
|
||||||
checksum = spack.util.crypto.checksum(hashlib.sha256, stage.archive_file)
|
checksum = spack.util.crypto.checksum(hashlib.sha256, stage.archive_file)
|
||||||
return checksum, None
|
return checksum, None
|
||||||
except FailedDownloadError:
|
except fs.FailedDownloadError:
|
||||||
return None, f"[WORKER] Failed to fetch {url}"
|
return None, f"[WORKER] Failed to fetch {url}"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return None, f"[WORKER] Something failed on {url}, skipping. ({e})"
|
return None, f"[WORKER] Something failed on {url}, skipping. ({e})"
|
||||||
@@ -1265,7 +1206,3 @@ class RestageError(StageError):
|
|||||||
|
|
||||||
class VersionFetchError(StageError):
|
class VersionFetchError(StageError):
|
||||||
"""Raised when we can't determine a URL to fetch a package."""
|
"""Raised when we can't determine a URL to fetch a package."""
|
||||||
|
|
||||||
|
|
||||||
# Keep this in namespace for convenience
|
|
||||||
FailedDownloadError = fs.FailedDownloadError
|
|
||||||
|
@@ -371,7 +371,6 @@ def use_store(
|
|||||||
data.update(extra_data)
|
data.update(extra_data)
|
||||||
|
|
||||||
# Swap the store with the one just constructed and return it
|
# Swap the store with the one just constructed and return it
|
||||||
ensure_singleton_created()
|
|
||||||
spack.config.CONFIG.push_scope(
|
spack.config.CONFIG.push_scope(
|
||||||
spack.config.InternalConfigScope(name=scope_name, data={"config": {"install_tree": data}})
|
spack.config.InternalConfigScope(name=scope_name, data={"config": {"install_tree": data}})
|
||||||
)
|
)
|
||||||
|
@@ -218,10 +218,12 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
|
|||||||
str(archspec.cpu.host().family) != "x86_64", reason="tests are for x86_64 uarch ranges"
|
str(archspec.cpu.host().family) != "x86_64", reason="tests are for x86_64 uarch ranges"
|
||||||
)
|
)
|
||||||
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
||||||
spec = Spec(f"a %gcc@10 foobar=bar target={root_target_range} ^b target={dep_target_range}")
|
spec = Spec(
|
||||||
|
f"pkg-a %gcc@10 foobar=bar target={root_target_range} ^pkg-b target={dep_target_range}"
|
||||||
|
)
|
||||||
with spack.concretize.disable_compiler_existence_check():
|
with spack.concretize.disable_compiler_existence_check():
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
assert spec.target == spec["b"].target == result
|
assert spec.target == spec["pkg-b"].target == result
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@@ -22,6 +22,7 @@
|
|||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
from llnl.util.filesystem import join_path, visit_directory_tree
|
from llnl.util.filesystem import join_path, visit_directory_tree
|
||||||
|
from llnl.util.symlink import readlink
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.caches
|
import spack.caches
|
||||||
@@ -1062,10 +1063,10 @@ def test_tarball_common_prefix(dummy_prefix, tmpdir):
|
|||||||
assert set(os.listdir(os.path.join("prefix2", "share"))) == {"file"}
|
assert set(os.listdir(os.path.join("prefix2", "share"))) == {"file"}
|
||||||
|
|
||||||
# Relative symlink should still be correct
|
# Relative symlink should still be correct
|
||||||
assert os.readlink(os.path.join("prefix2", "bin", "relative_app_link")) == "app"
|
assert readlink(os.path.join("prefix2", "bin", "relative_app_link")) == "app"
|
||||||
|
|
||||||
# Absolute symlink should remain absolute -- this is for relocation to fix up.
|
# Absolute symlink should remain absolute -- this is for relocation to fix up.
|
||||||
assert os.readlink(os.path.join("prefix2", "bin", "absolute_app_link")) == os.path.join(
|
assert readlink(os.path.join("prefix2", "bin", "absolute_app_link")) == os.path.join(
|
||||||
dummy_prefix, "bin", "app"
|
dummy_prefix, "bin", "app"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -228,3 +228,25 @@ def test_source_is_disabled(mutable_config):
|
|||||||
spack.config.add("bootstrap:trusted:{0}:{1}".format(conf["name"], False))
|
spack.config.add("bootstrap:trusted:{0}:{1}".format(conf["name"], False))
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
spack.bootstrap.core.source_is_enabled_or_raise(conf)
|
spack.bootstrap.core.source_is_enabled_or_raise(conf)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.regression("45247")
|
||||||
|
def test_use_store_does_not_try_writing_outside_root(tmp_path, monkeypatch, mutable_config):
|
||||||
|
"""Tests that when we use the 'use_store' context manager, there is no attempt at creating
|
||||||
|
a Store outside the given root.
|
||||||
|
"""
|
||||||
|
initial_store = mutable_config.get("config:install_tree:root")
|
||||||
|
user_store = tmp_path / "store"
|
||||||
|
|
||||||
|
fn = spack.store.Store.__init__
|
||||||
|
|
||||||
|
def _checked_init(self, root, *args, **kwargs):
|
||||||
|
fn(self, root, *args, **kwargs)
|
||||||
|
assert self.root == str(user_store)
|
||||||
|
|
||||||
|
monkeypatch.setattr(spack.store.Store, "__init__", _checked_init)
|
||||||
|
|
||||||
|
spack.store.reinitialize()
|
||||||
|
with spack.store.use_store(user_store):
|
||||||
|
assert spack.config.CONFIG.get("config:install_tree:root") == str(user_store)
|
||||||
|
assert spack.config.CONFIG.get("config:install_tree:root") == initial_store
|
||||||
|
@@ -14,6 +14,7 @@
|
|||||||
|
|
||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
import spack.config
|
import spack.config
|
||||||
|
import spack.deptypes as dt
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
@@ -456,14 +457,14 @@ def test_parallel_false_is_not_propagating(default_mock_concretization):
|
|||||||
# a foobar=bar (parallel = False)
|
# a foobar=bar (parallel = False)
|
||||||
# |
|
# |
|
||||||
# b (parallel =True)
|
# b (parallel =True)
|
||||||
s = default_mock_concretization("a foobar=bar")
|
s = default_mock_concretization("pkg-a foobar=bar")
|
||||||
|
|
||||||
spack.build_environment.set_package_py_globals(s.package, context=Context.BUILD)
|
spack.build_environment.set_package_py_globals(s.package, context=Context.BUILD)
|
||||||
assert s["a"].package.module.make_jobs == 1
|
assert s["pkg-a"].package.module.make_jobs == 1
|
||||||
|
|
||||||
spack.build_environment.set_package_py_globals(s["b"].package, context=Context.BUILD)
|
spack.build_environment.set_package_py_globals(s["pkg-b"].package, context=Context.BUILD)
|
||||||
assert s["b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
|
assert s["pkg-b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
|
||||||
parallel=s["b"].package.parallel
|
parallel=s["pkg-b"].package.parallel
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -559,7 +560,7 @@ def test_dirty_disable_module_unload(config, mock_packages, working_env, mock_mo
|
|||||||
"""Test that on CRAY platform 'module unload' is not called if the 'dirty'
|
"""Test that on CRAY platform 'module unload' is not called if the 'dirty'
|
||||||
option is on.
|
option is on.
|
||||||
"""
|
"""
|
||||||
s = spack.spec.Spec("a").concretized()
|
s = spack.spec.Spec("pkg-a").concretized()
|
||||||
|
|
||||||
# If called with "dirty" we don't unload modules, so no calls to the
|
# If called with "dirty" we don't unload modules, so no calls to the
|
||||||
# `module` function on Cray
|
# `module` function on Cray
|
||||||
@@ -716,3 +717,21 @@ def test_build_system_globals_only_set_on_root_during_build(default_mock_concret
|
|||||||
for depth, spec in root.traverse(depth=True, root=True):
|
for depth, spec in root.traverse(depth=True, root=True):
|
||||||
for variable in build_variables:
|
for variable in build_variables:
|
||||||
assert hasattr(spec.package.module, variable) == should_be_set(depth)
|
assert hasattr(spec.package.module, variable) == should_be_set(depth)
|
||||||
|
|
||||||
|
|
||||||
|
def test_rpath_with_duplicate_link_deps():
|
||||||
|
"""If we have two instances of one package in the same link sub-dag, only the newest version is
|
||||||
|
rpath'ed. This is for runtime support without splicing."""
|
||||||
|
runtime_1 = spack.spec.Spec("runtime@=1.0")
|
||||||
|
runtime_2 = spack.spec.Spec("runtime@=2.0")
|
||||||
|
child = spack.spec.Spec("child@=1.0")
|
||||||
|
root = spack.spec.Spec("root@=1.0")
|
||||||
|
|
||||||
|
root.add_dependency_edge(child, depflag=dt.LINK, virtuals=())
|
||||||
|
root.add_dependency_edge(runtime_2, depflag=dt.LINK, virtuals=())
|
||||||
|
child.add_dependency_edge(runtime_1, depflag=dt.LINK, virtuals=())
|
||||||
|
|
||||||
|
rpath_deps = spack.build_environment._get_rpath_deps_from_spec(root, transitive_rpaths=True)
|
||||||
|
assert child in rpath_deps
|
||||||
|
assert runtime_2 in rpath_deps
|
||||||
|
assert runtime_1 not in rpath_deps
|
||||||
|
@@ -97,7 +97,7 @@ def test_negative_ninja_check(self, input_dir, test_dir, concretize_and_setup):
|
|||||||
@pytest.mark.usefixtures("config", "mock_packages")
|
@pytest.mark.usefixtures("config", "mock_packages")
|
||||||
class TestAutotoolsPackage:
|
class TestAutotoolsPackage:
|
||||||
def test_with_or_without(self, default_mock_concretization):
|
def test_with_or_without(self, default_mock_concretization):
|
||||||
s = default_mock_concretization("a")
|
s = default_mock_concretization("pkg-a")
|
||||||
options = s.package.with_or_without("foo")
|
options = s.package.with_or_without("foo")
|
||||||
|
|
||||||
# Ensure that values that are not representing a feature
|
# Ensure that values that are not representing a feature
|
||||||
@@ -129,7 +129,7 @@ def activate(value):
|
|||||||
assert "--without-lorem-ipsum" in options
|
assert "--without-lorem-ipsum" in options
|
||||||
|
|
||||||
def test_none_is_allowed(self, default_mock_concretization):
|
def test_none_is_allowed(self, default_mock_concretization):
|
||||||
s = default_mock_concretization("a foo=none")
|
s = default_mock_concretization("pkg-a foo=none")
|
||||||
options = s.package.with_or_without("foo")
|
options = s.package.with_or_without("foo")
|
||||||
|
|
||||||
# Ensure that values that are not representing a feature
|
# Ensure that values that are not representing a feature
|
||||||
|
@@ -12,21 +12,21 @@
|
|||||||
|
|
||||||
def test_build_task_errors(install_mockery):
|
def test_build_task_errors(install_mockery):
|
||||||
with pytest.raises(ValueError, match="must be a package"):
|
with pytest.raises(ValueError, match="must be a package"):
|
||||||
inst.BuildTask("abc", None, False, 0, 0, 0, [])
|
inst.BuildTask("abc", None, False, 0, 0, 0, set())
|
||||||
|
|
||||||
spec = spack.spec.Spec("trivial-install-test-package")
|
spec = spack.spec.Spec("trivial-install-test-package")
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||||
with pytest.raises(ValueError, match="must have a concrete spec"):
|
with pytest.raises(ValueError, match="must have a concrete spec"):
|
||||||
inst.BuildTask(pkg_cls(spec), None, False, 0, 0, 0, [])
|
inst.BuildTask(pkg_cls(spec), None, False, 0, 0, 0, set())
|
||||||
|
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
assert spec.concrete
|
assert spec.concrete
|
||||||
with pytest.raises(ValueError, match="must have a build request"):
|
with pytest.raises(ValueError, match="must have a build request"):
|
||||||
inst.BuildTask(spec.package, None, False, 0, 0, 0, [])
|
inst.BuildTask(spec.package, None, False, 0, 0, 0, set())
|
||||||
|
|
||||||
request = inst.BuildRequest(spec.package, {})
|
request = inst.BuildRequest(spec.package, {})
|
||||||
with pytest.raises(inst.InstallError, match="Cannot create a build task"):
|
with pytest.raises(inst.InstallError, match="Cannot create a build task"):
|
||||||
inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_REMOVED, [])
|
inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_REMOVED, set())
|
||||||
|
|
||||||
|
|
||||||
def test_build_task_basics(install_mockery):
|
def test_build_task_basics(install_mockery):
|
||||||
@@ -36,8 +36,8 @@ def test_build_task_basics(install_mockery):
|
|||||||
|
|
||||||
# Ensure key properties match expectations
|
# Ensure key properties match expectations
|
||||||
request = inst.BuildRequest(spec.package, {})
|
request = inst.BuildRequest(spec.package, {})
|
||||||
task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, [])
|
task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, set())
|
||||||
assert task.explicit # package was "explicitly" requested
|
assert not task.explicit
|
||||||
assert task.priority == len(task.uninstalled_deps)
|
assert task.priority == len(task.uninstalled_deps)
|
||||||
assert task.key == (task.priority, task.sequence)
|
assert task.key == (task.priority, task.sequence)
|
||||||
|
|
||||||
@@ -58,7 +58,7 @@ def test_build_task_strings(install_mockery):
|
|||||||
|
|
||||||
# Ensure key properties match expectations
|
# Ensure key properties match expectations
|
||||||
request = inst.BuildRequest(spec.package, {})
|
request = inst.BuildRequest(spec.package, {})
|
||||||
task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, [])
|
task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, set())
|
||||||
|
|
||||||
# Cover __repr__
|
# Cover __repr__
|
||||||
irep = task.__repr__()
|
irep = task.__repr__()
|
||||||
|
@@ -51,7 +51,7 @@ def __init__(self, response_code=200, content_to_read=[]):
|
|||||||
self._content = content_to_read
|
self._content = content_to_read
|
||||||
self._read = [False for c in content_to_read]
|
self._read = [False for c in content_to_read]
|
||||||
|
|
||||||
def open(self, request):
|
def open(self, request, data=None, timeout=object()):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def getcode(self):
|
def getcode(self):
|
||||||
|
@@ -106,24 +106,24 @@ def test_specs_staging(config, tmpdir):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
builder = repo.MockRepositoryBuilder(tmpdir)
|
builder = repo.MockRepositoryBuilder(tmpdir)
|
||||||
builder.add_package("g")
|
builder.add_package("pkg-g")
|
||||||
builder.add_package("f")
|
builder.add_package("pkg-f")
|
||||||
builder.add_package("e")
|
builder.add_package("pkg-e")
|
||||||
builder.add_package("d", dependencies=[("f", None, None), ("g", None, None)])
|
builder.add_package("pkg-d", dependencies=[("pkg-f", None, None), ("pkg-g", None, None)])
|
||||||
builder.add_package("c")
|
builder.add_package("pkg-c")
|
||||||
builder.add_package("b", dependencies=[("d", None, None), ("e", None, None)])
|
builder.add_package("pkg-b", dependencies=[("pkg-d", None, None), ("pkg-e", None, None)])
|
||||||
builder.add_package("a", dependencies=[("b", None, None), ("c", None, None)])
|
builder.add_package("pkg-a", dependencies=[("pkg-b", None, None), ("pkg-c", None, None)])
|
||||||
|
|
||||||
with repo.use_repositories(builder.root):
|
with repo.use_repositories(builder.root):
|
||||||
spec_a = Spec("a").concretized()
|
spec_a = Spec("pkg-a").concretized()
|
||||||
|
|
||||||
spec_a_label = ci._spec_ci_label(spec_a)
|
spec_a_label = ci._spec_ci_label(spec_a)
|
||||||
spec_b_label = ci._spec_ci_label(spec_a["b"])
|
spec_b_label = ci._spec_ci_label(spec_a["pkg-b"])
|
||||||
spec_c_label = ci._spec_ci_label(spec_a["c"])
|
spec_c_label = ci._spec_ci_label(spec_a["pkg-c"])
|
||||||
spec_d_label = ci._spec_ci_label(spec_a["d"])
|
spec_d_label = ci._spec_ci_label(spec_a["pkg-d"])
|
||||||
spec_e_label = ci._spec_ci_label(spec_a["e"])
|
spec_e_label = ci._spec_ci_label(spec_a["pkg-e"])
|
||||||
spec_f_label = ci._spec_ci_label(spec_a["f"])
|
spec_f_label = ci._spec_ci_label(spec_a["pkg-f"])
|
||||||
spec_g_label = ci._spec_ci_label(spec_a["g"])
|
spec_g_label = ci._spec_ci_label(spec_a["pkg-g"])
|
||||||
|
|
||||||
spec_labels, dependencies, stages = ci.stage_spec_jobs([spec_a])
|
spec_labels, dependencies, stages = ci.stage_spec_jobs([spec_a])
|
||||||
|
|
||||||
@@ -760,7 +760,6 @@ def test_ci_rebuild_mock_success(
|
|||||||
rebuild_env = create_rebuild_env(tmpdir, pkg_name, broken_tests)
|
rebuild_env = create_rebuild_env(tmpdir, pkg_name, broken_tests)
|
||||||
|
|
||||||
monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "echo")
|
monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "echo")
|
||||||
monkeypatch.setattr(spack.cmd.ci, "MAKE_COMMAND", "echo")
|
|
||||||
|
|
||||||
with rebuild_env.env_dir.as_cwd():
|
with rebuild_env.env_dir.as_cwd():
|
||||||
activate_rebuild_env(tmpdir, pkg_name, rebuild_env)
|
activate_rebuild_env(tmpdir, pkg_name, rebuild_env)
|
||||||
@@ -843,7 +842,6 @@ def test_ci_rebuild(
|
|||||||
ci_cmd("rebuild", "--tests", fail_on_error=False)
|
ci_cmd("rebuild", "--tests", fail_on_error=False)
|
||||||
|
|
||||||
monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "notcommand")
|
monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "notcommand")
|
||||||
monkeypatch.setattr(spack.cmd.ci, "MAKE_COMMAND", "notcommand")
|
|
||||||
monkeypatch.setattr(spack.cmd.ci, "INSTALL_FAIL_CODE", 127)
|
monkeypatch.setattr(spack.cmd.ci, "INSTALL_FAIL_CODE", 127)
|
||||||
|
|
||||||
with rebuild_env.env_dir.as_cwd():
|
with rebuild_env.env_dir.as_cwd():
|
||||||
@@ -1292,7 +1290,7 @@ def test_ci_generate_override_runner_attrs(
|
|||||||
spack:
|
spack:
|
||||||
specs:
|
specs:
|
||||||
- flatten-deps
|
- flatten-deps
|
||||||
- a
|
- pkg-a
|
||||||
mirrors:
|
mirrors:
|
||||||
some-mirror: https://my.fake.mirror
|
some-mirror: https://my.fake.mirror
|
||||||
ci:
|
ci:
|
||||||
@@ -1309,12 +1307,12 @@ def test_ci_generate_override_runner_attrs(
|
|||||||
- match:
|
- match:
|
||||||
- dependency-install
|
- dependency-install
|
||||||
- match:
|
- match:
|
||||||
- a
|
- pkg-a
|
||||||
build-job:
|
build-job:
|
||||||
tags:
|
tags:
|
||||||
- specific-a-2
|
- specific-a-2
|
||||||
- match:
|
- match:
|
||||||
- a
|
- pkg-a
|
||||||
build-job-remove:
|
build-job-remove:
|
||||||
tags:
|
tags:
|
||||||
- toplevel2
|
- toplevel2
|
||||||
@@ -1374,8 +1372,8 @@ def test_ci_generate_override_runner_attrs(
|
|||||||
assert global_vars["SPACK_CHECKOUT_VERSION"] == git_version or "v0.20.0.test0"
|
assert global_vars["SPACK_CHECKOUT_VERSION"] == git_version or "v0.20.0.test0"
|
||||||
|
|
||||||
for ci_key in yaml_contents.keys():
|
for ci_key in yaml_contents.keys():
|
||||||
if ci_key.startswith("a"):
|
if ci_key.startswith("pkg-a"):
|
||||||
# Make sure a's attributes override variables, and all the
|
# Make sure pkg-a's attributes override variables, and all the
|
||||||
# scripts. Also, make sure the 'toplevel' tag doesn't
|
# scripts. Also, make sure the 'toplevel' tag doesn't
|
||||||
# appear twice, but that a's specific extra tag does appear
|
# appear twice, but that a's specific extra tag does appear
|
||||||
the_elt = yaml_contents[ci_key]
|
the_elt = yaml_contents[ci_key]
|
||||||
@@ -1832,7 +1830,7 @@ def test_ci_generate_read_broken_specs_url(
|
|||||||
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
|
tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
|
||||||
):
|
):
|
||||||
"""Verify that `broken-specs-url` works as intended"""
|
"""Verify that `broken-specs-url` works as intended"""
|
||||||
spec_a = Spec("a")
|
spec_a = Spec("pkg-a")
|
||||||
spec_a.concretize()
|
spec_a.concretize()
|
||||||
a_dag_hash = spec_a.dag_hash()
|
a_dag_hash = spec_a.dag_hash()
|
||||||
|
|
||||||
@@ -1858,7 +1856,7 @@ def test_ci_generate_read_broken_specs_url(
|
|||||||
spack:
|
spack:
|
||||||
specs:
|
specs:
|
||||||
- flatten-deps
|
- flatten-deps
|
||||||
- a
|
- pkg-a
|
||||||
mirrors:
|
mirrors:
|
||||||
some-mirror: https://my.fake.mirror
|
some-mirror: https://my.fake.mirror
|
||||||
ci:
|
ci:
|
||||||
@@ -1866,9 +1864,9 @@ def test_ci_generate_read_broken_specs_url(
|
|||||||
pipeline-gen:
|
pipeline-gen:
|
||||||
- submapping:
|
- submapping:
|
||||||
- match:
|
- match:
|
||||||
- a
|
- pkg-a
|
||||||
- flatten-deps
|
- flatten-deps
|
||||||
- b
|
- pkg-b
|
||||||
- dependency-install
|
- dependency-install
|
||||||
build-job:
|
build-job:
|
||||||
tags:
|
tags:
|
||||||
|
@@ -81,14 +81,14 @@ def test_match_spec_env(mock_packages, mutable_mock_env_path):
|
|||||||
"""
|
"""
|
||||||
# Initial sanity check: we are planning on choosing a non-default
|
# Initial sanity check: we are planning on choosing a non-default
|
||||||
# value, so make sure that is in fact not the default.
|
# value, so make sure that is in fact not the default.
|
||||||
check_defaults = spack.cmd.parse_specs(["a"], concretize=True)[0]
|
check_defaults = spack.cmd.parse_specs(["pkg-a"], concretize=True)[0]
|
||||||
assert not check_defaults.satisfies("foobar=baz")
|
assert not check_defaults.satisfies("foobar=baz")
|
||||||
|
|
||||||
e = ev.create("test")
|
e = ev.create("test")
|
||||||
e.add("a foobar=baz")
|
e.add("pkg-a foobar=baz")
|
||||||
e.concretize()
|
e.concretize()
|
||||||
with e:
|
with e:
|
||||||
env_spec = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["a"])[0])
|
env_spec = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["pkg-a"])[0])
|
||||||
assert env_spec.satisfies("foobar=baz")
|
assert env_spec.satisfies("foobar=baz")
|
||||||
assert env_spec.concrete
|
assert env_spec.concrete
|
||||||
|
|
||||||
@@ -96,12 +96,12 @@ def test_match_spec_env(mock_packages, mutable_mock_env_path):
|
|||||||
@pytest.mark.usefixtures("config")
|
@pytest.mark.usefixtures("config")
|
||||||
def test_multiple_env_match_raises_error(mock_packages, mutable_mock_env_path):
|
def test_multiple_env_match_raises_error(mock_packages, mutable_mock_env_path):
|
||||||
e = ev.create("test")
|
e = ev.create("test")
|
||||||
e.add("a foobar=baz")
|
e.add("pkg-a foobar=baz")
|
||||||
e.add("a foobar=fee")
|
e.add("pkg-a foobar=fee")
|
||||||
e.concretize()
|
e.concretize()
|
||||||
with e:
|
with e:
|
||||||
with pytest.raises(ev.SpackEnvironmentError) as exc_info:
|
with pytest.raises(ev.SpackEnvironmentError) as exc_info:
|
||||||
spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["a"])[0])
|
spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["pkg-a"])[0])
|
||||||
|
|
||||||
assert "matches multiple specs" in exc_info.value.message
|
assert "matches multiple specs" in exc_info.value.message
|
||||||
|
|
||||||
@@ -109,16 +109,16 @@ def test_multiple_env_match_raises_error(mock_packages, mutable_mock_env_path):
|
|||||||
@pytest.mark.usefixtures("config")
|
@pytest.mark.usefixtures("config")
|
||||||
def test_root_and_dep_match_returns_root(mock_packages, mutable_mock_env_path):
|
def test_root_and_dep_match_returns_root(mock_packages, mutable_mock_env_path):
|
||||||
e = ev.create("test")
|
e = ev.create("test")
|
||||||
e.add("b@0.9")
|
e.add("pkg-b@0.9")
|
||||||
e.add("a foobar=bar") # Depends on b, should choose b@1.0
|
e.add("pkg-a foobar=bar") # Depends on b, should choose b@1.0
|
||||||
e.concretize()
|
e.concretize()
|
||||||
with e:
|
with e:
|
||||||
# This query matches the root b and b as a dependency of a. In that
|
# This query matches the root b and b as a dependency of a. In that
|
||||||
# case the root instance should be preferred.
|
# case the root instance should be preferred.
|
||||||
env_spec1 = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["b"])[0])
|
env_spec1 = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["pkg-b"])[0])
|
||||||
assert env_spec1.satisfies("@0.9")
|
assert env_spec1.satisfies("@0.9")
|
||||||
|
|
||||||
env_spec2 = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["b@1.0"])[0])
|
env_spec2 = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["pkg-b@1.0"])[0])
|
||||||
assert env_spec2
|
assert env_spec2
|
||||||
|
|
||||||
|
|
||||||
|
@@ -51,8 +51,8 @@ def test_concretize_root_test_dependencies_are_concretized(unify, mutable_mock_e
|
|||||||
|
|
||||||
with ev.read("test") as e:
|
with ev.read("test") as e:
|
||||||
e.unify = unify
|
e.unify = unify
|
||||||
add("a")
|
add("pkg-a")
|
||||||
add("b")
|
add("pkg-b")
|
||||||
concretize("--test", "root")
|
concretize("--test", "root")
|
||||||
assert e.matching_spec("test-dependency")
|
assert e.matching_spec("test-dependency")
|
||||||
|
|
||||||
|
@@ -15,26 +15,26 @@
|
|||||||
def test_env(mutable_mock_env_path, config, mock_packages):
|
def test_env(mutable_mock_env_path, config, mock_packages):
|
||||||
ev.create("test")
|
ev.create("test")
|
||||||
with ev.read("test") as e:
|
with ev.read("test") as e:
|
||||||
e.add("a@2.0 foobar=bar ^b@1.0")
|
e.add("pkg-a@2.0 foobar=bar ^pkg-b@1.0")
|
||||||
e.add("a@1.0 foobar=bar ^b@0.9")
|
e.add("pkg-a@1.0 foobar=bar ^pkg-b@0.9")
|
||||||
e.concretize()
|
e.concretize()
|
||||||
e.write()
|
e.write()
|
||||||
|
|
||||||
|
|
||||||
def test_deconcretize_dep(test_env):
|
def test_deconcretize_dep(test_env):
|
||||||
with ev.read("test") as e:
|
with ev.read("test") as e:
|
||||||
deconcretize("-y", "b@1.0")
|
deconcretize("-y", "pkg-b@1.0")
|
||||||
specs = [s for s, _ in e.concretized_specs()]
|
specs = [s for s, _ in e.concretized_specs()]
|
||||||
|
|
||||||
assert len(specs) == 1
|
assert len(specs) == 1
|
||||||
assert specs[0].satisfies("a@1.0")
|
assert specs[0].satisfies("pkg-a@1.0")
|
||||||
|
|
||||||
|
|
||||||
def test_deconcretize_all_dep(test_env):
|
def test_deconcretize_all_dep(test_env):
|
||||||
with ev.read("test") as e:
|
with ev.read("test") as e:
|
||||||
with pytest.raises(SpackCommandError):
|
with pytest.raises(SpackCommandError):
|
||||||
deconcretize("-y", "b")
|
deconcretize("-y", "pkg-b")
|
||||||
deconcretize("-y", "--all", "b")
|
deconcretize("-y", "--all", "pkg-b")
|
||||||
specs = [s for s, _ in e.concretized_specs()]
|
specs = [s for s, _ in e.concretized_specs()]
|
||||||
|
|
||||||
assert len(specs) == 0
|
assert len(specs) == 0
|
||||||
@@ -42,27 +42,27 @@ def test_deconcretize_all_dep(test_env):
|
|||||||
|
|
||||||
def test_deconcretize_root(test_env):
|
def test_deconcretize_root(test_env):
|
||||||
with ev.read("test") as e:
|
with ev.read("test") as e:
|
||||||
output = deconcretize("-y", "--root", "b@1.0")
|
output = deconcretize("-y", "--root", "pkg-b@1.0")
|
||||||
assert "No matching specs to deconcretize" in output
|
assert "No matching specs to deconcretize" in output
|
||||||
assert len(e.concretized_order) == 2
|
assert len(e.concretized_order) == 2
|
||||||
|
|
||||||
deconcretize("-y", "--root", "a@2.0")
|
deconcretize("-y", "--root", "pkg-a@2.0")
|
||||||
specs = [s for s, _ in e.concretized_specs()]
|
specs = [s for s, _ in e.concretized_specs()]
|
||||||
|
|
||||||
assert len(specs) == 1
|
assert len(specs) == 1
|
||||||
assert specs[0].satisfies("a@1.0")
|
assert specs[0].satisfies("pkg-a@1.0")
|
||||||
|
|
||||||
|
|
||||||
def test_deconcretize_all_root(test_env):
|
def test_deconcretize_all_root(test_env):
|
||||||
with ev.read("test") as e:
|
with ev.read("test") as e:
|
||||||
with pytest.raises(SpackCommandError):
|
with pytest.raises(SpackCommandError):
|
||||||
deconcretize("-y", "--root", "a")
|
deconcretize("-y", "--root", "pkg-a")
|
||||||
|
|
||||||
output = deconcretize("-y", "--root", "--all", "b")
|
output = deconcretize("-y", "--root", "--all", "pkg-b")
|
||||||
assert "No matching specs to deconcretize" in output
|
assert "No matching specs to deconcretize" in output
|
||||||
assert len(e.concretized_order) == 2
|
assert len(e.concretized_order) == 2
|
||||||
|
|
||||||
deconcretize("-y", "--root", "--all", "a")
|
deconcretize("-y", "--root", "--all", "pkg-a")
|
||||||
specs = [s for s, _ in e.concretized_specs()]
|
specs = [s for s, _ in e.concretized_specs()]
|
||||||
|
|
||||||
assert len(specs) == 0
|
assert len(specs) == 0
|
||||||
|
@@ -15,6 +15,7 @@
|
|||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.link_tree
|
import llnl.util.link_tree
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.symlink import readlink
|
||||||
|
|
||||||
import spack.cmd.env
|
import spack.cmd.env
|
||||||
import spack.config
|
import spack.config
|
||||||
@@ -27,7 +28,9 @@
|
|||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
|
import spack.store
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
|
import spack.util.spack_yaml
|
||||||
from spack.cmd.env import _env_create
|
from spack.cmd.env import _env_create
|
||||||
from spack.main import SpackCommand, SpackCommandError
|
from spack.main import SpackCommand, SpackCommandError
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
@@ -60,6 +63,27 @@
|
|||||||
sep = os.sep
|
sep = os.sep
|
||||||
|
|
||||||
|
|
||||||
|
def setup_combined_multiple_env():
|
||||||
|
env("create", "test1")
|
||||||
|
test1 = ev.read("test1")
|
||||||
|
with test1:
|
||||||
|
add("zlib")
|
||||||
|
test1.concretize()
|
||||||
|
test1.write()
|
||||||
|
|
||||||
|
env("create", "test2")
|
||||||
|
test2 = ev.read("test2")
|
||||||
|
with test2:
|
||||||
|
add("libelf")
|
||||||
|
test2.concretize()
|
||||||
|
test2.write()
|
||||||
|
|
||||||
|
env("create", "--include-concrete", "test1", "--include-concrete", "test2", "combined_env")
|
||||||
|
combined = ev.read("combined_env")
|
||||||
|
|
||||||
|
return test1, test2, combined
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def environment_from_manifest(tmp_path):
|
def environment_from_manifest(tmp_path):
|
||||||
"""Returns a new environment named 'test' from the content of a manifest file."""
|
"""Returns a new environment named 'test' from the content of a manifest file."""
|
||||||
@@ -369,6 +393,29 @@ def test_env_install_single_spec(install_mockery, mock_fetch):
|
|||||||
assert e.specs_by_hash[e.concretized_order[0]].name == "cmake-client"
|
assert e.specs_by_hash[e.concretized_order[0]].name == "cmake-client"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("unify", [True, False, "when_possible"])
|
||||||
|
def test_env_install_include_concrete_env(unify, install_mockery, mock_fetch):
|
||||||
|
test1, test2, combined = setup_combined_multiple_env()
|
||||||
|
|
||||||
|
combined.concretize()
|
||||||
|
combined.write()
|
||||||
|
|
||||||
|
combined.unify = unify
|
||||||
|
|
||||||
|
with combined:
|
||||||
|
install()
|
||||||
|
|
||||||
|
test1_roots = test1.concretized_order
|
||||||
|
test2_roots = test2.concretized_order
|
||||||
|
combined_included_roots = combined.included_concretized_order
|
||||||
|
|
||||||
|
for spec in combined.all_specs():
|
||||||
|
assert spec.installed
|
||||||
|
|
||||||
|
assert test1_roots == combined_included_roots[test1.path]
|
||||||
|
assert test2_roots == combined_included_roots[test2.path]
|
||||||
|
|
||||||
|
|
||||||
def test_env_roots_marked_explicit(install_mockery, mock_fetch):
|
def test_env_roots_marked_explicit(install_mockery, mock_fetch):
|
||||||
install = SpackCommand("install")
|
install = SpackCommand("install")
|
||||||
install("dependent-install")
|
install("dependent-install")
|
||||||
@@ -456,7 +503,7 @@ def test_env_install_two_specs_same_dep(install_mockery, mock_fetch, tmpdir, cap
|
|||||||
"""\
|
"""\
|
||||||
spack:
|
spack:
|
||||||
specs:
|
specs:
|
||||||
- a
|
- pkg-a
|
||||||
- depb
|
- depb
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
@@ -475,8 +522,8 @@ def test_env_install_two_specs_same_dep(install_mockery, mock_fetch, tmpdir, cap
|
|||||||
depb = spack.store.STORE.db.query_one("depb", installed=True)
|
depb = spack.store.STORE.db.query_one("depb", installed=True)
|
||||||
assert depb, "Expected depb to be installed"
|
assert depb, "Expected depb to be installed"
|
||||||
|
|
||||||
a = spack.store.STORE.db.query_one("a", installed=True)
|
a = spack.store.STORE.db.query_one("pkg-a", installed=True)
|
||||||
assert a, "Expected a to be installed"
|
assert a, "Expected pkg-a to be installed"
|
||||||
|
|
||||||
|
|
||||||
def test_remove_after_concretize():
|
def test_remove_after_concretize():
|
||||||
@@ -557,6 +604,41 @@ def test_remove_command():
|
|||||||
assert "mpileaks@" not in find("--show-concretized")
|
assert "mpileaks@" not in find("--show-concretized")
|
||||||
|
|
||||||
|
|
||||||
|
def test_bad_remove_included_env():
|
||||||
|
env("create", "test")
|
||||||
|
test = ev.read("test")
|
||||||
|
|
||||||
|
with test:
|
||||||
|
add("mpileaks")
|
||||||
|
|
||||||
|
test.concretize()
|
||||||
|
test.write()
|
||||||
|
|
||||||
|
env("create", "--include-concrete", "test", "combined_env")
|
||||||
|
|
||||||
|
with pytest.raises(SpackCommandError):
|
||||||
|
env("remove", "test")
|
||||||
|
|
||||||
|
|
||||||
|
def test_force_remove_included_env():
|
||||||
|
env("create", "test")
|
||||||
|
test = ev.read("test")
|
||||||
|
|
||||||
|
with test:
|
||||||
|
add("mpileaks")
|
||||||
|
|
||||||
|
test.concretize()
|
||||||
|
test.write()
|
||||||
|
|
||||||
|
env("create", "--include-concrete", "test", "combined_env")
|
||||||
|
|
||||||
|
rm_output = env("remove", "-f", "-y", "test")
|
||||||
|
list_output = env("list")
|
||||||
|
|
||||||
|
assert '"test" is being used by environment "combined_env"' in rm_output
|
||||||
|
assert "test" not in list_output
|
||||||
|
|
||||||
|
|
||||||
def test_environment_status(capsys, tmpdir):
|
def test_environment_status(capsys, tmpdir):
|
||||||
with tmpdir.as_cwd():
|
with tmpdir.as_cwd():
|
||||||
with capsys.disabled():
|
with capsys.disabled():
|
||||||
@@ -745,7 +827,7 @@ def test_env_view_external_prefix(tmp_path, mutable_database, mock_packages):
|
|||||||
"""\
|
"""\
|
||||||
spack:
|
spack:
|
||||||
specs:
|
specs:
|
||||||
- a
|
- pkg-a
|
||||||
view: true
|
view: true
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
@@ -753,9 +835,9 @@ def test_env_view_external_prefix(tmp_path, mutable_database, mock_packages):
|
|||||||
external_config = io.StringIO(
|
external_config = io.StringIO(
|
||||||
"""\
|
"""\
|
||||||
packages:
|
packages:
|
||||||
a:
|
pkg-a:
|
||||||
externals:
|
externals:
|
||||||
- spec: a@2.0
|
- spec: pkg-a@2.0
|
||||||
prefix: {a_prefix}
|
prefix: {a_prefix}
|
||||||
buildable: false
|
buildable: false
|
||||||
""".format(
|
""".format(
|
||||||
@@ -1636,6 +1718,286 @@ def test_env_without_view_install(tmpdir, mock_stage, mock_fetch, install_mocker
|
|||||||
check_mpileaks_and_deps_in_view(view_dir)
|
check_mpileaks_and_deps_in_view(view_dir)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("env_name", [True, False])
|
||||||
|
def test_env_include_concrete_env_yaml(env_name):
|
||||||
|
env("create", "test")
|
||||||
|
test = ev.read("test")
|
||||||
|
|
||||||
|
with test:
|
||||||
|
add("mpileaks")
|
||||||
|
test.concretize()
|
||||||
|
test.write()
|
||||||
|
|
||||||
|
environ = "test" if env_name else test.path
|
||||||
|
|
||||||
|
env("create", "--include-concrete", environ, "combined_env")
|
||||||
|
|
||||||
|
combined = ev.read("combined_env")
|
||||||
|
combined_yaml = combined.manifest["spack"]
|
||||||
|
|
||||||
|
assert "include_concrete" in combined_yaml
|
||||||
|
assert test.path in combined_yaml["include_concrete"]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.regression("45766")
|
||||||
|
@pytest.mark.parametrize("format", ["v1", "v2", "v3"])
|
||||||
|
def test_env_include_concrete_old_env(format, tmpdir):
|
||||||
|
lockfile = os.path.join(spack.paths.test_path, "data", "legacy_env", f"{format}.lock")
|
||||||
|
# create an env from old .lock file -- this does not update the format
|
||||||
|
env("create", "old-env", lockfile)
|
||||||
|
env("create", "--include-concrete", "old-env", "test")
|
||||||
|
|
||||||
|
assert ev.read("old-env").all_specs() == ev.read("test").all_specs()
|
||||||
|
|
||||||
|
|
||||||
|
def test_env_bad_include_concrete_env():
|
||||||
|
with pytest.raises(ev.SpackEnvironmentError):
|
||||||
|
env("create", "--include-concrete", "nonexistant_env", "combined_env")
|
||||||
|
|
||||||
|
|
||||||
|
def test_env_not_concrete_include_concrete_env():
|
||||||
|
env("create", "test")
|
||||||
|
test = ev.read("test")
|
||||||
|
|
||||||
|
with test:
|
||||||
|
add("mpileaks")
|
||||||
|
|
||||||
|
with pytest.raises(ev.SpackEnvironmentError):
|
||||||
|
env("create", "--include-concrete", "test", "combined_env")
|
||||||
|
|
||||||
|
|
||||||
|
def test_env_multiple_include_concrete_envs():
|
||||||
|
test1, test2, combined = setup_combined_multiple_env()
|
||||||
|
|
||||||
|
combined_yaml = combined.manifest["spack"]
|
||||||
|
|
||||||
|
assert test1.path in combined_yaml["include_concrete"][0]
|
||||||
|
assert test2.path in combined_yaml["include_concrete"][1]
|
||||||
|
|
||||||
|
# No local specs in the combined env
|
||||||
|
assert not combined_yaml["specs"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_env_include_concrete_envs_lockfile():
|
||||||
|
test1, test2, combined = setup_combined_multiple_env()
|
||||||
|
|
||||||
|
combined_yaml = combined.manifest["spack"]
|
||||||
|
|
||||||
|
assert "include_concrete" in combined_yaml
|
||||||
|
assert test1.path in combined_yaml["include_concrete"]
|
||||||
|
|
||||||
|
with open(combined.lock_path) as f:
|
||||||
|
lockfile_as_dict = combined._read_lockfile(f)
|
||||||
|
|
||||||
|
assert set(
|
||||||
|
entry["hash"] for entry in lockfile_as_dict["include_concrete"][test1.path]["roots"]
|
||||||
|
) == set(test1.specs_by_hash)
|
||||||
|
assert set(
|
||||||
|
entry["hash"] for entry in lockfile_as_dict["include_concrete"][test2.path]["roots"]
|
||||||
|
) == set(test2.specs_by_hash)
|
||||||
|
|
||||||
|
|
||||||
|
def test_env_include_concrete_add_env():
|
||||||
|
test1, test2, combined = setup_combined_multiple_env()
|
||||||
|
|
||||||
|
# crete new env & crecretize
|
||||||
|
env("create", "new")
|
||||||
|
new_env = ev.read("new")
|
||||||
|
with new_env:
|
||||||
|
add("mpileaks")
|
||||||
|
|
||||||
|
new_env.concretize()
|
||||||
|
new_env.write()
|
||||||
|
|
||||||
|
# add new env to combined
|
||||||
|
combined.included_concrete_envs.append(new_env.path)
|
||||||
|
|
||||||
|
# assert thing haven't changed yet
|
||||||
|
with open(combined.lock_path) as f:
|
||||||
|
lockfile_as_dict = combined._read_lockfile(f)
|
||||||
|
|
||||||
|
assert new_env.path not in lockfile_as_dict["include_concrete"].keys()
|
||||||
|
|
||||||
|
# concretize combined env with new env
|
||||||
|
combined.concretize()
|
||||||
|
combined.write()
|
||||||
|
|
||||||
|
# assert changes
|
||||||
|
with open(combined.lock_path) as f:
|
||||||
|
lockfile_as_dict = combined._read_lockfile(f)
|
||||||
|
|
||||||
|
assert new_env.path in lockfile_as_dict["include_concrete"].keys()
|
||||||
|
|
||||||
|
|
||||||
|
def test_env_include_concrete_remove_env():
|
||||||
|
test1, test2, combined = setup_combined_multiple_env()
|
||||||
|
|
||||||
|
# remove test2 from combined
|
||||||
|
combined.included_concrete_envs = [test1.path]
|
||||||
|
|
||||||
|
# assert test2 is still in combined's lockfile
|
||||||
|
with open(combined.lock_path) as f:
|
||||||
|
lockfile_as_dict = combined._read_lockfile(f)
|
||||||
|
|
||||||
|
assert test2.path in lockfile_as_dict["include_concrete"].keys()
|
||||||
|
|
||||||
|
# reconcretize combined
|
||||||
|
combined.concretize()
|
||||||
|
combined.write()
|
||||||
|
|
||||||
|
# assert test2 is not in combined's lockfile
|
||||||
|
with open(combined.lock_path) as f:
|
||||||
|
lockfile_as_dict = combined._read_lockfile(f)
|
||||||
|
|
||||||
|
assert test2.path not in lockfile_as_dict["include_concrete"].keys()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("unify", [True, False, "when_possible"])
|
||||||
|
def test_env_include_concrete_env_reconcretized(unify):
|
||||||
|
"""Double check to make sure that concrete_specs for the local specs is empty
|
||||||
|
after recocnretizing.
|
||||||
|
"""
|
||||||
|
_, _, combined = setup_combined_multiple_env()
|
||||||
|
|
||||||
|
combined.unify = unify
|
||||||
|
|
||||||
|
with open(combined.lock_path) as f:
|
||||||
|
lockfile_as_dict = combined._read_lockfile(f)
|
||||||
|
|
||||||
|
assert not lockfile_as_dict["roots"]
|
||||||
|
assert not lockfile_as_dict["concrete_specs"]
|
||||||
|
|
||||||
|
combined.concretize()
|
||||||
|
combined.write()
|
||||||
|
|
||||||
|
with open(combined.lock_path) as f:
|
||||||
|
lockfile_as_dict = combined._read_lockfile(f)
|
||||||
|
|
||||||
|
assert not lockfile_as_dict["roots"]
|
||||||
|
assert not lockfile_as_dict["concrete_specs"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_concretize_include_concrete_env():
|
||||||
|
test1, _, combined = setup_combined_multiple_env()
|
||||||
|
|
||||||
|
with test1:
|
||||||
|
add("mpileaks")
|
||||||
|
test1.concretize()
|
||||||
|
test1.write()
|
||||||
|
|
||||||
|
assert Spec("mpileaks") in test1.concretized_user_specs
|
||||||
|
assert Spec("mpileaks") not in combined.included_concretized_user_specs[test1.path]
|
||||||
|
|
||||||
|
combined.concretize()
|
||||||
|
combined.write()
|
||||||
|
|
||||||
|
assert Spec("mpileaks") in combined.included_concretized_user_specs[test1.path]
|
||||||
|
|
||||||
|
|
||||||
|
def test_concretize_nested_include_concrete_envs():
|
||||||
|
env("create", "test1")
|
||||||
|
test1 = ev.read("test1")
|
||||||
|
with test1:
|
||||||
|
add("zlib")
|
||||||
|
test1.concretize()
|
||||||
|
test1.write()
|
||||||
|
|
||||||
|
env("create", "--include-concrete", "test1", "test2")
|
||||||
|
test2 = ev.read("test2")
|
||||||
|
with test2:
|
||||||
|
add("libelf")
|
||||||
|
test2.concretize()
|
||||||
|
test2.write()
|
||||||
|
|
||||||
|
env("create", "--include-concrete", "test2", "test3")
|
||||||
|
test3 = ev.read("test3")
|
||||||
|
|
||||||
|
with open(test3.lock_path) as f:
|
||||||
|
lockfile_as_dict = test3._read_lockfile(f)
|
||||||
|
|
||||||
|
assert test2.path in lockfile_as_dict["include_concrete"]
|
||||||
|
assert test1.path in lockfile_as_dict["include_concrete"][test2.path]["include_concrete"]
|
||||||
|
|
||||||
|
assert Spec("zlib") in test3.included_concretized_user_specs[test1.path]
|
||||||
|
|
||||||
|
|
||||||
|
def test_concretize_nested_included_concrete():
|
||||||
|
"""Confirm that nested included environments use specs concretized at
|
||||||
|
environment creation time and change with reconcretization."""
|
||||||
|
env("create", "test1")
|
||||||
|
test1 = ev.read("test1")
|
||||||
|
with test1:
|
||||||
|
add("zlib")
|
||||||
|
test1.concretize()
|
||||||
|
test1.write()
|
||||||
|
|
||||||
|
# test2 should include test1 with zlib
|
||||||
|
env("create", "--include-concrete", "test1", "test2")
|
||||||
|
test2 = ev.read("test2")
|
||||||
|
with test2:
|
||||||
|
add("libelf")
|
||||||
|
test2.concretize()
|
||||||
|
test2.write()
|
||||||
|
|
||||||
|
assert Spec("zlib") in test2.included_concretized_user_specs[test1.path]
|
||||||
|
|
||||||
|
# Modify/re-concretize test1 to replace zlib with mpileaks
|
||||||
|
with test1:
|
||||||
|
remove("zlib")
|
||||||
|
add("mpileaks")
|
||||||
|
test1.concretize()
|
||||||
|
test1.write()
|
||||||
|
|
||||||
|
# test3 should include the latest concretization of test1
|
||||||
|
env("create", "--include-concrete", "test1", "test3")
|
||||||
|
test3 = ev.read("test3")
|
||||||
|
with test3:
|
||||||
|
add("callpath")
|
||||||
|
test3.concretize()
|
||||||
|
test3.write()
|
||||||
|
|
||||||
|
included_specs = test3.included_concretized_user_specs[test1.path]
|
||||||
|
assert len(included_specs) == 1
|
||||||
|
assert Spec("mpileaks") in included_specs
|
||||||
|
|
||||||
|
# The last concretization of test4's included environments should have test2
|
||||||
|
# with the original concretized test1 spec and test3 with the re-concretized
|
||||||
|
# test1 spec.
|
||||||
|
env("create", "--include-concrete", "test2", "--include-concrete", "test3", "test4")
|
||||||
|
test4 = ev.read("test4")
|
||||||
|
|
||||||
|
def included_included_spec(path1, path2):
|
||||||
|
included_path1 = test4.included_concrete_spec_data[path1]
|
||||||
|
included_path2 = included_path1["include_concrete"][path2]
|
||||||
|
return included_path2["roots"][0]["spec"]
|
||||||
|
|
||||||
|
included_test2_test1 = included_included_spec(test2.path, test1.path)
|
||||||
|
assert "zlib" in included_test2_test1
|
||||||
|
|
||||||
|
included_test3_test1 = included_included_spec(test3.path, test1.path)
|
||||||
|
assert "mpileaks" in included_test3_test1
|
||||||
|
|
||||||
|
# test4's concretized specs should reflect the original concretization.
|
||||||
|
concrete_specs = [s for s, _ in test4.concretized_specs()]
|
||||||
|
expected = [Spec(s) for s in ["libelf", "zlib", "mpileaks", "callpath"]]
|
||||||
|
assert all(s in concrete_specs for s in expected)
|
||||||
|
|
||||||
|
# Re-concretize test2 to reflect the new concretization of included test1
|
||||||
|
# to remove zlib and write it out so it can be picked up by test4.
|
||||||
|
# Re-concretize test4 to reflect the re-concretization of included test2
|
||||||
|
# and ensure that its included specs are up-to-date
|
||||||
|
test2.concretize()
|
||||||
|
test2.write()
|
||||||
|
test4.concretize()
|
||||||
|
|
||||||
|
concrete_specs = [s for s, _ in test4.concretized_specs()]
|
||||||
|
assert Spec("zlib") not in concrete_specs
|
||||||
|
|
||||||
|
# Expecting mpileaks to appear only once
|
||||||
|
expected = [Spec(s) for s in ["libelf", "mpileaks", "callpath"]]
|
||||||
|
assert len(concrete_specs) == 3 and all(s in concrete_specs for s in expected)
|
||||||
|
|
||||||
|
|
||||||
def test_env_config_view_default(
|
def test_env_config_view_default(
|
||||||
environment_from_manifest, mock_stage, mock_fetch, install_mockery
|
environment_from_manifest, mock_stage, mock_fetch, install_mockery
|
||||||
):
|
):
|
||||||
@@ -3625,7 +3987,7 @@ def test_environment_depfile_makefile(depfile_flags, expected_installs, tmpdir,
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Do make dry run.
|
# Do make dry run.
|
||||||
out = make("-n", "-f", makefile, output=str)
|
out = make("-n", "-f", makefile, "SPACK=spack", output=str)
|
||||||
|
|
||||||
specs_that_make_would_install = _parse_dry_run_package_installs(out)
|
specs_that_make_would_install = _parse_dry_run_package_installs(out)
|
||||||
|
|
||||||
@@ -3663,7 +4025,7 @@ def test_depfile_works_with_gitversions(tmpdir, mock_packages, monkeypatch):
|
|||||||
env("depfile", "-o", makefile, "--make-disable-jobserver", "--make-prefix=prefix")
|
env("depfile", "-o", makefile, "--make-disable-jobserver", "--make-prefix=prefix")
|
||||||
|
|
||||||
# Do a dry run on the generated depfile
|
# Do a dry run on the generated depfile
|
||||||
out = make("-n", "-f", makefile, output=str)
|
out = make("-n", "-f", makefile, "SPACK=spack", output=str)
|
||||||
|
|
||||||
# Check that all specs are there (without duplicates)
|
# Check that all specs are there (without duplicates)
|
||||||
specs_that_make_would_install = _parse_dry_run_package_installs(out)
|
specs_that_make_would_install = _parse_dry_run_package_installs(out)
|
||||||
@@ -3725,7 +4087,12 @@ def test_depfile_phony_convenience_targets(
|
|||||||
|
|
||||||
# Phony install/* target should install picked package and all its deps
|
# Phony install/* target should install picked package and all its deps
|
||||||
specs_that_make_would_install = _parse_dry_run_package_installs(
|
specs_that_make_would_install = _parse_dry_run_package_installs(
|
||||||
make("-n", picked_spec.format("install/{name}-{version}-{hash}"), output=str)
|
make(
|
||||||
|
"-n",
|
||||||
|
picked_spec.format("install/{name}-{version}-{hash}"),
|
||||||
|
"SPACK=spack",
|
||||||
|
output=str,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
assert set(specs_that_make_would_install) == set(expected_installs)
|
assert set(specs_that_make_would_install) == set(expected_installs)
|
||||||
@@ -3733,7 +4100,12 @@ def test_depfile_phony_convenience_targets(
|
|||||||
|
|
||||||
# Phony install-deps/* target shouldn't install picked package
|
# Phony install-deps/* target shouldn't install picked package
|
||||||
specs_that_make_would_install = _parse_dry_run_package_installs(
|
specs_that_make_would_install = _parse_dry_run_package_installs(
|
||||||
make("-n", picked_spec.format("install-deps/{name}-{version}-{hash}"), output=str)
|
make(
|
||||||
|
"-n",
|
||||||
|
picked_spec.format("install-deps/{name}-{version}-{hash}"),
|
||||||
|
"SPACK=spack",
|
||||||
|
output=str,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
assert set(specs_that_make_would_install) == set(expected_installs) - {picked_package}
|
assert set(specs_that_make_would_install) == set(expected_installs) - {picked_package}
|
||||||
@@ -3793,7 +4165,7 @@ def test_spack_package_ids_variable(tmpdir, mock_packages):
|
|||||||
make = Executable("make")
|
make = Executable("make")
|
||||||
|
|
||||||
# Do dry run.
|
# Do dry run.
|
||||||
out = make("-n", "-C", str(tmpdir), output=str)
|
out = make("-n", "-C", str(tmpdir), "SPACK=spack", output=str)
|
||||||
|
|
||||||
# post-install: <hash> should've been executed
|
# post-install: <hash> should've been executed
|
||||||
with ev.read("test") as test:
|
with ev.read("test") as test:
|
||||||
@@ -4066,8 +4438,8 @@ def test_env_view_resolves_identical_file_conflicts(tmp_path, install_mockery, m
|
|||||||
# view-file/bin/
|
# view-file/bin/
|
||||||
# x # expect this x to be linked
|
# x # expect this x to be linked
|
||||||
|
|
||||||
assert os.readlink(tmp_path / "view" / "bin" / "x") == bottom.bin.x
|
assert readlink(tmp_path / "view" / "bin" / "x") == bottom.bin.x
|
||||||
assert os.readlink(tmp_path / "view" / "bin" / "y") == top.bin.y
|
assert readlink(tmp_path / "view" / "bin" / "y") == top.bin.y
|
||||||
|
|
||||||
|
|
||||||
def test_env_view_ignores_different_file_conflicts(tmp_path, install_mockery, mock_fetch):
|
def test_env_view_ignores_different_file_conflicts(tmp_path, install_mockery, mock_fetch):
|
||||||
@@ -4078,4 +4450,4 @@ def test_env_view_ignores_different_file_conflicts(tmp_path, install_mockery, mo
|
|||||||
install()
|
install()
|
||||||
prefix_dependent = e.matching_spec("view-ignore-conflict").prefix
|
prefix_dependent = e.matching_spec("view-ignore-conflict").prefix
|
||||||
# The dependent's file is linked into the view
|
# The dependent's file is linked into the view
|
||||||
assert os.readlink(tmp_path / "view" / "bin" / "x") == prefix_dependent.bin.x
|
assert readlink(tmp_path / "view" / "bin" / "x") == prefix_dependent.bin.x
|
||||||
|
@@ -69,10 +69,10 @@ def test_query_arguments():
|
|||||||
|
|
||||||
q_args = query_arguments(args)
|
q_args = query_arguments(args)
|
||||||
assert "installed" in q_args
|
assert "installed" in q_args
|
||||||
assert "known" in q_args
|
assert "predicate_fn" in q_args
|
||||||
assert "explicit" in q_args
|
assert "explicit" in q_args
|
||||||
assert q_args["installed"] == ["installed"]
|
assert q_args["installed"] == ["installed"]
|
||||||
assert q_args["known"] is any
|
assert q_args["predicate_fn"] is None
|
||||||
assert q_args["explicit"] is any
|
assert q_args["explicit"] is any
|
||||||
assert "start_date" in q_args
|
assert "start_date" in q_args
|
||||||
assert "end_date" not in q_args
|
assert "end_date" not in q_args
|
||||||
@@ -349,6 +349,87 @@ def test_find_prefix_in_env(
|
|||||||
# Would throw error on regression
|
# Would throw error on regression
|
||||||
|
|
||||||
|
|
||||||
|
def test_find_specs_include_concrete_env(mutable_mock_env_path, config, mutable_mock_repo, tmpdir):
|
||||||
|
path = tmpdir.join("spack.yaml")
|
||||||
|
|
||||||
|
with tmpdir.as_cwd():
|
||||||
|
with open(str(path), "w") as f:
|
||||||
|
f.write(
|
||||||
|
"""\
|
||||||
|
spack:
|
||||||
|
specs:
|
||||||
|
- mpileaks
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
env("create", "test1", "spack.yaml")
|
||||||
|
|
||||||
|
test1 = ev.read("test1")
|
||||||
|
test1.concretize()
|
||||||
|
test1.write()
|
||||||
|
|
||||||
|
with tmpdir.as_cwd():
|
||||||
|
with open(str(path), "w") as f:
|
||||||
|
f.write(
|
||||||
|
"""\
|
||||||
|
spack:
|
||||||
|
specs:
|
||||||
|
- libelf
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
env("create", "test2", "spack.yaml")
|
||||||
|
|
||||||
|
test2 = ev.read("test2")
|
||||||
|
test2.concretize()
|
||||||
|
test2.write()
|
||||||
|
|
||||||
|
env("create", "--include-concrete", "test1", "--include-concrete", "test2", "combined_env")
|
||||||
|
|
||||||
|
with ev.read("combined_env"):
|
||||||
|
output = find()
|
||||||
|
|
||||||
|
assert "No root specs" in output
|
||||||
|
assert "Included specs" in output
|
||||||
|
assert "mpileaks" in output
|
||||||
|
assert "libelf" in output
|
||||||
|
|
||||||
|
|
||||||
|
def test_find_specs_nested_include_concrete_env(
|
||||||
|
mutable_mock_env_path, config, mutable_mock_repo, tmpdir
|
||||||
|
):
|
||||||
|
path = tmpdir.join("spack.yaml")
|
||||||
|
|
||||||
|
with tmpdir.as_cwd():
|
||||||
|
with open(str(path), "w") as f:
|
||||||
|
f.write(
|
||||||
|
"""\
|
||||||
|
spack:
|
||||||
|
specs:
|
||||||
|
- mpileaks
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
env("create", "test1", "spack.yaml")
|
||||||
|
|
||||||
|
test1 = ev.read("test1")
|
||||||
|
test1.concretize()
|
||||||
|
test1.write()
|
||||||
|
|
||||||
|
env("create", "--include-concrete", "test1", "test2")
|
||||||
|
test2 = ev.read("test2")
|
||||||
|
test2.add("libelf")
|
||||||
|
test2.concretize()
|
||||||
|
test2.write()
|
||||||
|
|
||||||
|
env("create", "--include-concrete", "test2", "test3")
|
||||||
|
|
||||||
|
with ev.read("test3"):
|
||||||
|
output = find()
|
||||||
|
|
||||||
|
assert "No root specs" in output
|
||||||
|
assert "Included specs" in output
|
||||||
|
assert "mpileaks" in output
|
||||||
|
assert "libelf" in output
|
||||||
|
|
||||||
|
|
||||||
def test_find_loaded(database, working_env):
|
def test_find_loaded(database, working_env):
|
||||||
output = find("--loaded", "--group")
|
output = find("--loaded", "--group")
|
||||||
assert output == ""
|
assert output == ""
|
||||||
|
@@ -89,7 +89,7 @@ def check(pkg):
|
|||||||
assert pkg.run_tests
|
assert pkg.run_tests
|
||||||
|
|
||||||
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", check)
|
monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", check)
|
||||||
install("--test=all", "a")
|
install("--test=all", "pkg-a")
|
||||||
|
|
||||||
|
|
||||||
def test_install_package_already_installed(
|
def test_install_package_already_installed(
|
||||||
@@ -570,61 +570,58 @@ def test_cdash_upload_build_error(tmpdir, mock_fetch, install_mockery, capfd):
|
|||||||
@pytest.mark.disable_clean_stage_check
|
@pytest.mark.disable_clean_stage_check
|
||||||
def test_cdash_upload_clean_build(tmpdir, mock_fetch, install_mockery, capfd):
|
def test_cdash_upload_clean_build(tmpdir, mock_fetch, install_mockery, capfd):
|
||||||
# capfd interferes with Spack's capturing of e.g., Build.xml output
|
# capfd interferes with Spack's capturing of e.g., Build.xml output
|
||||||
with capfd.disabled():
|
with capfd.disabled(), tmpdir.as_cwd():
|
||||||
with tmpdir.as_cwd():
|
install("--log-file=cdash_reports", "--log-format=cdash", "pkg-a")
|
||||||
install("--log-file=cdash_reports", "--log-format=cdash", "a")
|
report_dir = tmpdir.join("cdash_reports")
|
||||||
report_dir = tmpdir.join("cdash_reports")
|
assert report_dir in tmpdir.listdir()
|
||||||
assert report_dir in tmpdir.listdir()
|
report_file = report_dir.join("pkg-a_Build.xml")
|
||||||
report_file = report_dir.join("a_Build.xml")
|
assert report_file in report_dir.listdir()
|
||||||
assert report_file in report_dir.listdir()
|
content = report_file.open().read()
|
||||||
content = report_file.open().read()
|
assert "</Build>" in content
|
||||||
assert "</Build>" in content
|
assert "<Text>" not in content
|
||||||
assert "<Text>" not in content
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.disable_clean_stage_check
|
@pytest.mark.disable_clean_stage_check
|
||||||
def test_cdash_upload_extra_params(tmpdir, mock_fetch, install_mockery, capfd):
|
def test_cdash_upload_extra_params(tmpdir, mock_fetch, install_mockery, capfd):
|
||||||
# capfd interferes with Spack's capture of e.g., Build.xml output
|
# capfd interferes with Spack's capture of e.g., Build.xml output
|
||||||
with capfd.disabled():
|
with capfd.disabled(), tmpdir.as_cwd():
|
||||||
with tmpdir.as_cwd():
|
install(
|
||||||
install(
|
"--log-file=cdash_reports",
|
||||||
"--log-file=cdash_reports",
|
"--log-format=cdash",
|
||||||
"--log-format=cdash",
|
"--cdash-build=my_custom_build",
|
||||||
"--cdash-build=my_custom_build",
|
"--cdash-site=my_custom_site",
|
||||||
"--cdash-site=my_custom_site",
|
"--cdash-track=my_custom_track",
|
||||||
"--cdash-track=my_custom_track",
|
"pkg-a",
|
||||||
"a",
|
)
|
||||||
)
|
report_dir = tmpdir.join("cdash_reports")
|
||||||
report_dir = tmpdir.join("cdash_reports")
|
assert report_dir in tmpdir.listdir()
|
||||||
assert report_dir in tmpdir.listdir()
|
report_file = report_dir.join("pkg-a_Build.xml")
|
||||||
report_file = report_dir.join("a_Build.xml")
|
assert report_file in report_dir.listdir()
|
||||||
assert report_file in report_dir.listdir()
|
content = report_file.open().read()
|
||||||
content = report_file.open().read()
|
assert 'Site BuildName="my_custom_build - pkg-a"' in content
|
||||||
assert 'Site BuildName="my_custom_build - a"' in content
|
assert 'Name="my_custom_site"' in content
|
||||||
assert 'Name="my_custom_site"' in content
|
assert "-my_custom_track" in content
|
||||||
assert "-my_custom_track" in content
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.disable_clean_stage_check
|
@pytest.mark.disable_clean_stage_check
|
||||||
def test_cdash_buildstamp_param(tmpdir, mock_fetch, install_mockery, capfd):
|
def test_cdash_buildstamp_param(tmpdir, mock_fetch, install_mockery, capfd):
|
||||||
# capfd interferes with Spack's capture of e.g., Build.xml output
|
# capfd interferes with Spack's capture of e.g., Build.xml output
|
||||||
with capfd.disabled():
|
with capfd.disabled(), tmpdir.as_cwd():
|
||||||
with tmpdir.as_cwd():
|
cdash_track = "some_mocked_track"
|
||||||
cdash_track = "some_mocked_track"
|
buildstamp_format = "%Y%m%d-%H%M-{0}".format(cdash_track)
|
||||||
buildstamp_format = "%Y%m%d-%H%M-{0}".format(cdash_track)
|
buildstamp = time.strftime(buildstamp_format, time.localtime(int(time.time())))
|
||||||
buildstamp = time.strftime(buildstamp_format, time.localtime(int(time.time())))
|
install(
|
||||||
install(
|
"--log-file=cdash_reports",
|
||||||
"--log-file=cdash_reports",
|
"--log-format=cdash",
|
||||||
"--log-format=cdash",
|
"--cdash-buildstamp={0}".format(buildstamp),
|
||||||
"--cdash-buildstamp={0}".format(buildstamp),
|
"pkg-a",
|
||||||
"a",
|
)
|
||||||
)
|
report_dir = tmpdir.join("cdash_reports")
|
||||||
report_dir = tmpdir.join("cdash_reports")
|
assert report_dir in tmpdir.listdir()
|
||||||
assert report_dir in tmpdir.listdir()
|
report_file = report_dir.join("pkg-a_Build.xml")
|
||||||
report_file = report_dir.join("a_Build.xml")
|
assert report_file in report_dir.listdir()
|
||||||
assert report_file in report_dir.listdir()
|
content = report_file.open().read()
|
||||||
content = report_file.open().read()
|
assert buildstamp in content
|
||||||
assert buildstamp in content
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.disable_clean_stage_check
|
@pytest.mark.disable_clean_stage_check
|
||||||
@@ -632,38 +629,37 @@ def test_cdash_install_from_spec_json(
|
|||||||
tmpdir, mock_fetch, install_mockery, capfd, mock_packages, mock_archive, config
|
tmpdir, mock_fetch, install_mockery, capfd, mock_packages, mock_archive, config
|
||||||
):
|
):
|
||||||
# capfd interferes with Spack's capturing
|
# capfd interferes with Spack's capturing
|
||||||
with capfd.disabled():
|
with capfd.disabled(), tmpdir.as_cwd():
|
||||||
with tmpdir.as_cwd():
|
spec_json_path = str(tmpdir.join("spec.json"))
|
||||||
spec_json_path = str(tmpdir.join("spec.json"))
|
|
||||||
|
|
||||||
pkg_spec = Spec("a")
|
pkg_spec = Spec("pkg-a")
|
||||||
pkg_spec.concretize()
|
pkg_spec.concretize()
|
||||||
|
|
||||||
with open(spec_json_path, "w") as fd:
|
with open(spec_json_path, "w") as fd:
|
||||||
fd.write(pkg_spec.to_json(hash=ht.dag_hash))
|
fd.write(pkg_spec.to_json(hash=ht.dag_hash))
|
||||||
|
|
||||||
install(
|
install(
|
||||||
"--log-format=cdash",
|
"--log-format=cdash",
|
||||||
"--log-file=cdash_reports",
|
"--log-file=cdash_reports",
|
||||||
"--cdash-build=my_custom_build",
|
"--cdash-build=my_custom_build",
|
||||||
"--cdash-site=my_custom_site",
|
"--cdash-site=my_custom_site",
|
||||||
"--cdash-track=my_custom_track",
|
"--cdash-track=my_custom_track",
|
||||||
"-f",
|
"-f",
|
||||||
spec_json_path,
|
spec_json_path,
|
||||||
)
|
)
|
||||||
|
|
||||||
report_dir = tmpdir.join("cdash_reports")
|
report_dir = tmpdir.join("cdash_reports")
|
||||||
assert report_dir in tmpdir.listdir()
|
assert report_dir in tmpdir.listdir()
|
||||||
report_file = report_dir.join("a_Configure.xml")
|
report_file = report_dir.join("pkg-a_Configure.xml")
|
||||||
assert report_file in report_dir.listdir()
|
assert report_file in report_dir.listdir()
|
||||||
content = report_file.open().read()
|
content = report_file.open().read()
|
||||||
install_command_regex = re.compile(
|
install_command_regex = re.compile(
|
||||||
r"<ConfigureCommand>(.+)</ConfigureCommand>", re.MULTILINE | re.DOTALL
|
r"<ConfigureCommand>(.+)</ConfigureCommand>", re.MULTILINE | re.DOTALL
|
||||||
)
|
)
|
||||||
m = install_command_regex.search(content)
|
m = install_command_regex.search(content)
|
||||||
assert m
|
assert m
|
||||||
install_command = m.group(1)
|
install_command = m.group(1)
|
||||||
assert "a@" in install_command
|
assert "pkg-a@" in install_command
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.disable_clean_stage_check
|
@pytest.mark.disable_clean_stage_check
|
||||||
@@ -795,15 +791,15 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock
|
|||||||
# ^libdwarf
|
# ^libdwarf
|
||||||
# ^mpich
|
# ^mpich
|
||||||
# libelf@0.8.10
|
# libelf@0.8.10
|
||||||
# a~bvv
|
# pkg-a~bvv
|
||||||
# ^b
|
# ^pkg-b
|
||||||
# a
|
# pkg-a
|
||||||
# ^b
|
# ^pkg-b
|
||||||
e = ev.create("test", with_view=False)
|
e = ev.create("test", with_view=False)
|
||||||
e.add("mpileaks")
|
e.add("mpileaks")
|
||||||
e.add("libelf@0.8.10") # so env has both root and dep libelf specs
|
e.add("libelf@0.8.10") # so env has both root and dep libelf specs
|
||||||
e.add("a")
|
e.add("pkg-a")
|
||||||
e.add("a ~bvv")
|
e.add("pkg-a ~bvv")
|
||||||
e.concretize()
|
e.concretize()
|
||||||
e.write()
|
e.write()
|
||||||
env_specs = e.all_specs()
|
env_specs = e.all_specs()
|
||||||
@@ -814,9 +810,9 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock
|
|||||||
|
|
||||||
# First find and remember some target concrete specs in the environment
|
# First find and remember some target concrete specs in the environment
|
||||||
for e_spec in env_specs:
|
for e_spec in env_specs:
|
||||||
if e_spec.satisfies(Spec("a ~bvv")):
|
if e_spec.satisfies(Spec("pkg-a ~bvv")):
|
||||||
a_spec = e_spec
|
a_spec = e_spec
|
||||||
elif e_spec.name == "b":
|
elif e_spec.name == "pkg-b":
|
||||||
b_spec = e_spec
|
b_spec = e_spec
|
||||||
elif e_spec.satisfies(Spec("mpi")):
|
elif e_spec.satisfies(Spec("mpi")):
|
||||||
mpi_spec = e_spec
|
mpi_spec = e_spec
|
||||||
@@ -839,8 +835,8 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock
|
|||||||
assert "You can add specs to the environment with 'spack add " in inst_out
|
assert "You can add specs to the environment with 'spack add " in inst_out
|
||||||
|
|
||||||
# Without --add, ensure that two packages "a" get installed
|
# Without --add, ensure that two packages "a" get installed
|
||||||
inst_out = install("a", output=str)
|
inst_out = install("pkg-a", output=str)
|
||||||
assert len([x for x in e.all_specs() if x.installed and x.name == "a"]) == 2
|
assert len([x for x in e.all_specs() if x.installed and x.name == "pkg-a"]) == 2
|
||||||
|
|
||||||
# Install an unambiguous dependency spec (that already exists as a dep
|
# Install an unambiguous dependency spec (that already exists as a dep
|
||||||
# in the environment) and make sure it gets installed (w/ deps),
|
# in the environment) and make sure it gets installed (w/ deps),
|
||||||
@@ -873,7 +869,7 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock
|
|||||||
# root of the environment as well as installed.
|
# root of the environment as well as installed.
|
||||||
assert b_spec not in e.roots()
|
assert b_spec not in e.roots()
|
||||||
|
|
||||||
install("--add", "b")
|
install("--add", "pkg-b")
|
||||||
|
|
||||||
assert b_spec in e.roots()
|
assert b_spec in e.roots()
|
||||||
assert b_spec not in e.uninstalled_specs()
|
assert b_spec not in e.uninstalled_specs()
|
||||||
@@ -908,7 +904,7 @@ def test_cdash_auth_token(tmpdir, mock_fetch, install_mockery, monkeypatch, capf
|
|||||||
# capfd interferes with Spack's capturing
|
# capfd interferes with Spack's capturing
|
||||||
with tmpdir.as_cwd(), capfd.disabled():
|
with tmpdir.as_cwd(), capfd.disabled():
|
||||||
monkeypatch.setenv("SPACK_CDASH_AUTH_TOKEN", "asdf")
|
monkeypatch.setenv("SPACK_CDASH_AUTH_TOKEN", "asdf")
|
||||||
out = install("-v", "--log-file=cdash_reports", "--log-format=cdash", "a")
|
out = install("-v", "--log-file=cdash_reports", "--log-format=cdash", "pkg-a")
|
||||||
assert "Using CDash auth token from environment" in out
|
assert "Using CDash auth token from environment" in out
|
||||||
|
|
||||||
|
|
||||||
@@ -916,26 +912,25 @@ def test_cdash_auth_token(tmpdir, mock_fetch, install_mockery, monkeypatch, capf
|
|||||||
@pytest.mark.disable_clean_stage_check
|
@pytest.mark.disable_clean_stage_check
|
||||||
def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd):
|
def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd):
|
||||||
# capfd interferes with Spack's capturing of e.g., Build.xml output
|
# capfd interferes with Spack's capturing of e.g., Build.xml output
|
||||||
with capfd.disabled():
|
with capfd.disabled(), tmpdir.as_cwd():
|
||||||
with tmpdir.as_cwd():
|
# Test would fail if install raised an error.
|
||||||
# Test would fail if install raised an error.
|
|
||||||
|
|
||||||
# Ensure that even on non-x86_64 architectures, there are no
|
# Ensure that even on non-x86_64 architectures, there are no
|
||||||
# dependencies installed
|
# dependencies installed
|
||||||
spec = spack.spec.Spec("configure-warning").concretized()
|
spec = Spec("configure-warning").concretized()
|
||||||
spec.clear_dependencies()
|
spec.clear_dependencies()
|
||||||
specfile = "./spec.json"
|
specfile = "./spec.json"
|
||||||
with open(specfile, "w") as f:
|
with open(specfile, "w") as f:
|
||||||
f.write(spec.to_json())
|
f.write(spec.to_json())
|
||||||
|
|
||||||
install("--log-file=cdash_reports", "--log-format=cdash", specfile)
|
install("--log-file=cdash_reports", "--log-format=cdash", specfile)
|
||||||
# Verify Configure.xml exists with expected contents.
|
# Verify Configure.xml exists with expected contents.
|
||||||
report_dir = tmpdir.join("cdash_reports")
|
report_dir = tmpdir.join("cdash_reports")
|
||||||
assert report_dir in tmpdir.listdir()
|
assert report_dir in tmpdir.listdir()
|
||||||
report_file = report_dir.join("Configure.xml")
|
report_file = report_dir.join("Configure.xml")
|
||||||
assert report_file in report_dir.listdir()
|
assert report_file in report_dir.listdir()
|
||||||
content = report_file.open().read()
|
content = report_file.open().read()
|
||||||
assert "foo: No such file or directory" in content
|
assert "foo: No such file or directory" in content
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.not_on_windows("ArchSpec gives test platform debian rather than windows")
|
@pytest.mark.not_on_windows("ArchSpec gives test platform debian rather than windows")
|
||||||
@@ -952,7 +947,7 @@ def test_compiler_bootstrap(
|
|||||||
assert CompilerSpec("gcc@=12.0") not in compilers.all_compiler_specs()
|
assert CompilerSpec("gcc@=12.0") not in compilers.all_compiler_specs()
|
||||||
|
|
||||||
# Test succeeds if it does not raise an error
|
# Test succeeds if it does not raise an error
|
||||||
install("a%gcc@=12.0")
|
install("pkg-a%gcc@=12.0")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.not_on_windows("Binary mirrors not supported on windows")
|
@pytest.mark.not_on_windows("Binary mirrors not supported on windows")
|
||||||
@@ -992,8 +987,8 @@ def test_compiler_bootstrap_from_binary_mirror(
|
|||||||
# Now make sure that when the compiler is installed from binary mirror,
|
# Now make sure that when the compiler is installed from binary mirror,
|
||||||
# it also gets configured as a compiler. Test succeeds if it does not
|
# it also gets configured as a compiler. Test succeeds if it does not
|
||||||
# raise an error
|
# raise an error
|
||||||
install("--no-check-signature", "--cache-only", "--only", "dependencies", "b%gcc@=10.2.0")
|
install("--no-check-signature", "--cache-only", "--only", "dependencies", "pkg-b%gcc@=10.2.0")
|
||||||
install("--no-cache", "--only", "package", "b%gcc@10.2.0")
|
install("--no-cache", "--only", "package", "pkg-b%gcc@10.2.0")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.not_on_windows("ArchSpec gives test platform debian rather than windows")
|
@pytest.mark.not_on_windows("ArchSpec gives test platform debian rather than windows")
|
||||||
@@ -1013,7 +1008,7 @@ def test_compiler_bootstrap_already_installed(
|
|||||||
|
|
||||||
# Test succeeds if it does not raise an error
|
# Test succeeds if it does not raise an error
|
||||||
install("gcc@=12.0")
|
install("gcc@=12.0")
|
||||||
install("a%gcc@=12.0")
|
install("pkg-a%gcc@=12.0")
|
||||||
|
|
||||||
|
|
||||||
def test_install_fails_no_args(tmpdir):
|
def test_install_fails_no_args(tmpdir):
|
||||||
@@ -1195,7 +1190,7 @@ def test_report_filename_for_cdash(install_mockery_mutable_config, mock_fetch):
|
|||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
spack.cmd.install.setup_parser(parser)
|
spack.cmd.install.setup_parser(parser)
|
||||||
args = parser.parse_args(
|
args = parser.parse_args(
|
||||||
["--cdash-upload-url", "https://blahblah/submit.php?project=debugging", "a"]
|
["--cdash-upload-url", "https://blahblah/submit.php?project=debugging", "pkg-a"]
|
||||||
)
|
)
|
||||||
specs = spack.cmd.install.concrete_specs_from_cli(args, {})
|
specs = spack.cmd.install.concrete_specs_from_cli(args, {})
|
||||||
filename = spack.cmd.install.report_filename(args, specs)
|
filename = spack.cmd.install.report_filename(args, specs)
|
||||||
|
@@ -121,7 +121,7 @@ def test_maintainers_list_packages(mock_packages, capfd):
|
|||||||
|
|
||||||
|
|
||||||
def test_maintainers_list_fails(mock_packages, capfd):
|
def test_maintainers_list_fails(mock_packages, capfd):
|
||||||
out = maintainers("a", fail_on_error=False)
|
out = maintainers("pkg-a", fail_on_error=False)
|
||||||
assert not out
|
assert not out
|
||||||
assert maintainers.returncode == 1
|
assert maintainers.returncode == 1
|
||||||
|
|
||||||
|
@@ -11,6 +11,7 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.main
|
import spack.main
|
||||||
import spack.modules
|
import spack.modules
|
||||||
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
|
|
||||||
module = spack.main.SpackCommand("module")
|
module = spack.main.SpackCommand("module")
|
||||||
@@ -178,8 +179,8 @@ def test_setdefault_command(mutable_database, mutable_config):
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
spack.config.set("modules", data)
|
spack.config.set("modules", data)
|
||||||
# Install two different versions of a package
|
# Install two different versions of pkg-a
|
||||||
other_spec, preferred = "a@1.0", "a@2.0"
|
other_spec, preferred = "pkg-a@1.0", "pkg-a@2.0"
|
||||||
|
|
||||||
spack.spec.Spec(other_spec).concretized().package.do_install(fake=True)
|
spack.spec.Spec(other_spec).concretized().package.do_install(fake=True)
|
||||||
spack.spec.Spec(preferred).concretized().package.do_install(fake=True)
|
spack.spec.Spec(preferred).concretized().package.do_install(fake=True)
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user