Compare commits
320 Commits
packages/r
...
develop-20
Author | SHA1 | Date | |
---|---|---|---|
![]() |
5f9c6299d1 | ||
![]() |
541e40e252 | ||
![]() |
ddc8790896 | ||
![]() |
7d6b643b58 | ||
![]() |
6f08db4631 | ||
![]() |
7086d6f1ac | ||
![]() |
7a313295ac | ||
![]() |
3bdaaaf47c | ||
![]() |
a3fa54812f | ||
![]() |
afc9615abf | ||
![]() |
19352af10b | ||
![]() |
e4f8cff286 | ||
![]() |
76df9de26a | ||
![]() |
983e7427f7 | ||
![]() |
d9df520e85 | ||
![]() |
b51fd9f5a6 | ||
![]() |
8058cd34e4 | ||
![]() |
2f488b9329 | ||
![]() |
78a4d3e7d2 | ||
![]() |
2b9a621d19 | ||
![]() |
fa5f4f1cab | ||
![]() |
4042afaa99 | ||
![]() |
7fdf1029b7 | ||
![]() |
814f4f20c0 | ||
![]() |
7c473937ba | ||
![]() |
9d754c127a | ||
![]() |
9a8bff01ad | ||
![]() |
434a703bcf | ||
![]() |
a42108438d | ||
![]() |
1be9b7f53c | ||
![]() |
6b05a80745 | ||
![]() |
4d36b0a5ef | ||
![]() |
636843f330 | ||
![]() |
d4378b6e09 | ||
![]() |
69b54d9039 | ||
![]() |
618866b35c | ||
![]() |
47bd8a6b26 | ||
![]() |
93d31225db | ||
![]() |
c3a1d1f91e | ||
![]() |
33621a9860 | ||
![]() |
055eb3cd94 | ||
![]() |
c4d18671fe | ||
![]() |
5d9f0cf44e | ||
![]() |
02faa7b97e | ||
![]() |
d37749cedd | ||
![]() |
7e5e6f2833 | ||
![]() |
37ea9657cf | ||
![]() |
2107a88514 | ||
![]() |
1a4b07e730 | ||
![]() |
c98045e028 | ||
![]() |
bc5456a791 | ||
![]() |
656720a387 | ||
![]() |
9604c0a9b3 | ||
![]() |
ee96194486 | ||
![]() |
ab21fc1daf | ||
![]() |
d593ad0c06 | ||
![]() |
254fe6ed6e | ||
![]() |
7e20874f54 | ||
![]() |
cd4c40fdbd | ||
![]() |
c13e8e49fe | ||
![]() |
35a2a0b3d0 | ||
![]() |
22d69724f4 | ||
![]() |
f6e3f6eec7 | ||
![]() |
866c440f0c | ||
![]() |
fe6f5b87dc | ||
![]() |
3df3f40984 | ||
![]() |
33f4a40df4 | ||
![]() |
7ba0132f66 | ||
![]() |
744f034dfb | ||
![]() |
d41fb3d542 | ||
![]() |
9b077a360e | ||
![]() |
5c297d8322 | ||
![]() |
9e18e63053 | ||
![]() |
069286bda7 | ||
![]() |
1679b5e141 | ||
![]() |
1f935ac356 | ||
![]() |
e66e572656 | ||
![]() |
17d3d17d46 | ||
![]() |
c814fb5fe6 | ||
![]() |
fe2d06399f | ||
![]() |
b5dec35113 | ||
![]() |
20565ba8ab | ||
![]() |
c47a3ee05b | ||
![]() |
aaa7469b92 | ||
![]() |
1f8a6d8e8b | ||
![]() |
654bf45c01 | ||
![]() |
daa42be47f | ||
![]() |
ca179deb8e | ||
![]() |
8f6092bf83 | ||
![]() |
6b649ccf4f | ||
![]() |
d463d4566d | ||
![]() |
f79be3022b | ||
![]() |
7327e731b4 | ||
![]() |
ab6d494fe4 | ||
![]() |
e5aa74e7cb | ||
![]() |
728f8e2654 | ||
![]() |
9a58a6da0d | ||
![]() |
395491815a | ||
![]() |
fd98ebed9d | ||
![]() |
f7de621d0c | ||
![]() |
a5f404cff5 | ||
![]() |
8100b0d575 | ||
![]() |
b38ab54028 | ||
![]() |
412f22b76a | ||
![]() |
d226ef31bd | ||
![]() |
ae32af927d | ||
![]() |
400dd40492 | ||
![]() |
04bdff33ad | ||
![]() |
017e3dd417 | ||
![]() |
f7e3902ca8 | ||
![]() |
89da8d4c84 | ||
![]() |
8cac74699b | ||
![]() |
db311eef46 | ||
![]() |
1427735876 | ||
![]() |
f88ca8cc1f | ||
![]() |
bf1f4e15ee | ||
![]() |
dd756d53de | ||
![]() |
1c1970e727 | ||
![]() |
c283fce487 | ||
![]() |
199cbce5ef | ||
![]() |
82dea7e6ce | ||
![]() |
bd71ce5856 | ||
![]() |
73fc86cbc3 | ||
![]() |
3589edcc6d | ||
![]() |
cade66d842 | ||
![]() |
c5766aa757 | ||
![]() |
c3e9bd4fbf | ||
![]() |
05357052ac | ||
![]() |
dc3f5cd606 | ||
![]() |
9a16927993 | ||
![]() |
093b273f5c | ||
![]() |
b6ff126494 | ||
![]() |
fe8f631b7d | ||
![]() |
f9065f0c7e | ||
![]() |
699735016f | ||
![]() |
ec85bbe3f7 | ||
![]() |
7e1ad8f321 | ||
![]() |
0eb8f4f0f9 | ||
![]() |
ee27dc5d45 | ||
![]() |
ec0a57cba4 | ||
![]() |
4c91e6245c | ||
![]() |
6a1dfbda97 | ||
![]() |
6b0011c8f1 | ||
![]() |
8b5521ec0a | ||
![]() |
b9e4e98f15 | ||
![]() |
85487f23bc | ||
![]() |
fb4811ec3f | ||
![]() |
202e64872a | ||
![]() |
25ba3124bd | ||
![]() |
df57e1ceb3 | ||
![]() |
59b4b785e0 | ||
![]() |
b1b21a4d02 | ||
![]() |
b1af32cb60 | ||
![]() |
9d8f94a7c8 | ||
![]() |
1297673a70 | ||
![]() |
0fee2c234e | ||
![]() |
229cf49c71 | ||
![]() |
394e6159d6 | ||
![]() |
cbe18d9cbc | ||
![]() |
2d83707f84 | ||
![]() |
9a91f021a7 | ||
![]() |
297e43b097 | ||
![]() |
900765901d | ||
![]() |
680d1f2e58 | ||
![]() |
76957f19f9 | ||
![]() |
c7001efeb8 | ||
![]() |
a60d1084b1 | ||
![]() |
497e19f0e3 | ||
![]() |
cd6ee96398 | ||
![]() |
904d85b53b | ||
![]() |
199653dd31 | ||
![]() |
fdfb4e9893 | ||
![]() |
afa76ebbdc | ||
![]() |
e5c045cc1c | ||
![]() |
8c92836c39 | ||
![]() |
a782e6bc33 | ||
![]() |
4ede0ae5e3 | ||
![]() |
986325eb0d | ||
![]() |
8bcd64ce6c | ||
![]() |
f079ad3690 | ||
![]() |
8c1d6188e3 | ||
![]() |
1d70ab934c | ||
![]() |
fa704e867c | ||
![]() |
85939b26ae | ||
![]() |
a5436b3962 | ||
![]() |
a8e25193e0 | ||
![]() |
480d6f9911 | ||
![]() |
02f329a8af | ||
![]() |
2de712b35f | ||
![]() |
aa49b3d8ce | ||
![]() |
eccecba39a | ||
![]() |
94c99fc5d4 | ||
![]() |
1f1021a47f | ||
![]() |
296e5308a7 | ||
![]() |
47e79c32fd | ||
![]() |
906799eec5 | ||
![]() |
dcdcab7b2c | ||
![]() |
86050decb9 | ||
![]() |
fff8165f2f | ||
![]() |
5a9dbcc0c4 | ||
![]() |
bd627465f3 | ||
![]() |
f96e8757b8 | ||
![]() |
b8cbbb8e2e | ||
![]() |
d40f847497 | ||
![]() |
ed34dfca96 | ||
![]() |
3ee6a5b96f | ||
![]() |
88bcfddbbb | ||
![]() |
c49269f9dd | ||
![]() |
ef45c392e0 | ||
![]() |
8b811171c7 | ||
![]() |
823a2c1e4b | ||
![]() |
ead25b1e9e | ||
![]() |
d5eefcba87 | ||
![]() |
1bcb1fcebc | ||
![]() |
f19b657235 | ||
![]() |
8e1bd9a403 | ||
![]() |
ba56622574 | ||
![]() |
836be2364c | ||
![]() |
b623f58782 | ||
![]() |
182bc87fe1 | ||
![]() |
f93595ba2f | ||
![]() |
aa5b17ceb5 | ||
![]() |
eb5a1d3b4c | ||
![]() |
84f680239e | ||
![]() |
c7b693a0df | ||
![]() |
7d5ad18573 | ||
![]() |
2921e04353 | ||
![]() |
33464a7038 | ||
![]() |
d3cdb2a344 | ||
![]() |
34df21b62c | ||
![]() |
e8a13642a0 | ||
![]() |
dc3c96dd2f | ||
![]() |
c29652580a | ||
![]() |
d714a9b223 | ||
![]() |
f596a8cdad | ||
![]() |
3699a0ec9b | ||
![]() |
6c268234ba | ||
![]() |
c1736077bb | ||
![]() |
85905959dc | ||
![]() |
2ae5596e92 | ||
![]() |
9d0b9f086f | ||
![]() |
da079ed06f | ||
![]() |
a69c5b3e32 | ||
![]() |
e3cce2bd96 | ||
![]() |
0d668e4e92 | ||
![]() |
ad6c7380c5 | ||
![]() |
c064a30765 | ||
![]() |
4a4f156d99 | ||
![]() |
cb8878aaf4 | ||
![]() |
d49f3a0960 | ||
![]() |
15413c7258 | ||
![]() |
de754c7a47 | ||
![]() |
ac9398ed21 | ||
![]() |
57769fac7d | ||
![]() |
c65fd7e12d | ||
![]() |
c71d778875 | ||
![]() |
a7313dc407 | ||
![]() |
31477d5dc7 | ||
![]() |
382ba0d041 | ||
![]() |
886c950423 | ||
![]() |
3798b16a29 | ||
![]() |
796617054d | ||
![]() |
78fc25ec12 | ||
![]() |
6de51fdc58 | ||
![]() |
430ba496d1 | ||
![]() |
e1ede9c04b | ||
![]() |
856dd3417b | ||
![]() |
e49c6f68bc | ||
![]() |
eed7a1af24 | ||
![]() |
22e40541c7 | ||
![]() |
8561c89c25 | ||
![]() |
6501705de2 | ||
![]() |
0b3e1fd412 | ||
![]() |
c260da5127 | ||
![]() |
f63261dc65 | ||
![]() |
1c081611ea | ||
![]() |
428b4e340a | ||
![]() |
20bf239a6a | ||
![]() |
cd682613cf | ||
![]() |
c1852e3706 | ||
![]() |
855a8476e4 | ||
![]() |
d4a892f200 | ||
![]() |
66e2836ba1 | ||
![]() |
52ab0c66fe | ||
![]() |
f316068b27 | ||
![]() |
553cc3b70a | ||
![]() |
f0f9a16e4f | ||
![]() |
9ec8eaa0d3 | ||
![]() |
00182b19dc | ||
![]() |
cc7a29c55a | ||
![]() |
61b0f4f84d | ||
![]() |
fe3bfa482e | ||
![]() |
e5f53a6250 | ||
![]() |
a7e8080784 | ||
![]() |
f5e934f2dc | ||
![]() |
54b57c5d1e | ||
![]() |
725ef8f5c8 | ||
![]() |
f51a9a9107 | ||
![]() |
4f0e336ed0 | ||
![]() |
64774f3015 | ||
![]() |
4e9fbca033 | ||
![]() |
a2fd26bbcc | ||
![]() |
067da09b46 | ||
![]() |
b1b0c108bb | ||
![]() |
c624088a7b | ||
![]() |
a965c7c5c8 | ||
![]() |
904d43f0e6 | ||
![]() |
10b6d7282a | ||
![]() |
7112a49d1e | ||
![]() |
b11bd6b745 | ||
![]() |
4d0b04cf34 | ||
![]() |
165c171659 | ||
![]() |
aa3c62d936 | ||
![]() |
cba2fe914c | ||
![]() |
1b82779087 | ||
![]() |
55b1b0f3f0 | ||
![]() |
4606c8ed68 | ||
![]() |
dd53eeb322 | ||
![]() |
f42486b684 | ||
![]() |
44ecea3813 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
|
45
.github/workflows/bootstrap.yml
vendored
45
.github/workflows/bootstrap.yml
vendored
@@ -63,7 +63,7 @@ jobs:
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
@@ -112,10 +112,10 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest", "windows-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' && matrix.runner != 'windows-latest'}}
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
@@ -124,11 +124,16 @@ jobs:
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Setup Windows
|
||||
if: ${{ matrix.runner == 'windows-latest' }}
|
||||
run: |
|
||||
Remove-Item -Path (Get-Command gpg).Path
|
||||
Remove-Item -Path (Get-Command file).Path
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: |
|
||||
3.8
|
||||
@@ -137,11 +142,20 @@ jobs:
|
||||
3.11
|
||||
3.12
|
||||
- name: Set bootstrap sources
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
- name: Disable from source bootstrap
|
||||
if: ${{ matrix.runner != 'windows-latest' }}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
- name: Bootstrap clingo
|
||||
# No binary clingo on Windows yet
|
||||
if: ${{ matrix.runner != 'windows-latest' }}
|
||||
run: |
|
||||
set -e
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
||||
@@ -164,7 +178,24 @@ jobs:
|
||||
fi
|
||||
done
|
||||
- name: Bootstrap GnuPG
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
${{ env.VALIDATE_LAST_EXIT }}
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
- name: Bootstrap File
|
||||
env:
|
||||
SETUP_SCRIPT_EXT: ${{ matrix.runner == 'windows-latest' && 'ps1' || 'sh' }}
|
||||
SETUP_SCRIPT_SOURCE: ${{ matrix.runner == 'windows-latest' && './' || 'source ' }}
|
||||
USER_SCOPE_PARENT_DIR: ${{ matrix.runner == 'windows-latest' && '$env:userprofile' || '$HOME' }}
|
||||
VALIDATE_LAST_EXIT: ${{ matrix.runner == 'windows-latest' && './share/spack/qa/validate_last_exit.ps1' || '' }}
|
||||
run: |
|
||||
${{ env.SETUP_SCRIPT_SOURCE }}share/spack/setup-env.${{ env.SETUP_SCRIPT_EXT }}
|
||||
spack -d python share/spack/qa/bootstrap-file.py
|
||||
${{ env.VALIDATE_LAST_EXIT }}
|
||||
tree ${{ env.USER_SCOPE_PARENT_DIR }}/.spack/bootstrap/store/
|
||||
|
4
.github/workflows/build-containers.yml
vendored
4
.github/workflows/build-containers.yml
vendored
@@ -87,7 +87,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a
|
||||
uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
with:
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
path: dockerfiles
|
||||
@@ -126,7 +126,7 @@ jobs:
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@834a144ee995460fba8ed112a2fc961b36a5ec5a
|
||||
uses: actions/upload-artifact/merge@50769540e7f4bd5e21e526ee35c689e35e0d6874
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
|
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
10
.github/workflows/unit_tests.yaml
vendored
10
.github/workflows/unit_tests.yaml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -151,7 +151,7 @@ jobs:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -188,7 +188,7 @@ jobs:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -225,7 +225,7 @@ jobs:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
4
.github/workflows/valid-style.yml
vendored
4
.github/workflows/valid-style.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f
|
||||
- uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
|
@@ -72,3 +72,13 @@ packages:
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
cray-mpich:
|
||||
buildable: false
|
||||
cray-mvapich2:
|
||||
buildable: false
|
||||
fujitsu-mpi:
|
||||
buildable: false
|
||||
hpcx-mpi:
|
||||
buildable: false
|
||||
spectrum-mpi:
|
||||
buildable: false
|
||||
|
@@ -863,7 +863,7 @@ named list ``compilers`` is ``['%gcc', '%clang', '%intel']`` on
|
||||
spack:
|
||||
definitions:
|
||||
- compilers: ['%gcc', '%clang']
|
||||
- when: arch.satisfies('x86_64:')
|
||||
- when: arch.satisfies('target=x86_64:')
|
||||
compilers: ['%intel']
|
||||
|
||||
.. note::
|
||||
@@ -893,8 +893,9 @@ The valid variables for a ``when`` clause are:
|
||||
|
||||
#. ``env``. The user environment (usually ``os.environ`` in Python).
|
||||
|
||||
#. ``hostname``. The hostname of the system (if ``hostname`` is an
|
||||
executable in the user's PATH).
|
||||
#. ``hostname``. The hostname of the system.
|
||||
|
||||
#. ``full_hostname``. The fully qualified hostname of the system.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
SpecLists as Constraints
|
||||
|
@@ -1263,6 +1263,11 @@ Git fetching supports the following parameters to ``version``:
|
||||
option ``--depth 1`` will be used if the version of git and the specified
|
||||
transport protocol support it, and ``--single-branch`` will be used if the
|
||||
version of git supports it.
|
||||
* ``git_sparse_paths``: Use ``sparse-checkout`` to only clone these relative paths.
|
||||
This feature requires ``git`` to be version ``2.25.0`` or later but is useful for
|
||||
large repositories that have separate portions that can be built independently.
|
||||
If paths provided are directories then all the subdirectories and associated files
|
||||
will also be cloned.
|
||||
|
||||
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
|
||||
|
||||
@@ -1361,6 +1366,41 @@ Submodules
|
||||
For more information about git submodules see the manpage of git: ``man
|
||||
git-submodule``.
|
||||
|
||||
Sparse-Checkout
|
||||
You can supply ``git_sparse_paths`` at the package or version level to utilize git's
|
||||
sparse-checkout feature. This will only clone the paths that are specified in the
|
||||
``git_sparse_paths`` attribute for the package along with the files in the top level directory.
|
||||
This feature allows you to only clone what you need from a large repository.
|
||||
Note that this is a newer feature in git and requries git ``2.25.0`` or greater.
|
||||
If ``git_sparse_paths`` is supplied and the git version is too old
|
||||
then a warning will be issued and that package will use the standard cloning operations instead.
|
||||
``git_sparse_paths`` should be supplied as a list of paths, a callable function for versions,
|
||||
or a more complex package attribute using the ``@property`` decorator. The return value should be
|
||||
a list for a callable implementation of ``git_sparse_paths``.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def sparse_path_function(package)
|
||||
"""a callable function that can be used in side a version"""
|
||||
# paths can be directories or functions, all subdirectories and files are included
|
||||
paths = ["doe", "rae", "me/file.cpp"]
|
||||
if package.spec.version > Version("1.2.0"):
|
||||
paths.extend(["fae"])
|
||||
return paths
|
||||
|
||||
class MyPackage(package):
|
||||
# can also be a package attribute that will be used if not specified in versions
|
||||
git_sparse_paths = ["doe", "rae"]
|
||||
|
||||
# use the package attribute
|
||||
version("1.0.0")
|
||||
version("1.1.0")
|
||||
# use the function
|
||||
version("1.1.5", git_sparse_paths=sparse_path_func)
|
||||
version("1.2.0", git_sparse_paths=sparse_path_func)
|
||||
version("1.2.5", git_sparse_paths=sparse_path_func)
|
||||
version("1.1.5", git_sparse_paths=sparse_path_func)
|
||||
|
||||
.. _github-fetch:
|
||||
|
||||
^^^^^^
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
|
||||
* Version: 0.2.5-dev (commit 7e6740012b897ae4a950f0bba7e9726b767e921f)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
24
lib/spack/external/_vendoring/distro/distro.py
vendored
24
lib/spack/external/_vendoring/distro/distro.py
vendored
@@ -1265,27 +1265,29 @@ def _distro_release_info(self) -> Dict[str, str]:
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
else:
|
||||
try:
|
||||
basenames = [
|
||||
basename
|
||||
for basename in os.listdir(self.etc_dir)
|
||||
if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
||||
and os.path.isfile(os.path.join(self.etc_dir, basename))
|
||||
]
|
||||
with os.scandir(self.etc_dir) as it:
|
||||
etc_files = [
|
||||
p.path for p in it
|
||||
if p.is_file() and p.name not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
||||
]
|
||||
# We sort for repeatability in cases where there are multiple
|
||||
# distro specific files; e.g. CentOS, Oracle, Enterprise all
|
||||
# containing `redhat-release` on top of their own.
|
||||
basenames.sort()
|
||||
etc_files.sort()
|
||||
except OSError:
|
||||
# This may occur when /etc is not readable but we can't be
|
||||
# sure about the *-release files. Check common entries of
|
||||
# /etc for information. If they turn out to not be there the
|
||||
# error is handled in `_parse_distro_release_file()`.
|
||||
basenames = _DISTRO_RELEASE_BASENAMES
|
||||
for basename in basenames:
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
etc_files = [
|
||||
os.path.join(self.etc_dir, basename)
|
||||
for basename in _DISTRO_RELEASE_BASENAMES
|
||||
]
|
||||
|
||||
for filepath in etc_files:
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(os.path.basename(filepath))
|
||||
if match is None:
|
||||
continue
|
||||
filepath = os.path.join(self.etc_dir, basename)
|
||||
distro_info = self._parse_distro_release_file(filepath)
|
||||
# The name is always present if the pattern matches.
|
||||
if "name" not in distro_info:
|
||||
|
173
lib/spack/external/_vendoring/jsonschema/_format.py
vendored
173
lib/spack/external/_vendoring/jsonschema/_format.py
vendored
@@ -231,96 +231,6 @@ def is_host_name(instance):
|
||||
return True
|
||||
|
||||
|
||||
try:
|
||||
# The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
|
||||
import idna
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
|
||||
def is_idn_host_name(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
idna.encode(instance)
|
||||
return True
|
||||
|
||||
|
||||
try:
|
||||
import rfc3987
|
||||
except ImportError:
|
||||
try:
|
||||
from rfc3986_validator import validate_rfc3986
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts(name="uri")
|
||||
def is_uri(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return validate_rfc3986(instance, rule="URI")
|
||||
|
||||
@_checks_drafts(
|
||||
draft6="uri-reference",
|
||||
draft7="uri-reference",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_uri_reference(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return validate_rfc3986(instance, rule="URI_reference")
|
||||
|
||||
else:
|
||||
@_checks_drafts(draft7="iri", raises=ValueError)
|
||||
def is_iri(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="IRI")
|
||||
|
||||
@_checks_drafts(draft7="iri-reference", raises=ValueError)
|
||||
def is_iri_reference(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="IRI_reference")
|
||||
|
||||
@_checks_drafts(name="uri", raises=ValueError)
|
||||
def is_uri(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="URI")
|
||||
|
||||
@_checks_drafts(
|
||||
draft6="uri-reference",
|
||||
draft7="uri-reference",
|
||||
raises=ValueError,
|
||||
)
|
||||
def is_uri_reference(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return rfc3987.parse(instance, rule="URI_reference")
|
||||
|
||||
|
||||
try:
|
||||
from strict_rfc3339 import validate_rfc3339
|
||||
except ImportError:
|
||||
try:
|
||||
from rfc3339_validator import validate_rfc3339
|
||||
except ImportError:
|
||||
validate_rfc3339 = None
|
||||
|
||||
if validate_rfc3339:
|
||||
@_checks_drafts(name="date-time")
|
||||
def is_datetime(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return validate_rfc3339(instance)
|
||||
|
||||
@_checks_drafts(draft7="time")
|
||||
def is_time(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return is_datetime("1970-01-01T" + instance)
|
||||
|
||||
|
||||
@_checks_drafts(name="regex", raises=re.error)
|
||||
def is_regex(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
@@ -340,86 +250,3 @@ def is_draft3_time(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return datetime.datetime.strptime(instance, "%H:%M:%S")
|
||||
|
||||
|
||||
try:
|
||||
import webcolors
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
def is_css_color_code(instance):
|
||||
return webcolors.normalize_hex(instance)
|
||||
|
||||
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
|
||||
def is_css21_color(instance):
|
||||
if (
|
||||
not isinstance(instance, str_types) or
|
||||
instance.lower() in webcolors.css21_names_to_hex
|
||||
):
|
||||
return True
|
||||
return is_css_color_code(instance)
|
||||
|
||||
def is_css3_color(instance):
|
||||
if instance.lower() in webcolors.css3_names_to_hex:
|
||||
return True
|
||||
return is_css_color_code(instance)
|
||||
|
||||
|
||||
try:
|
||||
import jsonpointer
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts(
|
||||
draft6="json-pointer",
|
||||
draft7="json-pointer",
|
||||
raises=jsonpointer.JsonPointerException,
|
||||
)
|
||||
def is_json_pointer(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return jsonpointer.JsonPointer(instance)
|
||||
|
||||
# TODO: I don't want to maintain this, so it
|
||||
# needs to go either into jsonpointer (pending
|
||||
# https://github.com/stefankoegl/python-json-pointer/issues/34) or
|
||||
# into a new external library.
|
||||
@_checks_drafts(
|
||||
draft7="relative-json-pointer",
|
||||
raises=jsonpointer.JsonPointerException,
|
||||
)
|
||||
def is_relative_json_pointer(instance):
|
||||
# Definition taken from:
|
||||
# https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
non_negative_integer, rest = [], ""
|
||||
for i, character in enumerate(instance):
|
||||
if character.isdigit():
|
||||
non_negative_integer.append(character)
|
||||
continue
|
||||
|
||||
if not non_negative_integer:
|
||||
return False
|
||||
|
||||
rest = instance[i:]
|
||||
break
|
||||
return (rest == "#") or jsonpointer.JsonPointer(rest)
|
||||
|
||||
|
||||
try:
|
||||
import uritemplate.exceptions
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
@_checks_drafts(
|
||||
draft6="uri-template",
|
||||
draft7="uri-template",
|
||||
raises=uritemplate.exceptions.InvalidTemplate,
|
||||
)
|
||||
def is_uri_template(
|
||||
instance,
|
||||
template_validator=uritemplate.Validator().force_balanced_braces(),
|
||||
):
|
||||
template = uritemplate.URITemplate(instance)
|
||||
return template_validator.validate(template)
|
||||
|
12
lib/spack/external/archspec/cpu/detect.py
vendored
12
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -47,7 +47,11 @@ def decorator(factory):
|
||||
|
||||
|
||||
def partial_uarch(
|
||||
name: str = "", vendor: str = "", features: Optional[Set[str]] = None, generation: int = 0
|
||||
name: str = "",
|
||||
vendor: str = "",
|
||||
features: Optional[Set[str]] = None,
|
||||
generation: int = 0,
|
||||
cpu_part: str = "",
|
||||
) -> Microarchitecture:
|
||||
"""Construct a partial microarchitecture, from information gathered during system scan."""
|
||||
return Microarchitecture(
|
||||
@@ -57,6 +61,7 @@ def partial_uarch(
|
||||
features=features or set(),
|
||||
compilers={},
|
||||
generation=generation,
|
||||
cpu_part=cpu_part,
|
||||
)
|
||||
|
||||
|
||||
@@ -90,6 +95,7 @@ def proc_cpuinfo() -> Microarchitecture:
|
||||
return partial_uarch(
|
||||
vendor=_canonicalize_aarch64_vendor(data),
|
||||
features=_feature_set(data, key="Features"),
|
||||
cpu_part=data.get("CPU part", ""),
|
||||
)
|
||||
|
||||
if architecture in (PPC64LE, PPC64):
|
||||
@@ -345,6 +351,10 @@ def sorting_fn(item):
|
||||
generic_candidates = [c for c in candidates if c.vendor == "generic"]
|
||||
best_generic = max(generic_candidates, key=sorting_fn)
|
||||
|
||||
# Relevant for AArch64. Filter on "cpu_part" if we have any match
|
||||
if info.cpu_part != "" and any(c for c in candidates if info.cpu_part == c.cpu_part):
|
||||
candidates = [c for c in candidates if info.cpu_part == c.cpu_part]
|
||||
|
||||
# Filter the candidates to be descendant of the best generic candidate.
|
||||
# This is to avoid that the lack of a niche feature that can be disabled
|
||||
# from e.g. BIOS prevents detection of a reasonably performant architecture
|
||||
|
@@ -2,9 +2,7 @@
|
||||
# Archspec Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Types and functions to manage information
|
||||
on CPU microarchitectures.
|
||||
"""
|
||||
"""Types and functions to manage information on CPU microarchitectures."""
|
||||
import functools
|
||||
import platform
|
||||
import re
|
||||
@@ -65,21 +63,24 @@ class Microarchitecture:
|
||||
passed in as argument above.
|
||||
* versions: versions that support this micro-architecture.
|
||||
|
||||
generation (int): generation of the micro-architecture, if
|
||||
relevant.
|
||||
generation (int): generation of the micro-architecture, if relevant.
|
||||
cpu_part (str): cpu part of the architecture, if relevant.
|
||||
"""
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
# pylint: disable=too-many-arguments,too-many-instance-attributes
|
||||
#: Aliases for micro-architecture's features
|
||||
feature_aliases = FEATURE_ALIASES
|
||||
|
||||
def __init__(self, name, parents, vendor, features, compilers, generation=0):
|
||||
def __init__(self, name, parents, vendor, features, compilers, generation=0, cpu_part=""):
|
||||
self.name = name
|
||||
self.parents = parents
|
||||
self.vendor = vendor
|
||||
self.features = features
|
||||
self.compilers = compilers
|
||||
# Only relevant for PowerPC
|
||||
self.generation = generation
|
||||
# Only relevant for AArch64
|
||||
self.cpu_part = cpu_part
|
||||
# Cache the ancestor computation
|
||||
self._ancestors = None
|
||||
|
||||
@@ -111,6 +112,7 @@ def __eq__(self, other):
|
||||
and self.parents == other.parents # avoid ancestors here
|
||||
and self.compilers == other.compilers
|
||||
and self.generation == other.generation
|
||||
and self.cpu_part == other.cpu_part
|
||||
)
|
||||
|
||||
@coerce_target_names
|
||||
@@ -143,7 +145,8 @@ def __repr__(self):
|
||||
cls_name = self.__class__.__name__
|
||||
fmt = (
|
||||
cls_name + "({0.name!r}, {0.parents!r}, {0.vendor!r}, "
|
||||
"{0.features!r}, {0.compilers!r}, {0.generation!r})"
|
||||
"{0.features!r}, {0.compilers!r}, generation={0.generation!r}, "
|
||||
"cpu_part={0.cpu_part!r})"
|
||||
)
|
||||
return fmt.format(self)
|
||||
|
||||
@@ -190,6 +193,7 @@ def to_dict(self):
|
||||
"generation": self.generation,
|
||||
"parents": [str(x) for x in self.parents],
|
||||
"compilers": self.compilers,
|
||||
"cpupart": self.cpu_part,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@@ -202,6 +206,7 @@ def from_dict(data) -> "Microarchitecture":
|
||||
features=set(data["features"]),
|
||||
compilers=data.get("compilers", {}),
|
||||
generation=data.get("generation", 0),
|
||||
cpu_part=data.get("cpupart", ""),
|
||||
)
|
||||
|
||||
def optimization_flags(self, compiler, version):
|
||||
@@ -360,8 +365,11 @@ def fill_target_from_dict(name, data, targets):
|
||||
features = set(values["features"])
|
||||
compilers = values.get("compilers", {})
|
||||
generation = values.get("generation", 0)
|
||||
cpu_part = values.get("cpupart", "")
|
||||
|
||||
targets[name] = Microarchitecture(name, parents, vendor, features, compilers, generation)
|
||||
targets[name] = Microarchitecture(
|
||||
name, parents, vendor, features, compilers, generation=generation, cpu_part=cpu_part
|
||||
)
|
||||
|
||||
known_targets = {}
|
||||
data = archspec.cpu.schema.TARGETS_JSON["microarchitectures"]
|
||||
|
@@ -2225,10 +2225,14 @@
|
||||
],
|
||||
"nvhpc": [
|
||||
{
|
||||
"versions": "21.11:",
|
||||
"versions": "21.11:23.8",
|
||||
"name": "zen3",
|
||||
"flags": "-tp {name}",
|
||||
"warnings": "zen4 is not fully supported by nvhpc yet, falling back to zen3"
|
||||
"warnings": "zen4 is not fully supported by nvhpc versions < 23.9, falling back to zen3"
|
||||
},
|
||||
{
|
||||
"versions": "23.9:",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -2711,7 +2715,8 @@
|
||||
"flags": "-mcpu=thunderx2t99"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0x0af"
|
||||
},
|
||||
"a64fx": {
|
||||
"from": ["armv8.2a"],
|
||||
@@ -2779,7 +2784,8 @@
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0x001"
|
||||
},
|
||||
"cortex_a72": {
|
||||
"from": ["aarch64"],
|
||||
@@ -2816,7 +2822,8 @@
|
||||
"flags" : "-mcpu=cortex-a72"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0xd08"
|
||||
},
|
||||
"neoverse_n1": {
|
||||
"from": ["cortex_a72", "armv8.2a"],
|
||||
@@ -2902,7 +2909,8 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0xd0c"
|
||||
},
|
||||
"neoverse_v1": {
|
||||
"from": ["neoverse_n1", "armv8.4a"],
|
||||
@@ -2926,8 +2934,6 @@
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
@@ -3028,7 +3034,8 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0xd40"
|
||||
},
|
||||
"neoverse_v2": {
|
||||
"from": ["neoverse_n1", "armv9.0a"],
|
||||
@@ -3052,13 +3059,10 @@
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"sm3",
|
||||
"sm4",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
@@ -3066,18 +3070,12 @@
|
||||
"sb",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"sveaes",
|
||||
"svepmull",
|
||||
"svebitperm",
|
||||
"svesha3",
|
||||
"svesm4",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16",
|
||||
"dgh"
|
||||
"bf16"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
@@ -3102,15 +3100,19 @@
|
||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:11.99",
|
||||
"versions": "10.0:11.3.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||
},
|
||||
{
|
||||
"versions": "11.4:11.99",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
},
|
||||
{
|
||||
"versions": "12.0:12.99",
|
||||
"versions": "12.0:12.2.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16 -mtune=cortex-a710"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"versions": "12.3:",
|
||||
"flags" : "-mcpu=neoverse-v2"
|
||||
}
|
||||
],
|
||||
@@ -3145,7 +3147,113 @@
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0xd4f"
|
||||
},
|
||||
"neoverse_n2": {
|
||||
"from": ["neoverse_n1", "armv9.0a"],
|
||||
"vendor": "ARM",
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"sve",
|
||||
"asimdfhm",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"flagm2",
|
||||
"frint",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
{
|
||||
"versions": "4.8:5.99",
|
||||
"flags": "-march=armv8-a"
|
||||
},
|
||||
{
|
||||
"versions": "6:6.99",
|
||||
"flags" : "-march=armv8.1-a"
|
||||
},
|
||||
{
|
||||
"versions": "7.0:7.99",
|
||||
"flags" : "-march=armv8.2-a -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "8.0:8.99",
|
||||
"flags" : "-march=armv8.4-a+sve -mtune=cortex-a72"
|
||||
},
|
||||
{
|
||||
"versions": "9.0:9.99",
|
||||
"flags" : "-march=armv8.5-a+sve -mtune=cortex-a76"
|
||||
},
|
||||
{
|
||||
"versions": "10.0:10.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16 -mtune=cortex-a77"
|
||||
},
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:10.99",
|
||||
"flags" : "-march=armv8.5-a+sve"
|
||||
},
|
||||
{
|
||||
"versions": "11.0:13.99",
|
||||
"flags" : "-march=armv8.5-a+sve+sve2+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "14.0:15.99",
|
||||
"flags" : "-march=armv9-a+i8mm+bf16"
|
||||
},
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"arm" : [
|
||||
{
|
||||
"versions": "23.04.0:",
|
||||
"flags" : "-mcpu=neoverse-n2"
|
||||
}
|
||||
],
|
||||
"nvhpc" : [
|
||||
{
|
||||
"versions": "23.3:",
|
||||
"name": "neoverse-n1",
|
||||
"flags": "-tp {name}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"cpupart": "0xd49"
|
||||
},
|
||||
"m1": {
|
||||
"from": ["armv8.4a"],
|
||||
@@ -3211,7 +3319,8 @@
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0x022"
|
||||
},
|
||||
"m2": {
|
||||
"from": ["m1", "armv8.5a"],
|
||||
@@ -3289,7 +3398,8 @@
|
||||
"flags" : "-mcpu=apple-m2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"cpupart": "0x032"
|
||||
},
|
||||
"arm": {
|
||||
"from": [],
|
||||
|
@@ -52,6 +52,9 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"cpupart": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
@@ -107,4 +110,4 @@
|
||||
"additionalProperties": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
45
lib/spack/external/patches/distro.patch
vendored
Normal file
45
lib/spack/external/patches/distro.patch
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
diff --git a/lib/spack/external/_vendoring/distro/distro.py b/lib/spack/external/_vendoring/distro/distro.py
|
||||
index 89e1868047..50c3b18d4d 100644
|
||||
--- a/lib/spack/external/_vendoring/distro/distro.py
|
||||
+++ b/lib/spack/external/_vendoring/distro/distro.py
|
||||
@@ -1265,27 +1265,29 @@ def _distro_release_info(self) -> Dict[str, str]:
|
||||
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
else:
|
||||
try:
|
||||
- basenames = [
|
||||
- basename
|
||||
- for basename in os.listdir(self.etc_dir)
|
||||
- if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
||||
- and os.path.isfile(os.path.join(self.etc_dir, basename))
|
||||
- ]
|
||||
+ with os.scandir(self.etc_dir) as it:
|
||||
+ etc_files = [
|
||||
+ p.path for p in it
|
||||
+ if p.is_file() and p.name not in _DISTRO_RELEASE_IGNORE_BASENAMES
|
||||
+ ]
|
||||
# We sort for repeatability in cases where there are multiple
|
||||
# distro specific files; e.g. CentOS, Oracle, Enterprise all
|
||||
# containing `redhat-release` on top of their own.
|
||||
- basenames.sort()
|
||||
+ etc_files.sort()
|
||||
except OSError:
|
||||
# This may occur when /etc is not readable but we can't be
|
||||
# sure about the *-release files. Check common entries of
|
||||
# /etc for information. If they turn out to not be there the
|
||||
# error is handled in `_parse_distro_release_file()`.
|
||||
- basenames = _DISTRO_RELEASE_BASENAMES
|
||||
- for basename in basenames:
|
||||
- match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
||||
+ etc_files = [
|
||||
+ os.path.join(self.etc_dir, basename)
|
||||
+ for basename in _DISTRO_RELEASE_BASENAMES
|
||||
+ ]
|
||||
+
|
||||
+ for filepath in etc_files:
|
||||
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(os.path.basename(filepath))
|
||||
if match is None:
|
||||
continue
|
||||
- filepath = os.path.join(self.etc_dir, basename)
|
||||
distro_info = self._parse_distro_release_file(filepath)
|
||||
# The name is always present if the pattern matches.
|
||||
if "name" not in distro_info:
|
188
lib/spack/external/patches/jsonschema.patch
vendored
188
lib/spack/external/patches/jsonschema.patch
vendored
@@ -13,3 +13,191 @@ index 6b630cdfbb..1791fe7fbf 100644
|
||||
-__version__ = metadata.version("jsonschema")
|
||||
+
|
||||
+__version__ = "3.2.0"
|
||||
diff --git a/lib/spack/external/_vendoring/jsonschema/_format.py b/lib/spack/external/_vendoring/jsonschema/_format.py
|
||||
index 281a7cfcff..29061e3661 100644
|
||||
--- a/lib/spack/external/_vendoring/jsonschema/_format.py
|
||||
+++ b/lib/spack/external/_vendoring/jsonschema/_format.py
|
||||
@@ -231,96 +231,6 @@ def is_host_name(instance):
|
||||
return True
|
||||
|
||||
|
||||
-try:
|
||||
- # The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
|
||||
- import idna
|
||||
-except ImportError:
|
||||
- pass
|
||||
-else:
|
||||
- @_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
|
||||
- def is_idn_host_name(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- idna.encode(instance)
|
||||
- return True
|
||||
-
|
||||
-
|
||||
-try:
|
||||
- import rfc3987
|
||||
-except ImportError:
|
||||
- try:
|
||||
- from rfc3986_validator import validate_rfc3986
|
||||
- except ImportError:
|
||||
- pass
|
||||
- else:
|
||||
- @_checks_drafts(name="uri")
|
||||
- def is_uri(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return validate_rfc3986(instance, rule="URI")
|
||||
-
|
||||
- @_checks_drafts(
|
||||
- draft6="uri-reference",
|
||||
- draft7="uri-reference",
|
||||
- raises=ValueError,
|
||||
- )
|
||||
- def is_uri_reference(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return validate_rfc3986(instance, rule="URI_reference")
|
||||
-
|
||||
-else:
|
||||
- @_checks_drafts(draft7="iri", raises=ValueError)
|
||||
- def is_iri(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return rfc3987.parse(instance, rule="IRI")
|
||||
-
|
||||
- @_checks_drafts(draft7="iri-reference", raises=ValueError)
|
||||
- def is_iri_reference(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return rfc3987.parse(instance, rule="IRI_reference")
|
||||
-
|
||||
- @_checks_drafts(name="uri", raises=ValueError)
|
||||
- def is_uri(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return rfc3987.parse(instance, rule="URI")
|
||||
-
|
||||
- @_checks_drafts(
|
||||
- draft6="uri-reference",
|
||||
- draft7="uri-reference",
|
||||
- raises=ValueError,
|
||||
- )
|
||||
- def is_uri_reference(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return rfc3987.parse(instance, rule="URI_reference")
|
||||
-
|
||||
-
|
||||
-try:
|
||||
- from strict_rfc3339 import validate_rfc3339
|
||||
-except ImportError:
|
||||
- try:
|
||||
- from rfc3339_validator import validate_rfc3339
|
||||
- except ImportError:
|
||||
- validate_rfc3339 = None
|
||||
-
|
||||
-if validate_rfc3339:
|
||||
- @_checks_drafts(name="date-time")
|
||||
- def is_datetime(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return validate_rfc3339(instance)
|
||||
-
|
||||
- @_checks_drafts(draft7="time")
|
||||
- def is_time(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return is_datetime("1970-01-01T" + instance)
|
||||
-
|
||||
-
|
||||
@_checks_drafts(name="regex", raises=re.error)
|
||||
def is_regex(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
@@ -340,86 +250,3 @@ def is_draft3_time(instance):
|
||||
if not isinstance(instance, str_types):
|
||||
return True
|
||||
return datetime.datetime.strptime(instance, "%H:%M:%S")
|
||||
-
|
||||
-
|
||||
-try:
|
||||
- import webcolors
|
||||
-except ImportError:
|
||||
- pass
|
||||
-else:
|
||||
- def is_css_color_code(instance):
|
||||
- return webcolors.normalize_hex(instance)
|
||||
-
|
||||
- @_checks_drafts(draft3="color", raises=(ValueError, TypeError))
|
||||
- def is_css21_color(instance):
|
||||
- if (
|
||||
- not isinstance(instance, str_types) or
|
||||
- instance.lower() in webcolors.css21_names_to_hex
|
||||
- ):
|
||||
- return True
|
||||
- return is_css_color_code(instance)
|
||||
-
|
||||
- def is_css3_color(instance):
|
||||
- if instance.lower() in webcolors.css3_names_to_hex:
|
||||
- return True
|
||||
- return is_css_color_code(instance)
|
||||
-
|
||||
-
|
||||
-try:
|
||||
- import jsonpointer
|
||||
-except ImportError:
|
||||
- pass
|
||||
-else:
|
||||
- @_checks_drafts(
|
||||
- draft6="json-pointer",
|
||||
- draft7="json-pointer",
|
||||
- raises=jsonpointer.JsonPointerException,
|
||||
- )
|
||||
- def is_json_pointer(instance):
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- return jsonpointer.JsonPointer(instance)
|
||||
-
|
||||
- # TODO: I don't want to maintain this, so it
|
||||
- # needs to go either into jsonpointer (pending
|
||||
- # https://github.com/stefankoegl/python-json-pointer/issues/34) or
|
||||
- # into a new external library.
|
||||
- @_checks_drafts(
|
||||
- draft7="relative-json-pointer",
|
||||
- raises=jsonpointer.JsonPointerException,
|
||||
- )
|
||||
- def is_relative_json_pointer(instance):
|
||||
- # Definition taken from:
|
||||
- # https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
|
||||
- if not isinstance(instance, str_types):
|
||||
- return True
|
||||
- non_negative_integer, rest = [], ""
|
||||
- for i, character in enumerate(instance):
|
||||
- if character.isdigit():
|
||||
- non_negative_integer.append(character)
|
||||
- continue
|
||||
-
|
||||
- if not non_negative_integer:
|
||||
- return False
|
||||
-
|
||||
- rest = instance[i:]
|
||||
- break
|
||||
- return (rest == "#") or jsonpointer.JsonPointer(rest)
|
||||
-
|
||||
-
|
||||
-try:
|
||||
- import uritemplate.exceptions
|
||||
-except ImportError:
|
||||
- pass
|
||||
-else:
|
||||
- @_checks_drafts(
|
||||
- draft6="uri-template",
|
||||
- draft7="uri-template",
|
||||
- raises=uritemplate.exceptions.InvalidTemplate,
|
||||
- )
|
||||
- def is_uri_template(
|
||||
- instance,
|
||||
- template_validator=uritemplate.Validator().force_balanced_braces(),
|
||||
- ):
|
||||
- template = uritemplate.URITemplate(instance)
|
||||
- return template_validator.validate(template)
|
||||
|
@@ -27,8 +27,6 @@
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
from llnl.util.symlink import islink, readlink, resolve_link_target_relative_to_the_link, symlink
|
||||
|
||||
from spack.util.executable import Executable, which
|
||||
|
||||
from ..path import path_to_os_path, system_path_filter
|
||||
|
||||
if sys.platform != "win32":
|
||||
@@ -53,7 +51,6 @@
|
||||
"find_all_headers",
|
||||
"find_libraries",
|
||||
"find_system_libraries",
|
||||
"fix_darwin_install_name",
|
||||
"force_remove",
|
||||
"force_symlink",
|
||||
"getuid",
|
||||
@@ -248,42 +245,6 @@ def path_contains_subdirectory(path, root):
|
||||
return norm_path.startswith(norm_root)
|
||||
|
||||
|
||||
@memoized
|
||||
def file_command(*args):
|
||||
"""Creates entry point to `file` system command with provided arguments"""
|
||||
file_cmd = which("file", required=True)
|
||||
for arg in args:
|
||||
file_cmd.add_default_arg(arg)
|
||||
return file_cmd
|
||||
|
||||
|
||||
@memoized
|
||||
def _get_mime_type():
|
||||
"""Generate method to call `file` system command to aquire mime type
|
||||
for a specified path
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
# -h option (no-dereference) does not exist in Windows
|
||||
return file_command("-b", "--mime-type")
|
||||
else:
|
||||
return file_command("-b", "-h", "--mime-type")
|
||||
|
||||
|
||||
def mime_type(filename):
|
||||
"""Returns the mime type and subtype of a file.
|
||||
|
||||
Args:
|
||||
filename: file to be analyzed
|
||||
|
||||
Returns:
|
||||
Tuple containing the MIME type and subtype
|
||||
"""
|
||||
output = _get_mime_type()(filename, output=str, error=str).strip()
|
||||
tty.debug("==> " + output)
|
||||
type, _, subtype = output.partition("/")
|
||||
return type, subtype
|
||||
|
||||
|
||||
#: This generates the library filenames that may appear on any OS.
|
||||
library_extensions = ["a", "la", "so", "tbd", "dylib"]
|
||||
|
||||
@@ -1624,6 +1585,12 @@ def remove_linked_tree(path):
|
||||
shutil.rmtree(os.path.realpath(path), **kwargs)
|
||||
os.unlink(path)
|
||||
else:
|
||||
if sys.platform == "win32":
|
||||
# Adding this prefix allows shutil to remove long paths on windows
|
||||
# https://learn.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation?tabs=registry
|
||||
long_path_pfx = "\\\\?\\"
|
||||
if not path.startswith(long_path_pfx):
|
||||
path = long_path_pfx + path
|
||||
shutil.rmtree(path, **kwargs)
|
||||
|
||||
|
||||
@@ -1673,41 +1640,6 @@ def safe_remove(*files_or_dirs):
|
||||
raise
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def fix_darwin_install_name(path):
|
||||
"""Fix install name of dynamic libraries on Darwin to have full path.
|
||||
|
||||
There are two parts of this task:
|
||||
|
||||
1. Use ``install_name('-id', ...)`` to change install name of a single lib
|
||||
2. Use ``install_name('-change', ...)`` to change the cross linking between
|
||||
libs. The function assumes that all libraries are in one folder and
|
||||
currently won't follow subfolders.
|
||||
|
||||
Parameters:
|
||||
path (str): directory in which .dylib files are located
|
||||
"""
|
||||
libs = glob.glob(join_path(path, "*.dylib"))
|
||||
for lib in libs:
|
||||
# fix install name first:
|
||||
install_name_tool = Executable("install_name_tool")
|
||||
install_name_tool("-id", lib, lib)
|
||||
otool = Executable("otool")
|
||||
long_deps = otool("-L", lib, output=str).split("\n")
|
||||
deps = [dep.partition(" ")[0][1::] for dep in long_deps[2:-1]]
|
||||
# fix all dependencies:
|
||||
for dep in deps:
|
||||
for loc in libs:
|
||||
# We really want to check for either
|
||||
# dep == os.path.basename(loc) or
|
||||
# dep == join_path(builddir, os.path.basename(loc)),
|
||||
# but we don't know builddir (nor how symbolic links look
|
||||
# in builddir). We thus only compare the basenames.
|
||||
if os.path.basename(dep) == os.path.basename(loc):
|
||||
install_name_tool("-change", dep, loc, lib)
|
||||
break
|
||||
|
||||
|
||||
def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2) -> Optional[str]:
|
||||
"""Find the first file matching a pattern.
|
||||
|
||||
|
@@ -6,7 +6,6 @@
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import functools
|
||||
import inspect
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
@@ -16,7 +15,7 @@
|
||||
from typing import Any, Callable, Iterable, List, Tuple
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = [r"^\.#", "~$"]
|
||||
ignore_modules = r"^\.#|~$"
|
||||
|
||||
|
||||
def index_by(objects, *funcs):
|
||||
@@ -84,20 +83,6 @@ def index_by(objects, *funcs):
|
||||
return result
|
||||
|
||||
|
||||
def caller_locals():
|
||||
"""This will return the locals of the *parent* of the caller.
|
||||
This allows a function to insert variables into its caller's
|
||||
scope. Yes, this is some black magic, and yes it's useful
|
||||
for implementing things like depends_on and provides.
|
||||
"""
|
||||
# Passing zero here skips line context for speed.
|
||||
stack = inspect.stack(0)
|
||||
try:
|
||||
return stack[2][0].f_locals
|
||||
finally:
|
||||
del stack
|
||||
|
||||
|
||||
def attr_setdefault(obj, name, value):
|
||||
"""Like dict.setdefault, but for objects."""
|
||||
if not hasattr(obj, name):
|
||||
@@ -105,15 +90,6 @@ def attr_setdefault(obj, name, value):
|
||||
return getattr(obj, name)
|
||||
|
||||
|
||||
def has_method(cls, name):
|
||||
for base in inspect.getmro(cls):
|
||||
if base is object:
|
||||
continue
|
||||
if name in base.__dict__:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def union_dicts(*dicts):
|
||||
"""Use update() to combine all dicts into one.
|
||||
|
||||
@@ -178,19 +154,22 @@ def list_modules(directory, **kwargs):
|
||||
order."""
|
||||
list_directories = kwargs.setdefault("directories", True)
|
||||
|
||||
for name in os.listdir(directory):
|
||||
if name == "__init__.py":
|
||||
continue
|
||||
ignore = re.compile(ignore_modules)
|
||||
|
||||
path = os.path.join(directory, name)
|
||||
if list_directories and os.path.isdir(path):
|
||||
init_py = os.path.join(path, "__init__.py")
|
||||
if os.path.isfile(init_py):
|
||||
yield name
|
||||
with os.scandir(directory) as it:
|
||||
for entry in it:
|
||||
if entry.name == "__init__.py" or entry.name == "__pycache__":
|
||||
continue
|
||||
|
||||
elif name.endswith(".py"):
|
||||
if not any(re.search(pattern, name) for pattern in ignore_modules):
|
||||
yield re.sub(".py$", "", name)
|
||||
if (
|
||||
list_directories
|
||||
and entry.is_dir()
|
||||
and os.path.isfile(os.path.join(entry.path, "__init__.py"))
|
||||
):
|
||||
yield entry.name
|
||||
|
||||
elif entry.name.endswith(".py") and entry.is_file() and not ignore.search(entry.name):
|
||||
yield entry.name[:-3] # strip .py
|
||||
|
||||
|
||||
def decorator_with_or_without_args(decorator):
|
||||
@@ -237,8 +216,8 @@ def setter(name, value):
|
||||
value.__name__ = name
|
||||
setattr(cls, name, value)
|
||||
|
||||
if not has_method(cls, "_cmp_key"):
|
||||
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
|
||||
if not hasattr(cls, "_cmp_key"):
|
||||
raise TypeError(f"'{cls.__name__}' doesn't define _cmp_key().")
|
||||
|
||||
setter("__eq__", lambda s, o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
|
||||
setter("__lt__", lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
|
||||
@@ -388,8 +367,8 @@ def cd_fun():
|
||||
TypeError: If the class does not have a ``_cmp_iter`` method
|
||||
|
||||
"""
|
||||
if not has_method(cls, "_cmp_iter"):
|
||||
raise TypeError("'%s' doesn't define _cmp_iter()." % cls.__name__)
|
||||
if not hasattr(cls, "_cmp_iter"):
|
||||
raise TypeError(f"'{cls.__name__}' doesn't define _cmp_iter().")
|
||||
|
||||
# comparison operators are implemented in terms of lazy_eq and lazy_lt
|
||||
def eq(self, other):
|
||||
@@ -864,20 +843,19 @@ def uniq(sequence):
|
||||
return uniq_list
|
||||
|
||||
|
||||
def elide_list(line_list, max_num=10):
|
||||
def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
|
||||
"""Takes a long list and limits it to a smaller number of elements,
|
||||
replacing intervening elements with '...'. For example::
|
||||
|
||||
elide_list([1,2,3,4,5,6], 4)
|
||||
elide_list(["1", "2", "3", "4", "5", "6"], 4)
|
||||
|
||||
gives::
|
||||
|
||||
[1, 2, 3, '...', 6]
|
||||
["1", "2", "3", "...", "6"]
|
||||
"""
|
||||
if len(line_list) > max_num:
|
||||
return line_list[: max_num - 1] + ["..."] + line_list[-1:]
|
||||
else:
|
||||
return line_list
|
||||
return [*line_list[: max_num - 1], "...", line_list[-1]]
|
||||
return line_list
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import errno
|
||||
import io
|
||||
import multiprocessing
|
||||
import multiprocessing.connection
|
||||
import os
|
||||
import re
|
||||
import select
|
||||
|
@@ -1,131 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack.spec
|
||||
import spack.version
|
||||
from spack.compilers.clang import Clang
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
|
||||
class ABI:
|
||||
"""This class provides methods to test ABI compatibility between specs.
|
||||
The current implementation is rather rough and could be improved."""
|
||||
|
||||
def architecture_compatible(
|
||||
self, target: spack.spec.Spec, constraint: spack.spec.Spec
|
||||
) -> bool:
|
||||
"""Return true if architecture of target spec is ABI compatible
|
||||
to the architecture of constraint spec. If either the target
|
||||
or constraint specs have no architecture, target is also defined
|
||||
as architecture ABI compatible to constraint."""
|
||||
return (
|
||||
not target.architecture
|
||||
or not constraint.architecture
|
||||
or target.architecture.intersects(constraint.architecture)
|
||||
)
|
||||
|
||||
@memoized
|
||||
def _gcc_get_libstdcxx_version(self, version):
|
||||
"""Returns gcc ABI compatibility info by getting the library version of
|
||||
a compiler's libstdc++ or libgcc_s"""
|
||||
from spack.build_environment import dso_suffix
|
||||
|
||||
spec = spack.spec.CompilerSpec("gcc", version)
|
||||
compilers = spack.compilers.compilers_for_spec(spec)
|
||||
if not compilers:
|
||||
return None
|
||||
compiler = compilers[0]
|
||||
rungcc = None
|
||||
libname = None
|
||||
output = None
|
||||
if compiler.cxx:
|
||||
rungcc = Executable(compiler.cxx)
|
||||
libname = "libstdc++." + dso_suffix
|
||||
elif compiler.cc:
|
||||
rungcc = Executable(compiler.cc)
|
||||
libname = "libgcc_s." + dso_suffix
|
||||
else:
|
||||
return None
|
||||
try:
|
||||
# Some gcc's are actually clang and don't respond properly to
|
||||
# --print-file-name (they just print the filename, not the
|
||||
# full path). Ignore these and expect them to be handled as clang.
|
||||
if Clang.default_version(rungcc.exe[0]) != "unknown":
|
||||
return None
|
||||
|
||||
output = rungcc("--print-file-name=%s" % libname, output=str)
|
||||
except ProcessError:
|
||||
return None
|
||||
if not output:
|
||||
return None
|
||||
libpath = os.path.realpath(output.strip())
|
||||
if not libpath:
|
||||
return None
|
||||
return os.path.basename(libpath)
|
||||
|
||||
@memoized
|
||||
def _gcc_compiler_compare(self, pversion, cversion):
|
||||
"""Returns true iff the gcc version pversion and cversion
|
||||
are ABI compatible."""
|
||||
plib = self._gcc_get_libstdcxx_version(pversion)
|
||||
clib = self._gcc_get_libstdcxx_version(cversion)
|
||||
if not plib or not clib:
|
||||
return False
|
||||
return plib == clib
|
||||
|
||||
def _intel_compiler_compare(
|
||||
self, pversion: spack.version.ClosedOpenRange, cversion: spack.version.ClosedOpenRange
|
||||
) -> bool:
|
||||
"""Returns true iff the intel version pversion and cversion
|
||||
are ABI compatible"""
|
||||
|
||||
# Test major and minor versions. Ignore build version.
|
||||
pv = pversion.lo
|
||||
cv = cversion.lo
|
||||
return pv.up_to(2) == cv.up_to(2)
|
||||
|
||||
def compiler_compatible(
|
||||
self, parent: spack.spec.Spec, child: spack.spec.Spec, loose: bool = False
|
||||
) -> bool:
|
||||
"""Return true if compilers for parent and child are ABI compatible."""
|
||||
if not parent.compiler or not child.compiler:
|
||||
return True
|
||||
|
||||
if parent.compiler.name != child.compiler.name:
|
||||
# Different compiler families are assumed ABI incompatible
|
||||
return False
|
||||
|
||||
if loose:
|
||||
return True
|
||||
|
||||
# TODO: Can we move the specialized ABI matching stuff
|
||||
# TODO: into compiler classes?
|
||||
for pversion in parent.compiler.versions:
|
||||
for cversion in child.compiler.versions:
|
||||
# For a few compilers use specialized comparisons.
|
||||
# Otherwise match on version match.
|
||||
if pversion.intersects(cversion):
|
||||
return True
|
||||
elif parent.compiler.name == "gcc" and self._gcc_compiler_compare(
|
||||
pversion, cversion
|
||||
):
|
||||
return True
|
||||
elif parent.compiler.name == "intel" and self._intel_compiler_compare(
|
||||
pversion, cversion
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
def compatible(
|
||||
self, target: spack.spec.Spec, constraint: spack.spec.Spec, loose: bool = False
|
||||
) -> bool:
|
||||
"""Returns true if target spec is ABI compatible to constraint spec"""
|
||||
return self.architecture_compatible(target, constraint) and self.compiler_compatible(
|
||||
target, constraint, loose=loose
|
||||
)
|
@@ -39,9 +39,9 @@ def _search_duplicate_compilers(error_cls):
|
||||
import collections
|
||||
import collections.abc
|
||||
import glob
|
||||
import inspect
|
||||
import io
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import pickle
|
||||
import re
|
||||
@@ -210,6 +210,11 @@ def _search_duplicate_compilers(error_cls):
|
||||
group="configs", tag="CFG-PACKAGES", description="Sanity checks on packages.yaml", kwargs=()
|
||||
)
|
||||
|
||||
#: Sanity checks on packages.yaml
|
||||
config_repos = AuditClass(
|
||||
group="configs", tag="CFG-REPOS", description="Sanity checks on repositories", kwargs=()
|
||||
)
|
||||
|
||||
|
||||
@config_packages
|
||||
def _search_duplicate_specs_in_externals(error_cls):
|
||||
@@ -252,40 +257,6 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _deprecated_preferences(error_cls):
|
||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
|
||||
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
|
||||
def make_error(attribute_name, config_data, summary):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
dict_view = syaml.syaml_dict((k, v) for k, v in config_data.items() if k == attribute_name)
|
||||
syaml.dump_config(dict_view, stream=s, blame=True)
|
||||
return error_cls(summary=summary, details=[s.getvalue()])
|
||||
|
||||
if "all" in packages_yaml and "version" in packages_yaml["all"]:
|
||||
summary = "Using the deprecated 'version' attribute under 'packages:all'"
|
||||
errors.append(make_error("version", packages_yaml["all"], summary))
|
||||
|
||||
for package_name in packages_yaml:
|
||||
if package_name == "all":
|
||||
continue
|
||||
|
||||
package_conf = packages_yaml[package_name]
|
||||
for attribute in ("compiler", "providers", "target"):
|
||||
if attribute not in package_conf:
|
||||
continue
|
||||
summary = (
|
||||
f"Using the deprecated '{attribute}' attribute " f"under 'packages:{package_name}'"
|
||||
)
|
||||
errors.append(make_error(attribute, package_conf, summary))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@config_packages
|
||||
def _avoid_mismatched_variants(error_cls):
|
||||
"""Warns if variant preferences have mismatched types or names."""
|
||||
@@ -367,6 +338,27 @@ def _ensure_all_virtual_packages_have_default_providers(error_cls):
|
||||
]
|
||||
|
||||
|
||||
@config_repos
|
||||
def _ensure_no_folders_without_package_py(error_cls):
|
||||
"""Check that we don't leave any folder without a package.py in repos"""
|
||||
errors = []
|
||||
for repository in spack.repo.PATH.repos:
|
||||
missing = []
|
||||
for entry in os.scandir(repository.packages_path):
|
||||
if not entry.is_dir():
|
||||
continue
|
||||
package_py = pathlib.Path(entry.path) / spack.repo.package_file_name
|
||||
if not package_py.exists():
|
||||
missing.append(entry.path)
|
||||
if missing:
|
||||
summary = (
|
||||
f"The '{repository.namespace}' repository misses a package.py file"
|
||||
f" in the following folders"
|
||||
)
|
||||
errors.append(error_cls(summary=summary, details=[f"{x}" for x in missing]))
|
||||
return errors
|
||||
|
||||
|
||||
def _make_config_error(config_data, summary, error_cls):
|
||||
s = io.StringIO()
|
||||
s.write("Occurring in the following file:\n")
|
||||
@@ -498,7 +490,7 @@ def _search_for_reserved_attributes_names_in_packages(pkgs, error_cls):
|
||||
name_definitions = collections.defaultdict(list)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(pkg_name)
|
||||
|
||||
for cls_item in inspect.getmro(pkg_cls):
|
||||
for cls_item in pkg_cls.__mro__:
|
||||
for name in RESERVED_NAMES:
|
||||
current_value = cls_item.__dict__.get(name)
|
||||
if current_value is None:
|
||||
@@ -527,7 +519,7 @@ def _ensure_all_package_names_are_lowercase(pkgs, error_cls):
|
||||
badname_regex, errors = re.compile(r"[_A-Z]"), []
|
||||
for pkg_name in pkgs:
|
||||
if badname_regex.search(pkg_name):
|
||||
error_msg = "Package name '{}' is either lowercase or conatine '_'".format(pkg_name)
|
||||
error_msg = f"Package name '{pkg_name}' should be lowercase and must not contain '_'"
|
||||
errors.append(error_cls(error_msg, []))
|
||||
return errors
|
||||
|
||||
|
@@ -6,7 +6,6 @@
|
||||
import codecs
|
||||
import collections
|
||||
import concurrent.futures
|
||||
import contextlib
|
||||
import copy
|
||||
import hashlib
|
||||
import io
|
||||
@@ -25,7 +24,7 @@
|
||||
import urllib.request
|
||||
import warnings
|
||||
from contextlib import closing
|
||||
from typing import Dict, Generator, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fsys
|
||||
import llnl.util.lang
|
||||
@@ -55,6 +54,7 @@
|
||||
import spack.util.archive
|
||||
import spack.util.crypto
|
||||
import spack.util.file_cache as file_cache
|
||||
import spack.util.filesystem as ssys
|
||||
import spack.util.gpg
|
||||
import spack.util.parallel
|
||||
import spack.util.path
|
||||
@@ -106,7 +106,7 @@ class BuildCacheDatabase(spack_db.Database):
|
||||
record_fields = ("spec", "ref_count", "in_buildcache")
|
||||
|
||||
def __init__(self, root):
|
||||
super().__init__(root, lock_cfg=spack_db.NO_LOCK)
|
||||
super().__init__(root, lock_cfg=spack_db.NO_LOCK, layout=None)
|
||||
self._write_transaction_impl = llnl.util.lang.nullcontext
|
||||
self._read_transaction_impl = llnl.util.lang.nullcontext
|
||||
|
||||
@@ -688,7 +688,7 @@ def get_buildfile_manifest(spec):
|
||||
# Non-symlinks.
|
||||
for rel_path in visitor.files:
|
||||
abs_path = os.path.join(root, rel_path)
|
||||
m_type, m_subtype = fsys.mime_type(abs_path)
|
||||
m_type, m_subtype = ssys.mime_type(abs_path)
|
||||
|
||||
if relocate.needs_binary_relocation(m_type, m_subtype):
|
||||
# Why is this branch not part of needs_binary_relocation? :(
|
||||
@@ -789,7 +789,9 @@ def sign_specfile(key: str, specfile_path: str) -> str:
|
||||
return signed_specfile_path
|
||||
|
||||
|
||||
def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_dir, concurrency):
|
||||
def _read_specs_and_push_index(
|
||||
file_list, read_method, cache_prefix, db: BuildCacheDatabase, temp_dir, concurrency
|
||||
):
|
||||
"""Read all the specs listed in the provided list, using thread given thread parallelism,
|
||||
generate the index, and push it to the mirror.
|
||||
|
||||
@@ -813,7 +815,7 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
|
||||
else:
|
||||
continue
|
||||
|
||||
db.add(fetched_spec, None)
|
||||
db.add(fetched_spec)
|
||||
db.mark(fetched_spec, "in_buildcache", True)
|
||||
|
||||
# Now generate the index, compute its hash, and push the two files to
|
||||
@@ -958,7 +960,7 @@ def _spec_files_from_cache(url: str):
|
||||
raise ListMirrorSpecsError("Failed to get list of specs from {0}".format(url))
|
||||
|
||||
|
||||
def generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
|
||||
def _url_generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
|
||||
"""Create or replace the build cache index on the given mirror. The
|
||||
buildcache index contains an entry for each binary package under the
|
||||
cache_prefix.
|
||||
@@ -1119,7 +1121,7 @@ def _exists_in_buildcache(spec: Spec, tmpdir: str, out_url: str) -> ExistsInBuil
|
||||
return ExistsInBuildcache(signed, unsigned, tarball)
|
||||
|
||||
|
||||
def _upload_tarball_and_specfile(
|
||||
def _url_upload_tarball_and_specfile(
|
||||
spec: Spec, tmpdir: str, out_url: str, exists: ExistsInBuildcache, signing_key: Optional[str]
|
||||
):
|
||||
files = BuildcacheFiles(spec, tmpdir, out_url)
|
||||
@@ -1154,49 +1156,146 @@ def _upload_tarball_and_specfile(
|
||||
)
|
||||
|
||||
|
||||
class Uploader:
|
||||
def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool):
|
||||
self.mirror = mirror
|
||||
self.force = force
|
||||
self.update_index = update_index
|
||||
|
||||
self.tmpdir: str
|
||||
self.executor: concurrent.futures.Executor
|
||||
|
||||
def __enter__(self):
|
||||
self._tmpdir = tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root())
|
||||
self._executor = spack.util.parallel.make_concurrent_executor()
|
||||
|
||||
self.tmpdir = self._tmpdir.__enter__()
|
||||
self.executor = self.executor = self._executor.__enter__()
|
||||
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
self._executor.__exit__(*args)
|
||||
self._tmpdir.__exit__(*args)
|
||||
|
||||
def push_or_raise(self, specs: List[spack.spec.Spec]) -> List[spack.spec.Spec]:
|
||||
skipped, errors = self.push(specs)
|
||||
if errors:
|
||||
raise PushToBuildCacheError(
|
||||
f"Failed to push {len(errors)} specs to {self.mirror.push_url}:\n"
|
||||
+ "\n".join(
|
||||
f"Failed to push {_format_spec(spec)}: {error}" for spec, error in errors
|
||||
)
|
||||
)
|
||||
return skipped
|
||||
|
||||
def push(
|
||||
self, specs: List[spack.spec.Spec]
|
||||
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
|
||||
raise NotImplementedError
|
||||
|
||||
def tag(self, tag: str, roots: List[spack.spec.Spec]):
|
||||
"""Make a list of selected specs together available under the given tag"""
|
||||
pass
|
||||
|
||||
|
||||
class OCIUploader(Uploader):
|
||||
def __init__(
|
||||
self,
|
||||
mirror: spack.mirror.Mirror,
|
||||
force: bool,
|
||||
update_index: bool,
|
||||
base_image: Optional[str],
|
||||
) -> None:
|
||||
super().__init__(mirror, force, update_index)
|
||||
self.target_image = spack.oci.oci.image_from_mirror(mirror)
|
||||
self.base_image = ImageReference.from_string(base_image) if base_image else None
|
||||
|
||||
def push(
|
||||
self, specs: List[spack.spec.Spec]
|
||||
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
|
||||
skipped, base_images, checksums, upload_errors = _oci_push(
|
||||
target_image=self.target_image,
|
||||
base_image=self.base_image,
|
||||
installed_specs_with_deps=specs,
|
||||
force=self.force,
|
||||
tmpdir=self.tmpdir,
|
||||
executor=self.executor,
|
||||
)
|
||||
|
||||
self._base_images = base_images
|
||||
self._checksums = checksums
|
||||
|
||||
# only update index if any binaries were uploaded
|
||||
if self.update_index and len(skipped) + len(upload_errors) < len(specs):
|
||||
_oci_update_index(self.target_image, self.tmpdir, self.executor)
|
||||
|
||||
return skipped, upload_errors
|
||||
|
||||
def tag(self, tag: str, roots: List[spack.spec.Spec]):
|
||||
tagged_image = self.target_image.with_tag(tag)
|
||||
|
||||
# _push_oci may not populate self._base_images if binaries were already in the registry
|
||||
for spec in roots:
|
||||
_oci_update_base_images(
|
||||
base_image=self.base_image,
|
||||
target_image=self.target_image,
|
||||
spec=spec,
|
||||
base_image_cache=self._base_images,
|
||||
)
|
||||
_oci_put_manifest(
|
||||
self._base_images, self._checksums, tagged_image, self.tmpdir, None, None, *roots
|
||||
)
|
||||
|
||||
|
||||
class URLUploader(Uploader):
|
||||
def __init__(
|
||||
self,
|
||||
mirror: spack.mirror.Mirror,
|
||||
force: bool,
|
||||
update_index: bool,
|
||||
signing_key: Optional[str],
|
||||
) -> None:
|
||||
super().__init__(mirror, force, update_index)
|
||||
self.url = mirror.push_url
|
||||
self.signing_key = signing_key
|
||||
|
||||
def push(
|
||||
self, specs: List[spack.spec.Spec]
|
||||
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
|
||||
return _url_push(
|
||||
specs,
|
||||
out_url=self.url,
|
||||
force=self.force,
|
||||
update_index=self.update_index,
|
||||
signing_key=self.signing_key,
|
||||
tmpdir=self.tmpdir,
|
||||
executor=self.executor,
|
||||
)
|
||||
|
||||
|
||||
def make_uploader(
|
||||
mirror: spack.mirror.Mirror,
|
||||
force: bool = False,
|
||||
update_index: bool = False,
|
||||
signing_key: Optional[str] = None,
|
||||
base_image: Optional[str] = None,
|
||||
) -> Uploader:
|
||||
"""Builder for the appropriate uploader based on the mirror type"""
|
||||
if mirror.push_url.startswith("oci://"):
|
||||
return OCIUploader(
|
||||
mirror=mirror, force=force, update_index=update_index, base_image=base_image
|
||||
)
|
||||
else:
|
||||
return URLUploader(
|
||||
mirror=mirror, force=force, update_index=update_index, signing_key=signing_key
|
||||
)
|
||||
|
||||
|
||||
def _format_spec(spec: Spec) -> str:
|
||||
return spec.cformat("{name}{@version}{/hash:7}")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def default_push_context() -> Generator[Tuple[str, concurrent.futures.Executor], None, None]:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
|
||||
yield tmpdir, executor
|
||||
|
||||
|
||||
def push_or_raise(
|
||||
specs: List[Spec],
|
||||
out_url: str,
|
||||
signing_key: Optional[str],
|
||||
force: bool = False,
|
||||
update_index: bool = False,
|
||||
) -> List[Spec]:
|
||||
"""Same as push, but raises an exception on error. Returns a list of skipped specs already
|
||||
present in the build cache when force=False."""
|
||||
skipped, errors = push(specs, out_url, signing_key, force, update_index)
|
||||
if errors:
|
||||
raise PushToBuildCacheError(
|
||||
f"Failed to push {len(errors)} specs to {out_url}:\n"
|
||||
+ "\n".join(f"Failed to push {_format_spec(spec)}: {error}" for spec, error in errors)
|
||||
)
|
||||
return skipped
|
||||
|
||||
|
||||
def push(
|
||||
specs: List[Spec],
|
||||
out_url: str,
|
||||
signing_key: Optional[str],
|
||||
force: bool = False,
|
||||
update_index: bool = False,
|
||||
) -> Tuple[List[Spec], List[Tuple[Spec, BaseException]]]:
|
||||
"""Pushes to the provided build cache, and returns a list of skipped specs that were already
|
||||
present (when force=False). Does not raise on error."""
|
||||
with default_push_context() as (tmpdir, executor):
|
||||
return _push(specs, out_url, signing_key, force, update_index, tmpdir, executor)
|
||||
|
||||
|
||||
class FancyProgress:
|
||||
def __init__(self, total: int):
|
||||
self.n = 0
|
||||
@@ -1234,7 +1333,7 @@ def fail(self) -> None:
|
||||
tty.info(f"{self.pre}Failed to push {self.pretty_spec}")
|
||||
|
||||
|
||||
def _push(
|
||||
def _url_push(
|
||||
specs: List[Spec],
|
||||
out_url: str,
|
||||
signing_key: Optional[str],
|
||||
@@ -1279,7 +1378,7 @@ def _push(
|
||||
|
||||
upload_futures = [
|
||||
executor.submit(
|
||||
_upload_tarball_and_specfile,
|
||||
_url_upload_tarball_and_specfile,
|
||||
spec,
|
||||
tmpdir,
|
||||
out_url,
|
||||
@@ -1309,12 +1408,12 @@ def _push(
|
||||
if signing_key:
|
||||
keys_tmpdir = os.path.join(tmpdir, "keys")
|
||||
os.mkdir(keys_tmpdir)
|
||||
push_keys(out_url, keys=[signing_key], update_index=update_index, tmpdir=keys_tmpdir)
|
||||
_url_push_keys(out_url, keys=[signing_key], update_index=update_index, tmpdir=keys_tmpdir)
|
||||
|
||||
if update_index:
|
||||
index_tmpdir = os.path.join(tmpdir, "index")
|
||||
os.mkdir(index_tmpdir)
|
||||
generate_package_index(out_url, index_tmpdir)
|
||||
_url_generate_package_index(out_url, index_tmpdir)
|
||||
|
||||
return skipped, errors
|
||||
|
||||
@@ -1431,12 +1530,9 @@ def _oci_put_manifest(
|
||||
for s in expected_blobs:
|
||||
# If a layer for a dependency has gone missing (due to removed manifest in the registry, a
|
||||
# failed push, or a local forced uninstall), we cannot create a runnable container image.
|
||||
# If an OCI registry is only used for storage, this is not a hard error, but for now we
|
||||
# raise an exception unconditionally, until someone requests a more lenient behavior.
|
||||
checksum = checksums.get(s.dag_hash())
|
||||
if not checksum:
|
||||
raise MissingLayerError(f"missing layer for {_format_spec(s)}")
|
||||
config["rootfs"]["diff_ids"].append(str(checksum.uncompressed_digest))
|
||||
if checksum:
|
||||
config["rootfs"]["diff_ids"].append(str(checksum.uncompressed_digest))
|
||||
|
||||
# Set the environment variables
|
||||
config["config"]["Env"] = [f"{k}={v}" for k, v in env.items()]
|
||||
@@ -1481,6 +1577,7 @@ def _oci_put_manifest(
|
||||
"size": checksums[s.dag_hash()].size,
|
||||
}
|
||||
for s in expected_blobs
|
||||
if s.dag_hash() in checksums
|
||||
),
|
||||
],
|
||||
}
|
||||
@@ -1519,7 +1616,7 @@ def _oci_update_base_images(
|
||||
)
|
||||
|
||||
|
||||
def _push_oci(
|
||||
def _oci_push(
|
||||
*,
|
||||
target_image: ImageReference,
|
||||
base_image: Optional[ImageReference],
|
||||
@@ -1671,7 +1768,7 @@ def _oci_update_index(
|
||||
|
||||
for spec_dict in spec_dicts:
|
||||
spec = Spec.from_dict(spec_dict)
|
||||
db.add(spec, directory_layout=None)
|
||||
db.add(spec)
|
||||
db.mark(spec, "in_buildcache", True)
|
||||
|
||||
# Create the index.json file
|
||||
@@ -2467,9 +2564,8 @@ def install_root_node(spec, unsigned=False, force=False, sha256=None):
|
||||
with spack.util.path.filter_padding():
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, force)
|
||||
spec.package.windows_establish_runtime_linkage()
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout)
|
||||
spack.store.STORE.db.add(spec)
|
||||
|
||||
|
||||
def install_single_spec(spec, unsigned=False, force=False):
|
||||
@@ -2645,7 +2741,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||
)
|
||||
|
||||
|
||||
def push_keys(
|
||||
def _url_push_keys(
|
||||
*mirrors: Union[spack.mirror.Mirror, str],
|
||||
keys: List[str],
|
||||
tmpdir: str,
|
||||
@@ -3096,7 +3192,3 @@ class CannotListKeys(GenerateIndexError):
|
||||
|
||||
class PushToBuildCacheError(spack.error.SpackError):
|
||||
"""Raised when unable to push objects to binary mirror"""
|
||||
|
||||
|
||||
class MissingLayerError(spack.error.SpackError):
|
||||
"""Raised when a required layer for a dependency is missing in an OCI registry."""
|
||||
|
@@ -9,6 +9,7 @@
|
||||
all_core_root_specs,
|
||||
ensure_clingo_importable_or_raise,
|
||||
ensure_core_dependencies,
|
||||
ensure_file_in_path_or_raise,
|
||||
ensure_gpg_in_path_or_raise,
|
||||
ensure_patchelf_in_path_or_raise,
|
||||
)
|
||||
@@ -19,6 +20,7 @@
|
||||
"is_bootstrapping",
|
||||
"ensure_bootstrap_configuration",
|
||||
"ensure_core_dependencies",
|
||||
"ensure_file_in_path_or_raise",
|
||||
"ensure_gpg_in_path_or_raise",
|
||||
"ensure_clingo_importable_or_raise",
|
||||
"ensure_patchelf_in_path_or_raise",
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Common basic functions used through the spack.bootstrap package"""
|
||||
import fnmatch
|
||||
import importlib
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
@@ -28,7 +29,7 @@
|
||||
|
||||
def _python_import(module: str) -> bool:
|
||||
try:
|
||||
__import__(module)
|
||||
importlib.import_module(module)
|
||||
except ImportError:
|
||||
return False
|
||||
return True
|
||||
|
@@ -143,11 +143,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
def _add_compilers_if_missing() -> None:
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
new_compilers = spack.compilers.find_new_compilers(
|
||||
mixed_toolchain=sys.platform == "darwin"
|
||||
)
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers)
|
||||
spack.compilers.find_compilers()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
@@ -156,7 +152,7 @@ def _ensure_bootstrap_configuration() -> Generator:
|
||||
bootstrap_store_path = store_path()
|
||||
user_configuration = _read_and_sanitize_configuration()
|
||||
with spack.environment.no_active_environment():
|
||||
with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
|
||||
with spack.platforms.use_platform(
|
||||
spack.platforms.real_host()
|
||||
), spack.repo.use_repositories(spack.paths.packages_path):
|
||||
# Default configuration scopes excluding command line
|
||||
|
@@ -472,7 +472,8 @@ def ensure_clingo_importable_or_raise() -> None:
|
||||
|
||||
def gnupg_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap GnuPG"""
|
||||
return _root_spec("gnupg@2.3:")
|
||||
root_spec_name = "win-gpg" if IS_WINDOWS else "gnupg"
|
||||
return _root_spec(f"{root_spec_name}@2.3:")
|
||||
|
||||
|
||||
def ensure_gpg_in_path_or_raise() -> None:
|
||||
@@ -482,6 +483,19 @@ def ensure_gpg_in_path_or_raise() -> None:
|
||||
)
|
||||
|
||||
|
||||
def file_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap file"""
|
||||
root_spec_name = "win-file" if IS_WINDOWS else "file"
|
||||
return _root_spec(root_spec_name)
|
||||
|
||||
|
||||
def ensure_file_in_path_or_raise() -> None:
|
||||
"""Ensure file is in the PATH or raise"""
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["file"], abstract_spec=file_root_spec()
|
||||
)
|
||||
|
||||
|
||||
def patchelf_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap patchelf"""
|
||||
# 0.13.1 is the last version not to require C++17.
|
||||
@@ -565,14 +579,15 @@ def ensure_core_dependencies() -> None:
|
||||
"""Ensure the presence of all the core dependencies."""
|
||||
if sys.platform.lower() == "linux":
|
||||
ensure_patchelf_in_path_or_raise()
|
||||
if not IS_WINDOWS:
|
||||
ensure_gpg_in_path_or_raise()
|
||||
elif sys.platform == "win32":
|
||||
ensure_file_in_path_or_raise()
|
||||
ensure_gpg_in_path_or_raise()
|
||||
ensure_clingo_importable_or_raise()
|
||||
|
||||
|
||||
def all_core_root_specs() -> List[str]:
|
||||
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
|
||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec(), file_root_spec()]
|
||||
|
||||
|
||||
def bootstrapping_sources(scope: Optional[str] = None):
|
||||
|
@@ -88,7 +88,7 @@ def _core_requirements() -> List[RequiredResponseType]:
|
||||
|
||||
def _buildcache_requirements() -> List[RequiredResponseType]:
|
||||
_buildcache_exes = {
|
||||
"file": _missing("file", "required to analyze files for buildcaches"),
|
||||
"file": _missing("file", "required to analyze files for buildcaches", system_only=False),
|
||||
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
|
||||
}
|
||||
if platform.system().lower() == "darwin":
|
||||
@@ -124,7 +124,7 @@ def _development_requirements() -> List[RequiredResponseType]:
|
||||
# Ensure we trigger environment modifications if we have an environment
|
||||
if BootstrapEnvironment.spack_yaml().exists():
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_syspath_and_environ()
|
||||
env.load()
|
||||
|
||||
return [
|
||||
_required_executable(
|
||||
|
@@ -457,9 +457,12 @@ def set_wrapper_variables(pkg, env):
|
||||
env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
|
||||
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
|
||||
|
||||
# Find ccache binary and hand it to build environment
|
||||
if spack.config.get("config:ccache"):
|
||||
# Enable ccache in the compiler wrapper
|
||||
env.set(SPACK_CCACHE_BINARY, spack.util.executable.which_string("ccache", required=True))
|
||||
else:
|
||||
# Avoid cache pollution if a build system forces `ccache <compiler wrapper invocation>`.
|
||||
env.set("CCACHE_DISABLE", "1")
|
||||
|
||||
# Gather information about various types of dependencies
|
||||
link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
|
||||
@@ -1000,7 +1003,6 @@ def set_all_package_py_globals(self):
|
||||
"""Set the globals in modules of package.py files."""
|
||||
for dspec, flag in chain(self.external, self.nonexternal):
|
||||
pkg = dspec.package
|
||||
|
||||
if self.should_set_package_py_globals & flag:
|
||||
if self.context == Context.BUILD and self.needs_build_context & flag:
|
||||
set_package_py_globals(pkg, context=Context.BUILD)
|
||||
@@ -1008,6 +1010,12 @@ def set_all_package_py_globals(self):
|
||||
# This includes runtime dependencies, also runtime deps of direct build deps.
|
||||
set_package_py_globals(pkg, context=Context.RUN)
|
||||
|
||||
# Looping over the set of packages a second time
|
||||
# ensures all globals are loaded into the module space prior to
|
||||
# any package setup. This guarantees package setup methods have
|
||||
# access to expected module level definitions such as "spack_cc"
|
||||
for dspec, flag in chain(self.external, self.nonexternal):
|
||||
pkg = dspec.package
|
||||
for spec in dspec.dependents():
|
||||
# Note: some specs have dependents that are unreachable from the root, so avoid
|
||||
# setting globals for those.
|
||||
@@ -1550,21 +1558,21 @@ class ModuleChangePropagator:
|
||||
|
||||
_PROTECTED_NAMES = ("package", "current_module", "modules_in_mro", "_set_attributes")
|
||||
|
||||
def __init__(self, package):
|
||||
def __init__(self, package: spack.package_base.PackageBase) -> None:
|
||||
self._set_self_attributes("package", package)
|
||||
self._set_self_attributes("current_module", package.module)
|
||||
|
||||
#: Modules for the classes in the MRO up to PackageBase
|
||||
modules_in_mro = []
|
||||
for cls in inspect.getmro(type(package)):
|
||||
module = cls.module
|
||||
for cls in package.__class__.__mro__:
|
||||
module = getattr(cls, "module", None)
|
||||
|
||||
if module == self.current_module:
|
||||
continue
|
||||
|
||||
if module == spack.package_base:
|
||||
if module is None or module is spack.package_base:
|
||||
break
|
||||
|
||||
if module is self.current_module:
|
||||
continue
|
||||
|
||||
modules_in_mro.append(module)
|
||||
self._set_self_attributes("modules_in_mro", modules_in_mro)
|
||||
self._set_self_attributes("_set_attributes", {})
|
||||
|
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
import os.path
|
||||
import stat
|
||||
@@ -549,13 +548,12 @@ def autoreconf(self, pkg, spec, prefix):
|
||||
tty.warn("* a custom AUTORECONF phase in the package *")
|
||||
tty.warn("*********************************************************")
|
||||
with fs.working_dir(self.configure_directory):
|
||||
m = inspect.getmodule(self.pkg)
|
||||
# This line is what is needed most of the time
|
||||
# --install, --verbose, --force
|
||||
autoreconf_args = ["-ivf"]
|
||||
autoreconf_args += self.autoreconf_search_path_args
|
||||
autoreconf_args += self.autoreconf_extra_args
|
||||
m.autoreconf(*autoreconf_args)
|
||||
self.pkg.module.autoreconf(*autoreconf_args)
|
||||
|
||||
@property
|
||||
def autoreconf_search_path_args(self):
|
||||
@@ -579,7 +577,9 @@ def set_configure_or_die(self):
|
||||
raise RuntimeError(msg.format(self.configure_directory))
|
||||
|
||||
# Monkey-patch the configure script in the corresponding module
|
||||
inspect.getmodule(self.pkg).configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg = spack.build_environment.ModuleChangePropagator(self.pkg)
|
||||
globals_for_pkg.configure = Executable(self.configure_abs_path)
|
||||
globals_for_pkg.propagate_changes_to_mro()
|
||||
|
||||
def configure_args(self):
|
||||
"""Return the list of all the arguments that must be passed to configure,
|
||||
@@ -596,7 +596,7 @@ def configure(self, pkg, spec, prefix):
|
||||
options += self.configure_args()
|
||||
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
inspect.getmodule(self.pkg).configure(*options)
|
||||
pkg.module.configure(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
@@ -604,12 +604,12 @@ def build(self, pkg, spec, prefix):
|
||||
params = ["V=1"]
|
||||
params += self.build_targets
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make(*params)
|
||||
pkg.module.make(*params)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make(*self.install_targets)
|
||||
pkg.module.make(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
|
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import inspect
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.builder
|
||||
@@ -72,9 +70,7 @@ def check_args(self):
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Runs ``cargo install`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(pkg).cargo(
|
||||
"install", "--root", "out", "--path", ".", *self.build_args
|
||||
)
|
||||
pkg.module.cargo("install", "--root", "out", "--path", ".", *self.build_args)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Copy build files into package prefix."""
|
||||
@@ -86,4 +82,4 @@ def install(self, pkg, spec, prefix):
|
||||
def check(self):
|
||||
"""Run "cargo test"."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).cargo("test", *self.check_args)
|
||||
self.pkg.module.cargo("test", *self.check_args)
|
||||
|
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
import inspect
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
@@ -108,6 +107,11 @@ def _conditional_cmake_defaults(pkg: spack.package_base.PackageBase, args: List[
|
||||
if _supports_compilation_databases(pkg):
|
||||
args.append(CMakeBuilder.define("CMAKE_EXPORT_COMPILE_COMMANDS", True))
|
||||
|
||||
# Enable MACOSX_RPATH by default when cmake_minimum_required < 3
|
||||
# https://cmake.org/cmake/help/latest/policy/CMP0042.html
|
||||
if pkg.spec.satisfies("platform=darwin") and cmake.satisfies("@3:"):
|
||||
args.append(CMakeBuilder.define("CMAKE_POLICY_DEFAULT_CMP0042", "NEW"))
|
||||
|
||||
|
||||
def generator(*names: str, default: Optional[str] = None):
|
||||
"""The build system generator to use.
|
||||
@@ -539,24 +543,24 @@ def cmake(self, pkg, spec, prefix):
|
||||
options += self.cmake_args()
|
||||
options.append(os.path.abspath(self.root_cmakelists_dir))
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
inspect.getmodule(self.pkg).cmake(*options)
|
||||
pkg.module.cmake(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
inspect.getmodule(self.pkg).make(*self.build_targets)
|
||||
pkg.module.make(*self.build_targets)
|
||||
elif self.generator == "Ninja":
|
||||
self.build_targets.append("-v")
|
||||
inspect.getmodule(self.pkg).ninja(*self.build_targets)
|
||||
pkg.module.ninja(*self.build_targets)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
if self.generator == "Unix Makefiles":
|
||||
inspect.getmodule(self.pkg).make(*self.install_targets)
|
||||
pkg.module.make(*self.install_targets)
|
||||
elif self.generator == "Ninja":
|
||||
inspect.getmodule(self.pkg).ninja(*self.install_targets)
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
|
@@ -138,7 +138,7 @@ def cuda_flags(arch_list):
|
||||
conflicts("%gcc@11.2:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%gcc@12:", when="+cuda ^cuda@:11.8")
|
||||
conflicts("%gcc@13:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%gcc@14:", when="+cuda ^cuda@:12.6")
|
||||
conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
|
||||
conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
|
||||
conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
|
||||
@@ -146,6 +146,7 @@ def cuda_flags(arch_list):
|
||||
conflicts("%clang@16:", when="+cuda ^cuda@:12.1")
|
||||
conflicts("%clang@17:", when="+cuda ^cuda@:12.3")
|
||||
conflicts("%clang@18:", when="+cuda ^cuda@:12.5")
|
||||
conflicts("%clang@19:", when="+cuda ^cuda@:12.6")
|
||||
|
||||
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
|
||||
conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
|
||||
|
@@ -3,8 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import inspect
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.builder
|
||||
@@ -82,7 +80,7 @@ def check_args(self):
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Runs ``go build`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(pkg).go("build", *self.build_args)
|
||||
pkg.module.go("build", *self.build_args)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install built binaries into prefix bin."""
|
||||
@@ -95,4 +93,4 @@ def install(self, pkg, spec, prefix):
|
||||
def check(self):
|
||||
"""Run ``go test .`` in the source directory"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).go("test", *self.check_args)
|
||||
self.pkg.module.go("test", *self.check_args)
|
||||
|
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -103,12 +102,12 @@ def edit(self, pkg, spec, prefix):
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Run "make" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make(*self.build_targets)
|
||||
pkg.module.make(*self.build_targets)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Run "make" on the install targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make(*self.install_targets)
|
||||
pkg.module.make(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
|
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
from typing import List
|
||||
|
||||
@@ -195,19 +194,19 @@ def meson(self, pkg, spec, prefix):
|
||||
options += self.std_meson_args
|
||||
options += self.meson_args()
|
||||
with fs.working_dir(self.build_directory, create=True):
|
||||
inspect.getmodule(self.pkg).meson(*options)
|
||||
pkg.module.meson(*options)
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
options = ["-v"]
|
||||
options += self.build_targets
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).ninja(*options)
|
||||
pkg.module.ninja(*options)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).ninja(*self.install_targets)
|
||||
pkg.module.ninja(*self.install_targets)
|
||||
|
||||
spack.builder.run_after("build")(execute_build_time_tests)
|
||||
|
||||
|
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import List # novm
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -104,7 +103,7 @@ def msbuild_install_args(self):
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Run "msbuild" on the build targets specified by the builder."""
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).msbuild(
|
||||
pkg.module.msbuild(
|
||||
*self.std_msbuild_args,
|
||||
*self.msbuild_args(),
|
||||
self.define_targets(*self.build_targets),
|
||||
@@ -114,6 +113,6 @@ def install(self, pkg, spec, prefix):
|
||||
"""Run "msbuild" on the install targets specified by the builder.
|
||||
This is INSTALL by default"""
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).msbuild(
|
||||
pkg.module.msbuild(
|
||||
*self.msbuild_install_args(), self.define_targets(*self.install_targets)
|
||||
)
|
||||
|
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import List # novm
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -132,9 +131,7 @@ def build(self, pkg, spec, prefix):
|
||||
if self.makefile_name:
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
*opts, *self.build_targets, ignore_quotes=self.ignore_quotes
|
||||
)
|
||||
pkg.module.nmake(*opts, *self.build_targets, ignore_quotes=self.ignore_quotes)
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Run "nmake" on the install targets specified by the builder.
|
||||
@@ -146,6 +143,4 @@ def install(self, pkg, spec, prefix):
|
||||
opts.append("/F{}".format(self.makefile_name))
|
||||
opts.append(self.define("PREFIX", fs.windows_sfn(prefix)))
|
||||
with fs.working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).nmake(
|
||||
*opts, *self.install_targets, ignore_quotes=self.ignore_quotes
|
||||
)
|
||||
pkg.module.nmake(*opts, *self.install_targets, ignore_quotes=self.ignore_quotes)
|
||||
|
@@ -2,8 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, extends
|
||||
@@ -47,7 +45,7 @@ class OctaveBuilder(BaseBuilder):
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install the package from the archive file"""
|
||||
inspect.getmodule(self.pkg).octave(
|
||||
pkg.module.octave(
|
||||
"--quiet",
|
||||
"--norc",
|
||||
"--built-in-docstrings-file=/dev/null",
|
||||
|
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
from typing import Iterable
|
||||
|
||||
@@ -134,7 +133,7 @@ def build_method(self):
|
||||
def build_executable(self):
|
||||
"""Returns the executable method to build the perl package"""
|
||||
if self.build_method == "Makefile.PL":
|
||||
build_executable = inspect.getmodule(self.pkg).make
|
||||
build_executable = self.pkg.module.make
|
||||
elif self.build_method == "Build.PL":
|
||||
build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build"))
|
||||
return build_executable
|
||||
@@ -158,7 +157,7 @@ def configure(self, pkg, spec, prefix):
|
||||
options = ["Build.PL", "--install_base", prefix]
|
||||
options += self.configure_args()
|
||||
|
||||
inspect.getmodule(self.pkg).perl(*options)
|
||||
pkg.module.perl(*options)
|
||||
|
||||
# It is possible that the shebang in the Build script that is created from
|
||||
# Build.PL may be too long causing the build to fail. Patching the shebang
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import operator
|
||||
import os
|
||||
import re
|
||||
@@ -17,7 +16,7 @@
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import HeaderList, LibraryList
|
||||
from llnl.util.filesystem import HeaderList, LibraryList, join_path
|
||||
|
||||
import spack.builder
|
||||
import spack.config
|
||||
@@ -120,6 +119,12 @@ def skip_modules(self) -> Iterable[str]:
|
||||
"""
|
||||
return []
|
||||
|
||||
@property
|
||||
def bindir(self) -> str:
|
||||
"""Path to Python package's bindir, bin on unix like OS's Scripts on Windows"""
|
||||
windows = self.spec.satisfies("platform=windows")
|
||||
return join_path(self.spec.prefix, "Scripts" if windows else "bin")
|
||||
|
||||
def view_file_conflicts(self, view, merge_map):
|
||||
"""Report all file conflicts, excepting special cases for python.
|
||||
Specifically, this does not report errors for duplicate
|
||||
@@ -222,7 +227,7 @@ def test_imports(self) -> None:
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = inspect.getmodule(self).python # type: ignore[union-attr]
|
||||
python = self.module.python
|
||||
for module in self.import_modules:
|
||||
with test_part(
|
||||
self,
|
||||
@@ -309,9 +314,9 @@ def get_external_python_for_prefix(self):
|
||||
)
|
||||
|
||||
python_externals_detected = [
|
||||
d.spec
|
||||
for d in python_externals_detection.get("python", [])
|
||||
if d.prefix == self.spec.external_path
|
||||
spec
|
||||
for spec in python_externals_detection.get("python", [])
|
||||
if spec.external_path == self.spec.external_path
|
||||
]
|
||||
if python_externals_detected:
|
||||
return python_externals_detected[0]
|
||||
|
@@ -2,8 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.builder
|
||||
@@ -66,17 +64,17 @@ def qmake_args(self):
|
||||
def qmake(self, pkg, spec, prefix):
|
||||
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).qmake(*self.qmake_args())
|
||||
pkg.module.qmake(*self.qmake_args())
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Make the build targets"""
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make()
|
||||
pkg.module.make()
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Make the install targets"""
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make("install")
|
||||
pkg.module.make("install")
|
||||
|
||||
def check(self):
|
||||
"""Search the Makefile for a ``check:`` target and runs it if found."""
|
||||
|
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import llnl.util.lang as lang
|
||||
@@ -51,7 +50,7 @@ def install(self, pkg, spec, prefix):
|
||||
|
||||
args.extend(["--library={0}".format(self.pkg.module.r_lib_dir), self.stage.source_path])
|
||||
|
||||
inspect.getmodule(self.pkg).R(*args)
|
||||
pkg.module.R(*args)
|
||||
|
||||
|
||||
class RPackage(Package):
|
||||
@@ -85,28 +84,20 @@ def homepage(cls):
|
||||
return "https://bioconductor.org/packages/" + cls.bioc
|
||||
|
||||
@lang.classproperty
|
||||
def urls(cls):
|
||||
def url(cls):
|
||||
if cls.cran:
|
||||
return [
|
||||
return (
|
||||
"https://cloud.r-project.org/src/contrib/"
|
||||
+ f"{cls.cran}_{str(list(cls.versions)[0])}.tar.gz",
|
||||
"https://cloud.r-project.org/src/contrib/Archive/{cls.cran}/"
|
||||
+ f"{cls.cran}_{str(list(cls.versions)[0])}.tar.gz",
|
||||
]
|
||||
elif cls.bioc:
|
||||
return [
|
||||
"https://bioconductor.org/packages/release/bioc/src/contrib/"
|
||||
+ f"{cls.bioc}_{str(list(cls.versions)[0])}.tar.gz",
|
||||
"https://bioconductor.org/packages/release/data/annotation/src/contrib/"
|
||||
+ f"{cls.bioc}_{str(list(cls.versions)[0])}.tar.gz",
|
||||
]
|
||||
else:
|
||||
return [cls.url]
|
||||
+ cls.cran
|
||||
+ "_"
|
||||
+ str(list(cls.versions)[0])
|
||||
+ ".tar.gz"
|
||||
)
|
||||
|
||||
@lang.classproperty
|
||||
def list_url(cls):
|
||||
if cls.cran:
|
||||
return "https://cloud.r-project.org/src/contrib/"
|
||||
return "https://cloud.r-project.org/src/contrib/Archive/" + cls.cran + "/"
|
||||
|
||||
@property
|
||||
def git(self):
|
||||
|
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import glob
|
||||
import inspect
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
@@ -52,10 +51,10 @@ def build(self, pkg, spec, prefix):
|
||||
gemspecs = glob.glob("*.gemspec")
|
||||
rakefiles = glob.glob("Rakefile")
|
||||
if gemspecs:
|
||||
inspect.getmodule(self.pkg).gem("build", "--norc", gemspecs[0])
|
||||
pkg.module.gem("build", "--norc", gemspecs[0])
|
||||
elif rakefiles:
|
||||
jobs = inspect.getmodule(self.pkg).make_jobs
|
||||
inspect.getmodule(self.pkg).rake("package", "-j{0}".format(jobs))
|
||||
jobs = pkg.module.make_jobs
|
||||
pkg.module.rake("package", "-j{0}".format(jobs))
|
||||
else:
|
||||
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
||||
pass
|
||||
@@ -70,6 +69,6 @@ def install(self, pkg, spec, prefix):
|
||||
# if --install-dir is not used, GEM_PATH is deleted from the
|
||||
# environement, and Gems required to build native extensions will
|
||||
# not be found. Those extensions are built during `gem install`.
|
||||
inspect.getmodule(self.pkg).gem(
|
||||
pkg.module.gem(
|
||||
"install", "--norc", "--ignore-dependencies", "--install-dir", prefix, gems[0]
|
||||
)
|
||||
|
@@ -2,8 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
|
||||
import spack.builder
|
||||
import spack.package_base
|
||||
from spack.directives import build_system, depends_on
|
||||
@@ -63,8 +61,7 @@ def build_args(self, spec, prefix):
|
||||
|
||||
def build(self, pkg, spec, prefix):
|
||||
"""Build the package."""
|
||||
args = self.build_args(spec, prefix)
|
||||
inspect.getmodule(self.pkg).scons(*args)
|
||||
pkg.module.scons(*self.build_args(spec, prefix))
|
||||
|
||||
def install_args(self, spec, prefix):
|
||||
"""Arguments to pass to install."""
|
||||
@@ -72,9 +69,7 @@ def install_args(self, spec, prefix):
|
||||
|
||||
def install(self, pkg, spec, prefix):
|
||||
"""Install the package."""
|
||||
args = self.install_args(spec, prefix)
|
||||
|
||||
inspect.getmodule(self.pkg).scons("install", *args)
|
||||
pkg.module.scons("install", *self.install_args(spec, prefix))
|
||||
|
||||
def build_test(self):
|
||||
"""Run unit tests after build.
|
||||
|
@@ -2,7 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
import os
|
||||
import re
|
||||
|
||||
@@ -86,14 +85,13 @@ def import_modules(self):
|
||||
|
||||
def python(self, *args, **kwargs):
|
||||
"""The python ``Executable``."""
|
||||
inspect.getmodule(self).python(*args, **kwargs)
|
||||
self.pkg.module.python(*args, **kwargs)
|
||||
|
||||
def test_imports(self):
|
||||
"""Attempts to import modules of the installed package."""
|
||||
|
||||
# Make sure we are importing the installed modules,
|
||||
# not the ones in the source directory
|
||||
python = inspect.getmodule(self).python
|
||||
for module in self.import_modules:
|
||||
with spack.install_test.test_part(
|
||||
self,
|
||||
@@ -101,7 +99,7 @@ def test_imports(self):
|
||||
purpose="checking import of {0}".format(module),
|
||||
work_dir="spack-test",
|
||||
):
|
||||
python("-c", "import {0}".format(module))
|
||||
self.python("-c", "import {0}".format(module))
|
||||
|
||||
|
||||
@spack.builder.builder("sip")
|
||||
@@ -136,7 +134,7 @@ def configure(self, pkg, spec, prefix):
|
||||
"""Configure the package."""
|
||||
|
||||
# https://www.riverbankcomputing.com/static/Docs/sip/command_line_tools.html
|
||||
args = ["--verbose", "--target-dir", inspect.getmodule(self.pkg).python_platlib]
|
||||
args = ["--verbose", "--target-dir", pkg.module.python_platlib]
|
||||
args.extend(self.configure_args())
|
||||
|
||||
# https://github.com/Python-SIP/sip/commit/cb0be6cb6e9b756b8b0db3136efb014f6fb9b766
|
||||
@@ -155,7 +153,7 @@ def build(self, pkg, spec, prefix):
|
||||
args = self.build_args()
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make(*args)
|
||||
pkg.module.make(*args)
|
||||
|
||||
def build_args(self):
|
||||
"""Arguments to pass to build."""
|
||||
@@ -166,7 +164,7 @@ def install(self, pkg, spec, prefix):
|
||||
args = self.install_args()
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
inspect.getmodule(self.pkg).make("install", *args)
|
||||
pkg.module.make("install", *args)
|
||||
|
||||
def install_args(self):
|
||||
"""Arguments to pass to install."""
|
||||
|
@@ -2,8 +2,6 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import inspect
|
||||
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
import spack.builder
|
||||
@@ -90,11 +88,11 @@ def build_directory(self):
|
||||
|
||||
def python(self, *args, **kwargs):
|
||||
"""The python ``Executable``."""
|
||||
inspect.getmodule(self.pkg).python(*args, **kwargs)
|
||||
self.pkg.module.python(*args, **kwargs)
|
||||
|
||||
def waf(self, *args, **kwargs):
|
||||
"""Runs the waf ``Executable``."""
|
||||
jobs = inspect.getmodule(self.pkg).make_jobs
|
||||
jobs = self.pkg.module.make_jobs
|
||||
|
||||
with working_dir(self.build_directory):
|
||||
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
||||
|
@@ -6,12 +6,12 @@
|
||||
import collections.abc
|
||||
import copy
|
||||
import functools
|
||||
import inspect
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from llnl.util import lang
|
||||
|
||||
import spack.build_environment
|
||||
import spack.multimethod
|
||||
|
||||
#: Builder classes, as registered by the "builder" decorator
|
||||
BUILDER_CLS = {}
|
||||
@@ -96,11 +96,10 @@ class hierarchy (look at AspellDictPackage for an example of that)
|
||||
Args:
|
||||
pkg (spack.package_base.PackageBase): package object for which we need a builder
|
||||
"""
|
||||
package_module = inspect.getmodule(pkg)
|
||||
package_buildsystem = buildsystem_name(pkg)
|
||||
default_builder_cls = BUILDER_CLS[package_buildsystem]
|
||||
builder_cls_name = default_builder_cls.__name__
|
||||
builder_cls = getattr(package_module, builder_cls_name, None)
|
||||
builder_cls = getattr(pkg.module, builder_cls_name, None)
|
||||
if builder_cls:
|
||||
return builder_cls(pkg)
|
||||
|
||||
@@ -295,7 +294,11 @@ def _decorator(fn):
|
||||
return _decorator
|
||||
|
||||
|
||||
class BuilderMeta(PhaseCallbacksMeta, type(collections.abc.Sequence)): # type: ignore
|
||||
class BuilderMeta(
|
||||
PhaseCallbacksMeta,
|
||||
spack.multimethod.MultiMethodMeta,
|
||||
type(collections.abc.Sequence), # type: ignore
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
|
@@ -9,11 +9,11 @@
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.fetch_strategy
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.util.file_cache
|
||||
import spack.util.path
|
||||
@@ -74,23 +74,6 @@ def store(self, fetcher, relative_dest):
|
||||
mkdirp(os.path.dirname(dst))
|
||||
fetcher.archive(dst)
|
||||
|
||||
def symlink(self, mirror_ref):
|
||||
"""Symlink a human readible path in our mirror to the actual
|
||||
storage location."""
|
||||
|
||||
cosmetic_path = os.path.join(self.root, mirror_ref.cosmetic_path)
|
||||
storage_path = os.path.join(self.root, mirror_ref.storage_path)
|
||||
relative_dst = os.path.relpath(storage_path, start=os.path.dirname(cosmetic_path))
|
||||
|
||||
if not os.path.exists(cosmetic_path):
|
||||
if os.path.lexists(cosmetic_path):
|
||||
# In this case the link itself exists but it is broken: remove
|
||||
# it and recreate it (in order to fix any symlinks broken prior
|
||||
# to https://github.com/spack/spack/pull/13908)
|
||||
os.unlink(cosmetic_path)
|
||||
mkdirp(os.path.dirname(cosmetic_path))
|
||||
symlink(relative_dst, cosmetic_path)
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
FETCH_CACHE: Union[spack.fetch_strategy.FsCache, llnl.util.lang.Singleton] = (
|
||||
|
@@ -1110,7 +1110,8 @@ def main_script_replacements(cmd):
|
||||
cdash_handler.populate_buildgroup(all_job_names)
|
||||
except (SpackError, HTTPError, URLError, TimeoutError) as err:
|
||||
tty.warn(f"Problem populating buildgroup: {err}")
|
||||
else:
|
||||
elif cdash_config:
|
||||
# warn only if there was actually a CDash configuration.
|
||||
tty.warn("Unable to populate buildgroup without CDash credentials")
|
||||
|
||||
service_job_retries = {
|
||||
@@ -1382,8 +1383,10 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
|
||||
"""
|
||||
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
|
||||
signing_key = bindist.select_signing_key() if sign_binaries else None
|
||||
mirror = spack.mirror.Mirror.from_url(mirror_url)
|
||||
try:
|
||||
bindist.push_or_raise([spec], out_url=mirror_url, signing_key=signing_key)
|
||||
with bindist.make_uploader(mirror, signing_key=signing_key) as uploader:
|
||||
uploader.push_or_raise([spec])
|
||||
return True
|
||||
except bindist.PushToBuildCacheError as e:
|
||||
tty.error(f"Problem writing to {mirror_url}: {e}")
|
||||
@@ -1433,10 +1436,6 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
||||
job_log_dir: path into which build log should be copied
|
||||
"""
|
||||
tty.debug(f"job spec: {job_spec}")
|
||||
if not job_spec:
|
||||
msg = f"Cannot copy stage logs: job spec ({job_spec}) is required"
|
||||
tty.error(msg)
|
||||
return
|
||||
|
||||
try:
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
@@ -114,8 +115,8 @@ def get_module(cmd_name):
|
||||
|
||||
try:
|
||||
# Try to import the command from the built-in directory
|
||||
module_name = "%s.%s" % (__name__, pname)
|
||||
module = __import__(module_name, fromlist=[pname, SETUP_PARSER, DESCRIPTION], level=0)
|
||||
module_name = f"{__name__}.{pname}"
|
||||
module = importlib.import_module(module_name)
|
||||
tty.debug("Imported {0} from built-in commands".format(pname))
|
||||
except ImportError:
|
||||
module = spack.extensions.get_module(cmd_name)
|
||||
|
@@ -37,7 +37,6 @@
|
||||
from spack import traverse
|
||||
from spack.cmd import display_specs
|
||||
from spack.cmd.common import arguments
|
||||
from spack.oci.image import ImageReference
|
||||
from spack.spec import Spec, save_dependency_specfiles
|
||||
|
||||
description = "create, download and install binary packages"
|
||||
@@ -392,13 +391,8 @@ def push_fn(args):
|
||||
else:
|
||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||
|
||||
mirror: spack.mirror.Mirror = args.mirror
|
||||
|
||||
# Check if this is an OCI image.
|
||||
try:
|
||||
target_image = spack.oci.oci.image_from_mirror(mirror)
|
||||
except ValueError:
|
||||
target_image = None
|
||||
mirror = args.mirror
|
||||
assert isinstance(mirror, spack.mirror.Mirror)
|
||||
|
||||
push_url = mirror.push_url
|
||||
|
||||
@@ -409,14 +403,11 @@ def push_fn(args):
|
||||
unsigned = not (args.key or args.signed)
|
||||
|
||||
# For OCI images, we require dependencies to be pushed for now.
|
||||
if target_image:
|
||||
if "dependencies" not in args.things_to_install:
|
||||
tty.die("Dependencies must be pushed for OCI images.")
|
||||
if not unsigned:
|
||||
tty.warn(
|
||||
"Code signing is currently not supported for OCI images. "
|
||||
"Use --unsigned to silence this warning."
|
||||
)
|
||||
if mirror.push_url.startswith("oci://") and not unsigned:
|
||||
tty.warn(
|
||||
"Code signing is currently not supported for OCI images. "
|
||||
"Use --unsigned to silence this warning."
|
||||
)
|
||||
unsigned = True
|
||||
|
||||
# Select a signing key, or None if unsigned.
|
||||
@@ -447,49 +438,17 @@ def push_fn(args):
|
||||
(s, PackageNotInstalledError("package not installed")) for s in not_installed
|
||||
)
|
||||
|
||||
with bindist.default_push_context() as (tmpdir, executor):
|
||||
if target_image:
|
||||
base_image = ImageReference.from_string(args.base_image) if args.base_image else None
|
||||
skipped, base_images, checksums, upload_errors = bindist._push_oci(
|
||||
target_image=target_image,
|
||||
base_image=base_image,
|
||||
installed_specs_with_deps=specs,
|
||||
force=args.force,
|
||||
tmpdir=tmpdir,
|
||||
executor=executor,
|
||||
)
|
||||
|
||||
if upload_errors:
|
||||
failed.extend(upload_errors)
|
||||
|
||||
# Apart from creating manifests for each individual spec, we allow users to create a
|
||||
# separate image tag for all root specs and their runtime dependencies.
|
||||
elif args.tag:
|
||||
tagged_image = target_image.with_tag(args.tag)
|
||||
# _push_oci may not populate base_images if binaries were already in the registry
|
||||
for spec in roots:
|
||||
bindist._oci_update_base_images(
|
||||
base_image=base_image,
|
||||
target_image=target_image,
|
||||
spec=spec,
|
||||
base_image_cache=base_images,
|
||||
)
|
||||
bindist._oci_put_manifest(
|
||||
base_images, checksums, tagged_image, tmpdir, None, None, *roots
|
||||
)
|
||||
tty.info(f"Tagged {tagged_image}")
|
||||
|
||||
else:
|
||||
skipped, upload_errors = bindist._push(
|
||||
specs,
|
||||
out_url=push_url,
|
||||
force=args.force,
|
||||
update_index=args.update_index,
|
||||
signing_key=signing_key,
|
||||
tmpdir=tmpdir,
|
||||
executor=executor,
|
||||
)
|
||||
failed.extend(upload_errors)
|
||||
with bindist.make_uploader(
|
||||
mirror=mirror,
|
||||
force=args.force,
|
||||
update_index=args.update_index,
|
||||
signing_key=signing_key,
|
||||
base_image=args.base_image,
|
||||
) as uploader:
|
||||
skipped, upload_errors = uploader.push(specs=specs)
|
||||
failed.extend(upload_errors)
|
||||
if not upload_errors and args.tag:
|
||||
uploader.tag(args.tag, roots)
|
||||
|
||||
if skipped:
|
||||
if len(specs) == 1:
|
||||
@@ -501,7 +460,7 @@ def push_fn(args):
|
||||
"The following {} specs were skipped as they already exist in the buildcache:\n"
|
||||
" {}\n"
|
||||
" Use --force to overwrite them.".format(
|
||||
len(skipped), ", ".join(elide_list(skipped, 5))
|
||||
len(skipped), ", ".join(elide_list([_format_spec(s) for s in skipped], 5))
|
||||
)
|
||||
)
|
||||
|
||||
@@ -522,13 +481,6 @@ def push_fn(args):
|
||||
),
|
||||
)
|
||||
|
||||
# Update the OCI index if requested
|
||||
if target_image and len(skipped) < len(specs) and args.update_index:
|
||||
with tempfile.TemporaryDirectory(
|
||||
dir=spack.stage.get_stage_root()
|
||||
) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
|
||||
bindist._oci_update_index(target_image, tmpdir, executor)
|
||||
|
||||
|
||||
def install_fn(args):
|
||||
"""install from a binary package"""
|
||||
@@ -816,7 +768,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
url = mirror.push_url
|
||||
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
bindist.generate_package_index(url, tmpdir)
|
||||
bindist._url_generate_package_index(url, tmpdir)
|
||||
|
||||
if update_keys:
|
||||
keys_url = url_util.join(
|
||||
|
@@ -7,6 +7,7 @@
|
||||
import copy
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
|
||||
@@ -18,6 +19,7 @@
|
||||
import spack.cmd
|
||||
import spack.main
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
from spack.main import section_descriptions
|
||||
|
||||
description = "list available spack commands"
|
||||
@@ -139,7 +141,7 @@ def usage(self, usage: str) -> str:
|
||||
|
||||
cmd = self.parser.prog.replace(" ", "-")
|
||||
if cmd in self.documented:
|
||||
string += "\n:ref:`More documentation <cmd-{0}>`\n".format(cmd)
|
||||
string = f"{string}\n:ref:`More documentation <cmd-{cmd}>`\n"
|
||||
|
||||
return string
|
||||
|
||||
@@ -249,33 +251,27 @@ def body(
|
||||
Function body.
|
||||
"""
|
||||
if positionals:
|
||||
return """
|
||||
return f"""
|
||||
if $list_options
|
||||
then
|
||||
{0}
|
||||
{self.optionals(optionals)}
|
||||
else
|
||||
{1}
|
||||
{self.positionals(positionals)}
|
||||
fi
|
||||
""".format(
|
||||
self.optionals(optionals), self.positionals(positionals)
|
||||
)
|
||||
"""
|
||||
elif subcommands:
|
||||
return """
|
||||
return f"""
|
||||
if $list_options
|
||||
then
|
||||
{0}
|
||||
{self.optionals(optionals)}
|
||||
else
|
||||
{1}
|
||||
{self.subcommands(subcommands)}
|
||||
fi
|
||||
""".format(
|
||||
self.optionals(optionals), self.subcommands(subcommands)
|
||||
)
|
||||
"""
|
||||
else:
|
||||
return """
|
||||
{0}
|
||||
""".format(
|
||||
self.optionals(optionals)
|
||||
)
|
||||
return f"""
|
||||
{self.optionals(optionals)}
|
||||
"""
|
||||
|
||||
def positionals(self, positionals: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting positional arguments.
|
||||
@@ -304,7 +300,7 @@ def optionals(self, optionals: Sequence[str]) -> str:
|
||||
Returns:
|
||||
Syntax for optional flags.
|
||||
"""
|
||||
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(optionals))
|
||||
return f'SPACK_COMPREPLY="{" ".join(optionals)}"'
|
||||
|
||||
def subcommands(self, subcommands: Sequence[str]) -> str:
|
||||
"""Return the syntax for reporting subcommands.
|
||||
@@ -315,7 +311,7 @@ def subcommands(self, subcommands: Sequence[str]) -> str:
|
||||
Returns:
|
||||
Syntax for subcommand parsers
|
||||
"""
|
||||
return 'SPACK_COMPREPLY="{0}"'.format(" ".join(subcommands))
|
||||
return f'SPACK_COMPREPLY="{" ".join(subcommands)}"'
|
||||
|
||||
|
||||
# Map argument destination names to their complete commands
|
||||
@@ -395,7 +391,7 @@ def _fish_dest_get_complete(prog: str, dest: str) -> Optional[str]:
|
||||
subcmd = s[1] if len(s) == 2 else ""
|
||||
|
||||
for (prog_key, pos_key), value in _dest_to_fish_complete.items():
|
||||
if subcmd.startswith(prog_key) and re.match("^" + pos_key + "$", dest):
|
||||
if subcmd.startswith(prog_key) and re.match(f"^{pos_key}$", dest):
|
||||
return value
|
||||
return None
|
||||
|
||||
@@ -427,24 +423,6 @@ def format(self, cmd: Command) -> str:
|
||||
+ self.complete(cmd.prog, positionals, optionals, subcommands)
|
||||
)
|
||||
|
||||
def _quote(self, string: str) -> str:
|
||||
"""Quote string and escape special characters if necessary.
|
||||
|
||||
Args:
|
||||
string: Input string.
|
||||
|
||||
Returns:
|
||||
Quoted string.
|
||||
"""
|
||||
# Goal here is to match fish_indent behavior
|
||||
|
||||
# Strings without spaces (or other special characters) do not need to be escaped
|
||||
if not any([sub in string for sub in [" ", "'", '"']]):
|
||||
return string
|
||||
|
||||
string = string.replace("'", r"\'")
|
||||
return f"'{string}'"
|
||||
|
||||
def optspecs(
|
||||
self,
|
||||
prog: str,
|
||||
@@ -463,7 +441,7 @@ def optspecs(
|
||||
optspec_var = "__fish_spack_optspecs_" + prog.replace(" ", "_").replace("-", "_")
|
||||
|
||||
if optionals is None:
|
||||
return "set -g %s\n" % optspec_var
|
||||
return f"set -g {optspec_var}\n"
|
||||
|
||||
# Build optspec by iterating over options
|
||||
args = []
|
||||
@@ -490,11 +468,11 @@ def optspecs(
|
||||
long = [f[2:] for f in flags if f.startswith("--")]
|
||||
|
||||
while len(short) > 0 and len(long) > 0:
|
||||
arg = "%s/%s%s" % (short.pop(), long.pop(), required)
|
||||
arg = f"{short.pop()}/{long.pop()}{required}"
|
||||
while len(short) > 0:
|
||||
arg = "%s/%s" % (short.pop(), required)
|
||||
arg = f"{short.pop()}/{required}"
|
||||
while len(long) > 0:
|
||||
arg = "%s%s" % (long.pop(), required)
|
||||
arg = f"{long.pop()}{required}"
|
||||
|
||||
args.append(arg)
|
||||
|
||||
@@ -503,7 +481,7 @@ def optspecs(
|
||||
# indicate that such subcommand exists.
|
||||
args = " ".join(args)
|
||||
|
||||
return "set -g %s %s\n" % (optspec_var, args)
|
||||
return f"set -g {optspec_var} {args}\n"
|
||||
|
||||
@staticmethod
|
||||
def complete_head(
|
||||
@@ -524,12 +502,14 @@ def complete_head(
|
||||
subcmd = s[1] if len(s) == 2 else ""
|
||||
|
||||
if index is None:
|
||||
return "complete -c %s -n '__fish_spack_using_command %s'" % (s[0], subcmd)
|
||||
return f"complete -c {s[0]} -n '__fish_spack_using_command {subcmd}'"
|
||||
elif nargs in [argparse.ZERO_OR_MORE, argparse.ONE_OR_MORE, argparse.REMAINDER]:
|
||||
head = "complete -c %s -n '__fish_spack_using_command_pos_remainder %d %s'"
|
||||
return (
|
||||
f"complete -c {s[0]} -n '__fish_spack_using_command_pos_remainder "
|
||||
f"{index} {subcmd}'"
|
||||
)
|
||||
else:
|
||||
head = "complete -c %s -n '__fish_spack_using_command_pos %d %s'"
|
||||
return head % (s[0], index, subcmd)
|
||||
return f"complete -c {s[0]} -n '__fish_spack_using_command_pos {index} {subcmd}'"
|
||||
|
||||
def complete(
|
||||
self,
|
||||
@@ -597,25 +577,18 @@ def positionals(
|
||||
|
||||
if choices is not None:
|
||||
# If there are choices, we provide a completion for all possible values.
|
||||
commands.append(head + " -f -a %s" % self._quote(" ".join(choices)))
|
||||
commands.append(f"{head} -f -a {shlex.quote(' '.join(choices))}")
|
||||
else:
|
||||
# Otherwise, we try to find a predefined completion for it
|
||||
value = _fish_dest_get_complete(prog, args)
|
||||
if value is not None:
|
||||
commands.append(head + " " + value)
|
||||
commands.append(f"{head} {value}")
|
||||
|
||||
return "\n".join(commands) + "\n"
|
||||
|
||||
def prog_comment(self, prog: str) -> str:
|
||||
"""Return a comment line for the command.
|
||||
|
||||
Args:
|
||||
prog: Program name.
|
||||
|
||||
Returns:
|
||||
Comment line.
|
||||
"""
|
||||
return "\n# %s\n" % prog
|
||||
"""Return a comment line for the command."""
|
||||
return f"\n# {prog}\n"
|
||||
|
||||
def optionals(
|
||||
self,
|
||||
@@ -658,28 +631,28 @@ def optionals(
|
||||
for f in flags:
|
||||
if f.startswith("--"):
|
||||
long = f[2:]
|
||||
prefix += " -l %s" % long
|
||||
prefix = f"{prefix} -l {long}"
|
||||
elif f.startswith("-"):
|
||||
short = f[1:]
|
||||
assert len(short) == 1
|
||||
prefix += " -s %s" % short
|
||||
prefix = f"{prefix} -s {short}"
|
||||
|
||||
# Check if option require argument.
|
||||
# Currently multi-argument options are not supported, so we treat it like one argument.
|
||||
if nargs != 0:
|
||||
prefix += " -r"
|
||||
prefix = f"{prefix} -r"
|
||||
|
||||
if dest is not None:
|
||||
# If there are choices, we provide a completion for all possible values.
|
||||
commands.append(prefix + " -f -a %s" % self._quote(" ".join(dest)))
|
||||
commands.append(f"{prefix} -f -a {shlex.quote(' '.join(dest))}")
|
||||
else:
|
||||
# Otherwise, we try to find a predefined completion for it
|
||||
value = _fish_dest_get_complete(prog, dest)
|
||||
if value is not None:
|
||||
commands.append(prefix + " " + value)
|
||||
commands.append(f"{prefix} {value}")
|
||||
|
||||
if help:
|
||||
commands.append(prefix + " -d %s" % self._quote(help))
|
||||
commands.append(f"{prefix} -d {shlex.quote(help)}")
|
||||
|
||||
return "\n".join(commands) + "\n"
|
||||
|
||||
@@ -697,11 +670,11 @@ def subcommands(self, prog: str, subcommands: List[Tuple[ArgumentParser, str, st
|
||||
head = self.complete_head(prog, 0)
|
||||
|
||||
for _, subcommand, help in subcommands:
|
||||
command = head + " -f -a %s" % self._quote(subcommand)
|
||||
command = f"{head} -f -a {shlex.quote(subcommand)}"
|
||||
|
||||
if help is not None and len(help) > 0:
|
||||
help = help.split("\n")[0]
|
||||
command += " -d %s" % self._quote(help)
|
||||
command = f"{command} -d {shlex.quote(help)}"
|
||||
|
||||
commands.append(command)
|
||||
|
||||
@@ -747,7 +720,7 @@ def rst_index(out: IO) -> None:
|
||||
|
||||
for i, cmd in enumerate(sorted(commands)):
|
||||
description = description.capitalize() if i == 0 else ""
|
||||
ref = ":ref:`%s <spack-%s>`" % (cmd, cmd)
|
||||
ref = f":ref:`{cmd} <spack-{cmd}>`"
|
||||
comma = "," if i != len(commands) - 1 else ""
|
||||
bar = "| " if i % 8 == 0 else " "
|
||||
out.write(line % (description, bar + ref + comma))
|
||||
@@ -858,10 +831,10 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
||||
|
||||
# check header first so we don't open out files unnecessarily
|
||||
if args.header and not os.path.exists(args.header):
|
||||
tty.die("No such file: '%s'" % args.header)
|
||||
tty.die(f"No such file: '{args.header}'")
|
||||
|
||||
if args.update:
|
||||
tty.msg("Updating file: %s" % args.update)
|
||||
tty.msg(f"Updating file: {args.update}")
|
||||
with open(args.update, "w") as f:
|
||||
prepend_header(args, f)
|
||||
formatter(args, f)
|
||||
|
@@ -50,6 +50,7 @@ def setup_parser(subparser):
|
||||
default=lambda: spack.config.default_modify_scope("compilers"),
|
||||
help="configuration scope to modify",
|
||||
)
|
||||
arguments.add_common_arguments(find_parser, ["jobs"])
|
||||
|
||||
# Remove
|
||||
remove_parser = sp.add_parser("remove", aliases=["rm"], help="remove compiler by spec")
|
||||
@@ -78,25 +79,21 @@ def setup_parser(subparser):
|
||||
def compiler_find(args):
|
||||
"""Search either $PATH or a list of paths OR MODULES for compilers and
|
||||
add them to Spack's configuration.
|
||||
|
||||
"""
|
||||
# None signals spack.compiler.find_compilers to use its default logic
|
||||
paths = args.add_paths or None
|
||||
|
||||
# Below scope=None because we want new compilers that don't appear
|
||||
# in any other configuration.
|
||||
new_compilers = spack.compilers.find_new_compilers(
|
||||
paths, scope=None, mixed_toolchain=args.mixed_toolchain
|
||||
new_compilers = spack.compilers.find_compilers(
|
||||
path_hints=paths,
|
||||
scope=args.scope,
|
||||
mixed_toolchain=args.mixed_toolchain,
|
||||
max_workers=args.jobs,
|
||||
)
|
||||
if new_compilers:
|
||||
spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope)
|
||||
n = len(new_compilers)
|
||||
s = "s" if n > 1 else ""
|
||||
|
||||
config = spack.config.CONFIG
|
||||
filename = config.get_config_filename(args.scope, "compilers")
|
||||
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
|
||||
colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
|
||||
filename = spack.config.CONFIG.get_config_filename(args.scope, "compilers")
|
||||
tty.msg(f"Added {n:d} new compiler{s} to {filename}")
|
||||
compiler_strs = sorted(f"{c.spec.name}@{c.spec.version}" for c in new_compilers)
|
||||
colify(reversed(compiler_strs), indent=4)
|
||||
else:
|
||||
tty.msg("Found no new compilers")
|
||||
tty.msg("Compilers are defined in the following files:")
|
||||
|
@@ -6,6 +6,7 @@
|
||||
import re
|
||||
import sys
|
||||
import urllib.parse
|
||||
from typing import List
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp
|
||||
@@ -14,9 +15,15 @@
|
||||
import spack.stage
|
||||
import spack.util.web
|
||||
from spack.spec import Spec
|
||||
from spack.url import UndetectableNameError, UndetectableVersionError, parse_name, parse_version
|
||||
from spack.url import (
|
||||
UndetectableNameError,
|
||||
UndetectableVersionError,
|
||||
find_versions_of_archive,
|
||||
parse_name,
|
||||
parse_version,
|
||||
)
|
||||
from spack.util.editor import editor
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.executable import which
|
||||
from spack.util.format import get_version_lines
|
||||
from spack.util.naming import mod_to_class, simplify_name, valid_fully_qualified_module_name
|
||||
|
||||
@@ -89,14 +96,20 @@ class BundlePackageTemplate:
|
||||
url_def = " # There is no URL since there is no code to download."
|
||||
body_def = " # There is no need for install() since there is no code."
|
||||
|
||||
def __init__(self, name, versions):
|
||||
def __init__(self, name: str, versions, languages: List[str]):
|
||||
self.name = name
|
||||
self.class_name = mod_to_class(name)
|
||||
self.versions = versions
|
||||
self.languages = languages
|
||||
|
||||
def write(self, pkg_path):
|
||||
"""Writes the new package file."""
|
||||
|
||||
all_deps = [f' depends_on("{lang}", type="build")' for lang in self.languages]
|
||||
if all_deps and self.dependencies:
|
||||
all_deps.append("")
|
||||
all_deps.append(self.dependencies)
|
||||
|
||||
# Write out a template for the file
|
||||
with open(pkg_path, "w") as pkg_file:
|
||||
pkg_file.write(
|
||||
@@ -106,7 +119,7 @@ def write(self, pkg_path):
|
||||
base_class_name=self.base_class_name,
|
||||
url_def=self.url_def,
|
||||
versions=self.versions,
|
||||
dependencies=self.dependencies,
|
||||
dependencies="\n".join(all_deps),
|
||||
body_def=self.body_def,
|
||||
)
|
||||
)
|
||||
@@ -125,8 +138,8 @@ def install(self, spec, prefix):
|
||||
|
||||
url_line = ' url = "{url}"'
|
||||
|
||||
def __init__(self, name, url, versions):
|
||||
super().__init__(name, versions)
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
super().__init__(name, versions, languages)
|
||||
|
||||
self.url_def = self.url_line.format(url=url)
|
||||
|
||||
@@ -214,13 +227,13 @@ def luarocks_args(self):
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
# If the user provided `--name lua-lpeg`, don't rename it lua-lua-lpeg
|
||||
if not name.startswith("lua-"):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to lua-{0}".format(name))
|
||||
name = "lua-{0}".format(name)
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
super().__init__(name, url, versions, languages)
|
||||
|
||||
|
||||
class MesonPackageTemplate(PackageTemplate):
|
||||
@@ -321,14 +334,14 @@ class RacketPackageTemplate(PackageTemplate):
|
||||
# subdirectory = None
|
||||
"""
|
||||
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
# If the user provided `--name rkt-scribble`, don't rename it rkt-rkt-scribble
|
||||
if not name.startswith("rkt-"):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to rkt-{0}".format(name))
|
||||
name = "rkt-{0}".format(name)
|
||||
self.body_def = self.body_def.format(name[4:])
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
super().__init__(name, url, versions, languages)
|
||||
|
||||
|
||||
class PythonPackageTemplate(PackageTemplate):
|
||||
@@ -361,7 +374,7 @@ def config_settings(self, spec, prefix):
|
||||
settings = {}
|
||||
return settings"""
|
||||
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
# If the user provided `--name py-numpy`, don't rename it py-py-numpy
|
||||
if not name.startswith("py-"):
|
||||
# Make it more obvious that we are renaming the package
|
||||
@@ -415,7 +428,7 @@ def __init__(self, name, url, *args, **kwargs):
|
||||
+ self.url_line
|
||||
)
|
||||
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
super().__init__(name, url, versions, languages)
|
||||
|
||||
|
||||
class RPackageTemplate(PackageTemplate):
|
||||
@@ -434,7 +447,7 @@ def configure_args(self):
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
# If the user provided `--name r-rcpp`, don't rename it r-r-rcpp
|
||||
if not name.startswith("r-"):
|
||||
# Make it more obvious that we are renaming the package
|
||||
@@ -454,7 +467,7 @@ def __init__(self, name, url, *args, **kwargs):
|
||||
if bioc:
|
||||
self.url_line = ' url = "{0}"\n' ' bioc = "{1}"'.format(url, r_name)
|
||||
|
||||
super().__init__(name, url, *args, **kwargs)
|
||||
super().__init__(name, url, versions, languages)
|
||||
|
||||
|
||||
class PerlmakePackageTemplate(PackageTemplate):
|
||||
@@ -474,14 +487,14 @@ def configure_args(self):
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
def __init__(self, name, *args, **kwargs):
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
# If the user provided `--name perl-cpp`, don't rename it perl-perl-cpp
|
||||
if not name.startswith("perl-"):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to perl-{0}".format(name))
|
||||
name = "perl-{0}".format(name)
|
||||
|
||||
super().__init__(name, *args, **kwargs)
|
||||
super().__init__(name, url, versions, languages)
|
||||
|
||||
|
||||
class PerlbuildPackageTemplate(PerlmakePackageTemplate):
|
||||
@@ -506,7 +519,7 @@ class OctavePackageTemplate(PackageTemplate):
|
||||
# FIXME: Add additional dependencies if required.
|
||||
# depends_on("octave-foo", type=("build", "run"))"""
|
||||
|
||||
def __init__(self, name, *args, **kwargs):
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
# If the user provided `--name octave-splines`, don't rename it
|
||||
# octave-octave-splines
|
||||
if not name.startswith("octave-"):
|
||||
@@ -514,7 +527,7 @@ def __init__(self, name, *args, **kwargs):
|
||||
tty.msg("Changing package name from {0} to octave-{0}".format(name))
|
||||
name = "octave-{0}".format(name)
|
||||
|
||||
super().__init__(name, *args, **kwargs)
|
||||
super().__init__(name, url, versions, languages)
|
||||
|
||||
|
||||
class RubyPackageTemplate(PackageTemplate):
|
||||
@@ -534,7 +547,7 @@ def build(self, spec, prefix):
|
||||
# FIXME: If not needed delete this function
|
||||
pass"""
|
||||
|
||||
def __init__(self, name, *args, **kwargs):
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
# If the user provided `--name ruby-numpy`, don't rename it
|
||||
# ruby-ruby-numpy
|
||||
if not name.startswith("ruby-"):
|
||||
@@ -542,7 +555,7 @@ def __init__(self, name, *args, **kwargs):
|
||||
tty.msg("Changing package name from {0} to ruby-{0}".format(name))
|
||||
name = "ruby-{0}".format(name)
|
||||
|
||||
super().__init__(name, *args, **kwargs)
|
||||
super().__init__(name, url, versions, languages)
|
||||
|
||||
|
||||
class MakefilePackageTemplate(PackageTemplate):
|
||||
@@ -580,14 +593,14 @@ def configure_args(self, spec, prefix):
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
def __init__(self, name, *args, **kwargs):
|
||||
def __init__(self, name, url, versions, languages: List[str]):
|
||||
# If the user provided `--name py-pyqt4`, don't rename it py-py-pyqt4
|
||||
if not name.startswith("py-"):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to py-{0}".format(name))
|
||||
name = "py-{0}".format(name)
|
||||
|
||||
super().__init__(name, *args, **kwargs)
|
||||
super().__init__(name, url, versions, languages)
|
||||
|
||||
|
||||
templates = {
|
||||
@@ -658,8 +671,48 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
|
||||
class BuildSystemGuesser:
|
||||
"""An instance of BuildSystemGuesser provides a callable object to be used
|
||||
#: C file extensions
|
||||
C_EXT = {".c"}
|
||||
|
||||
#: C++ file extensions
|
||||
CXX_EXT = {
|
||||
".C",
|
||||
".c++",
|
||||
".cc",
|
||||
".ccm",
|
||||
".cpp",
|
||||
".CPP",
|
||||
".cxx",
|
||||
".h++",
|
||||
".hh",
|
||||
".hpp",
|
||||
".hxx",
|
||||
".inl",
|
||||
".ipp",
|
||||
".ixx",
|
||||
".tcc",
|
||||
".tpp",
|
||||
}
|
||||
|
||||
#: Fortran file extensions
|
||||
FORTRAN_EXT = {
|
||||
".f77",
|
||||
".F77",
|
||||
".f90",
|
||||
".F90",
|
||||
".f95",
|
||||
".F95",
|
||||
".f",
|
||||
".F",
|
||||
".for",
|
||||
".FOR",
|
||||
".ftn",
|
||||
".FTN",
|
||||
}
|
||||
|
||||
|
||||
class BuildSystemAndLanguageGuesser:
|
||||
"""An instance of BuildSystemAndLanguageGuesser provides a callable object to be used
|
||||
during ``spack create``. By passing this object to ``spack checksum``, we
|
||||
can take a peek at the fetched tarball and discern the build system it uses
|
||||
"""
|
||||
@@ -667,81 +720,119 @@ class BuildSystemGuesser:
|
||||
def __init__(self):
|
||||
"""Sets the default build system."""
|
||||
self.build_system = "generic"
|
||||
self._c = False
|
||||
self._cxx = False
|
||||
self._fortran = False
|
||||
|
||||
def __call__(self, stage, url):
|
||||
# List of files in the archive ordered by their depth in the directory tree.
|
||||
self._file_entries: List[str] = []
|
||||
|
||||
def __call__(self, archive: str, url: str) -> None:
|
||||
"""Try to guess the type of build system used by a project based on
|
||||
the contents of its archive or the URL it was downloaded from."""
|
||||
|
||||
if url is not None:
|
||||
# Most octave extensions are hosted on Octave-Forge:
|
||||
# https://octave.sourceforge.net/index.html
|
||||
# They all have the same base URL.
|
||||
if "downloads.sourceforge.net/octave/" in url:
|
||||
self.build_system = "octave"
|
||||
return
|
||||
if url.endswith(".gem"):
|
||||
self.build_system = "ruby"
|
||||
return
|
||||
if url.endswith(".whl") or ".whl#" in url:
|
||||
self.build_system = "python"
|
||||
return
|
||||
if url.endswith(".rock"):
|
||||
self.build_system = "lua"
|
||||
return
|
||||
|
||||
# A list of clues that give us an idea of the build system a package
|
||||
# uses. If the regular expression matches a file contained in the
|
||||
# archive, the corresponding build system is assumed.
|
||||
# NOTE: Order is important here. If a package supports multiple
|
||||
# build systems, we choose the first match in this list.
|
||||
clues = [
|
||||
(r"/CMakeLists\.txt$", "cmake"),
|
||||
(r"/NAMESPACE$", "r"),
|
||||
(r"/Cargo\.toml$", "cargo"),
|
||||
(r"/go\.mod$", "go"),
|
||||
(r"/configure$", "autotools"),
|
||||
(r"/configure\.(in|ac)$", "autoreconf"),
|
||||
(r"/Makefile\.am$", "autoreconf"),
|
||||
(r"/pom\.xml$", "maven"),
|
||||
(r"/SConstruct$", "scons"),
|
||||
(r"/waf$", "waf"),
|
||||
(r"/pyproject.toml", "python"),
|
||||
(r"/setup\.(py|cfg)$", "python"),
|
||||
(r"/WORKSPACE$", "bazel"),
|
||||
(r"/Build\.PL$", "perlbuild"),
|
||||
(r"/Makefile\.PL$", "perlmake"),
|
||||
(r"/.*\.gemspec$", "ruby"),
|
||||
(r"/Rakefile$", "ruby"),
|
||||
(r"/setup\.rb$", "ruby"),
|
||||
(r"/.*\.pro$", "qmake"),
|
||||
(r"/.*\.rockspec$", "lua"),
|
||||
(r"/(GNU)?[Mm]akefile$", "makefile"),
|
||||
(r"/DESCRIPTION$", "octave"),
|
||||
(r"/meson\.build$", "meson"),
|
||||
(r"/configure\.py$", "sip"),
|
||||
]
|
||||
|
||||
# Peek inside the compressed file.
|
||||
if stage.archive_file.endswith(".zip") or ".zip#" in stage.archive_file:
|
||||
if archive.endswith(".zip") or ".zip#" in archive:
|
||||
try:
|
||||
unzip = which("unzip")
|
||||
output = unzip("-lq", stage.archive_file, output=str)
|
||||
except ProcessError:
|
||||
assert unzip is not None
|
||||
output = unzip("-lq", archive, output=str)
|
||||
except Exception:
|
||||
output = ""
|
||||
else:
|
||||
try:
|
||||
tar = which("tar")
|
||||
output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
|
||||
except ProcessError:
|
||||
assert tar is not None
|
||||
output = tar("tf", archive, output=str)
|
||||
except Exception:
|
||||
output = ""
|
||||
lines = output.splitlines()
|
||||
self._file_entries[:] = output.splitlines()
|
||||
|
||||
# Determine the build system based on the files contained
|
||||
# in the archive.
|
||||
for pattern, bs in clues:
|
||||
if any(re.search(pattern, line) for line in lines):
|
||||
self.build_system = bs
|
||||
break
|
||||
# Files closest to the root should be considered first when determining build system.
|
||||
self._file_entries.sort(key=lambda p: p.count("/"))
|
||||
|
||||
self._determine_build_system(url)
|
||||
self._determine_language()
|
||||
|
||||
def _determine_build_system(self, url: str) -> None:
|
||||
# Most octave extensions are hosted on Octave-Forge:
|
||||
# https://octave.sourceforge.net/index.html
|
||||
# They all have the same base URL.
|
||||
if "downloads.sourceforge.net/octave/" in url:
|
||||
self.build_system = "octave"
|
||||
elif url.endswith(".gem"):
|
||||
self.build_system = "ruby"
|
||||
elif url.endswith(".whl") or ".whl#" in url:
|
||||
self.build_system = "python"
|
||||
elif url.endswith(".rock"):
|
||||
self.build_system = "lua"
|
||||
elif self._file_entries:
|
||||
# A list of clues that give us an idea of the build system a package
|
||||
# uses. If the regular expression matches a file contained in the
|
||||
# archive, the corresponding build system is assumed.
|
||||
# NOTE: Order is important here. If a package supports multiple
|
||||
# build systems, we choose the first match in this list.
|
||||
clues = [
|
||||
(re.compile(pattern), build_system)
|
||||
for pattern, build_system in (
|
||||
(r"/CMakeLists\.txt$", "cmake"),
|
||||
(r"/NAMESPACE$", "r"),
|
||||
(r"/Cargo\.toml$", "cargo"),
|
||||
(r"/go\.mod$", "go"),
|
||||
(r"/configure$", "autotools"),
|
||||
(r"/configure\.(in|ac)$", "autoreconf"),
|
||||
(r"/Makefile\.am$", "autoreconf"),
|
||||
(r"/pom\.xml$", "maven"),
|
||||
(r"/SConstruct$", "scons"),
|
||||
(r"/waf$", "waf"),
|
||||
(r"/pyproject.toml", "python"),
|
||||
(r"/setup\.(py|cfg)$", "python"),
|
||||
(r"/WORKSPACE$", "bazel"),
|
||||
(r"/Build\.PL$", "perlbuild"),
|
||||
(r"/Makefile\.PL$", "perlmake"),
|
||||
(r"/.*\.gemspec$", "ruby"),
|
||||
(r"/Rakefile$", "ruby"),
|
||||
(r"/setup\.rb$", "ruby"),
|
||||
(r"/.*\.pro$", "qmake"),
|
||||
(r"/.*\.rockspec$", "lua"),
|
||||
(r"/(GNU)?[Mm]akefile$", "makefile"),
|
||||
(r"/DESCRIPTION$", "octave"),
|
||||
(r"/meson\.build$", "meson"),
|
||||
(r"/configure\.py$", "sip"),
|
||||
)
|
||||
]
|
||||
|
||||
# Determine the build system based on the files contained in the archive.
|
||||
for file in self._file_entries:
|
||||
for pattern, build_system in clues:
|
||||
if pattern.search(file):
|
||||
self.build_system = build_system
|
||||
return
|
||||
|
||||
def _determine_language(self):
|
||||
for entry in self._file_entries:
|
||||
_, ext = os.path.splitext(entry)
|
||||
|
||||
if not self._c and ext in C_EXT:
|
||||
self._c = True
|
||||
elif not self._cxx and ext in CXX_EXT:
|
||||
self._cxx = True
|
||||
elif not self._fortran and ext in FORTRAN_EXT:
|
||||
self._fortran = True
|
||||
|
||||
if self._c and self._cxx and self._fortran:
|
||||
return
|
||||
|
||||
@property
|
||||
def languages(self) -> List[str]:
|
||||
langs: List[str] = []
|
||||
if self._c:
|
||||
langs.append("c")
|
||||
if self._cxx:
|
||||
langs.append("cxx")
|
||||
if self._fortran:
|
||||
langs.append("fortran")
|
||||
return langs
|
||||
|
||||
|
||||
def get_name(name, url):
|
||||
@@ -811,7 +902,7 @@ def get_url(url):
|
||||
def get_versions(args, name):
|
||||
"""Returns a list of versions and hashes for a package.
|
||||
|
||||
Also returns a BuildSystemGuesser object.
|
||||
Also returns a BuildSystemAndLanguageGuesser object.
|
||||
|
||||
Returns default values if no URL is provided.
|
||||
|
||||
@@ -820,7 +911,7 @@ def get_versions(args, name):
|
||||
name (str): The name of the package
|
||||
|
||||
Returns:
|
||||
tuple: versions and hashes, and a BuildSystemGuesser object
|
||||
tuple: versions and hashes, and a BuildSystemAndLanguageGuesser object
|
||||
"""
|
||||
|
||||
# Default version with hash
|
||||
@@ -834,7 +925,7 @@ def get_versions(args, name):
|
||||
# version("1.2.4")"""
|
||||
|
||||
# Default guesser
|
||||
guesser = BuildSystemGuesser()
|
||||
guesser = BuildSystemAndLanguageGuesser()
|
||||
|
||||
valid_url = True
|
||||
try:
|
||||
@@ -847,7 +938,7 @@ def get_versions(args, name):
|
||||
if args.url is not None and args.template != "bundle" and valid_url:
|
||||
# Find available versions
|
||||
try:
|
||||
url_dict = spack.url.find_versions_of_archive(args.url)
|
||||
url_dict = find_versions_of_archive(args.url)
|
||||
if len(url_dict) > 1 and not args.batch and sys.stdin.isatty():
|
||||
url_dict_filtered = spack.stage.interactive_version_filter(url_dict)
|
||||
if url_dict_filtered is None:
|
||||
@@ -874,7 +965,7 @@ def get_versions(args, name):
|
||||
return versions, guesser
|
||||
|
||||
|
||||
def get_build_system(template, url, guesser):
|
||||
def get_build_system(template: str, url: str, guesser: BuildSystemAndLanguageGuesser) -> str:
|
||||
"""Determine the build system template.
|
||||
|
||||
If a template is specified, always use that. Otherwise, if a URL
|
||||
@@ -882,11 +973,10 @@ def get_build_system(template, url, guesser):
|
||||
build system it uses. Otherwise, use a generic template by default.
|
||||
|
||||
Args:
|
||||
template (str): ``--template`` argument given to ``spack create``
|
||||
url (str): ``url`` argument given to ``spack create``
|
||||
args (argparse.Namespace): The arguments given to ``spack create``
|
||||
guesser (BuildSystemGuesser): The first_stage_function given to
|
||||
``spack checksum`` which records the build system it detects
|
||||
template: ``--template`` argument given to ``spack create``
|
||||
url: ``url`` argument given to ``spack create``
|
||||
guesser: The first_stage_function given to ``spack checksum`` which records the build
|
||||
system it detects
|
||||
|
||||
Returns:
|
||||
str: The name of the build system template to use
|
||||
@@ -960,7 +1050,7 @@ def create(parser, args):
|
||||
build_system = get_build_system(args.template, url, guesser)
|
||||
|
||||
# Create the package template object
|
||||
constr_args = {"name": name, "versions": versions}
|
||||
constr_args = {"name": name, "versions": versions, "languages": guesser.languages}
|
||||
package_class = templates[build_system]
|
||||
if package_class != BundlePackageTemplate:
|
||||
constr_args["url"] = url
|
||||
|
@@ -14,7 +14,6 @@
|
||||
installation and its deprecator.
|
||||
"""
|
||||
import argparse
|
||||
import os
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import symlink
|
||||
@@ -76,12 +75,7 @@ def setup_parser(sp):
|
||||
)
|
||||
|
||||
sp.add_argument(
|
||||
"-l",
|
||||
"--link-type",
|
||||
type=str,
|
||||
default="soft",
|
||||
choices=["soft", "hard"],
|
||||
help="type of filesystem link to use for deprecation (default soft)",
|
||||
"-l", "--link-type", type=str, default=None, choices=["soft", "hard"], help="(deprecated)"
|
||||
)
|
||||
|
||||
sp.add_argument(
|
||||
@@ -91,6 +85,9 @@ def setup_parser(sp):
|
||||
|
||||
def deprecate(parser, args):
|
||||
"""Deprecate one spec in favor of another"""
|
||||
if args.link_type is not None:
|
||||
tty.warn("The --link-type option is deprecated and will be removed in a future release.")
|
||||
|
||||
env = ev.active_environment()
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
|
||||
@@ -144,7 +141,5 @@ def deprecate(parser, args):
|
||||
if not answer:
|
||||
tty.die("Will not deprecate any packages.")
|
||||
|
||||
link_fn = os.link if args.link_type == "hard" else symlink
|
||||
|
||||
for dcate, dcator in zip(all_deprecate, all_deprecators):
|
||||
dcate.package.do_deprecate(dcator, link_fn)
|
||||
dcate.package.do_deprecate(dcator, symlink)
|
||||
|
@@ -10,8 +10,10 @@
|
||||
import spack.cmd
|
||||
import spack.config
|
||||
import spack.fetch_strategy
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
import spack.version
|
||||
from spack.cmd.common import arguments
|
||||
@@ -62,7 +64,7 @@ def change_fn(section):
|
||||
spack.config.change_or_add("develop", find_fn, change_fn)
|
||||
|
||||
|
||||
def _retrieve_develop_source(spec, abspath):
|
||||
def _retrieve_develop_source(spec: spack.spec.Spec, abspath: str) -> None:
|
||||
# "steal" the source code via staging API. We ask for a stage
|
||||
# to be created, then copy it afterwards somewhere else. It would be
|
||||
# better if we can create the `source_path` directly into its final
|
||||
@@ -71,13 +73,13 @@ def _retrieve_develop_source(spec, abspath):
|
||||
# We construct a package class ourselves, rather than asking for
|
||||
# Spec.package, since Spec only allows this when it is concrete
|
||||
package = pkg_cls(spec)
|
||||
source_stage = package.stage[0]
|
||||
source_stage: spack.stage.Stage = package.stage[0]
|
||||
if isinstance(source_stage.fetcher, spack.fetch_strategy.GitFetchStrategy):
|
||||
source_stage.fetcher.get_full_repo = True
|
||||
# If we retrieved this version before and cached it, we may have
|
||||
# done so without cloning the full git repo; likewise, any
|
||||
# mirror might store an instance with truncated history.
|
||||
source_stage.disable_mirrors()
|
||||
source_stage.default_fetcher_only = True
|
||||
|
||||
source_stage.fetcher.set_package(package)
|
||||
package.stage.steal_source(abspath)
|
||||
|
@@ -468,32 +468,30 @@ def env_remove(args):
|
||||
This removes an environment managed by Spack. Directory environments
|
||||
and manifests embedded in repositories should be removed manually.
|
||||
"""
|
||||
read_envs = []
|
||||
remove_envs = []
|
||||
valid_envs = []
|
||||
bad_envs = []
|
||||
invalid_envs = []
|
||||
|
||||
for env_name in ev.all_environment_names():
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
valid_envs.append(env_name)
|
||||
valid_envs.append(env)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
read_envs.append(env)
|
||||
remove_envs.append(env)
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
invalid_envs.append(env_name)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
bad_envs.append(env_name)
|
||||
|
||||
# Check if env is linked to another before trying to remove
|
||||
for name in valid_envs:
|
||||
# Check if remove_env is included from another env before trying to remove
|
||||
for env in valid_envs:
|
||||
for remove_env in remove_envs:
|
||||
# don't check if environment is included to itself
|
||||
if name == env_name:
|
||||
if env.name == remove_env.name:
|
||||
continue
|
||||
environ = ev.Environment(ev.root(name))
|
||||
if ev.root(env_name) in environ.included_concrete_envs:
|
||||
msg = f'Environment "{env_name}" is being used by environment "{name}"'
|
||||
|
||||
if remove_env.path in env.included_concrete_envs:
|
||||
msg = f'Environment "{remove_env.name}" is being used by environment "{env.name}"'
|
||||
if args.force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
@@ -506,7 +504,7 @@ def env_remove(args):
|
||||
if not answer:
|
||||
tty.die("Will not remove any environments")
|
||||
|
||||
for env in read_envs:
|
||||
for env in remove_envs:
|
||||
name = env.name
|
||||
if env.active:
|
||||
tty.die(f"Environment {name} can't be removed while activated.")
|
||||
|
@@ -224,7 +224,7 @@ def gpg_publish(args):
|
||||
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
|
||||
|
||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||
spack.binary_distribution.push_keys(
|
||||
spack.binary_distribution._url_push_keys(
|
||||
mirror, keys=args.keys, tmpdir=tmpdir, update_index=args.update_index
|
||||
)
|
||||
|
||||
|
@@ -502,7 +502,7 @@ def print_licenses(pkg, args):
|
||||
|
||||
def info(parser, args):
|
||||
spec = spack.spec.Spec(args.package)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.fullname)
|
||||
pkg = pkg_cls(spec)
|
||||
|
||||
# Output core package information
|
||||
|
@@ -23,11 +23,6 @@ def setup_parser(subparser):
|
||||
output.add_argument(
|
||||
"-s", "--safe", action="store_true", help="only list safe versions of the package"
|
||||
)
|
||||
output.add_argument(
|
||||
"--safe-only",
|
||||
action="store_true",
|
||||
help="[deprecated] only list safe versions of the package",
|
||||
)
|
||||
output.add_argument(
|
||||
"-r", "--remote", action="store_true", help="only list remote versions of the package"
|
||||
)
|
||||
@@ -47,17 +42,13 @@ def versions(parser, args):
|
||||
|
||||
safe_versions = pkg.versions
|
||||
|
||||
if args.safe_only:
|
||||
tty.warn('"--safe-only" is deprecated. Use "--safe" instead.')
|
||||
args.safe = args.safe_only
|
||||
|
||||
if not (args.remote or args.new):
|
||||
if sys.stdout.isatty():
|
||||
tty.msg("Safe versions (already checksummed):")
|
||||
|
||||
if not safe_versions:
|
||||
if sys.stdout.isatty():
|
||||
tty.warn("Found no versions for {0}".format(pkg.name))
|
||||
tty.warn(f"Found no versions for {pkg.name}")
|
||||
tty.debug("Manually add versions to the package.")
|
||||
else:
|
||||
colify(sorted(safe_versions, reverse=True), indent=2)
|
||||
@@ -83,12 +74,12 @@ def versions(parser, args):
|
||||
if not remote_versions:
|
||||
if sys.stdout.isatty():
|
||||
if not fetched_versions:
|
||||
tty.warn("Found no versions for {0}".format(pkg.name))
|
||||
tty.warn(f"Found no versions for {pkg.name}")
|
||||
tty.debug(
|
||||
"Check the list_url and list_depth attributes of "
|
||||
"the package to help Spack find versions."
|
||||
)
|
||||
else:
|
||||
tty.warn("Found no unchecksummed versions for {0}".format(pkg.name))
|
||||
tty.warn(f"Found no unchecksummed versions for {pkg.name}")
|
||||
else:
|
||||
colify(sorted(remote_versions, reverse=True), indent=2)
|
||||
|
@@ -29,6 +29,9 @@
|
||||
|
||||
__all__ = ["Compiler"]
|
||||
|
||||
PATH_INSTANCE_VARS = ["cc", "cxx", "f77", "fc"]
|
||||
FLAG_INSTANCE_VARS = ["cflags", "cppflags", "cxxflags", "fflags"]
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
@@ -700,6 +703,30 @@ def compiler_environment(self):
|
||||
os.environ.clear()
|
||||
os.environ.update(backup_env)
|
||||
|
||||
def to_dict(self):
|
||||
flags_dict = {fname: " ".join(fvals) for fname, fvals in self.flags.items()}
|
||||
flags_dict.update(
|
||||
{attr: getattr(self, attr, None) for attr in FLAG_INSTANCE_VARS if hasattr(self, attr)}
|
||||
)
|
||||
result = {
|
||||
"spec": str(self.spec),
|
||||
"paths": {attr: getattr(self, attr, None) for attr in PATH_INSTANCE_VARS},
|
||||
"flags": flags_dict,
|
||||
"operating_system": str(self.operating_system),
|
||||
"target": str(self.target),
|
||||
"modules": self.modules or [],
|
||||
"environment": self.environment or {},
|
||||
"extra_rpaths": self.extra_rpaths or [],
|
||||
}
|
||||
|
||||
if self.enable_implicit_rpaths is not None:
|
||||
result["implicit_rpaths"] = self.enable_implicit_rpaths
|
||||
|
||||
if self.alias:
|
||||
result["alias"] = self.alias
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class CompilerAccessError(spack.error.SpackError):
|
||||
def __init__(self, compiler, paths):
|
||||
|
@@ -6,12 +6,11 @@
|
||||
"""This module contains functions related to finding compilers on the
|
||||
system and configuring Spack to use multiple compilers.
|
||||
"""
|
||||
import collections
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import importlib
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -22,16 +21,15 @@
|
||||
import spack.compiler
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.operating_systems
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.version
|
||||
from spack.operating_systems import windows_os
|
||||
from spack.util.environment import get_path
|
||||
from spack.util.naming import mod_to_class
|
||||
|
||||
_path_instance_vars = ["cc", "cxx", "f77", "fc"]
|
||||
_flags_instance_vars = ["cflags", "cppflags", "cxxflags", "fflags"]
|
||||
_other_instance_vars = [
|
||||
"modules",
|
||||
"operating_system",
|
||||
@@ -63,6 +61,10 @@
|
||||
}
|
||||
|
||||
|
||||
#: Tag used to identify packages providing a compiler
|
||||
COMPILER_TAG = "compiler"
|
||||
|
||||
|
||||
def pkg_spec_for_compiler(cspec):
|
||||
"""Return the spec of the package that provides the compiler."""
|
||||
for spec, package in _compiler_to_pkg.items():
|
||||
@@ -85,29 +87,7 @@ def converter(cspec_like, *args, **kwargs):
|
||||
|
||||
def _to_dict(compiler):
|
||||
"""Return a dict version of compiler suitable to insert in YAML."""
|
||||
d = {}
|
||||
d["spec"] = str(compiler.spec)
|
||||
d["paths"] = dict((attr, getattr(compiler, attr, None)) for attr in _path_instance_vars)
|
||||
d["flags"] = dict((fname, " ".join(fvals)) for fname, fvals in compiler.flags.items())
|
||||
d["flags"].update(
|
||||
dict(
|
||||
(attr, getattr(compiler, attr, None))
|
||||
for attr in _flags_instance_vars
|
||||
if hasattr(compiler, attr)
|
||||
)
|
||||
)
|
||||
d["operating_system"] = str(compiler.operating_system)
|
||||
d["target"] = str(compiler.target)
|
||||
d["modules"] = compiler.modules or []
|
||||
d["environment"] = compiler.environment or {}
|
||||
d["extra_rpaths"] = compiler.extra_rpaths or []
|
||||
if compiler.enable_implicit_rpaths is not None:
|
||||
d["implicit_rpaths"] = compiler.enable_implicit_rpaths
|
||||
|
||||
if compiler.alias:
|
||||
d["alias"] = compiler.alias
|
||||
|
||||
return {"compiler": d}
|
||||
return {"compiler": compiler.to_dict()}
|
||||
|
||||
|
||||
def get_compiler_config(
|
||||
@@ -127,7 +107,7 @@ def get_compiler_config(
|
||||
# Do not init config because there is a non-empty scope
|
||||
return config
|
||||
|
||||
_init_compiler_config(configuration, scope=scope)
|
||||
find_compilers(scope=scope)
|
||||
config = configuration.get("compilers", scope=scope)
|
||||
return config
|
||||
|
||||
@@ -136,125 +116,8 @@ def get_compiler_config_from_packages(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration from packages.yaml"""
|
||||
config = configuration.get("packages", scope=scope)
|
||||
if not config:
|
||||
return []
|
||||
|
||||
packages = []
|
||||
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
|
||||
for name, entry in config.items():
|
||||
if name not in compiler_package_names:
|
||||
continue
|
||||
externals_config = entry.get("externals", None)
|
||||
if not externals_config:
|
||||
continue
|
||||
packages.extend(_compiler_config_from_package_config(externals_config))
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def _compiler_config_from_package_config(config):
|
||||
compilers = []
|
||||
for entry in config:
|
||||
compiler = _compiler_config_from_external(entry)
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
|
||||
return compilers
|
||||
|
||||
|
||||
def _compiler_config_from_external(config):
|
||||
extra_attributes_key = "extra_attributes"
|
||||
compilers_key = "compilers"
|
||||
c_key, cxx_key, fortran_key = "c", "cxx", "fortran"
|
||||
|
||||
# Allow `@x.y.z` instead of `@=x.y.z`
|
||||
spec = spack.spec.parse_with_version_concrete(config["spec"])
|
||||
|
||||
compiler_spec = spack.spec.CompilerSpec(
|
||||
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
||||
)
|
||||
|
||||
err_header = f"The external spec '{spec}' cannot be used as a compiler"
|
||||
|
||||
# If extra_attributes is not there I might not want to use this entry as a compiler,
|
||||
# therefore just leave a debug message, but don't be loud with a warning.
|
||||
if extra_attributes_key not in config:
|
||||
tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key")
|
||||
return None
|
||||
extra_attributes = config[extra_attributes_key]
|
||||
|
||||
# If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler
|
||||
if compilers_key not in extra_attributes:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'"
|
||||
)
|
||||
return None
|
||||
attribute_compilers = extra_attributes[compilers_key]
|
||||
|
||||
if c_key not in attribute_compilers:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the C compiler path under "
|
||||
f"'{extra_attributes_key}:{compilers_key}'"
|
||||
)
|
||||
return None
|
||||
c_compiler = attribute_compilers[c_key]
|
||||
|
||||
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
|
||||
if cxx_key not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
|
||||
|
||||
if fortran_key not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
|
||||
|
||||
# compilers format has cc/fc/f77, externals format has "c/fortran"
|
||||
paths = {
|
||||
"cc": c_compiler,
|
||||
"cxx": attribute_compilers.get(cxx_key, None),
|
||||
"fc": attribute_compilers.get(fortran_key, None),
|
||||
"f77": attribute_compilers.get(fortran_key, None),
|
||||
}
|
||||
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
else:
|
||||
target = spec.architecture.target
|
||||
if not target:
|
||||
target = spack.platforms.host().target("default_target")
|
||||
target = target.microarchitecture
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
|
||||
compiler_entry = {
|
||||
"compiler": {
|
||||
"spec": str(compiler_spec),
|
||||
"paths": paths,
|
||||
"flags": extra_attributes.get("flags", {}),
|
||||
"operating_system": str(operating_system),
|
||||
"target": str(target.family),
|
||||
"modules": config.get("modules", []),
|
||||
"environment": extra_attributes.get("environment", {}),
|
||||
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
|
||||
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
|
||||
}
|
||||
}
|
||||
return compiler_entry
|
||||
|
||||
|
||||
def _init_compiler_config(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str]
|
||||
) -> None:
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
compilers = find_compilers()
|
||||
compilers_dict = []
|
||||
for compiler in compilers:
|
||||
compilers_dict.append(_to_dict(compiler))
|
||||
configuration.set("compilers", compilers_dict, scope=scope)
|
||||
packages_yaml = configuration.get("packages", scope=scope)
|
||||
return CompilerConfigFactory.from_packages_yaml(packages_yaml)
|
||||
|
||||
|
||||
def compiler_config_files():
|
||||
@@ -278,9 +141,7 @@ def add_compilers_to_config(compilers, scope=None):
|
||||
compilers: a list of Compiler objects.
|
||||
scope: configuration scope to modify.
|
||||
"""
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
|
||||
for compiler in compilers:
|
||||
if not compiler.cc:
|
||||
tty.debug(f"{compiler.spec} does not have a C compiler")
|
||||
@@ -329,9 +190,7 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
|
||||
filtered_compiler_config = [
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
@@ -380,79 +239,77 @@ def all_compiler_specs(scope=None, init_config=True):
|
||||
|
||||
|
||||
def find_compilers(
|
||||
path_hints: Optional[List[str]] = None, *, mixed_toolchain=False
|
||||
path_hints: Optional[List[str]] = None,
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
mixed_toolchain: bool = False,
|
||||
max_workers: Optional[int] = None,
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
"""Return the list of compilers found in the paths given as arguments.
|
||||
"""Searches for compiler in the paths given as argument. If any new compiler is found, the
|
||||
configuration is updated, and the list of new compiler objects is returned.
|
||||
|
||||
Args:
|
||||
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
|
||||
environment variable will be used if the value is None
|
||||
scope: configuration scope to modify
|
||||
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
|
||||
a certain language
|
||||
max_workers: number of processes used to search for compilers
|
||||
"""
|
||||
import spack.detection
|
||||
|
||||
known_compilers = set(all_compilers(init_config=False))
|
||||
|
||||
if path_hints is None:
|
||||
path_hints = get_path("PATH")
|
||||
default_paths = fs.search_paths_for_executables(*path_hints)
|
||||
if sys.platform == "win32":
|
||||
default_paths.extend(windows_os.WindowsOs().compiler_search_paths)
|
||||
compiler_pkgs = spack.repo.PATH.packages_with_tags(COMPILER_TAG, full=True)
|
||||
|
||||
# To detect the version of the compilers, we dispatch a certain number
|
||||
# of function calls to different workers. Here we construct the list
|
||||
# of arguments for each call.
|
||||
arguments = []
|
||||
for o in all_os_classes():
|
||||
search_paths = getattr(o, "compiler_search_paths", default_paths)
|
||||
arguments.extend(arguments_to_detect_version_fn(o, search_paths))
|
||||
|
||||
# Here we map the function arguments to the corresponding calls
|
||||
tp = multiprocessing.pool.ThreadPool()
|
||||
try:
|
||||
detected_versions = tp.map(detect_version, arguments)
|
||||
finally:
|
||||
tp.close()
|
||||
|
||||
def valid_version(item: Tuple[Optional[DetectVersionArgs], Optional[str]]) -> bool:
|
||||
value, error = item
|
||||
if error is None:
|
||||
return True
|
||||
try:
|
||||
# This will fail on Python 2.6 if a non ascii
|
||||
# character is in the error
|
||||
tty.debug(error)
|
||||
except UnicodeEncodeError:
|
||||
pass
|
||||
return False
|
||||
|
||||
def remove_errors(
|
||||
item: Tuple[Optional[DetectVersionArgs], Optional[str]]
|
||||
) -> DetectVersionArgs:
|
||||
value, _ = item
|
||||
assert value is not None
|
||||
return value
|
||||
|
||||
return make_compiler_list(
|
||||
[remove_errors(detected) for detected in detected_versions if valid_version(detected)],
|
||||
mixed_toolchain=mixed_toolchain,
|
||||
detected_packages = spack.detection.by_path(
|
||||
compiler_pkgs, path_hints=default_paths, max_workers=max_workers
|
||||
)
|
||||
|
||||
valid_compilers = {}
|
||||
for name, detected in detected_packages.items():
|
||||
compilers = [x for x in detected if CompilerConfigFactory.from_external_spec(x)]
|
||||
if not compilers:
|
||||
continue
|
||||
valid_compilers[name] = compilers
|
||||
|
||||
def find_new_compilers(
|
||||
path_hints: Optional[List[str]] = None,
|
||||
scope: Optional[str] = None,
|
||||
*,
|
||||
mixed_toolchain: bool = False,
|
||||
):
|
||||
"""Same as ``find_compilers`` but return only the compilers that are not
|
||||
already in compilers.yaml.
|
||||
def _has_fortran_compilers(x):
|
||||
if "compilers" not in x.extra_attributes:
|
||||
return False
|
||||
|
||||
Args:
|
||||
path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
|
||||
environment variable will be used if the value is None
|
||||
scope: scope to look for a compiler. If None consider the merged configuration.
|
||||
mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
|
||||
a certain language
|
||||
"""
|
||||
compilers = find_compilers(path_hints, mixed_toolchain=mixed_toolchain)
|
||||
return "fortran" in x.extra_attributes["compilers"]
|
||||
|
||||
return select_new_compilers(compilers, scope)
|
||||
if mixed_toolchain:
|
||||
gccs = [x for x in valid_compilers.get("gcc", []) if _has_fortran_compilers(x)]
|
||||
if gccs:
|
||||
best_gcc = sorted(
|
||||
gccs, key=lambda x: spack.spec.parse_with_version_concrete(x).version
|
||||
)[-1]
|
||||
gfortran = best_gcc.extra_attributes["compilers"]["fortran"]
|
||||
for name in ("llvm", "apple-clang"):
|
||||
if name not in valid_compilers:
|
||||
continue
|
||||
candidates = valid_compilers[name]
|
||||
for candidate in candidates:
|
||||
if _has_fortran_compilers(candidate):
|
||||
continue
|
||||
candidate.extra_attributes["compilers"]["fortran"] = gfortran
|
||||
|
||||
new_compilers = []
|
||||
for name, detected in valid_compilers.items():
|
||||
for config in CompilerConfigFactory.from_specs(detected):
|
||||
c = _compiler_from_config_entry(config["compiler"])
|
||||
if c in known_compilers:
|
||||
continue
|
||||
new_compilers.append(c)
|
||||
|
||||
add_compilers_to_config(new_compilers, scope=scope)
|
||||
return new_compilers
|
||||
|
||||
|
||||
def select_new_compilers(compilers, scope=None):
|
||||
@@ -462,7 +319,9 @@ def select_new_compilers(compilers, scope=None):
|
||||
compilers_not_in_config = []
|
||||
for c in compilers:
|
||||
arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
|
||||
same_specs = compilers_for_spec(c.spec, arch_spec, scope=scope, init_config=False)
|
||||
same_specs = compilers_for_spec(
|
||||
c.spec, arch_spec=arch_spec, scope=scope, init_config=False
|
||||
)
|
||||
if not same_specs:
|
||||
compilers_not_in_config.append(c)
|
||||
|
||||
@@ -510,8 +369,9 @@ def replace_apple_clang(name):
|
||||
return [replace_apple_clang(name) for name in all_compiler_module_names()]
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def all_compiler_module_names() -> List[str]:
|
||||
return [name for name in llnl.util.lang.list_modules(spack.paths.compilers_path)]
|
||||
return list(llnl.util.lang.list_modules(spack.paths.compilers_path))
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
@@ -531,7 +391,12 @@ def find(compiler_spec, scope=None, init_config=True):
|
||||
def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
|
||||
"""Return specs of available compilers that match the supplied
|
||||
compiler spec. Return an empty list if nothing found."""
|
||||
return [c.spec for c in compilers_for_spec(compiler_spec, arch_spec, scope, True, init_config)]
|
||||
return [
|
||||
c.spec
|
||||
for c in compilers_for_spec(
|
||||
compiler_spec, arch_spec=arch_spec, scope=scope, init_config=init_config
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
def all_compilers(scope=None, init_config=True):
|
||||
@@ -553,14 +418,11 @@ def all_compilers_from(configuration, scope=None, init_config=True):
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def compilers_for_spec(
|
||||
compiler_spec, arch_spec=None, scope=None, use_cache=True, init_config=True
|
||||
):
|
||||
def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config=True):
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
|
||||
matches = set(find(compiler_spec, scope, init_config))
|
||||
compilers = []
|
||||
for cspec in matches:
|
||||
@@ -569,7 +431,7 @@ def compilers_for_spec(
|
||||
|
||||
|
||||
def compilers_for_arch(arch_spec, scope=None):
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope)
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=False)
|
||||
return list(get_compilers(config, arch_spec=arch_spec))
|
||||
|
||||
|
||||
@@ -601,13 +463,15 @@ def compiler_from_dict(items):
|
||||
os = items.get("operating_system", None)
|
||||
target = items.get("target", None)
|
||||
|
||||
if not ("paths" in items and all(n in items["paths"] for n in _path_instance_vars)):
|
||||
if not (
|
||||
"paths" in items and all(n in items["paths"] for n in spack.compiler.PATH_INSTANCE_VARS)
|
||||
):
|
||||
raise InvalidCompilerConfigurationError(cspec)
|
||||
|
||||
cls = class_for_compiler_name(cspec.name)
|
||||
|
||||
compiler_paths = []
|
||||
for c in _path_instance_vars:
|
||||
for c in spack.compiler.PATH_INSTANCE_VARS:
|
||||
compiler_path = items["paths"][c]
|
||||
if compiler_path != "None":
|
||||
compiler_paths.append(compiler_path)
|
||||
@@ -735,24 +599,6 @@ def compiler_for_spec(compiler_spec, arch_spec):
|
||||
return compilers[0]
|
||||
|
||||
|
||||
@_auto_compiler_spec
|
||||
def get_compiler_duplicates(compiler_spec, arch_spec):
|
||||
config = spack.config.CONFIG
|
||||
|
||||
scope_to_compilers = {}
|
||||
for scope in config.scopes:
|
||||
compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec, scope=scope)
|
||||
if compilers:
|
||||
scope_to_compilers[scope] = compilers
|
||||
|
||||
cfg_file_to_duplicates = {}
|
||||
for scope, compilers in scope_to_compilers.items():
|
||||
config_file = config.get_config_filename(scope, "compilers")
|
||||
cfg_file_to_duplicates[config_file] = compilers
|
||||
|
||||
return cfg_file_to_duplicates
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def class_for_compiler_name(compiler_name):
|
||||
"""Given a compiler module name, get the corresponding Compiler class."""
|
||||
@@ -766,7 +612,7 @@ def class_for_compiler_name(compiler_name):
|
||||
submodule_name = compiler_name.replace("-", "_")
|
||||
|
||||
module_name = ".".join(["spack", "compilers", submodule_name])
|
||||
module_obj = __import__(module_name, fromlist=[None])
|
||||
module_obj = importlib.import_module(module_name)
|
||||
cls = getattr(module_obj, mod_to_class(compiler_name))
|
||||
|
||||
# make a note of the name in the module so we can get to it easily.
|
||||
@@ -775,272 +621,10 @@ def class_for_compiler_name(compiler_name):
|
||||
return cls
|
||||
|
||||
|
||||
def all_os_classes():
|
||||
"""
|
||||
Return the list of classes for all operating systems available on
|
||||
this platform
|
||||
"""
|
||||
classes = []
|
||||
|
||||
platform = spack.platforms.host()
|
||||
for os_class in platform.operating_sys.values():
|
||||
classes.append(os_class)
|
||||
|
||||
return classes
|
||||
|
||||
|
||||
def all_compiler_types():
|
||||
return [class_for_compiler_name(c) for c in supported_compilers()]
|
||||
|
||||
|
||||
#: Gathers the attribute values by which a detected compiler is considered
|
||||
#: unique in Spack.
|
||||
#:
|
||||
#: - os: the operating system
|
||||
#: - compiler_name: the name of the compiler (e.g. 'gcc', 'clang', etc.)
|
||||
#: - version: the version of the compiler
|
||||
#:
|
||||
CompilerID = collections.namedtuple("CompilerID", ["os", "compiler_name", "version"])
|
||||
|
||||
#: Variations on a matched compiler name
|
||||
NameVariation = collections.namedtuple("NameVariation", ["prefix", "suffix"])
|
||||
|
||||
#: Groups together the arguments needed by `detect_version`. The four entries
|
||||
#: in the tuple are:
|
||||
#:
|
||||
#: - id: An instance of the CompilerID named tuple (version can be set to None
|
||||
#: as it will be detected later)
|
||||
#: - variation: a NameVariation for file being tested
|
||||
#: - language: compiler language being tested (one of 'cc', 'cxx', 'fc', 'f77')
|
||||
#: - path: full path to the executable being tested
|
||||
#:
|
||||
DetectVersionArgs = collections.namedtuple(
|
||||
"DetectVersionArgs", ["id", "variation", "language", "path"]
|
||||
)
|
||||
|
||||
|
||||
def arguments_to_detect_version_fn(
|
||||
operating_system: spack.operating_systems.OperatingSystem, paths: List[str]
|
||||
) -> List[DetectVersionArgs]:
|
||||
"""Returns a list of DetectVersionArgs tuples to be used in a
|
||||
corresponding function to detect compiler versions.
|
||||
|
||||
The ``operating_system`` instance can customize the behavior of this
|
||||
function by providing a method called with the same name.
|
||||
|
||||
Args:
|
||||
operating_system: the operating system on which we are looking for compilers
|
||||
paths: paths to search for compilers
|
||||
|
||||
Returns:
|
||||
List of DetectVersionArgs tuples. Each item in the list will be later
|
||||
mapped to the corresponding function call to detect the version of the
|
||||
compilers in this OS.
|
||||
"""
|
||||
|
||||
def _default(search_paths: List[str]) -> List[DetectVersionArgs]:
|
||||
command_arguments: List[DetectVersionArgs] = []
|
||||
files_to_be_tested = fs.files_in(*search_paths)
|
||||
for compiler_name in supported_compilers_for_host_platform():
|
||||
compiler_cls = class_for_compiler_name(compiler_name)
|
||||
|
||||
for language in ("cc", "cxx", "f77", "fc"):
|
||||
# Select only the files matching a regexp
|
||||
for (file, full_path), regexp in itertools.product(
|
||||
files_to_be_tested, compiler_cls.search_regexps(language)
|
||||
):
|
||||
match = regexp.match(file)
|
||||
if match:
|
||||
compiler_id = CompilerID(operating_system, compiler_name, None)
|
||||
detect_version_args = DetectVersionArgs(
|
||||
id=compiler_id,
|
||||
variation=NameVariation(*match.groups()),
|
||||
language=language,
|
||||
path=full_path,
|
||||
)
|
||||
command_arguments.append(detect_version_args)
|
||||
|
||||
return command_arguments
|
||||
|
||||
fn = getattr(operating_system, "arguments_to_detect_version_fn", _default)
|
||||
return fn(paths)
|
||||
|
||||
|
||||
def detect_version(
|
||||
detect_version_args: DetectVersionArgs,
|
||||
) -> Tuple[Optional[DetectVersionArgs], Optional[str]]:
|
||||
"""Computes the version of a compiler and adds it to the information
|
||||
passed as input.
|
||||
|
||||
As this function is meant to be executed by worker processes it won't
|
||||
raise any exception but instead will return a (value, error) tuple that
|
||||
needs to be checked by the code dispatching the calls.
|
||||
|
||||
Args:
|
||||
detect_version_args: information on the compiler for which we should detect the version.
|
||||
|
||||
Returns:
|
||||
A ``(DetectVersionArgs, error)`` tuple. If ``error`` is ``None`` the
|
||||
version of the compiler was computed correctly and the first argument
|
||||
of the tuple will contain it. Otherwise ``error`` is a string
|
||||
containing an explanation on why the version couldn't be computed.
|
||||
"""
|
||||
|
||||
def _default(fn_args):
|
||||
compiler_id = fn_args.id
|
||||
language = fn_args.language
|
||||
compiler_cls = class_for_compiler_name(compiler_id.compiler_name)
|
||||
path = fn_args.path
|
||||
|
||||
# Get compiler names and the callback to detect their versions
|
||||
callback = getattr(compiler_cls, f"{language}_version")
|
||||
|
||||
try:
|
||||
version = callback(path)
|
||||
if version and str(version).strip() and version != "unknown":
|
||||
value = fn_args._replace(id=compiler_id._replace(version=version))
|
||||
return value, None
|
||||
|
||||
error = f"Couldn't get version for compiler {path}".format(path)
|
||||
except spack.util.executable.ProcessError as e:
|
||||
error = f"Couldn't get version for compiler {path}\n" + str(e)
|
||||
except spack.util.executable.ProcessTimeoutError as e:
|
||||
error = f"Couldn't get version for compiler {path}\n" + str(e)
|
||||
except Exception as e:
|
||||
# Catching "Exception" here is fine because it just
|
||||
# means something went wrong running a candidate executable.
|
||||
error = "Error while executing candidate compiler {0}" "\n{1}: {2}".format(
|
||||
path, e.__class__.__name__, str(e)
|
||||
)
|
||||
return None, error
|
||||
|
||||
operating_system = detect_version_args.id.os
|
||||
fn = getattr(operating_system, "detect_version", _default)
|
||||
return fn(detect_version_args)
|
||||
|
||||
|
||||
def make_compiler_list(
|
||||
detected_versions: List[DetectVersionArgs], mixed_toolchain: bool = False
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
"""Process a list of detected versions and turn them into a list of
|
||||
compiler specs.
|
||||
|
||||
Args:
|
||||
detected_versions: list of DetectVersionArgs containing a valid version
|
||||
mixed_toolchain: allow mixing compilers from different toolchains if langauge is missing
|
||||
|
||||
Returns:
|
||||
list: list of Compiler objects
|
||||
"""
|
||||
group_fn = lambda x: (x.id, x.variation, x.language)
|
||||
sorted_compilers = sorted(detected_versions, key=group_fn)
|
||||
|
||||
# Gather items in a dictionary by the id, name variation and language
|
||||
compilers_d: Dict[CompilerID, Dict[NameVariation, dict]] = {}
|
||||
for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn):
|
||||
compiler_id, name_variation, language = sort_key
|
||||
by_compiler_id = compilers_d.setdefault(compiler_id, {})
|
||||
by_name_variation = by_compiler_id.setdefault(name_variation, {})
|
||||
by_name_variation[language] = next(x.path for x in group)
|
||||
|
||||
def _default_make_compilers(cmp_id, paths):
|
||||
operating_system, compiler_name, version = cmp_id
|
||||
compiler_cls = class_for_compiler_name(compiler_name)
|
||||
spec = spack.spec.CompilerSpec(compiler_cls.name, f"={version}")
|
||||
paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
|
||||
# TODO: johnwparent - revist the following line as per discussion at:
|
||||
# https://github.com/spack/spack/pull/33385/files#r1040036318
|
||||
target = archspec.cpu.host()
|
||||
compiler = compiler_cls(spec, operating_system, str(target.family), paths)
|
||||
return [compiler]
|
||||
|
||||
# For compilers with the same compiler id:
|
||||
#
|
||||
# - Prefer with C compiler to without
|
||||
# - Prefer with C++ compiler to without
|
||||
# - Prefer no variations to variations (e.g., clang to clang-gpu)
|
||||
#
|
||||
sort_fn = lambda variation: (
|
||||
"cc" not in by_compiler_id[variation], # None last
|
||||
"cxx" not in by_compiler_id[variation], # None last
|
||||
getattr(variation, "prefix", None),
|
||||
getattr(variation, "suffix", None),
|
||||
)
|
||||
|
||||
# Flatten to a list of compiler id, primary variation and compiler dictionary
|
||||
flat_compilers: List[Tuple[CompilerID, NameVariation, dict]] = []
|
||||
for compiler_id, by_compiler_id in compilers_d.items():
|
||||
ordered = sorted(by_compiler_id, key=sort_fn)
|
||||
selected_variation = ordered[0]
|
||||
selected = by_compiler_id[selected_variation]
|
||||
|
||||
# Fill any missing parts from subsequent entries (without mixing toolchains)
|
||||
for lang in ["cxx", "f77", "fc"]:
|
||||
if lang not in selected:
|
||||
next_lang = next(
|
||||
(by_compiler_id[v][lang] for v in ordered if lang in by_compiler_id[v]), None
|
||||
)
|
||||
if next_lang:
|
||||
selected[lang] = next_lang
|
||||
|
||||
flat_compilers.append((compiler_id, selected_variation, selected))
|
||||
|
||||
# Next, fill out the blanks of missing compilers by creating a mixed toolchain (if requested)
|
||||
if mixed_toolchain:
|
||||
make_mixed_toolchain(flat_compilers)
|
||||
|
||||
# Finally, create the compiler list
|
||||
compilers: List["spack.compiler.Compiler"] = []
|
||||
for compiler_id, _, compiler in flat_compilers:
|
||||
make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
|
||||
candidates = make_compilers(compiler_id, compiler)
|
||||
compilers.extend(x for x in candidates if x.cc is not None)
|
||||
|
||||
return compilers
|
||||
|
||||
|
||||
def make_mixed_toolchain(compilers: List[Tuple[CompilerID, NameVariation, dict]]) -> None:
|
||||
"""Add missing compilers across toolchains when they are missing for a particular language.
|
||||
This currently only adds the most sensible gfortran to (apple)-clang if it doesn't have a
|
||||
fortran compiler (no flang)."""
|
||||
|
||||
# First collect the clangs that are missing a fortran compiler
|
||||
clangs_without_flang = [
|
||||
(id, variation, compiler)
|
||||
for id, variation, compiler in compilers
|
||||
if id.compiler_name in ("clang", "apple-clang")
|
||||
and "f77" not in compiler
|
||||
and "fc" not in compiler
|
||||
]
|
||||
if not clangs_without_flang:
|
||||
return
|
||||
|
||||
# Filter on GCCs with fortran compiler
|
||||
gccs_with_fortran = [
|
||||
(id, variation, compiler)
|
||||
for id, variation, compiler in compilers
|
||||
if id.compiler_name == "gcc" and "f77" in compiler and "fc" in compiler
|
||||
]
|
||||
|
||||
# Sort these GCCs by "best variation" (no prefix / suffix first)
|
||||
gccs_with_fortran.sort(
|
||||
key=lambda x: (getattr(x[1], "prefix", None), getattr(x[1], "suffix", None))
|
||||
)
|
||||
|
||||
# Attach the optimal GCC fortran compiler to the clangs that don't have one
|
||||
for clang_id, _, clang_compiler in clangs_without_flang:
|
||||
gcc_compiler = next(
|
||||
(gcc[2] for gcc in gccs_with_fortran if gcc[0].os == clang_id.os), None
|
||||
)
|
||||
|
||||
if not gcc_compiler:
|
||||
continue
|
||||
|
||||
# Update the fc / f77 entries
|
||||
clang_compiler["f77"] = gcc_compiler["f77"]
|
||||
clang_compiler["fc"] = gcc_compiler["fc"]
|
||||
|
||||
|
||||
def is_mixed_toolchain(compiler):
|
||||
"""Returns True if the current compiler is a mixed toolchain,
|
||||
False otherwise.
|
||||
@@ -1087,20 +671,164 @@ def name_matches(name, name_list):
|
||||
return False
|
||||
|
||||
|
||||
_EXTRA_ATTRIBUTES_KEY = "extra_attributes"
|
||||
_COMPILERS_KEY = "compilers"
|
||||
_C_KEY = "c"
|
||||
_CXX_KEY, _FORTRAN_KEY = "cxx", "fortran"
|
||||
|
||||
|
||||
class CompilerConfigFactory:
|
||||
"""Class aggregating all ways of constructing a list of compiler config entries."""
|
||||
|
||||
@staticmethod
|
||||
def from_specs(specs: List["spack.spec.Spec"]) -> List[dict]:
|
||||
result = []
|
||||
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
|
||||
for s in specs:
|
||||
if s.name not in compiler_package_names:
|
||||
continue
|
||||
|
||||
candidate = CompilerConfigFactory.from_external_spec(s)
|
||||
if candidate is None:
|
||||
continue
|
||||
|
||||
result.append(candidate)
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def from_packages_yaml(packages_yaml) -> List[dict]:
|
||||
compiler_specs = []
|
||||
compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
|
||||
for name, entry in packages_yaml.items():
|
||||
if name not in compiler_package_names:
|
||||
continue
|
||||
|
||||
externals_config = entry.get("externals", None)
|
||||
if not externals_config:
|
||||
continue
|
||||
|
||||
current_specs = []
|
||||
for current_external in externals_config:
|
||||
compiler = CompilerConfigFactory._spec_from_external_config(current_external)
|
||||
if compiler:
|
||||
current_specs.append(compiler)
|
||||
compiler_specs.extend(current_specs)
|
||||
|
||||
return CompilerConfigFactory.from_specs(compiler_specs)
|
||||
|
||||
@staticmethod
|
||||
def _spec_from_external_config(config):
|
||||
# Allow `@x.y.z` instead of `@=x.y.z`
|
||||
err_header = f"The external spec '{config['spec']}' cannot be used as a compiler"
|
||||
# If extra_attributes is not there I might not want to use this entry as a compiler,
|
||||
# therefore just leave a debug message, but don't be loud with a warning.
|
||||
if _EXTRA_ATTRIBUTES_KEY not in config:
|
||||
tty.debug(f"[{__file__}] {err_header}: missing the '{_EXTRA_ATTRIBUTES_KEY}' key")
|
||||
return None
|
||||
extra_attributes = config[_EXTRA_ATTRIBUTES_KEY]
|
||||
result = spack.spec.Spec(
|
||||
str(spack.spec.parse_with_version_concrete(config["spec"])),
|
||||
external_modules=config.get("modules"),
|
||||
)
|
||||
result.extra_attributes = extra_attributes
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def from_external_spec(spec: "spack.spec.Spec") -> Optional[dict]:
|
||||
spec = spack.spec.parse_with_version_concrete(spec)
|
||||
extra_attributes = getattr(spec, _EXTRA_ATTRIBUTES_KEY, None)
|
||||
if extra_attributes is None:
|
||||
return None
|
||||
|
||||
paths = CompilerConfigFactory._extract_compiler_paths(spec)
|
||||
if paths is None:
|
||||
return None
|
||||
|
||||
compiler_spec = spack.spec.CompilerSpec(
|
||||
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
||||
)
|
||||
|
||||
operating_system, target = CompilerConfigFactory._extract_os_and_target(spec)
|
||||
|
||||
compiler_entry = {
|
||||
"compiler": {
|
||||
"spec": str(compiler_spec),
|
||||
"paths": paths,
|
||||
"flags": extra_attributes.get("flags", {}),
|
||||
"operating_system": str(operating_system),
|
||||
"target": str(target.family),
|
||||
"modules": getattr(spec, "external_modules", []),
|
||||
"environment": extra_attributes.get("environment", {}),
|
||||
"extra_rpaths": extra_attributes.get("extra_rpaths", []),
|
||||
"implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
|
||||
}
|
||||
}
|
||||
return compiler_entry
|
||||
|
||||
@staticmethod
|
||||
def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]:
|
||||
err_header = f"The external spec '{spec}' cannot be used as a compiler"
|
||||
extra_attributes = spec.extra_attributes
|
||||
# If I have 'extra_attributes' warn if 'compilers' is missing,
|
||||
# or we don't have a C compiler
|
||||
if _COMPILERS_KEY not in extra_attributes:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the '{_COMPILERS_KEY}' key under '{_EXTRA_ATTRIBUTES_KEY}'"
|
||||
)
|
||||
return None
|
||||
attribute_compilers = extra_attributes[_COMPILERS_KEY]
|
||||
|
||||
if _C_KEY not in attribute_compilers:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the C compiler path under "
|
||||
f"'{_EXTRA_ATTRIBUTES_KEY}:{_COMPILERS_KEY}'"
|
||||
)
|
||||
return None
|
||||
c_compiler = attribute_compilers[_C_KEY]
|
||||
|
||||
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
|
||||
if _CXX_KEY not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
|
||||
|
||||
if _FORTRAN_KEY not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
|
||||
|
||||
# compilers format has cc/fc/f77, externals format has "c/fortran"
|
||||
return {
|
||||
"cc": c_compiler,
|
||||
"cxx": attribute_compilers.get(_CXX_KEY, None),
|
||||
"fc": attribute_compilers.get(_FORTRAN_KEY, None),
|
||||
"f77": attribute_compilers.get(_FORTRAN_KEY, None),
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _extract_os_and_target(spec: "spack.spec.Spec"):
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
target = host_platform.target("default_target").microarchitecture
|
||||
else:
|
||||
target = spec.architecture.target
|
||||
if not target:
|
||||
target = spack.platforms.host().target("default_target")
|
||||
target = target.microarchitecture
|
||||
|
||||
operating_system = spec.os
|
||||
if not operating_system:
|
||||
host_platform = spack.platforms.host()
|
||||
operating_system = host_platform.operating_system("default_os")
|
||||
return operating_system, target
|
||||
|
||||
|
||||
class InvalidCompilerConfigurationError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec):
|
||||
super().__init__(
|
||||
'Invalid configuration for [compiler "%s"]: ' % compiler_spec,
|
||||
"Compiler configuration must contain entries for all compilers: %s"
|
||||
% _path_instance_vars,
|
||||
f'Invalid configuration for [compiler "{compiler_spec}"]: ',
|
||||
f"Compiler configuration must contain entries for "
|
||||
f"all compilers: {spack.compiler.PATH_INSTANCE_VARS}",
|
||||
)
|
||||
|
||||
|
||||
class NoCompilersError(spack.error.SpackError):
|
||||
def __init__(self):
|
||||
super().__init__("Spack could not find any compilers!")
|
||||
|
||||
|
||||
class UnknownCompilerError(spack.error.SpackError):
|
||||
def __init__(self, compiler_name):
|
||||
super().__init__("Spack doesn't support the requested compiler: {0}".format(compiler_name))
|
||||
@@ -1111,25 +839,3 @@ def __init__(self, compiler_spec, target):
|
||||
super().__init__(
|
||||
"No compilers for operating system %s satisfy spec %s" % (target, compiler_spec)
|
||||
)
|
||||
|
||||
|
||||
class CompilerDuplicateError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec, arch_spec):
|
||||
config_file_to_duplicates = get_compiler_duplicates(compiler_spec, arch_spec)
|
||||
duplicate_table = list((x, len(y)) for x, y in config_file_to_duplicates.items())
|
||||
descriptor = lambda num: "time" if num == 1 else "times"
|
||||
duplicate_msg = lambda cfgfile, count: "{0}: {1} {2}".format(
|
||||
cfgfile, str(count), descriptor(count)
|
||||
)
|
||||
msg = (
|
||||
"Compiler configuration contains entries with duplicate"
|
||||
+ " specification ({0}, {1})".format(compiler_spec, arch_spec)
|
||||
+ " in the following files:\n\t"
|
||||
+ "\n\t".join(duplicate_msg(x, y) for x, y in duplicate_table)
|
||||
)
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
|
||||
def __init__(self, compiler_spec):
|
||||
super().__init__("Multiple compilers satisfy spec %s" % compiler_spec)
|
||||
|
@@ -223,6 +223,30 @@ def get_oneapi_root(pth: str):
|
||||
)
|
||||
self.msvc_compiler_environment = CmdCall(*env_cmds)
|
||||
|
||||
@property
|
||||
def cxx11_flag(self):
|
||||
return "/std:c++11"
|
||||
|
||||
@property
|
||||
def cxx14_flag(self):
|
||||
return "/std:c++14"
|
||||
|
||||
@property
|
||||
def cxx17_flag(self):
|
||||
return "/std:c++17"
|
||||
|
||||
@property
|
||||
def cxx20_flag(self):
|
||||
return "/std:c++20"
|
||||
|
||||
@property
|
||||
def c11_flag(self):
|
||||
return "/std:c11"
|
||||
|
||||
@property
|
||||
def c17_flag(self):
|
||||
return "/std:c17"
|
||||
|
||||
@property
|
||||
def msvc_version(self):
|
||||
"""This is the VCToolset version *NOT* the actual version of the cl compiler
|
||||
|
@@ -8,7 +8,6 @@
|
||||
from contextlib import contextmanager
|
||||
from itertools import chain
|
||||
|
||||
import spack.abi
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.environment
|
||||
|
@@ -1090,7 +1090,7 @@ def validate(
|
||||
|
||||
|
||||
def read_config_file(
|
||||
filename: str, schema: Optional[YamlConfigDict] = None
|
||||
path: str, schema: Optional[YamlConfigDict] = None
|
||||
) -> Optional[YamlConfigDict]:
|
||||
"""Read a YAML configuration file.
|
||||
|
||||
@@ -1100,21 +1100,9 @@ def read_config_file(
|
||||
# to preserve flexibility in calling convention (don't need to provide
|
||||
# schema when it's not necessary) while allowing us to validate against a
|
||||
# known schema when the top-level key could be incorrect.
|
||||
|
||||
if not os.path.exists(filename):
|
||||
# Ignore nonexistent files.
|
||||
tty.debug(f"Skipping nonexistent config path {filename}", level=3)
|
||||
return None
|
||||
|
||||
elif not os.path.isfile(filename):
|
||||
raise ConfigFileError(f"Invalid configuration. {filename} exists but is not a file.")
|
||||
|
||||
elif not os.access(filename, os.R_OK):
|
||||
raise ConfigFileError(f"Config file is not readable: {filename}")
|
||||
|
||||
try:
|
||||
tty.debug(f"Reading config from file {filename}")
|
||||
with open(filename) as f:
|
||||
with open(path) as f:
|
||||
tty.debug(f"Reading config from file {path}")
|
||||
data = syaml.load_config(f)
|
||||
|
||||
if data:
|
||||
@@ -1125,15 +1113,20 @@ def read_config_file(
|
||||
|
||||
return data
|
||||
|
||||
except StopIteration:
|
||||
raise ConfigFileError(f"Config file is empty or is not a valid YAML dict: {filename}")
|
||||
except FileNotFoundError:
|
||||
# Ignore nonexistent files.
|
||||
tty.debug(f"Skipping nonexistent config path {path}", level=3)
|
||||
return None
|
||||
|
||||
except OSError as e:
|
||||
raise ConfigFileError(f"Path is not a file or is not readable: {path}: {str(e)}") from e
|
||||
|
||||
except StopIteration as e:
|
||||
raise ConfigFileError(f"Config file is empty or is not a valid YAML dict: {path}") from e
|
||||
|
||||
except syaml.SpackYAMLError as e:
|
||||
raise ConfigFileError(str(e)) from e
|
||||
|
||||
except OSError as e:
|
||||
raise ConfigFileError(f"Error reading configuration file {filename}: {str(e)}") from e
|
||||
|
||||
|
||||
def _override(string: str) -> bool:
|
||||
"""Test if a spack YAML string is an override.
|
||||
|
@@ -14,12 +14,14 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.compilers
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.hash_types as hash_types
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
from spack.schema.cray_manifest import schema as manifest_schema
|
||||
|
||||
#: Cray systems can store a Spack-compatible description of system
|
||||
@@ -237,7 +239,7 @@ def read(path, apply_updates):
|
||||
tty.debug(f"Include this\n{traceback.format_exc()}")
|
||||
if apply_updates:
|
||||
for spec in specs.values():
|
||||
spack.store.STORE.db.add(spec, directory_layout=None)
|
||||
spack.store.STORE.db.add(spec)
|
||||
|
||||
|
||||
class ManifestValidationError(spack.error.SpackError):
|
||||
|
@@ -59,7 +59,11 @@
|
||||
import spack.util.lock as lk
|
||||
import spack.util.spack_json as sjson
|
||||
import spack.version as vn
|
||||
from spack.directory_layout import DirectoryLayoutError, InconsistentInstallDirectoryError
|
||||
from spack.directory_layout import (
|
||||
DirectoryLayout,
|
||||
DirectoryLayoutError,
|
||||
InconsistentInstallDirectoryError,
|
||||
)
|
||||
from spack.error import SpackError
|
||||
from spack.util.crypto import bit_length
|
||||
|
||||
@@ -208,7 +212,7 @@ def __init__(
|
||||
ref_count: int = 0,
|
||||
explicit: bool = False,
|
||||
installation_time: Optional[float] = None,
|
||||
deprecated_for: Optional["spack.spec.Spec"] = None,
|
||||
deprecated_for: Optional[str] = None,
|
||||
in_buildcache: bool = False,
|
||||
origin=None,
|
||||
):
|
||||
@@ -595,9 +599,11 @@ class Database:
|
||||
def __init__(
|
||||
self,
|
||||
root: str,
|
||||
*,
|
||||
upstream_dbs: Optional[List["Database"]] = None,
|
||||
is_upstream: bool = False,
|
||||
lock_cfg: LockConfiguration = DEFAULT_LOCK_CFG,
|
||||
layout: Optional[DirectoryLayout] = None,
|
||||
) -> None:
|
||||
"""Database for Spack installations.
|
||||
|
||||
@@ -620,6 +626,7 @@ def __init__(
|
||||
"""
|
||||
self.root = root
|
||||
self.database_directory = os.path.join(self.root, _DB_DIRNAME)
|
||||
self.layout = layout
|
||||
|
||||
# Set up layout of database files within the db dir
|
||||
self._index_path = os.path.join(self.database_directory, "index.json")
|
||||
@@ -664,14 +671,6 @@ def __init__(
|
||||
|
||||
self.upstream_dbs = list(upstream_dbs) if upstream_dbs else []
|
||||
|
||||
# whether there was an error at the start of a read transaction
|
||||
self._error = None
|
||||
|
||||
# For testing: if this is true, an exception is thrown when missing
|
||||
# dependencies are detected (rather than just printing a warning
|
||||
# message)
|
||||
self._fail_when_missing_deps = False
|
||||
|
||||
self._write_transaction_impl = lk.WriteTransaction
|
||||
self._read_transaction_impl = lk.ReadTransaction
|
||||
|
||||
@@ -774,7 +773,13 @@ def query_local_by_spec_hash(self, hash_key):
|
||||
with self.read_transaction():
|
||||
return self._data.get(hash_key, None)
|
||||
|
||||
def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
def _assign_dependencies(
|
||||
self,
|
||||
spec_reader: Type["spack.spec.SpecfileReaderBase"],
|
||||
hash_key: str,
|
||||
installs: dict,
|
||||
data: Dict[str, InstallRecord],
|
||||
):
|
||||
# Add dependencies from other records in the install DB to
|
||||
# form a full spec.
|
||||
spec = data[hash_key].spec
|
||||
@@ -787,26 +792,20 @@ def _assign_dependencies(self, spec_reader, hash_key, installs, data):
|
||||
for dname, dhash, dtypes, _, virtuals in spec_reader.read_specfile_dep_specs(
|
||||
yaml_deps
|
||||
):
|
||||
# It is important that we always check upstream installations
|
||||
# in the same order, and that we always check the local
|
||||
# installation first: if a downstream Spack installs a package
|
||||
# then dependents in that installation could be using it.
|
||||
# If a hash is installed locally and upstream, there isn't
|
||||
# enough information to determine which one a local package
|
||||
# depends on, so the convention ensures that this isn't an
|
||||
# issue.
|
||||
upstream, record = self.query_by_spec_hash(dhash, data=data)
|
||||
# It is important that we always check upstream installations in the same order,
|
||||
# and that we always check the local installation first: if a downstream Spack
|
||||
# installs a package then dependents in that installation could be using it. If a
|
||||
# hash is installed locally and upstream, there isn't enough information to
|
||||
# determine which one a local package depends on, so the convention ensures that
|
||||
# this isn't an issue.
|
||||
_, record = self.query_by_spec_hash(dhash, data=data)
|
||||
child = record.spec if record else None
|
||||
|
||||
if not child:
|
||||
msg = "Missing dependency not in database: " "%s needs %s-%s" % (
|
||||
spec.cformat("{name}{/hash:7}"),
|
||||
dname,
|
||||
dhash[:7],
|
||||
tty.warn(
|
||||
f"Missing dependency not in database: "
|
||||
f"{spec.cformat('{name}{/hash:7}')} needs {dname}-{dhash[:7]}"
|
||||
)
|
||||
if self._fail_when_missing_deps:
|
||||
raise MissingDependenciesError(msg)
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
spec._add_dependency(child, depflag=dt.canonicalize(dtypes), virtuals=virtuals)
|
||||
@@ -873,8 +872,8 @@ def invalid_record(hash_key, error):
|
||||
# (i.e., its specs are a true Merkle DAG, unlike most specs.)
|
||||
|
||||
# Pass 1: Iterate through database and build specs w/o dependencies
|
||||
data = {}
|
||||
installed_prefixes = set()
|
||||
data: Dict[str, InstallRecord] = {}
|
||||
installed_prefixes: Set[str] = set()
|
||||
for hash_key, rec in installs.items():
|
||||
try:
|
||||
# This constructs a spec DAG from the list of all installs
|
||||
@@ -911,7 +910,7 @@ def invalid_record(hash_key, error):
|
||||
self._data = data
|
||||
self._installed_prefixes = installed_prefixes
|
||||
|
||||
def reindex(self, directory_layout):
|
||||
def reindex(self):
|
||||
"""Build database index from scratch based on a directory layout.
|
||||
|
||||
Locks the DB if it isn't locked already.
|
||||
@@ -919,6 +918,8 @@ def reindex(self, directory_layout):
|
||||
if self.is_upstream:
|
||||
raise UpstreamDatabaseLockingError("Cannot reindex an upstream database")
|
||||
|
||||
error: Optional[CorruptDatabaseError] = None
|
||||
|
||||
# Special transaction to avoid recursive reindex calls and to
|
||||
# ignore errors if we need to rebuild a corrupt database.
|
||||
def _read_suppress_error():
|
||||
@@ -926,7 +927,8 @@ def _read_suppress_error():
|
||||
if os.path.isfile(self._index_path):
|
||||
self._read_from_file(self._index_path)
|
||||
except CorruptDatabaseError as e:
|
||||
self._error = e
|
||||
nonlocal error
|
||||
error = e
|
||||
self._data = {}
|
||||
self._installed_prefixes = set()
|
||||
|
||||
@@ -935,14 +937,13 @@ def _read_suppress_error():
|
||||
)
|
||||
|
||||
with transaction:
|
||||
if self._error:
|
||||
tty.warn("Spack database was corrupt. Will rebuild. Error was:", str(self._error))
|
||||
self._error = None
|
||||
if error is not None:
|
||||
tty.warn(f"Spack database was corrupt. Will rebuild. Error was: {error}")
|
||||
|
||||
old_data = self._data
|
||||
old_installed_prefixes = self._installed_prefixes
|
||||
try:
|
||||
self._construct_from_directory_layout(directory_layout, old_data)
|
||||
self._construct_from_directory_layout(old_data)
|
||||
except BaseException:
|
||||
# If anything explodes, restore old data, skip write.
|
||||
self._data = old_data
|
||||
@@ -950,13 +951,16 @@ def _read_suppress_error():
|
||||
raise
|
||||
|
||||
def _construct_entry_from_directory_layout(
|
||||
self, directory_layout, old_data, spec, deprecator=None
|
||||
self,
|
||||
old_data: Dict[str, InstallRecord],
|
||||
spec: "spack.spec.Spec",
|
||||
deprecator: Optional["spack.spec.Spec"] = None,
|
||||
):
|
||||
# Try to recover explicit value from old DB, but
|
||||
# default it to True if DB was corrupt. This is
|
||||
# just to be conservative in case a command like
|
||||
# "autoremove" is run by the user after a reindex.
|
||||
tty.debug("RECONSTRUCTING FROM SPEC.YAML: {0}".format(spec))
|
||||
tty.debug(f"Reconstructing from spec file: {spec}")
|
||||
explicit = True
|
||||
inst_time = os.stat(spec.prefix).st_ctime
|
||||
if old_data is not None:
|
||||
@@ -965,18 +969,17 @@ def _construct_entry_from_directory_layout(
|
||||
explicit = old_info.explicit
|
||||
inst_time = old_info.installation_time
|
||||
|
||||
extra_args = {"explicit": explicit, "installation_time": inst_time}
|
||||
self._add(spec, directory_layout, **extra_args)
|
||||
self._add(spec, explicit=explicit, installation_time=inst_time)
|
||||
if deprecator:
|
||||
self._deprecate(spec, deprecator)
|
||||
|
||||
def _construct_from_directory_layout(self, directory_layout, old_data):
|
||||
# Read first the `spec.yaml` files in the prefixes. They should be
|
||||
# considered authoritative with respect to DB reindexing, as
|
||||
# entries in the DB may be corrupted in a way that still makes
|
||||
# them readable. If we considered DB entries authoritative
|
||||
# instead, we would perpetuate errors over a reindex.
|
||||
with directory_layout.disable_upstream_check():
|
||||
def _construct_from_directory_layout(self, old_data: Dict[str, InstallRecord]):
|
||||
# Read first the spec files in the prefixes. They should be considered authoritative with
|
||||
# respect to DB reindexing, as entries in the DB may be corrupted in a way that still makes
|
||||
# them readable. If we considered DB entries authoritative instead, we would perpetuate
|
||||
# errors over a reindex.
|
||||
assert self.layout is not None, "Cannot reindex a database without a known layout"
|
||||
with self.layout.disable_upstream_check():
|
||||
# Initialize data in the reconstructed DB
|
||||
self._data = {}
|
||||
self._installed_prefixes = set()
|
||||
@@ -984,44 +987,36 @@ def _construct_from_directory_layout(self, directory_layout, old_data):
|
||||
# Start inspecting the installed prefixes
|
||||
processed_specs = set()
|
||||
|
||||
for spec in directory_layout.all_specs():
|
||||
self._construct_entry_from_directory_layout(directory_layout, old_data, spec)
|
||||
for spec in self.layout.all_specs():
|
||||
self._construct_entry_from_directory_layout(old_data, spec)
|
||||
processed_specs.add(spec)
|
||||
|
||||
for spec, deprecator in directory_layout.all_deprecated_specs():
|
||||
self._construct_entry_from_directory_layout(
|
||||
directory_layout, old_data, spec, deprecator
|
||||
)
|
||||
for spec, deprecator in self.layout.all_deprecated_specs():
|
||||
self._construct_entry_from_directory_layout(old_data, spec, deprecator)
|
||||
processed_specs.add(spec)
|
||||
|
||||
for key, entry in old_data.items():
|
||||
# We already took care of this spec using
|
||||
# `spec.yaml` from its prefix.
|
||||
for entry in old_data.values():
|
||||
# We already took care of this spec using spec file from its prefix.
|
||||
if entry.spec in processed_specs:
|
||||
msg = "SKIPPING RECONSTRUCTION FROM OLD DB: {0}"
|
||||
msg += " [already reconstructed from spec.yaml]"
|
||||
tty.debug(msg.format(entry.spec))
|
||||
tty.debug(
|
||||
f"Skipping reconstruction from old db: {entry.spec}"
|
||||
" [already reconstructed from spec file]"
|
||||
)
|
||||
continue
|
||||
|
||||
# If we arrived here it very likely means that
|
||||
# we have external specs that are not dependencies
|
||||
# of other specs. This may be the case for externally
|
||||
# installed compilers or externally installed
|
||||
# applications.
|
||||
tty.debug("RECONSTRUCTING FROM OLD DB: {0}".format(entry.spec))
|
||||
# If we arrived here it very likely means that we have external specs that are not
|
||||
# dependencies of other specs. This may be the case for externally installed
|
||||
# compilers or externally installed applications.
|
||||
tty.debug(f"Reconstructing from old db: {entry.spec}")
|
||||
try:
|
||||
layout = None if entry.spec.external else directory_layout
|
||||
kwargs = {
|
||||
"spec": entry.spec,
|
||||
"directory_layout": layout,
|
||||
"explicit": entry.explicit,
|
||||
"installation_time": entry.installation_time,
|
||||
}
|
||||
self._add(**kwargs)
|
||||
self._add(
|
||||
spec=entry.spec,
|
||||
explicit=entry.explicit,
|
||||
installation_time=entry.installation_time,
|
||||
)
|
||||
processed_specs.add(entry.spec)
|
||||
except Exception as e:
|
||||
# Something went wrong, so the spec was not restored
|
||||
# from old data
|
||||
# Something went wrong, so the spec was not restored from old data
|
||||
tty.debug(e)
|
||||
|
||||
self._check_ref_counts()
|
||||
@@ -1033,7 +1028,7 @@ def _check_ref_counts(self):
|
||||
|
||||
Does no locking.
|
||||
"""
|
||||
counts = {}
|
||||
counts: Dict[str, int] = {}
|
||||
for key, rec in self._data.items():
|
||||
counts.setdefault(key, 0)
|
||||
for dep in rec.spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
|
||||
@@ -1117,29 +1112,23 @@ def _read(self):
|
||||
|
||||
def _add(
|
||||
self,
|
||||
spec,
|
||||
directory_layout=None,
|
||||
explicit=False,
|
||||
installation_time=None,
|
||||
allow_missing=False,
|
||||
spec: "spack.spec.Spec",
|
||||
explicit: bool = False,
|
||||
installation_time: Optional[float] = None,
|
||||
allow_missing: bool = False,
|
||||
):
|
||||
"""Add an install record for this spec to the database.
|
||||
|
||||
Assumes spec is installed in ``directory_layout.path_for_spec(spec)``.
|
||||
|
||||
Also ensures dependencies are present and updated in the DB as
|
||||
either installed or missing.
|
||||
Also ensures dependencies are present and updated in the DB as either installed or missing.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): spec to be added
|
||||
directory_layout: layout of the spec installation
|
||||
spec: spec to be added
|
||||
explicit:
|
||||
Possible values: True, False, any
|
||||
|
||||
A spec that was installed following a specific user
|
||||
request is marked as explicit. If instead it was
|
||||
pulled-in as a dependency of a user requested spec
|
||||
it's considered implicit.
|
||||
A spec that was installed following a specific user request is marked as explicit.
|
||||
If instead it was pulled-in as a dependency of a user requested spec it's
|
||||
considered implicit.
|
||||
|
||||
installation_time:
|
||||
Date and time of installation
|
||||
@@ -1150,48 +1139,42 @@ def _add(
|
||||
raise NonConcreteSpecAddError("Specs added to DB must be concrete.")
|
||||
|
||||
key = spec.dag_hash()
|
||||
spec_pkg_hash = spec._package_hash
|
||||
spec_pkg_hash = spec._package_hash # type: ignore[attr-defined]
|
||||
upstream, record = self.query_by_spec_hash(key)
|
||||
if upstream:
|
||||
return
|
||||
|
||||
# Retrieve optional arguments
|
||||
installation_time = installation_time or _now()
|
||||
|
||||
for edge in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
|
||||
if edge.spec.dag_hash() in self._data:
|
||||
continue
|
||||
# allow missing build-only deps. This prevents excessive
|
||||
# warnings when a spec is installed, and its build dep
|
||||
# is missing a build dep; there's no need to install the
|
||||
# build dep's build dep first, and there's no need to warn
|
||||
# about it missing.
|
||||
dep_allow_missing = allow_missing or edge.depflag == dt.BUILD
|
||||
self._add(
|
||||
edge.spec,
|
||||
directory_layout,
|
||||
explicit=False,
|
||||
installation_time=installation_time,
|
||||
allow_missing=dep_allow_missing,
|
||||
# allow missing build-only deps. This prevents excessive warnings when a spec is
|
||||
# installed, and its build dep is missing a build dep; there's no need to install
|
||||
# the build dep's build dep first, and there's no need to warn about it missing.
|
||||
allow_missing=allow_missing or edge.depflag == dt.BUILD,
|
||||
)
|
||||
|
||||
# Make sure the directory layout agrees whether the spec is installed
|
||||
if not spec.external and directory_layout:
|
||||
path = directory_layout.path_for_spec(spec)
|
||||
if not spec.external and self.layout:
|
||||
path = self.layout.path_for_spec(spec)
|
||||
installed = False
|
||||
try:
|
||||
directory_layout.ensure_installed(spec)
|
||||
self.layout.ensure_installed(spec)
|
||||
installed = True
|
||||
self._installed_prefixes.add(path)
|
||||
except DirectoryLayoutError as e:
|
||||
if not (allow_missing and isinstance(e, InconsistentInstallDirectoryError)):
|
||||
msg = (
|
||||
"{0} is being {1} in the database with prefix {2}, "
|
||||
"but this directory does not contain an installation of "
|
||||
"the spec, due to: {3}"
|
||||
)
|
||||
action = "updated" if key in self._data else "registered"
|
||||
tty.warn(msg.format(spec.short_spec, action, path, str(e)))
|
||||
tty.warn(
|
||||
f"{spec.short_spec} is being {action} in the database with prefix {path}, "
|
||||
"but this directory does not contain an installation of "
|
||||
f"the spec, due to: {e}"
|
||||
)
|
||||
elif spec.external_path:
|
||||
path = spec.external_path
|
||||
installed = True
|
||||
@@ -1202,23 +1185,27 @@ def _add(
|
||||
if key not in self._data:
|
||||
# Create a new install record with no deps initially.
|
||||
new_spec = spec.copy(deps=False)
|
||||
extra_args = {"explicit": explicit, "installation_time": installation_time}
|
||||
# Commands other than 'spack install' may add specs to the DB,
|
||||
# we can record the source of an installed Spec with 'origin'
|
||||
if hasattr(spec, "origin"):
|
||||
extra_args["origin"] = spec.origin
|
||||
self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, **extra_args)
|
||||
self._data[key] = InstallRecord(
|
||||
new_spec,
|
||||
path=path,
|
||||
installed=installed,
|
||||
ref_count=0,
|
||||
explicit=explicit,
|
||||
installation_time=installation_time,
|
||||
origin=None if not hasattr(spec, "origin") else spec.origin,
|
||||
)
|
||||
|
||||
# Connect dependencies from the DB to the new copy.
|
||||
for dep in spec.edges_to_dependencies(depflag=_TRACKED_DEPENDENCIES):
|
||||
dkey = dep.spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(dkey)
|
||||
assert record, f"Missing dependency {dep.spec} in DB"
|
||||
new_spec._add_dependency(record.spec, depflag=dep.depflag, virtuals=dep.virtuals)
|
||||
if not upstream:
|
||||
record.ref_count += 1
|
||||
|
||||
# Mark concrete once everything is built, and preserve
|
||||
# the original hashes of concrete specs.
|
||||
# Mark concrete once everything is built, and preserve the original hashes of concrete
|
||||
# specs.
|
||||
new_spec._mark_concrete()
|
||||
new_spec._hash = key
|
||||
new_spec._package_hash = spec_pkg_hash
|
||||
@@ -1231,7 +1218,7 @@ def _add(
|
||||
self._data[key].explicit = explicit
|
||||
|
||||
@_autospec
|
||||
def add(self, spec, directory_layout, explicit=False):
|
||||
def add(self, spec: "spack.spec.Spec", *, explicit: bool = False) -> None:
|
||||
"""Add spec at path to database, locking and reading DB to sync.
|
||||
|
||||
``add()`` will lock and read from the DB on disk.
|
||||
@@ -1240,9 +1227,9 @@ def add(self, spec, directory_layout, explicit=False):
|
||||
# TODO: ensure that spec is concrete?
|
||||
# Entire add is transactional.
|
||||
with self.write_transaction():
|
||||
self._add(spec, directory_layout, explicit=explicit)
|
||||
self._add(spec, explicit=explicit)
|
||||
|
||||
def _get_matching_spec_key(self, spec, **kwargs):
|
||||
def _get_matching_spec_key(self, spec: "spack.spec.Spec", **kwargs) -> str:
|
||||
"""Get the exact spec OR get a single spec that matches."""
|
||||
key = spec.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(key)
|
||||
@@ -1254,12 +1241,12 @@ def _get_matching_spec_key(self, spec, **kwargs):
|
||||
return key
|
||||
|
||||
@_autospec
|
||||
def get_record(self, spec, **kwargs):
|
||||
def get_record(self, spec: "spack.spec.Spec", **kwargs) -> Optional[InstallRecord]:
|
||||
key = self._get_matching_spec_key(spec, **kwargs)
|
||||
upstream, record = self.query_by_spec_hash(key)
|
||||
return record
|
||||
|
||||
def _decrement_ref_count(self, spec):
|
||||
def _decrement_ref_count(self, spec: "spack.spec.Spec") -> None:
|
||||
key = spec.dag_hash()
|
||||
|
||||
if key not in self._data:
|
||||
@@ -1276,7 +1263,7 @@ def _decrement_ref_count(self, spec):
|
||||
for dep in spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
|
||||
self._decrement_ref_count(dep)
|
||||
|
||||
def _increment_ref_count(self, spec):
|
||||
def _increment_ref_count(self, spec: "spack.spec.Spec") -> None:
|
||||
key = spec.dag_hash()
|
||||
|
||||
if key not in self._data:
|
||||
@@ -1285,14 +1272,14 @@ def _increment_ref_count(self, spec):
|
||||
rec = self._data[key]
|
||||
rec.ref_count += 1
|
||||
|
||||
def _remove(self, spec):
|
||||
def _remove(self, spec: "spack.spec.Spec") -> "spack.spec.Spec":
|
||||
"""Non-locking version of remove(); does real work."""
|
||||
key = self._get_matching_spec_key(spec)
|
||||
rec = self._data[key]
|
||||
|
||||
# This install prefix is now free for other specs to use, even if the
|
||||
# spec is only marked uninstalled.
|
||||
if not rec.spec.external and rec.installed:
|
||||
if not rec.spec.external and rec.installed and rec.path:
|
||||
self._installed_prefixes.remove(rec.path)
|
||||
|
||||
if rec.ref_count > 0:
|
||||
@@ -1316,7 +1303,7 @@ def _remove(self, spec):
|
||||
return rec.spec
|
||||
|
||||
@_autospec
|
||||
def remove(self, spec):
|
||||
def remove(self, spec: "spack.spec.Spec") -> "spack.spec.Spec":
|
||||
"""Removes a spec from the database. To be called on uninstall.
|
||||
|
||||
Reads the database, then:
|
||||
@@ -1331,7 +1318,7 @@ def remove(self, spec):
|
||||
with self.write_transaction():
|
||||
return self._remove(spec)
|
||||
|
||||
def deprecator(self, spec):
|
||||
def deprecator(self, spec: "spack.spec.Spec") -> Optional["spack.spec.Spec"]:
|
||||
"""Return the spec that the given spec is deprecated for, or None"""
|
||||
with self.read_transaction():
|
||||
spec_key = self._get_matching_spec_key(spec)
|
||||
@@ -1342,14 +1329,14 @@ def deprecator(self, spec):
|
||||
else:
|
||||
return None
|
||||
|
||||
def specs_deprecated_by(self, spec):
|
||||
def specs_deprecated_by(self, spec: "spack.spec.Spec") -> List["spack.spec.Spec"]:
|
||||
"""Return all specs deprecated in favor of the given spec"""
|
||||
with self.read_transaction():
|
||||
return [
|
||||
rec.spec for rec in self._data.values() if rec.deprecated_for == spec.dag_hash()
|
||||
]
|
||||
|
||||
def _deprecate(self, spec, deprecator):
|
||||
def _deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> None:
|
||||
spec_key = self._get_matching_spec_key(spec)
|
||||
spec_rec = self._data[spec_key]
|
||||
|
||||
@@ -1367,17 +1354,17 @@ def _deprecate(self, spec, deprecator):
|
||||
self._data[spec_key] = spec_rec
|
||||
|
||||
@_autospec
|
||||
def mark(self, spec, key, value):
|
||||
def mark(self, spec: "spack.spec.Spec", key, value) -> None:
|
||||
"""Mark an arbitrary record on a spec."""
|
||||
with self.write_transaction():
|
||||
return self._mark(spec, key, value)
|
||||
|
||||
def _mark(self, spec, key, value):
|
||||
def _mark(self, spec: "spack.spec.Spec", key, value) -> None:
|
||||
record = self._data[self._get_matching_spec_key(spec)]
|
||||
setattr(record, key, value)
|
||||
|
||||
@_autospec
|
||||
def deprecate(self, spec, deprecator):
|
||||
def deprecate(self, spec: "spack.spec.Spec", deprecator: "spack.spec.Spec") -> None:
|
||||
"""Marks a spec as deprecated in favor of its deprecator"""
|
||||
with self.write_transaction():
|
||||
return self._deprecate(spec, deprecator)
|
||||
@@ -1385,16 +1372,16 @@ def deprecate(self, spec, deprecator):
|
||||
@_autospec
|
||||
def installed_relatives(
|
||||
self,
|
||||
spec,
|
||||
direction="children",
|
||||
transitive=True,
|
||||
spec: "spack.spec.Spec",
|
||||
direction: str = "children",
|
||||
transitive: bool = True,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.ALL,
|
||||
):
|
||||
) -> Set["spack.spec.Spec"]:
|
||||
"""Return installed specs related to this one."""
|
||||
if direction not in ("parents", "children"):
|
||||
raise ValueError("Invalid direction: %s" % direction)
|
||||
|
||||
relatives = set()
|
||||
relatives: Set[spack.spec.Spec] = set()
|
||||
for spec in self.query(spec):
|
||||
if transitive:
|
||||
to_add = spec.traverse(direction=direction, root=False, deptype=deptype)
|
||||
@@ -1405,17 +1392,13 @@ def installed_relatives(
|
||||
|
||||
for relative in to_add:
|
||||
hash_key = relative.dag_hash()
|
||||
upstream, record = self.query_by_spec_hash(hash_key)
|
||||
_, record = self.query_by_spec_hash(hash_key)
|
||||
if not record:
|
||||
reltype = "Dependent" if direction == "parents" else "Dependency"
|
||||
msg = "Inconsistent state! %s %s of %s not in DB" % (
|
||||
reltype,
|
||||
hash_key,
|
||||
spec.dag_hash(),
|
||||
tty.warn(
|
||||
f"Inconsistent state: "
|
||||
f"{'dependent' if direction == 'parents' else 'dependency'} {hash_key} of "
|
||||
f"{spec.dag_hash()} not in DB"
|
||||
)
|
||||
if self._fail_when_missing_deps:
|
||||
raise MissingDependenciesError(msg)
|
||||
tty.warn(msg)
|
||||
continue
|
||||
|
||||
if not record.installed:
|
||||
@@ -1425,7 +1408,7 @@ def installed_relatives(
|
||||
return relatives
|
||||
|
||||
@_autospec
|
||||
def installed_extensions_for(self, extendee_spec):
|
||||
def installed_extensions_for(self, extendee_spec: "spack.spec.Spec"):
|
||||
"""Returns the specs of all packages that extend the given spec"""
|
||||
for spec in self.query():
|
||||
if spec.package.extends(extendee_spec):
|
||||
@@ -1684,7 +1667,7 @@ def unused_specs(
|
||||
self,
|
||||
root_hashes: Optional[Container[str]] = None,
|
||||
deptype: Union[dt.DepFlag, dt.DepTypes] = dt.LINK | dt.RUN,
|
||||
) -> "List[spack.spec.Spec]":
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Return all specs that are currently installed but not needed by root specs.
|
||||
|
||||
By default, roots are all explicit specs in the database. If a set of root
|
||||
|
@@ -2,17 +2,11 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from .common import (
|
||||
DetectedPackage,
|
||||
executable_prefix,
|
||||
set_virtuals_nonbuildable,
|
||||
update_configuration,
|
||||
)
|
||||
from .common import executable_prefix, set_virtuals_nonbuildable, update_configuration
|
||||
from .path import by_path, executables_in_path
|
||||
from .test import detection_tests
|
||||
|
||||
__all__ = [
|
||||
"DetectedPackage",
|
||||
"by_path",
|
||||
"executables_in_path",
|
||||
"executable_prefix",
|
||||
|
@@ -6,9 +6,9 @@
|
||||
function to update packages.yaml given a list of detected packages.
|
||||
|
||||
Ideally, each detection method should be placed in a specific subpackage
|
||||
and implement at least a function that returns a list of DetectedPackage
|
||||
objects. The update in packages.yaml can then be done using the function
|
||||
provided here.
|
||||
and implement at least a function that returns a list of specs.
|
||||
|
||||
The update in packages.yaml can then be done using the function provided here.
|
||||
|
||||
The module also contains other functions that might be useful across different
|
||||
detection mechanisms.
|
||||
@@ -17,9 +17,10 @@
|
||||
import itertools
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, List, NamedTuple, Optional, Set, Tuple, Union
|
||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.tty
|
||||
|
||||
@@ -30,25 +31,6 @@
|
||||
import spack.util.windows_registry
|
||||
|
||||
|
||||
class DetectedPackage(NamedTuple):
|
||||
"""Information on a package that has been detected."""
|
||||
|
||||
#: Spec that was detected
|
||||
spec: spack.spec.Spec
|
||||
#: Prefix of the spec
|
||||
prefix: str
|
||||
|
||||
def __reduce__(self):
|
||||
return DetectedPackage.restore, (str(self.spec), self.prefix, self.spec.extra_attributes)
|
||||
|
||||
@staticmethod
|
||||
def restore(
|
||||
spec_str: str, prefix: str, extra_attributes: Optional[Dict[str, str]]
|
||||
) -> "DetectedPackage":
|
||||
spec = spack.spec.Spec.from_detection(spec_str=spec_str, extra_attributes=extra_attributes)
|
||||
return DetectedPackage(spec=spec, prefix=prefix)
|
||||
|
||||
|
||||
def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
|
||||
"""Returns all the specs mentioned as externals in packages.yaml"""
|
||||
packages_yaml = spack.config.get("packages")
|
||||
@@ -63,7 +45,7 @@ def _externals_in_packages_yaml() -> Set[spack.spec.Spec]:
|
||||
|
||||
|
||||
def _pkg_config_dict(
|
||||
external_pkg_entries: List[DetectedPackage],
|
||||
external_pkg_entries: List["spack.spec.Spec"],
|
||||
) -> Dict[str, Union[bool, List[Dict[str, ExternalEntryType]]]]:
|
||||
"""Generate a package specific config dict according to the packages.yaml schema.
|
||||
|
||||
@@ -83,22 +65,19 @@ def _pkg_config_dict(
|
||||
pkg_dict = spack.util.spack_yaml.syaml_dict()
|
||||
pkg_dict["externals"] = []
|
||||
for e in external_pkg_entries:
|
||||
if not _spec_is_valid(e.spec):
|
||||
if not _spec_is_valid(e):
|
||||
continue
|
||||
|
||||
external_items: List[Tuple[str, ExternalEntryType]] = [
|
||||
("spec", str(e.spec)),
|
||||
("prefix", e.prefix),
|
||||
("spec", str(e)),
|
||||
("prefix", pathlib.Path(e.external_path).as_posix()),
|
||||
]
|
||||
if e.spec.external_modules:
|
||||
external_items.append(("modules", e.spec.external_modules))
|
||||
if e.external_modules:
|
||||
external_items.append(("modules", e.external_modules))
|
||||
|
||||
if e.spec.extra_attributes:
|
||||
if e.extra_attributes:
|
||||
external_items.append(
|
||||
(
|
||||
"extra_attributes",
|
||||
spack.util.spack_yaml.syaml_dict(e.spec.extra_attributes.items()),
|
||||
)
|
||||
("extra_attributes", spack.util.spack_yaml.syaml_dict(e.extra_attributes.items()))
|
||||
)
|
||||
|
||||
# external_items.extend(e.spec.extra_attributes.items())
|
||||
@@ -219,33 +198,32 @@ def library_prefix(library_dir: str) -> str:
|
||||
|
||||
|
||||
def update_configuration(
|
||||
detected_packages: Dict[str, List[DetectedPackage]],
|
||||
detected_packages: Dict[str, List["spack.spec.Spec"]],
|
||||
scope: Optional[str] = None,
|
||||
buildable: bool = True,
|
||||
) -> List[spack.spec.Spec]:
|
||||
"""Add the packages passed as arguments to packages.yaml
|
||||
|
||||
Args:
|
||||
detected_packages: list of DetectedPackage objects to be added
|
||||
detected_packages: list of specs to be added
|
||||
scope: configuration scope where to add the detected packages
|
||||
buildable: whether the detected packages are buildable or not
|
||||
"""
|
||||
predefined_external_specs = _externals_in_packages_yaml()
|
||||
pkg_to_cfg, all_new_specs = {}, []
|
||||
for package_name, entries in detected_packages.items():
|
||||
new_entries = [e for e in entries if (e.spec not in predefined_external_specs)]
|
||||
new_entries = [s for s in entries if s not in predefined_external_specs]
|
||||
|
||||
pkg_config = _pkg_config_dict(new_entries)
|
||||
external_entries = pkg_config.get("externals", [])
|
||||
assert not isinstance(external_entries, bool), "unexpected value for external entry"
|
||||
|
||||
all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in external_entries])
|
||||
all_new_specs.extend(new_entries)
|
||||
if buildable is False:
|
||||
pkg_config["buildable"] = False
|
||||
pkg_to_cfg[package_name] = pkg_config
|
||||
|
||||
pkgs_cfg = spack.config.get("packages", scope=scope)
|
||||
|
||||
pkgs_cfg = spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg)
|
||||
spack.config.set("packages", pkgs_cfg, scope=scope)
|
||||
|
||||
|
@@ -24,7 +24,6 @@
|
||||
import spack.util.ld_so_conf
|
||||
|
||||
from .common import (
|
||||
DetectedPackage,
|
||||
WindowsCompilerExternalPaths,
|
||||
WindowsKitExternalPaths,
|
||||
_convert_to_iterable,
|
||||
@@ -62,7 +61,7 @@ def common_windows_package_paths(pkg_cls=None) -> List[str]:
|
||||
|
||||
def file_identifier(path):
|
||||
s = os.stat(path)
|
||||
return (s.st_dev, s.st_ino)
|
||||
return s.st_dev, s.st_ino
|
||||
|
||||
|
||||
def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
@@ -80,6 +79,8 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
constructed based on the PATH environment variable.
|
||||
"""
|
||||
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
|
||||
# Make use we don't doubly list /usr/lib and /lib etc
|
||||
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
@@ -227,7 +228,7 @@ def prefix_from_path(self, *, path: str) -> str:
|
||||
|
||||
def detect_specs(
|
||||
self, *, pkg: Type["spack.package_base.PackageBase"], paths: List[str]
|
||||
) -> List[DetectedPackage]:
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""Given a list of files matching the search patterns, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
@@ -293,16 +294,16 @@ def detect_specs(
|
||||
warnings.warn(msg)
|
||||
continue
|
||||
|
||||
if spec.external_path:
|
||||
prefix = spec.external_path
|
||||
if not spec.external_path:
|
||||
spec.external_path = prefix
|
||||
|
||||
result.append(DetectedPackage(spec=spec, prefix=prefix))
|
||||
result.append(spec)
|
||||
|
||||
return result
|
||||
|
||||
def find(
|
||||
self, *, pkg_name: str, repository, initial_guess: Optional[List[str]] = None
|
||||
) -> List[DetectedPackage]:
|
||||
) -> List["spack.spec.Spec"]:
|
||||
"""For a given package, returns a list of detected specs.
|
||||
|
||||
Args:
|
||||
@@ -386,7 +387,7 @@ def by_path(
|
||||
*,
|
||||
path_hints: Optional[List[str]] = None,
|
||||
max_workers: Optional[int] = None,
|
||||
) -> Dict[str, List[DetectedPackage]]:
|
||||
) -> Dict[str, List["spack.spec.Spec"]]:
|
||||
"""Return the list of packages that have been detected on the system, keyed by
|
||||
unqualified package name.
|
||||
|
||||
|
@@ -68,7 +68,7 @@ def execute(self) -> List[spack.spec.Spec]:
|
||||
with self._mock_layout() as path_hints:
|
||||
entries = by_path([self.test.pkg_name], path_hints=path_hints)
|
||||
_, unqualified_name = spack.repo.partition_package_name(self.test.pkg_name)
|
||||
specs = set(x.spec for x in entries[unqualified_name])
|
||||
specs = set(entries[unqualified_name])
|
||||
return list(specs)
|
||||
|
||||
@contextlib.contextmanager
|
||||
@@ -104,7 +104,9 @@ def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[
|
||||
@property
|
||||
def expected_specs(self) -> List[spack.spec.Spec]:
|
||||
return [
|
||||
spack.spec.Spec.from_detection(item.spec, extra_attributes=item.extra_attributes)
|
||||
spack.spec.Spec.from_detection(
|
||||
item.spec, external_path=self.tmpdir.name, extra_attributes=item.extra_attributes
|
||||
)
|
||||
for item in self.test.results
|
||||
]
|
||||
|
||||
|
@@ -32,10 +32,9 @@ class OpenMpi(Package):
|
||||
"""
|
||||
import collections
|
||||
import collections.abc
|
||||
import functools
|
||||
import os.path
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Set, Tuple, Union
|
||||
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple, Union
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty.color
|
||||
@@ -48,6 +47,7 @@ class OpenMpi(Package):
|
||||
import spack.util.crypto
|
||||
import spack.variant
|
||||
from spack.dependency import Dependency
|
||||
from spack.directives_meta import DirectiveError, DirectiveMeta
|
||||
from spack.fetch_strategy import from_kwargs
|
||||
from spack.resource import Resource
|
||||
from spack.version import (
|
||||
@@ -80,22 +80,6 @@ class OpenMpi(Package):
|
||||
"redistribute",
|
||||
]
|
||||
|
||||
#: These are variant names used by Spack internally; packages can't use them
|
||||
reserved_names = [
|
||||
"arch",
|
||||
"architecture",
|
||||
"dev_path",
|
||||
"namespace",
|
||||
"operating_system",
|
||||
"os",
|
||||
"patches",
|
||||
"platform",
|
||||
"target",
|
||||
]
|
||||
|
||||
#: Names of possible directives. This list is mostly populated using the @directive decorator.
|
||||
#: Some directives leverage others and in that case are not automatically added.
|
||||
directive_names = ["build_system"]
|
||||
|
||||
_patch_order_index = 0
|
||||
|
||||
@@ -155,219 +139,6 @@ def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
|
||||
return spack.spec.Spec(value)
|
||||
|
||||
|
||||
class DirectiveMeta(type):
|
||||
"""Flushes the directives that were temporarily stored in the staging
|
||||
area into the package.
|
||||
"""
|
||||
|
||||
# Set of all known directives
|
||||
_directive_dict_names: Set[str] = set()
|
||||
_directives_to_be_executed: List[str] = []
|
||||
_when_constraints_from_context: List[str] = []
|
||||
_default_args: List[dict] = []
|
||||
|
||||
def __new__(cls, name, bases, attr_dict):
|
||||
# Initialize the attribute containing the list of directives
|
||||
# to be executed. Here we go reversed because we want to execute
|
||||
# commands:
|
||||
# 1. in the order they were defined
|
||||
# 2. following the MRO
|
||||
attr_dict["_directives_to_be_executed"] = []
|
||||
for base in reversed(bases):
|
||||
try:
|
||||
directive_from_base = base._directives_to_be_executed
|
||||
attr_dict["_directives_to_be_executed"].extend(directive_from_base)
|
||||
except AttributeError:
|
||||
# The base class didn't have the required attribute.
|
||||
# Continue searching
|
||||
pass
|
||||
|
||||
# De-duplicates directives from base classes
|
||||
attr_dict["_directives_to_be_executed"] = [
|
||||
x for x in llnl.util.lang.dedupe(attr_dict["_directives_to_be_executed"])
|
||||
]
|
||||
|
||||
# Move things to be executed from module scope (where they
|
||||
# are collected first) to class scope
|
||||
if DirectiveMeta._directives_to_be_executed:
|
||||
attr_dict["_directives_to_be_executed"].extend(
|
||||
DirectiveMeta._directives_to_be_executed
|
||||
)
|
||||
DirectiveMeta._directives_to_be_executed = []
|
||||
|
||||
return super(DirectiveMeta, cls).__new__(cls, name, bases, attr_dict)
|
||||
|
||||
def __init__(cls, name, bases, attr_dict):
|
||||
# The instance is being initialized: if it is a package we must ensure
|
||||
# that the directives are called to set it up.
|
||||
|
||||
if "spack.pkg" in cls.__module__:
|
||||
# Ensure the presence of the dictionaries associated with the directives.
|
||||
# All dictionaries are defaultdicts that create lists for missing keys.
|
||||
for d in DirectiveMeta._directive_dict_names:
|
||||
setattr(cls, d, {})
|
||||
|
||||
# Lazily execute directives
|
||||
for directive in cls._directives_to_be_executed:
|
||||
directive(cls)
|
||||
|
||||
# Ignore any directives executed *within* top-level
|
||||
# directives by clearing out the queue they're appended to
|
||||
DirectiveMeta._directives_to_be_executed = []
|
||||
|
||||
super(DirectiveMeta, cls).__init__(name, bases, attr_dict)
|
||||
|
||||
@staticmethod
|
||||
def push_to_context(when_spec):
|
||||
"""Add a spec to the context constraints."""
|
||||
DirectiveMeta._when_constraints_from_context.append(when_spec)
|
||||
|
||||
@staticmethod
|
||||
def pop_from_context():
|
||||
"""Pop the last constraint from the context"""
|
||||
return DirectiveMeta._when_constraints_from_context.pop()
|
||||
|
||||
@staticmethod
|
||||
def push_default_args(default_args):
|
||||
"""Push default arguments"""
|
||||
DirectiveMeta._default_args.append(default_args)
|
||||
|
||||
@staticmethod
|
||||
def pop_default_args():
|
||||
"""Pop default arguments"""
|
||||
return DirectiveMeta._default_args.pop()
|
||||
|
||||
@staticmethod
|
||||
def directive(dicts=None):
|
||||
"""Decorator for Spack directives.
|
||||
|
||||
Spack directives allow you to modify a package while it is being
|
||||
defined, e.g. to add version or dependency information. Directives
|
||||
are one of the key pieces of Spack's package "language", which is
|
||||
embedded in python.
|
||||
|
||||
Here's an example directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@directive(dicts='versions')
|
||||
version(pkg, ...):
|
||||
...
|
||||
|
||||
This directive allows you write:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(Package):
|
||||
version(...)
|
||||
|
||||
The ``@directive`` decorator handles a couple things for you:
|
||||
|
||||
1. Adds the class scope (pkg) as an initial parameter when
|
||||
called, like a class method would. This allows you to modify
|
||||
a package from within a directive, while the package is still
|
||||
being defined.
|
||||
|
||||
2. It automatically adds a dictionary called "versions" to the
|
||||
package so that you can refer to pkg.versions.
|
||||
|
||||
The ``(dicts='versions')`` part ensures that ALL packages in Spack
|
||||
will have a ``versions`` attribute after they're constructed, and
|
||||
that if no directive actually modified it, it will just be an
|
||||
empty dict.
|
||||
|
||||
This is just a modular way to add storage attributes to the
|
||||
Package class, and it's how Spack gets information from the
|
||||
packages to the core.
|
||||
"""
|
||||
global directive_names
|
||||
|
||||
if isinstance(dicts, str):
|
||||
dicts = (dicts,)
|
||||
|
||||
if not isinstance(dicts, collections.abc.Sequence):
|
||||
message = "dicts arg must be list, tuple, or string. Found {0}"
|
||||
raise TypeError(message.format(type(dicts)))
|
||||
|
||||
# Add the dictionary names if not already there
|
||||
DirectiveMeta._directive_dict_names |= set(dicts)
|
||||
|
||||
# This decorator just returns the directive functions
|
||||
def _decorator(decorated_function):
|
||||
directive_names.append(decorated_function.__name__)
|
||||
|
||||
@functools.wraps(decorated_function)
|
||||
def _wrapper(*args, **_kwargs):
|
||||
# First merge default args with kwargs
|
||||
kwargs = dict()
|
||||
for default_args in DirectiveMeta._default_args:
|
||||
kwargs.update(default_args)
|
||||
kwargs.update(_kwargs)
|
||||
|
||||
# Inject when arguments from the context
|
||||
if DirectiveMeta._when_constraints_from_context:
|
||||
# Check that directives not yet supporting the when= argument
|
||||
# are not used inside the context manager
|
||||
if decorated_function.__name__ == "version":
|
||||
msg = (
|
||||
'directive "{0}" cannot be used within a "when"'
|
||||
' context since it does not support a "when=" '
|
||||
"argument"
|
||||
)
|
||||
msg = msg.format(decorated_function.__name__)
|
||||
raise DirectiveError(msg)
|
||||
|
||||
when_constraints = [
|
||||
spack.spec.Spec(x) for x in DirectiveMeta._when_constraints_from_context
|
||||
]
|
||||
if kwargs.get("when"):
|
||||
when_constraints.append(spack.spec.Spec(kwargs["when"]))
|
||||
when_spec = spack.spec.merge_abstract_anonymous_specs(*when_constraints)
|
||||
|
||||
kwargs["when"] = when_spec
|
||||
|
||||
# If any of the arguments are executors returned by a
|
||||
# directive passed as an argument, don't execute them
|
||||
# lazily. Instead, let the called directive handle them.
|
||||
# This allows nested directive calls in packages. The
|
||||
# caller can return the directive if it should be queued.
|
||||
def remove_directives(arg):
|
||||
directives = DirectiveMeta._directives_to_be_executed
|
||||
if isinstance(arg, (list, tuple)):
|
||||
# Descend into args that are lists or tuples
|
||||
for a in arg:
|
||||
remove_directives(a)
|
||||
else:
|
||||
# Remove directives args from the exec queue
|
||||
remove = next((d for d in directives if d is arg), None)
|
||||
if remove is not None:
|
||||
directives.remove(remove)
|
||||
|
||||
# Nasty, but it's the best way I can think of to avoid
|
||||
# side effects if directive results are passed as args
|
||||
remove_directives(args)
|
||||
remove_directives(list(kwargs.values()))
|
||||
|
||||
# A directive returns either something that is callable on a
|
||||
# package or a sequence of them
|
||||
result = decorated_function(*args, **kwargs)
|
||||
|
||||
# ...so if it is not a sequence make it so
|
||||
values = result
|
||||
if not isinstance(values, collections.abc.Sequence):
|
||||
values = (values,)
|
||||
|
||||
DirectiveMeta._directives_to_be_executed.extend(values)
|
||||
|
||||
# wrapped function returns same result as original so
|
||||
# that we can nest directives
|
||||
return result
|
||||
|
||||
return _wrapper
|
||||
|
||||
return _decorator
|
||||
|
||||
|
||||
SubmoduleCallback = Callable[["spack.package_base.PackageBase"], Union[str, List[str], bool]]
|
||||
directive = DirectiveMeta.directive
|
||||
|
||||
@@ -846,7 +617,7 @@ def format_error(msg, pkg):
|
||||
msg += " @*r{{[{0}, variant '{1}']}}"
|
||||
return llnl.util.tty.color.colorize(msg.format(pkg.name, name))
|
||||
|
||||
if name in reserved_names:
|
||||
if name in spack.variant.reserved_names:
|
||||
|
||||
def _raise_reserved_name(pkg):
|
||||
msg = "The name '%s' is reserved by Spack" % name
|
||||
@@ -1110,10 +881,6 @@ def _execute_languages(pkg: "spack.package_base.PackageBase"):
|
||||
return _execute_languages
|
||||
|
||||
|
||||
class DirectiveError(spack.error.SpackError):
|
||||
"""This is raised when something is wrong with a package directive."""
|
||||
|
||||
|
||||
class DependencyError(DirectiveError):
|
||||
"""This is raised when a dependency specification is invalid."""
|
||||
|
||||
|
234
lib/spack/spack/directives_meta.py
Normal file
234
lib/spack/spack/directives_meta.py
Normal file
@@ -0,0 +1,234 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import collections.abc
|
||||
import functools
|
||||
from typing import List, Set
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.error
|
||||
import spack.spec
|
||||
|
||||
#: Names of possible directives. This list is mostly populated using the @directive decorator.
|
||||
#: Some directives leverage others and in that case are not automatically added.
|
||||
directive_names = ["build_system"]
|
||||
|
||||
|
||||
class DirectiveMeta(type):
|
||||
"""Flushes the directives that were temporarily stored in the staging
|
||||
area into the package.
|
||||
"""
|
||||
|
||||
# Set of all known directives
|
||||
_directive_dict_names: Set[str] = set()
|
||||
_directives_to_be_executed: List[str] = []
|
||||
_when_constraints_from_context: List[str] = []
|
||||
_default_args: List[dict] = []
|
||||
|
||||
def __new__(cls, name, bases, attr_dict):
|
||||
# Initialize the attribute containing the list of directives
|
||||
# to be executed. Here we go reversed because we want to execute
|
||||
# commands:
|
||||
# 1. in the order they were defined
|
||||
# 2. following the MRO
|
||||
attr_dict["_directives_to_be_executed"] = []
|
||||
for base in reversed(bases):
|
||||
try:
|
||||
directive_from_base = base._directives_to_be_executed
|
||||
attr_dict["_directives_to_be_executed"].extend(directive_from_base)
|
||||
except AttributeError:
|
||||
# The base class didn't have the required attribute.
|
||||
# Continue searching
|
||||
pass
|
||||
|
||||
# De-duplicates directives from base classes
|
||||
attr_dict["_directives_to_be_executed"] = [
|
||||
x for x in llnl.util.lang.dedupe(attr_dict["_directives_to_be_executed"])
|
||||
]
|
||||
|
||||
# Move things to be executed from module scope (where they
|
||||
# are collected first) to class scope
|
||||
if DirectiveMeta._directives_to_be_executed:
|
||||
attr_dict["_directives_to_be_executed"].extend(
|
||||
DirectiveMeta._directives_to_be_executed
|
||||
)
|
||||
DirectiveMeta._directives_to_be_executed = []
|
||||
|
||||
return super(DirectiveMeta, cls).__new__(cls, name, bases, attr_dict)
|
||||
|
||||
def __init__(cls, name, bases, attr_dict):
|
||||
# The instance is being initialized: if it is a package we must ensure
|
||||
# that the directives are called to set it up.
|
||||
|
||||
if "spack.pkg" in cls.__module__:
|
||||
# Ensure the presence of the dictionaries associated with the directives.
|
||||
# All dictionaries are defaultdicts that create lists for missing keys.
|
||||
for d in DirectiveMeta._directive_dict_names:
|
||||
setattr(cls, d, {})
|
||||
|
||||
# Lazily execute directives
|
||||
for directive in cls._directives_to_be_executed:
|
||||
directive(cls)
|
||||
|
||||
# Ignore any directives executed *within* top-level
|
||||
# directives by clearing out the queue they're appended to
|
||||
DirectiveMeta._directives_to_be_executed = []
|
||||
|
||||
super(DirectiveMeta, cls).__init__(name, bases, attr_dict)
|
||||
|
||||
@staticmethod
|
||||
def push_to_context(when_spec):
|
||||
"""Add a spec to the context constraints."""
|
||||
DirectiveMeta._when_constraints_from_context.append(when_spec)
|
||||
|
||||
@staticmethod
|
||||
def pop_from_context():
|
||||
"""Pop the last constraint from the context"""
|
||||
return DirectiveMeta._when_constraints_from_context.pop()
|
||||
|
||||
@staticmethod
|
||||
def push_default_args(default_args):
|
||||
"""Push default arguments"""
|
||||
DirectiveMeta._default_args.append(default_args)
|
||||
|
||||
@staticmethod
|
||||
def pop_default_args():
|
||||
"""Pop default arguments"""
|
||||
return DirectiveMeta._default_args.pop()
|
||||
|
||||
@staticmethod
|
||||
def directive(dicts=None):
|
||||
"""Decorator for Spack directives.
|
||||
|
||||
Spack directives allow you to modify a package while it is being
|
||||
defined, e.g. to add version or dependency information. Directives
|
||||
are one of the key pieces of Spack's package "language", which is
|
||||
embedded in python.
|
||||
|
||||
Here's an example directive:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@directive(dicts='versions')
|
||||
version(pkg, ...):
|
||||
...
|
||||
|
||||
This directive allows you write:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class Foo(Package):
|
||||
version(...)
|
||||
|
||||
The ``@directive`` decorator handles a couple things for you:
|
||||
|
||||
1. Adds the class scope (pkg) as an initial parameter when
|
||||
called, like a class method would. This allows you to modify
|
||||
a package from within a directive, while the package is still
|
||||
being defined.
|
||||
|
||||
2. It automatically adds a dictionary called "versions" to the
|
||||
package so that you can refer to pkg.versions.
|
||||
|
||||
The ``(dicts='versions')`` part ensures that ALL packages in Spack
|
||||
will have a ``versions`` attribute after they're constructed, and
|
||||
that if no directive actually modified it, it will just be an
|
||||
empty dict.
|
||||
|
||||
This is just a modular way to add storage attributes to the
|
||||
Package class, and it's how Spack gets information from the
|
||||
packages to the core.
|
||||
"""
|
||||
global directive_names
|
||||
|
||||
if isinstance(dicts, str):
|
||||
dicts = (dicts,)
|
||||
|
||||
if not isinstance(dicts, collections.abc.Sequence):
|
||||
message = "dicts arg must be list, tuple, or string. Found {0}"
|
||||
raise TypeError(message.format(type(dicts)))
|
||||
|
||||
# Add the dictionary names if not already there
|
||||
DirectiveMeta._directive_dict_names |= set(dicts)
|
||||
|
||||
# This decorator just returns the directive functions
|
||||
def _decorator(decorated_function):
|
||||
directive_names.append(decorated_function.__name__)
|
||||
|
||||
@functools.wraps(decorated_function)
|
||||
def _wrapper(*args, **_kwargs):
|
||||
# First merge default args with kwargs
|
||||
kwargs = dict()
|
||||
for default_args in DirectiveMeta._default_args:
|
||||
kwargs.update(default_args)
|
||||
kwargs.update(_kwargs)
|
||||
|
||||
# Inject when arguments from the context
|
||||
if DirectiveMeta._when_constraints_from_context:
|
||||
# Check that directives not yet supporting the when= argument
|
||||
# are not used inside the context manager
|
||||
if decorated_function.__name__ == "version":
|
||||
msg = (
|
||||
'directive "{0}" cannot be used within a "when"'
|
||||
' context since it does not support a "when=" '
|
||||
"argument"
|
||||
)
|
||||
msg = msg.format(decorated_function.__name__)
|
||||
raise DirectiveError(msg)
|
||||
|
||||
when_constraints = [
|
||||
spack.spec.Spec(x) for x in DirectiveMeta._when_constraints_from_context
|
||||
]
|
||||
if kwargs.get("when"):
|
||||
when_constraints.append(spack.spec.Spec(kwargs["when"]))
|
||||
when_spec = spack.spec.merge_abstract_anonymous_specs(*when_constraints)
|
||||
|
||||
kwargs["when"] = when_spec
|
||||
|
||||
# If any of the arguments are executors returned by a
|
||||
# directive passed as an argument, don't execute them
|
||||
# lazily. Instead, let the called directive handle them.
|
||||
# This allows nested directive calls in packages. The
|
||||
# caller can return the directive if it should be queued.
|
||||
def remove_directives(arg):
|
||||
directives = DirectiveMeta._directives_to_be_executed
|
||||
if isinstance(arg, (list, tuple)):
|
||||
# Descend into args that are lists or tuples
|
||||
for a in arg:
|
||||
remove_directives(a)
|
||||
else:
|
||||
# Remove directives args from the exec queue
|
||||
remove = next((d for d in directives if d is arg), None)
|
||||
if remove is not None:
|
||||
directives.remove(remove)
|
||||
|
||||
# Nasty, but it's the best way I can think of to avoid
|
||||
# side effects if directive results are passed as args
|
||||
remove_directives(args)
|
||||
remove_directives(list(kwargs.values()))
|
||||
|
||||
# A directive returns either something that is callable on a
|
||||
# package or a sequence of them
|
||||
result = decorated_function(*args, **kwargs)
|
||||
|
||||
# ...so if it is not a sequence make it so
|
||||
values = result
|
||||
if not isinstance(values, collections.abc.Sequence):
|
||||
values = (values,)
|
||||
|
||||
DirectiveMeta._directives_to_be_executed.extend(values)
|
||||
|
||||
# wrapped function returns same result as original so
|
||||
# that we can nest directives
|
||||
return result
|
||||
|
||||
return _wrapper
|
||||
|
||||
return _decorator
|
||||
|
||||
|
||||
class DirectiveError(spack.error.SpackError):
|
||||
"""This is raised when something is wrong with a package directive."""
|
@@ -14,7 +14,6 @@
|
||||
from pathlib import Path
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.symlink import readlink
|
||||
|
||||
import spack.config
|
||||
@@ -23,13 +22,8 @@
|
||||
import spack.util.spack_json as sjson
|
||||
from spack.error import SpackError
|
||||
|
||||
# Note: Posixpath is used here as opposed to
|
||||
# os.path.join due to spack.spec.Spec.format
|
||||
# requiring forward slash path seperators at this stage
|
||||
default_projections = {
|
||||
"all": posixpath.join(
|
||||
"{architecture}", "{compiler.name}-{compiler.version}", "{name}-{version}-{hash}"
|
||||
)
|
||||
"all": "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
|
||||
}
|
||||
|
||||
|
||||
@@ -152,20 +146,9 @@ def read_spec(self, path):
|
||||
def spec_file_path(self, spec):
|
||||
"""Gets full path to spec file"""
|
||||
_check_concrete(spec)
|
||||
# Attempts to convert to JSON if possible.
|
||||
# Otherwise just returns the YAML.
|
||||
yaml_path = os.path.join(self.metadata_path(spec), self._spec_file_name_yaml)
|
||||
json_path = os.path.join(self.metadata_path(spec), self.spec_file_name)
|
||||
if os.path.exists(yaml_path) and fs.can_write_to_dir(yaml_path):
|
||||
self.write_spec(spec, json_path)
|
||||
try:
|
||||
os.remove(yaml_path)
|
||||
except OSError as err:
|
||||
tty.debug("Could not remove deprecated {0}".format(yaml_path))
|
||||
tty.debug(err)
|
||||
elif os.path.exists(yaml_path):
|
||||
return yaml_path
|
||||
return json_path
|
||||
return yaml_path if os.path.exists(yaml_path) else json_path
|
||||
|
||||
def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
||||
"""Gets full path to spec file for deprecated spec
|
||||
@@ -199,17 +182,7 @@ def deprecated_file_path(self, deprecated_spec, deprecator_spec=None):
|
||||
deprecated_spec.dag_hash() + "_" + self.spec_file_name,
|
||||
)
|
||||
|
||||
if os.path.exists(yaml_path) and fs.can_write_to_dir(yaml_path):
|
||||
self.write_spec(deprecated_spec, json_path)
|
||||
try:
|
||||
os.remove(yaml_path)
|
||||
except (IOError, OSError) as err:
|
||||
tty.debug("Could not remove deprecated {0}".format(yaml_path))
|
||||
tty.debug(err)
|
||||
elif os.path.exists(yaml_path):
|
||||
return yaml_path
|
||||
|
||||
return json_path
|
||||
return yaml_path if os.path.exists(yaml_path) else json_path
|
||||
|
||||
@contextmanager
|
||||
def disable_upstream_check(self):
|
||||
|
@@ -58,9 +58,8 @@
|
||||
from spack.installer import PackageInstaller
|
||||
from spack.schema.env import TOP_LEVEL_KEY
|
||||
from spack.spec import Spec
|
||||
from spack.spec_list import InvalidSpecConstraintError, SpecList
|
||||
from spack.spec_list import SpecList
|
||||
from spack.util.path import substitute_path_variables
|
||||
from spack.variant import UnknownVariantError
|
||||
|
||||
#: environment variable used to indicate the active environment
|
||||
spack_env_var = "SPACK_ENV"
|
||||
@@ -1214,7 +1213,6 @@ def scope_name(self):
|
||||
def include_concrete_envs(self):
|
||||
"""Copy and save the included envs' specs internally"""
|
||||
|
||||
lockfile_meta = None
|
||||
root_hash_seen = set()
|
||||
concrete_hash_seen = set()
|
||||
self.included_concrete_spec_data = {}
|
||||
@@ -1225,37 +1223,26 @@ def include_concrete_envs(self):
|
||||
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
|
||||
|
||||
env = Environment(env_path)
|
||||
|
||||
with open(env.lock_path) as f:
|
||||
lockfile_as_dict = env._read_lockfile(f)
|
||||
|
||||
# Lockfile_meta must match each env and use at least format version 5
|
||||
if lockfile_meta is None:
|
||||
lockfile_meta = lockfile_as_dict["_meta"]
|
||||
elif lockfile_meta != lockfile_as_dict["_meta"]:
|
||||
raise SpackEnvironmentError("All lockfile _meta values must match")
|
||||
elif lockfile_meta["lockfile-version"] < 5:
|
||||
raise SpackEnvironmentError("The lockfile format must be at version 5 or higher")
|
||||
self.included_concrete_spec_data[env_path] = {"roots": [], "concrete_specs": {}}
|
||||
|
||||
# Copy unique root specs from env
|
||||
self.included_concrete_spec_data[env_path] = {"roots": []}
|
||||
for root_dict in lockfile_as_dict["roots"]:
|
||||
for root_dict in env._concrete_roots_dict():
|
||||
if root_dict["hash"] not in root_hash_seen:
|
||||
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
|
||||
root_hash_seen.add(root_dict["hash"])
|
||||
|
||||
# Copy unique concrete specs from env
|
||||
for concrete_spec in lockfile_as_dict["concrete_specs"]:
|
||||
if concrete_spec not in concrete_hash_seen:
|
||||
self.included_concrete_spec_data[env_path].update(
|
||||
{"concrete_specs": lockfile_as_dict["concrete_specs"]}
|
||||
for dag_hash, spec_details in env._concrete_specs_dict().items():
|
||||
if dag_hash not in concrete_hash_seen:
|
||||
self.included_concrete_spec_data[env_path]["concrete_specs"].update(
|
||||
{dag_hash: spec_details}
|
||||
)
|
||||
concrete_hash_seen.add(concrete_spec)
|
||||
concrete_hash_seen.add(dag_hash)
|
||||
|
||||
if "include_concrete" in lockfile_as_dict.keys():
|
||||
self.included_concrete_spec_data[env_path]["include_concrete"] = lockfile_as_dict[
|
||||
"include_concrete"
|
||||
]
|
||||
# Copy transitive include data
|
||||
transitive = env.included_concrete_spec_data
|
||||
if transitive:
|
||||
self.included_concrete_spec_data[env_path]["include_concrete"] = transitive
|
||||
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
self.write()
|
||||
@@ -1637,10 +1624,10 @@ def _concretize_separately(self, tests=False):
|
||||
|
||||
# Concretize any new user specs that we haven't concretized yet
|
||||
args, root_specs, i = [], [], 0
|
||||
for uspec, uspec_constraints in zip(self.user_specs, self.user_specs.specs_as_constraints):
|
||||
for uspec in self.user_specs:
|
||||
if uspec not in old_concretized_user_specs:
|
||||
root_specs.append(uspec)
|
||||
args.append((i, [str(x) for x in uspec_constraints], tests))
|
||||
args.append((i, str(uspec), tests))
|
||||
i += 1
|
||||
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
@@ -1656,7 +1643,7 @@ def _concretize_separately(self, tests=False):
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.get_compiler_config(spack.config.CONFIG, init_config=True)
|
||||
_ = spack.compilers.all_compilers_config(spack.config.CONFIG)
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(args) == 0:
|
||||
@@ -2173,16 +2160,23 @@ def _get_environment_specs(self, recurse_dependencies=True):
|
||||
|
||||
return specs
|
||||
|
||||
def _to_lockfile_dict(self):
|
||||
"""Create a dictionary to store a lockfile for this environment."""
|
||||
def _concrete_specs_dict(self):
|
||||
concrete_specs = {}
|
||||
for s in traverse.traverse_nodes(self.specs_by_hash.values(), key=traverse.by_dag_hash):
|
||||
spec_dict = s.node_dict_with_hashes(hash=ht.dag_hash)
|
||||
# Assumes no legacy formats, since this was just created.
|
||||
spec_dict[ht.dag_hash.name] = s.dag_hash()
|
||||
concrete_specs[s.dag_hash()] = spec_dict
|
||||
return concrete_specs
|
||||
|
||||
def _concrete_roots_dict(self):
|
||||
hash_spec_list = zip(self.concretized_order, self.concretized_user_specs)
|
||||
return [{"hash": h, "spec": str(s)} for h, s in hash_spec_list]
|
||||
|
||||
def _to_lockfile_dict(self):
|
||||
"""Create a dictionary to store a lockfile for this environment."""
|
||||
concrete_specs = self._concrete_specs_dict()
|
||||
root_specs = self._concrete_roots_dict()
|
||||
|
||||
spack_dict = {"version": spack.spack_version}
|
||||
spack_commit = spack.main.get_spack_commit()
|
||||
@@ -2203,7 +2197,7 @@ def _to_lockfile_dict(self):
|
||||
# spack version information
|
||||
"spack": spack_dict,
|
||||
# users specs + hashes are the 'roots' of the environment
|
||||
"roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list],
|
||||
"roots": root_specs,
|
||||
# Concrete specs by hash, including dependencies
|
||||
"concrete_specs": concrete_specs,
|
||||
}
|
||||
@@ -2513,52 +2507,11 @@ def display_specs(specs):
|
||||
print(tree_string)
|
||||
|
||||
|
||||
def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
# Accept only valid constraints from list and concretize spec
|
||||
# Get the named spec even if out of order
|
||||
root_spec = [s for s in spec_constraints if s.name]
|
||||
if len(root_spec) != 1:
|
||||
m = "The constraints %s are not a valid spec " % spec_constraints
|
||||
m += "concretization target. all specs must have a single name "
|
||||
m += "constraint for concretization."
|
||||
raise InvalidSpecConstraintError(m)
|
||||
spec_constraints.remove(root_spec[0])
|
||||
|
||||
invalid_constraints = []
|
||||
while True:
|
||||
# Attach all anonymous constraints to one named spec
|
||||
s = root_spec[0].copy()
|
||||
for c in spec_constraints:
|
||||
if c not in invalid_constraints:
|
||||
s.constrain(c)
|
||||
try:
|
||||
return s.concretized(tests=tests)
|
||||
except spack.spec.InvalidDependencyError as e:
|
||||
invalid_deps_string = ["^" + d for d in e.invalid_deps]
|
||||
invalid_deps = [
|
||||
c
|
||||
for c in spec_constraints
|
||||
if any(c.satisfies(invd) for invd in invalid_deps_string)
|
||||
]
|
||||
if len(invalid_deps) != len(invalid_deps_string):
|
||||
raise e
|
||||
invalid_constraints.extend(invalid_deps)
|
||||
except UnknownVariantError as e:
|
||||
invalid_variants = e.unknown_variants
|
||||
inv_variant_constraints = [
|
||||
c for c in spec_constraints if any(name in c.variants for name in invalid_variants)
|
||||
]
|
||||
if len(inv_variant_constraints) != len(invalid_variants):
|
||||
raise e
|
||||
invalid_constraints.extend(inv_variant_constraints)
|
||||
|
||||
|
||||
def _concretize_task(packed_arguments) -> Tuple[int, Spec, float]:
|
||||
index, spec_constraints, tests = packed_arguments
|
||||
spec_constraints = [Spec(x) for x in spec_constraints]
|
||||
index, spec_str, tests = packed_arguments
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
start = time.time()
|
||||
spec = _concretize_from_constraints(spec_constraints, tests)
|
||||
spec = Spec(spec_str).concretized(tests=tests)
|
||||
return index, spec, time.time() - start
|
||||
|
||||
|
||||
|
@@ -54,7 +54,7 @@
|
||||
import spack.version
|
||||
import spack.version.git_ref_lookup
|
||||
from spack.util.compression import decompressor_for
|
||||
from spack.util.executable import CommandNotFoundError, which
|
||||
from spack.util.executable import CommandNotFoundError, Executable, which
|
||||
|
||||
#: List of all fetch strategies, created by FetchStrategy metaclass.
|
||||
all_strategies = []
|
||||
@@ -246,33 +246,28 @@ class URLFetchStrategy(FetchStrategy):
|
||||
|
||||
# these are checksum types. The generic 'checksum' is deprecated for
|
||||
# specific hash names, but we need it for backward compatibility
|
||||
optional_attrs = list(crypto.hashes.keys()) + ["checksum"]
|
||||
optional_attrs = [*crypto.hashes.keys(), "checksum"]
|
||||
|
||||
def __init__(self, url=None, checksum=None, **kwargs):
|
||||
def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs) -> None:
|
||||
super().__init__(**kwargs)
|
||||
|
||||
# Prefer values in kwargs to the positionals.
|
||||
self.url = kwargs.get("url", url)
|
||||
self.url = url
|
||||
self.mirrors = kwargs.get("mirrors", [])
|
||||
|
||||
# digest can be set as the first argument, or from an explicit
|
||||
# kwarg by the hash name.
|
||||
self.digest = kwargs.get("checksum", checksum)
|
||||
self.digest: Optional[str] = checksum
|
||||
for h in self.optional_attrs:
|
||||
if h in kwargs:
|
||||
self.digest = kwargs[h]
|
||||
|
||||
self.expand_archive = kwargs.get("expand", True)
|
||||
self.extra_options = kwargs.get("fetch_options", {})
|
||||
self._curl = None
|
||||
|
||||
self.extension = kwargs.get("extension", None)
|
||||
|
||||
if not self.url:
|
||||
raise ValueError("URLFetchStrategy requires a url for fetching.")
|
||||
self.expand_archive: bool = kwargs.get("expand", True)
|
||||
self.extra_options: dict = kwargs.get("fetch_options", {})
|
||||
self._curl: Optional[Executable] = None
|
||||
self.extension: Optional[str] = kwargs.get("extension", None)
|
||||
|
||||
@property
|
||||
def curl(self):
|
||||
def curl(self) -> Executable:
|
||||
if not self._curl:
|
||||
self._curl = web_util.require_curl()
|
||||
return self._curl
|
||||
@@ -348,8 +343,8 @@ def _fetch_urllib(self, url):
|
||||
if os.path.lexists(save_file):
|
||||
os.remove(save_file)
|
||||
|
||||
with open(save_file, "wb") as _open_file:
|
||||
shutil.copyfileobj(response, _open_file)
|
||||
with open(save_file, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
|
||||
self._check_headers(str(response.headers))
|
||||
|
||||
@@ -468,7 +463,7 @@ def check(self):
|
||||
"""Check the downloaded archive against a checksum digest.
|
||||
No-op if this stage checks code out of a repository."""
|
||||
if not self.digest:
|
||||
raise NoDigestError("Attempt to check URLFetchStrategy with no digest.")
|
||||
raise NoDigestError(f"Attempt to check {self.__class__.__name__} with no digest.")
|
||||
|
||||
verify_checksum(self.archive_file, self.digest)
|
||||
|
||||
@@ -479,8 +474,8 @@ def reset(self):
|
||||
"""
|
||||
if not self.archive_file:
|
||||
raise NoArchiveFileError(
|
||||
"Tried to reset URLFetchStrategy before fetching",
|
||||
"Failed on reset() for URL %s" % self.url,
|
||||
f"Tried to reset {self.__class__.__name__} before fetching",
|
||||
f"Failed on reset() for URL{self.url}",
|
||||
)
|
||||
|
||||
# Remove everything but the archive from the stage
|
||||
@@ -493,14 +488,10 @@ def reset(self):
|
||||
self.expand()
|
||||
|
||||
def __repr__(self):
|
||||
url = self.url if self.url else "no url"
|
||||
return "%s<%s>" % (self.__class__.__name__, url)
|
||||
return f"{self.__class__.__name__}<{self.url}>"
|
||||
|
||||
def __str__(self):
|
||||
if self.url:
|
||||
return self.url
|
||||
else:
|
||||
return "[no url]"
|
||||
return self.url
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -513,7 +504,7 @@ def fetch(self):
|
||||
|
||||
# check whether the cache file exists.
|
||||
if not os.path.isfile(path):
|
||||
raise NoCacheError("No cache of %s" % path)
|
||||
raise NoCacheError(f"No cache of {path}")
|
||||
|
||||
# remove old symlink if one is there.
|
||||
filename = self.stage.save_filename
|
||||
@@ -523,8 +514,8 @@ def fetch(self):
|
||||
# Symlink to local cached archive.
|
||||
symlink(path, filename)
|
||||
|
||||
# Remove link if checksum fails, or subsequent fetchers
|
||||
# will assume they don't need to download.
|
||||
# Remove link if checksum fails, or subsequent fetchers will assume they don't need to
|
||||
# download.
|
||||
if self.digest:
|
||||
try:
|
||||
self.check()
|
||||
@@ -533,12 +524,12 @@ def fetch(self):
|
||||
raise
|
||||
|
||||
# Notify the user how we fetched.
|
||||
tty.msg("Using cached archive: {0}".format(path))
|
||||
tty.msg(f"Using cached archive: {path}")
|
||||
|
||||
|
||||
class OCIRegistryFetchStrategy(URLFetchStrategy):
|
||||
def __init__(self, url=None, checksum=None, **kwargs):
|
||||
super().__init__(url, checksum, **kwargs)
|
||||
def __init__(self, *, url: str, checksum: Optional[str] = None, **kwargs):
|
||||
super().__init__(url=url, checksum=checksum, **kwargs)
|
||||
|
||||
self._urlopen = kwargs.get("_urlopen", spack.oci.opener.urlopen)
|
||||
|
||||
@@ -583,18 +574,18 @@ def __init__(self, **kwargs):
|
||||
# Set a URL based on the type of fetch strategy.
|
||||
self.url = kwargs.get(self.url_attr, None)
|
||||
if not self.url:
|
||||
raise ValueError("%s requires %s argument." % (self.__class__, self.url_attr))
|
||||
raise ValueError(f"{self.__class__} requires {self.url_attr} argument.")
|
||||
|
||||
for attr in self.optional_attrs:
|
||||
setattr(self, attr, kwargs.get(attr, None))
|
||||
|
||||
@_needs_stage
|
||||
def check(self):
|
||||
tty.debug("No checksum needed when fetching with {0}".format(self.url_attr))
|
||||
tty.debug(f"No checksum needed when fetching with {self.url_attr}")
|
||||
|
||||
@_needs_stage
|
||||
def expand(self):
|
||||
tty.debug("Source fetched with %s is already expanded." % self.url_attr)
|
||||
tty.debug(f"Source fetched with {self.url_attr} is already expanded.")
|
||||
|
||||
@_needs_stage
|
||||
def archive(self, destination, *, exclude: Optional[str] = None):
|
||||
@@ -614,10 +605,10 @@ def archive(self, destination, *, exclude: Optional[str] = None):
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return "VCS: %s" % self.url
|
||||
return f"VCS: {self.url}"
|
||||
|
||||
def __repr__(self):
|
||||
return "%s<%s>" % (self.__class__, self.url)
|
||||
return f"{self.__class__}<{self.url}>"
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -720,11 +711,17 @@ class GitFetchStrategy(VCSFetchStrategy):
|
||||
"submodules",
|
||||
"get_full_repo",
|
||||
"submodules_delete",
|
||||
"git_sparse_paths",
|
||||
]
|
||||
|
||||
git_version_re = r"git version (\S+)"
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
|
||||
self.commit: Optional[str] = None
|
||||
self.tag: Optional[str] = None
|
||||
self.branch: Optional[str] = None
|
||||
|
||||
# Discards the keywords in kwargs that may conflict with the next call
|
||||
# to __init__
|
||||
forwarded_args = copy.copy(kwargs)
|
||||
@@ -735,6 +732,7 @@ def __init__(self, **kwargs):
|
||||
self.submodules = kwargs.get("submodules", False)
|
||||
self.submodules_delete = kwargs.get("submodules_delete", False)
|
||||
self.get_full_repo = kwargs.get("get_full_repo", False)
|
||||
self.git_sparse_paths = kwargs.get("git_sparse_paths", None)
|
||||
|
||||
@property
|
||||
def git_version(self):
|
||||
@@ -772,68 +770,71 @@ def git(self):
|
||||
|
||||
@property
|
||||
def cachable(self):
|
||||
return self.cache_enabled and bool(self.commit or self.tag)
|
||||
return self.cache_enabled and bool(self.commit)
|
||||
|
||||
def source_id(self):
|
||||
return self.commit or self.tag
|
||||
# TODO: tree-hash would secure download cache and mirrors, commit only secures checkouts.
|
||||
return self.commit
|
||||
|
||||
def mirror_id(self):
|
||||
repo_ref = self.commit or self.tag or self.branch
|
||||
if repo_ref:
|
||||
if self.commit:
|
||||
repo_path = urllib.parse.urlparse(self.url).path
|
||||
result = os.path.sep.join(["git", repo_path, repo_ref])
|
||||
result = os.path.sep.join(["git", repo_path, self.commit])
|
||||
return result
|
||||
|
||||
def _repo_info(self):
|
||||
args = ""
|
||||
|
||||
if self.commit:
|
||||
args = " at commit {0}".format(self.commit)
|
||||
args = f" at commit {self.commit}"
|
||||
elif self.tag:
|
||||
args = " at tag {0}".format(self.tag)
|
||||
args = f" at tag {self.tag}"
|
||||
elif self.branch:
|
||||
args = " on branch {0}".format(self.branch)
|
||||
args = f" on branch {self.branch}"
|
||||
|
||||
return "{0}{1}".format(self.url, args)
|
||||
return f"{self.url}{args}"
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if self.stage.expanded:
|
||||
tty.debug("Already fetched {0}".format(self.stage.source_path))
|
||||
tty.debug(f"Already fetched {self.stage.source_path}")
|
||||
return
|
||||
|
||||
self.clone(commit=self.commit, branch=self.branch, tag=self.tag)
|
||||
if self.git_sparse_paths:
|
||||
self._sparse_clone_src()
|
||||
else:
|
||||
self._clone_src()
|
||||
self.submodule_operations()
|
||||
|
||||
def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
def bare_clone(self, dest: str) -> None:
|
||||
"""
|
||||
Clone a repository to a path.
|
||||
Execute a bare clone for metadata only
|
||||
|
||||
This method handles cloning from git, but does not require a stage.
|
||||
|
||||
Arguments:
|
||||
dest (str or None): The path into which the code is cloned. If None,
|
||||
requires a stage and uses the stage's source path.
|
||||
commit (str or None): A commit to fetch from the remote. Only one of
|
||||
commit, branch, and tag may be non-None.
|
||||
branch (str or None): A branch to fetch from the remote.
|
||||
tag (str or None): A tag to fetch from the remote.
|
||||
bare (bool): Execute a "bare" git clone (--bare option to git)
|
||||
Requires a destination since bare cloning does not provide source
|
||||
and shouldn't be used for staging.
|
||||
"""
|
||||
# Default to spack source path
|
||||
dest = dest or self.stage.source_path
|
||||
tty.debug("Cloning git repository: {0}".format(self._repo_info()))
|
||||
tty.debug(f"Cloning git repository: {self._repo_info()}")
|
||||
|
||||
git = self.git
|
||||
debug = spack.config.get("config:debug")
|
||||
|
||||
if bare:
|
||||
# We don't need to worry about which commit/branch/tag is checked out
|
||||
clone_args = ["clone", "--bare"]
|
||||
if not debug:
|
||||
clone_args.append("--quiet")
|
||||
clone_args.extend([self.url, dest])
|
||||
git(*clone_args)
|
||||
elif commit:
|
||||
# We don't need to worry about which commit/branch/tag is checked out
|
||||
clone_args = ["clone", "--bare"]
|
||||
if not debug:
|
||||
clone_args.append("--quiet")
|
||||
clone_args.extend([self.url, dest])
|
||||
git(*clone_args)
|
||||
|
||||
def _clone_src(self) -> None:
|
||||
"""Clone a repository to a path using git."""
|
||||
# Default to spack source path
|
||||
dest = self.stage.source_path
|
||||
tty.debug(f"Cloning git repository: {self._repo_info()}")
|
||||
|
||||
git = self.git
|
||||
debug = spack.config.get("config:debug")
|
||||
|
||||
if self.commit:
|
||||
# Need to do a regular clone and check out everything if
|
||||
# they asked for a particular commit.
|
||||
clone_args = ["clone", self.url]
|
||||
@@ -852,7 +853,7 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
)
|
||||
|
||||
with working_dir(dest):
|
||||
checkout_args = ["checkout", commit]
|
||||
checkout_args = ["checkout", self.commit]
|
||||
if not debug:
|
||||
checkout_args.insert(1, "--quiet")
|
||||
git(*checkout_args)
|
||||
@@ -864,10 +865,10 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
args.append("--quiet")
|
||||
|
||||
# If we want a particular branch ask for it.
|
||||
if branch:
|
||||
args.extend(["--branch", branch])
|
||||
elif tag and self.git_version >= spack.version.Version("1.8.5.2"):
|
||||
args.extend(["--branch", tag])
|
||||
if self.branch:
|
||||
args.extend(["--branch", self.branch])
|
||||
elif self.tag and self.git_version >= spack.version.Version("1.8.5.2"):
|
||||
args.extend(["--branch", self.tag])
|
||||
|
||||
# Try to be efficient if we're using a new enough git.
|
||||
# This checks out only one branch's history
|
||||
@@ -899,7 +900,7 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
# For tags, be conservative and check them out AFTER
|
||||
# cloning. Later git versions can do this with clone
|
||||
# --branch, but older ones fail.
|
||||
if tag and self.git_version < spack.version.Version("1.8.5.2"):
|
||||
if self.tag and self.git_version < spack.version.Version("1.8.5.2"):
|
||||
# pull --tags returns a "special" error code of 1 in
|
||||
# older versions that we have to ignore.
|
||||
# see: https://github.com/git/git/commit/19d122b
|
||||
@@ -912,6 +913,79 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
git(*pull_args, ignore_errors=1)
|
||||
git(*co_args)
|
||||
|
||||
def _sparse_clone_src(self, **kwargs):
|
||||
"""Use git's sparse checkout feature to clone portions of a git repository"""
|
||||
dest = self.stage.source_path
|
||||
git = self.git
|
||||
|
||||
if self.git_version < spack.version.Version("2.26.0"):
|
||||
# technically this should be supported for 2.25, but bumping for OS issues
|
||||
# see https://github.com/spack/spack/issues/45771
|
||||
# code paths exist where the package is not set. Assure some indentifier for the
|
||||
# package that was configured for sparse checkout exists in the error message
|
||||
identifier = str(self.url)
|
||||
if self.package:
|
||||
identifier += f" ({self.package.name})"
|
||||
tty.warn(
|
||||
(
|
||||
f"{identifier} is configured for git sparse-checkout "
|
||||
"but the git version is too old to support sparse cloning. "
|
||||
"Cloning the full repository instead."
|
||||
)
|
||||
)
|
||||
self._clone_src()
|
||||
else:
|
||||
# default to depth=2 to allow for retention of some git properties
|
||||
depth = kwargs.get("depth", 2)
|
||||
needs_fetch = self.branch or self.tag
|
||||
git_ref = self.branch or self.tag or self.commit
|
||||
|
||||
assert git_ref
|
||||
|
||||
clone_args = ["clone"]
|
||||
|
||||
if needs_fetch:
|
||||
clone_args.extend(["--branch", git_ref])
|
||||
|
||||
if self.get_full_repo:
|
||||
clone_args.append("--no-single-branch")
|
||||
else:
|
||||
clone_args.append("--single-branch")
|
||||
|
||||
clone_args.extend(
|
||||
[f"--depth={depth}", "--no-checkout", "--filter=blob:none", self.url]
|
||||
)
|
||||
|
||||
sparse_args = ["sparse-checkout", "set"]
|
||||
|
||||
if callable(self.git_sparse_paths):
|
||||
sparse_args.extend(self.git_sparse_paths())
|
||||
else:
|
||||
sparse_args.extend([p for p in self.git_sparse_paths])
|
||||
|
||||
sparse_args.append("--cone")
|
||||
|
||||
checkout_args = ["checkout", git_ref]
|
||||
|
||||
if not spack.config.get("config:debug"):
|
||||
clone_args.insert(1, "--quiet")
|
||||
checkout_args.insert(1, "--quiet")
|
||||
|
||||
with temp_cwd():
|
||||
git(*clone_args)
|
||||
repo_name = get_single_file(".")
|
||||
if self.stage:
|
||||
self.stage.srcdir = repo_name
|
||||
shutil.move(repo_name, dest)
|
||||
|
||||
with working_dir(dest):
|
||||
git(*sparse_args)
|
||||
git(*checkout_args)
|
||||
|
||||
def submodule_operations(self):
|
||||
dest = self.stage.source_path
|
||||
git = self.git
|
||||
|
||||
if self.submodules_delete:
|
||||
with working_dir(dest):
|
||||
for submodule_to_delete in self.submodules_delete:
|
||||
@@ -964,7 +1038,7 @@ def protocol_supports_shallow_clone(self):
|
||||
return not (self.url.startswith("http://") or self.url.startswith("/"))
|
||||
|
||||
def __str__(self):
|
||||
return "[git] {0}".format(self._repo_info())
|
||||
return f"[git] {self._repo_info()}"
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -1288,7 +1362,7 @@ def reset(self):
|
||||
shutil.move(scrubbed, source_path)
|
||||
|
||||
def __str__(self):
|
||||
return "[hg] %s" % self.url
|
||||
return f"[hg] {self.url}"
|
||||
|
||||
|
||||
@fetcher
|
||||
@@ -1297,47 +1371,16 @@ class S3FetchStrategy(URLFetchStrategy):
|
||||
|
||||
url_attr = "s3"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
try:
|
||||
super().__init__(*args, **kwargs)
|
||||
except ValueError:
|
||||
if not kwargs.get("url"):
|
||||
raise ValueError("S3FetchStrategy requires a url for fetching.")
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if not self.url.startswith("s3://"):
|
||||
raise spack.error.FetchError(
|
||||
f"{self.__class__.__name__} can only fetch from s3:// urls."
|
||||
)
|
||||
if self.archive_file:
|
||||
tty.debug(f"Already downloaded {self.archive_file}")
|
||||
return
|
||||
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
if parsed_url.scheme != "s3":
|
||||
raise spack.error.FetchError("S3FetchStrategy can only fetch from s3:// urls.")
|
||||
|
||||
basename = os.path.basename(parsed_url.path)
|
||||
request = urllib.request.Request(
|
||||
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
|
||||
)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
try:
|
||||
response = web_util.urlopen(request)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
tty.debug(f"Fetching {self.url}")
|
||||
|
||||
with open(basename, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
|
||||
content_type = web_util.get_header(response.headers, "Content-type")
|
||||
|
||||
if content_type == "text/html":
|
||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||
|
||||
if self.stage.save_filename:
|
||||
fs.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
||||
|
||||
self._fetch_urllib(self.url)
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(
|
||||
RuntimeError(f"Missing archive {self.archive_file} after fetching")
|
||||
@@ -1350,46 +1393,17 @@ class GCSFetchStrategy(URLFetchStrategy):
|
||||
|
||||
url_attr = "gs"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
try:
|
||||
super().__init__(*args, **kwargs)
|
||||
except ValueError:
|
||||
if not kwargs.get("url"):
|
||||
raise ValueError("GCSFetchStrategy requires a url for fetching.")
|
||||
|
||||
@_needs_stage
|
||||
def fetch(self):
|
||||
if not self.url.startswith("gs"):
|
||||
raise spack.error.FetchError(
|
||||
f"{self.__class__.__name__} can only fetch from gs:// urls."
|
||||
)
|
||||
if self.archive_file:
|
||||
tty.debug("Already downloaded {0}".format(self.archive_file))
|
||||
tty.debug(f"Already downloaded {self.archive_file}")
|
||||
return
|
||||
|
||||
parsed_url = urllib.parse.urlparse(self.url)
|
||||
if parsed_url.scheme != "gs":
|
||||
raise spack.error.FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
|
||||
|
||||
basename = os.path.basename(parsed_url.path)
|
||||
request = urllib.request.Request(
|
||||
self.url, headers={"User-Agent": web_util.SPACK_USER_AGENT}
|
||||
)
|
||||
|
||||
with working_dir(self.stage.path):
|
||||
try:
|
||||
response = web_util.urlopen(request)
|
||||
except (TimeoutError, urllib.error.URLError) as e:
|
||||
raise FailedDownloadError(e) from e
|
||||
|
||||
tty.debug(f"Fetching {self.url}")
|
||||
|
||||
with open(basename, "wb") as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
|
||||
content_type = web_util.get_header(response.headers, "Content-type")
|
||||
|
||||
if content_type == "text/html":
|
||||
warn_content_type_mismatch(self.archive_file or "the archive")
|
||||
|
||||
if self.stage.save_filename:
|
||||
os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
|
||||
self._fetch_urllib(self.url)
|
||||
|
||||
if not self.archive_file:
|
||||
raise FailedDownloadError(
|
||||
@@ -1403,7 +1417,7 @@ class FetchAndVerifyExpandedFile(URLFetchStrategy):
|
||||
as well as after expanding it."""
|
||||
|
||||
def __init__(self, url, archive_sha256: str, expanded_sha256: str):
|
||||
super().__init__(url, archive_sha256)
|
||||
super().__init__(url=url, checksum=archive_sha256)
|
||||
self.expanded_sha256 = expanded_sha256
|
||||
|
||||
def expand(self):
|
||||
@@ -1445,14 +1459,14 @@ def stable_target(fetcher):
|
||||
return False
|
||||
|
||||
|
||||
def from_url(url):
|
||||
def from_url(url: str) -> URLFetchStrategy:
|
||||
"""Given a URL, find an appropriate fetch strategy for it.
|
||||
Currently just gives you a URLFetchStrategy that uses curl.
|
||||
|
||||
TODO: make this return appropriate fetch strategies for other
|
||||
types of URLs.
|
||||
"""
|
||||
return URLFetchStrategy(url)
|
||||
return URLFetchStrategy(url=url)
|
||||
|
||||
|
||||
def from_kwargs(**kwargs):
|
||||
@@ -1521,10 +1535,12 @@ def _check_version_attributes(fetcher, pkg, version):
|
||||
def _extrapolate(pkg, version):
|
||||
"""Create a fetcher from an extrapolated URL for this version."""
|
||||
try:
|
||||
return URLFetchStrategy(pkg.url_for_version(version), fetch_options=pkg.fetch_options)
|
||||
return URLFetchStrategy(url=pkg.url_for_version(version), fetch_options=pkg.fetch_options)
|
||||
except spack.package_base.NoURLError:
|
||||
msg = "Can't extrapolate a URL for version %s " "because package %s defines no URLs"
|
||||
raise ExtrapolationError(msg % (version, pkg.name))
|
||||
raise ExtrapolationError(
|
||||
f"Can't extrapolate a URL for version {version} because "
|
||||
f"package {pkg.name} defines no URLs"
|
||||
)
|
||||
|
||||
|
||||
def _from_merged_attrs(fetcher, pkg, version):
|
||||
@@ -1541,8 +1557,11 @@ def _from_merged_attrs(fetcher, pkg, version):
|
||||
attrs["fetch_options"] = pkg.fetch_options
|
||||
attrs.update(pkg.versions[version])
|
||||
|
||||
if fetcher.url_attr == "git" and hasattr(pkg, "submodules"):
|
||||
attrs.setdefault("submodules", pkg.submodules)
|
||||
if fetcher.url_attr == "git":
|
||||
pkg_attr_list = ["submodules", "git_sparse_paths"]
|
||||
for pkg_attr in pkg_attr_list:
|
||||
if hasattr(pkg, pkg_attr):
|
||||
attrs.setdefault(pkg_attr, getattr(pkg, pkg_attr))
|
||||
|
||||
return fetcher(**attrs)
|
||||
|
||||
@@ -1637,11 +1656,9 @@ def for_package_version(pkg, version=None):
|
||||
raise InvalidArgsError(pkg, version, **args)
|
||||
|
||||
|
||||
def from_url_scheme(url, *args, **kwargs):
|
||||
def from_url_scheme(url: str, **kwargs) -> FetchStrategy:
|
||||
"""Finds a suitable FetchStrategy by matching its url_attr with the scheme
|
||||
in the given url."""
|
||||
|
||||
url = kwargs.get("url", url)
|
||||
parsed_url = urllib.parse.urlparse(url, scheme="file")
|
||||
|
||||
scheme_mapping = kwargs.get("scheme_mapping") or {
|
||||
@@ -1658,11 +1675,9 @@ def from_url_scheme(url, *args, **kwargs):
|
||||
for fetcher in all_strategies:
|
||||
url_attr = getattr(fetcher, "url_attr", None)
|
||||
if url_attr and url_attr == scheme:
|
||||
return fetcher(url, *args, **kwargs)
|
||||
return fetcher(url=url, **kwargs)
|
||||
|
||||
raise ValueError(
|
||||
'No FetchStrategy found for url with scheme: "{SCHEME}"'.format(SCHEME=parsed_url.scheme)
|
||||
)
|
||||
raise ValueError(f'No FetchStrategy found for url with scheme: "{parsed_url.scheme}"')
|
||||
|
||||
|
||||
def from_list_url(pkg):
|
||||
@@ -1687,7 +1702,9 @@ def from_list_url(pkg):
|
||||
)
|
||||
|
||||
# construct a fetcher
|
||||
return URLFetchStrategy(url_from_list, checksum, fetch_options=pkg.fetch_options)
|
||||
return URLFetchStrategy(
|
||||
url=url_from_list, checksum=checksum, fetch_options=pkg.fetch_options
|
||||
)
|
||||
except KeyError as e:
|
||||
tty.debug(e)
|
||||
tty.msg("Cannot find version %s in url_list" % pkg.version)
|
||||
@@ -1715,10 +1732,10 @@ def store(self, fetcher, relative_dest):
|
||||
mkdirp(os.path.dirname(dst))
|
||||
fetcher.archive(dst)
|
||||
|
||||
def fetcher(self, target_path, digest, **kwargs):
|
||||
def fetcher(self, target_path: str, digest: Optional[str], **kwargs) -> CacheURLFetchStrategy:
|
||||
path = os.path.join(self.root, target_path)
|
||||
url = url_util.path_to_file_url(path)
|
||||
return CacheURLFetchStrategy(url, digest, **kwargs)
|
||||
return CacheURLFetchStrategy(url=url, checksum=digest, **kwargs)
|
||||
|
||||
def destroy(self):
|
||||
shutil.rmtree(self.root, ignore_errors=True)
|
||||
|
@@ -20,6 +20,7 @@
|
||||
systems (e.g. modules, lmod, etc.) or to add other custom
|
||||
features.
|
||||
"""
|
||||
import importlib
|
||||
|
||||
from llnl.util.lang import ensure_last, list_modules
|
||||
|
||||
@@ -46,11 +47,7 @@ def _populate_hooks(cls):
|
||||
|
||||
for name in relative_names:
|
||||
module_name = __name__ + "." + name
|
||||
# When importing a module from a package, __import__('A.B', ...)
|
||||
# returns package A when 'fromlist' is empty. If fromlist is not
|
||||
# empty it returns the submodule B instead
|
||||
# See: https://stackoverflow.com/a/2725668/771663
|
||||
module_obj = __import__(module_name, fromlist=[None])
|
||||
module_obj = importlib.import_module(module_name)
|
||||
cls._hooks.append((module_name, module_obj))
|
||||
|
||||
@property
|
||||
|
@@ -24,5 +24,6 @@ def post_install(spec, explicit):
|
||||
# Push the package to all autopush mirrors
|
||||
for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values():
|
||||
signing_key = bindist.select_signing_key() if mirror.signed else None
|
||||
bindist.push_or_raise([spec], out_url=mirror.push_url, signing_key=signing_key, force=True)
|
||||
with bindist.make_uploader(mirror=mirror, force=True, signing_key=signing_key) as uploader:
|
||||
uploader.push_or_raise([spec])
|
||||
tty.msg(f"{spec.name}: Pushed to build cache: '{mirror.name}'")
|
||||
|
@@ -451,7 +451,7 @@ def _process_external_package(pkg: "spack.package_base.PackageBase", explicit: b
|
||||
|
||||
# Add to the DB
|
||||
tty.debug(f"{pre} registering into DB")
|
||||
spack.store.STORE.db.add(spec, None, explicit=explicit)
|
||||
spack.store.STORE.db.add(spec, explicit=explicit)
|
||||
|
||||
|
||||
def _process_binary_cache_tarball(
|
||||
@@ -488,13 +488,12 @@ def _process_binary_cache_tarball(
|
||||
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
||||
pkg.windows_establish_runtime_linkage()
|
||||
|
||||
if hasattr(pkg, "_post_buildcache_install_hook"):
|
||||
pkg._post_buildcache_install_hook()
|
||||
|
||||
pkg.installed_from_binary_cache = True
|
||||
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
|
||||
spack.store.STORE.db.add(pkg.spec, explicit=explicit)
|
||||
return True
|
||||
|
||||
|
||||
@@ -1611,9 +1610,7 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
|
||||
def _add_compiler_package_to_config(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
compiler_search_prefix = getattr(pkg, "compiler_search_prefix", pkg.spec.prefix)
|
||||
spack.compilers.add_compilers_to_config(
|
||||
spack.compilers.find_compilers([compiler_search_prefix])
|
||||
)
|
||||
spack.compilers.find_compilers([compiler_search_prefix])
|
||||
|
||||
def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
"""
|
||||
@@ -1671,7 +1668,7 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
)
|
||||
# Note: PARENT of the build process adds the new package to
|
||||
# the database, so that we don't need to re-read from file.
|
||||
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
|
||||
spack.store.STORE.db.add(pkg.spec, explicit=explicit)
|
||||
|
||||
# If a compiler, ensure it is added to the configuration
|
||||
if task.compiler:
|
||||
|
@@ -21,6 +21,7 @@
|
||||
from typing import List, Optional, Union
|
||||
|
||||
import llnl.url
|
||||
import llnl.util.symlink
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import mkdirp
|
||||
|
||||
@@ -30,6 +31,7 @@
|
||||
import spack.fetch_strategy
|
||||
import spack.mirror
|
||||
import spack.oci.image
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.util.path
|
||||
import spack.util.spack_json as sjson
|
||||
@@ -426,51 +428,74 @@ def _determine_extension(fetcher):
|
||||
return ext
|
||||
|
||||
|
||||
class MirrorReference:
|
||||
"""A ``MirrorReference`` stores the relative paths where you can store a
|
||||
package/resource in a mirror directory.
|
||||
class MirrorLayout:
|
||||
"""A ``MirrorLayout`` object describes the relative path of a mirror entry."""
|
||||
|
||||
The appropriate storage location is given by ``storage_path``. The
|
||||
``cosmetic_path`` property provides a reference that a human could generate
|
||||
themselves based on reading the details of the package.
|
||||
|
||||
A user can iterate over a ``MirrorReference`` object to get all the
|
||||
possible names that might be used to refer to the resource in a mirror;
|
||||
this includes names generated by previous naming schemes that are no-longer
|
||||
reported by ``storage_path`` or ``cosmetic_path``.
|
||||
"""
|
||||
|
||||
def __init__(self, cosmetic_path, global_path=None):
|
||||
self.global_path = global_path
|
||||
self.cosmetic_path = cosmetic_path
|
||||
|
||||
@property
|
||||
def storage_path(self):
|
||||
if self.global_path:
|
||||
return self.global_path
|
||||
else:
|
||||
return self.cosmetic_path
|
||||
def __init__(self, path: str) -> None:
|
||||
self.path = path
|
||||
|
||||
def __iter__(self):
|
||||
if self.global_path:
|
||||
yield self.global_path
|
||||
yield self.cosmetic_path
|
||||
"""Yield all paths including aliases where the resource can be found."""
|
||||
yield self.path
|
||||
|
||||
def make_alias(self, root: str) -> None:
|
||||
"""Make the entry ``root / self.path`` available under a human readable alias"""
|
||||
pass
|
||||
|
||||
|
||||
class OCIImageLayout:
|
||||
"""Follow the OCI Image Layout Specification to archive blobs
|
||||
class DefaultLayout(MirrorLayout):
|
||||
def __init__(self, alias_path: str, digest_path: Optional[str] = None) -> None:
|
||||
# When we have a digest, it is used as the primary storage location. If not, then we use
|
||||
# the human-readable alias. In case of mirrors of a VCS checkout, we currently do not have
|
||||
# a digest, that's why an alias is required and a digest optional.
|
||||
super().__init__(path=digest_path or alias_path)
|
||||
self.alias = alias_path
|
||||
self.digest_path = digest_path
|
||||
|
||||
Paths are of the form `blobs/<algorithm>/<digest>`
|
||||
"""
|
||||
def make_alias(self, root: str) -> None:
|
||||
"""Symlink a human readible path in our mirror to the actual storage location."""
|
||||
# We already use the human-readable path as the main storage location.
|
||||
if not self.digest_path:
|
||||
return
|
||||
|
||||
alias, digest = os.path.join(root, self.alias), os.path.join(root, self.digest_path)
|
||||
|
||||
alias_dir = os.path.dirname(alias)
|
||||
relative_dst = os.path.relpath(digest, start=alias_dir)
|
||||
|
||||
mkdirp(alias_dir)
|
||||
tmp = f"{alias}.tmp"
|
||||
llnl.util.symlink.symlink(relative_dst, tmp)
|
||||
|
||||
try:
|
||||
os.rename(tmp, alias)
|
||||
except OSError:
|
||||
# Clean up the temporary if possible
|
||||
try:
|
||||
os.unlink(tmp)
|
||||
except OSError:
|
||||
pass
|
||||
raise
|
||||
|
||||
def __iter__(self):
|
||||
if self.digest_path:
|
||||
yield self.digest_path
|
||||
yield self.alias
|
||||
|
||||
|
||||
class OCILayout(MirrorLayout):
|
||||
"""Follow the OCI Image Layout Specification to archive blobs where paths are of the form
|
||||
``blobs/<algorithm>/<digest>``"""
|
||||
|
||||
def __init__(self, digest: spack.oci.image.Digest) -> None:
|
||||
self.storage_path = os.path.join("blobs", digest.algorithm, digest.digest)
|
||||
|
||||
def __iter__(self):
|
||||
yield self.storage_path
|
||||
super().__init__(os.path.join("blobs", digest.algorithm, digest.digest))
|
||||
|
||||
|
||||
def mirror_archive_paths(fetcher, per_package_ref, spec=None):
|
||||
def default_mirror_layout(
|
||||
fetcher: "spack.fetch_strategy.FetchStrategy",
|
||||
per_package_ref: str,
|
||||
spec: Optional["spack.spec.Spec"] = None,
|
||||
) -> MirrorLayout:
|
||||
"""Returns a ``MirrorReference`` object which keeps track of the relative
|
||||
storage path of the resource associated with the specified ``fetcher``."""
|
||||
ext = None
|
||||
@@ -494,7 +519,7 @@ def mirror_archive_paths(fetcher, per_package_ref, spec=None):
|
||||
if global_ref and ext:
|
||||
global_ref += ".%s" % ext
|
||||
|
||||
return MirrorReference(per_package_ref, global_ref)
|
||||
return DefaultLayout(per_package_ref, global_ref)
|
||||
|
||||
|
||||
def get_all_versions(specs):
|
||||
|
@@ -25,14 +25,11 @@
|
||||
so package authors should use their judgement.
|
||||
"""
|
||||
import functools
|
||||
import inspect
|
||||
from contextlib import contextmanager
|
||||
|
||||
from llnl.util.lang import caller_locals
|
||||
|
||||
import spack.directives
|
||||
import spack.directives_meta
|
||||
import spack.error
|
||||
from spack.spec import Spec
|
||||
import spack.spec
|
||||
|
||||
|
||||
class MultiMethodMeta(type):
|
||||
@@ -135,7 +132,7 @@ def __call__(self, package_or_builder_self, *args, **kwargs):
|
||||
# its superclasses for successive calls. We don't have that
|
||||
# information within `SpecMultiMethod`, because it is not
|
||||
# associated with the package class.
|
||||
for cls in inspect.getmro(package_or_builder_self.__class__)[1:]:
|
||||
for cls in package_or_builder_self.__class__.__mro__[1:]:
|
||||
superself = cls.__dict__.get(self.__name__, None)
|
||||
|
||||
if isinstance(superself, SpecMultiMethod):
|
||||
@@ -165,9 +162,9 @@ def __init__(self, condition):
|
||||
condition (str): condition to be met
|
||||
"""
|
||||
if isinstance(condition, bool):
|
||||
self.spec = Spec() if condition else None
|
||||
self.spec = spack.spec.Spec() if condition else None
|
||||
else:
|
||||
self.spec = Spec(condition)
|
||||
self.spec = spack.spec.Spec(condition)
|
||||
|
||||
def __call__(self, method):
|
||||
"""This annotation lets packages declare multiple versions of
|
||||
@@ -229,11 +226,9 @@ def install(self, prefix):
|
||||
platform-specific versions. There's not much we can do to get
|
||||
around this because of the way decorators work.
|
||||
"""
|
||||
# In Python 2, Get the first definition of the method in the
|
||||
# calling scope by looking at the caller's locals. In Python 3,
|
||||
# we handle this using MultiMethodMeta.__prepare__.
|
||||
if MultiMethodMeta._locals is None:
|
||||
MultiMethodMeta._locals = caller_locals()
|
||||
assert (
|
||||
MultiMethodMeta._locals is not None
|
||||
), "cannot use multimethod, missing MultiMethodMeta metaclass?"
|
||||
|
||||
# Create a multimethod with this name if there is not one already
|
||||
original_method = MultiMethodMeta._locals.get(method.__name__)
|
||||
@@ -266,17 +261,17 @@ def __enter__(self):
|
||||
and add their constraint to whatever may be already present in the directive
|
||||
`when=` argument.
|
||||
"""
|
||||
spack.directives.DirectiveMeta.push_to_context(str(self.spec))
|
||||
spack.directives_meta.DirectiveMeta.push_to_context(str(self.spec))
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
spack.directives.DirectiveMeta.pop_from_context()
|
||||
spack.directives_meta.DirectiveMeta.pop_from_context()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def default_args(**kwargs):
|
||||
spack.directives.DirectiveMeta.push_default_args(kwargs)
|
||||
spack.directives_meta.DirectiveMeta.push_default_args(kwargs)
|
||||
yield
|
||||
spack.directives.DirectiveMeta.pop_default_args()
|
||||
spack.directives_meta.DirectiveMeta.pop_default_args()
|
||||
|
||||
|
||||
class MultiMethodError(spack.error.SpackError):
|
||||
|
@@ -390,15 +390,12 @@ def make_stage(
|
||||
) -> spack.stage.Stage:
|
||||
_urlopen = _urlopen or spack.oci.opener.urlopen
|
||||
fetch_strategy = spack.fetch_strategy.OCIRegistryFetchStrategy(
|
||||
url, checksum=digest.digest, _urlopen=_urlopen
|
||||
url=url, checksum=digest.digest, _urlopen=_urlopen
|
||||
)
|
||||
# Use blobs/<alg>/<encoded> as the cache path, which follows
|
||||
# the OCI Image Layout Specification. What's missing though,
|
||||
# is the `oci-layout` and `index.json` files, which are
|
||||
# required by the spec.
|
||||
return spack.stage.Stage(
|
||||
fetch_strategy,
|
||||
mirror_paths=spack.mirror.OCIImageLayout(digest),
|
||||
name=digest.digest,
|
||||
keep=keep,
|
||||
fetch_strategy, mirror_paths=spack.mirror.OCILayout(digest), name=digest.digest, keep=keep
|
||||
)
|
||||
|
@@ -104,6 +104,7 @@
|
||||
from spack.spec import InvalidSpecDetected, Spec
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.executable import *
|
||||
from spack.util.filesystem import file_command, fix_darwin_install_name, mime_type
|
||||
from spack.variant import (
|
||||
any_combination_of,
|
||||
auto_or_any_combination_of,
|
||||
|
@@ -15,7 +15,7 @@
|
||||
import functools
|
||||
import glob
|
||||
import hashlib
|
||||
import inspect
|
||||
import importlib
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
@@ -116,11 +116,10 @@ def preferred_version(pkg: "PackageBase"):
|
||||
Arguments:
|
||||
pkg: The package whose versions are to be assessed.
|
||||
"""
|
||||
# Here we sort first on the fact that a version is marked
|
||||
# as preferred in the package, then on the fact that the
|
||||
# version is not develop, then lexicographically
|
||||
key_fn = lambda v: (pkg.versions[v].get("preferred", False), not v.isdevelop(), v)
|
||||
return max(pkg.versions, key=key_fn)
|
||||
from spack.solver.asp import concretization_version_order
|
||||
|
||||
version, _ = max(pkg.versions.items(), key=concretization_version_order)
|
||||
return version
|
||||
|
||||
|
||||
class WindowsRPath:
|
||||
@@ -245,10 +244,7 @@ def determine_spec_details(cls, prefix, objs_in_prefix):
|
||||
if version_str:
|
||||
objs_by_version[version_str].append(obj)
|
||||
except Exception as e:
|
||||
msg = (
|
||||
"An error occurred when trying to detect " 'the version of "{0}" [{1}]'
|
||||
)
|
||||
tty.debug(msg.format(obj, str(e)))
|
||||
tty.debug(f"Cannot detect the version of '{obj}' [{str(e)}]")
|
||||
|
||||
specs = []
|
||||
for version_str, objs in objs_by_version.items():
|
||||
@@ -261,27 +257,23 @@ def determine_spec_details(cls, prefix, objs_in_prefix):
|
||||
if isinstance(variant, str):
|
||||
variant = (variant, {})
|
||||
variant_str, extra_attributes = variant
|
||||
spec_str = "{0}@{1} {2}".format(cls.name, version_str, variant_str)
|
||||
spec_str = f"{cls.name}@{version_str} {variant_str}"
|
||||
|
||||
# Pop a few reserved keys from extra attributes, since
|
||||
# they have a different semantics
|
||||
external_path = extra_attributes.pop("prefix", None)
|
||||
external_modules = extra_attributes.pop("modules", None)
|
||||
try:
|
||||
spec = spack.spec.Spec(
|
||||
spec = spack.spec.Spec.from_detection(
|
||||
spec_str,
|
||||
external_path=external_path,
|
||||
external_modules=external_modules,
|
||||
extra_attributes=extra_attributes,
|
||||
)
|
||||
except Exception as e:
|
||||
msg = 'Parsing failed [spec_str="{0}", error={1}]'
|
||||
tty.debug(msg.format(spec_str, str(e)))
|
||||
tty.debug(f'Parsing failed [spec_str="{spec_str}", error={str(e)}]')
|
||||
else:
|
||||
specs.append(
|
||||
spack.spec.Spec.from_detection(
|
||||
spec, extra_attributes=extra_attributes
|
||||
)
|
||||
)
|
||||
specs.append(spec)
|
||||
|
||||
return sorted(specs)
|
||||
|
||||
@@ -740,7 +732,7 @@ def __init__(self, spec):
|
||||
raise ValueError(msg.format(self))
|
||||
|
||||
# init internal variables
|
||||
self._stage = None
|
||||
self._stage: Optional[StageComposite] = None
|
||||
self._fetcher = None
|
||||
self._tester: Optional["PackageTest"] = None
|
||||
|
||||
@@ -868,7 +860,7 @@ def module(cls):
|
||||
We use this to add variables to package modules. This makes
|
||||
install() methods easier to write (e.g., can call configure())
|
||||
"""
|
||||
return __import__(cls.__module__, fromlist=[cls.__name__])
|
||||
return importlib.import_module(cls.__module__)
|
||||
|
||||
@classproperty
|
||||
def namespace(cls):
|
||||
@@ -884,7 +876,7 @@ def fullname(cls):
|
||||
def fullnames(cls):
|
||||
"""Fullnames for this package and any packages from which it inherits."""
|
||||
fullnames = []
|
||||
for cls in inspect.getmro(cls):
|
||||
for cls in cls.__mro__:
|
||||
namespace = getattr(cls, "namespace", None)
|
||||
if namespace:
|
||||
fullnames.append("%s.%s" % (namespace, cls.name))
|
||||
@@ -1098,9 +1090,10 @@ def _make_resource_stage(self, root_stage, resource):
|
||||
root=root_stage,
|
||||
resource=resource,
|
||||
name=self._resource_stage(resource),
|
||||
mirror_paths=spack.mirror.mirror_archive_paths(
|
||||
mirror_paths=spack.mirror.default_mirror_layout(
|
||||
resource.fetcher, os.path.join(self.name, pretty_resource_name)
|
||||
),
|
||||
mirrors=spack.mirror.MirrorCollection(source=True).values(),
|
||||
path=self.path,
|
||||
)
|
||||
|
||||
@@ -1112,7 +1105,7 @@ def _make_root_stage(self, fetcher):
|
||||
# Construct a mirror path (TODO: get this out of package.py)
|
||||
format_string = "{name}-{version}"
|
||||
pretty_name = self.spec.format_path(format_string)
|
||||
mirror_paths = spack.mirror.mirror_archive_paths(
|
||||
mirror_paths = spack.mirror.default_mirror_layout(
|
||||
fetcher, os.path.join(self.name, pretty_name), self.spec
|
||||
)
|
||||
# Construct a path where the stage should build..
|
||||
@@ -1121,6 +1114,7 @@ def _make_root_stage(self, fetcher):
|
||||
stage = Stage(
|
||||
fetcher,
|
||||
mirror_paths=mirror_paths,
|
||||
mirrors=spack.mirror.MirrorCollection(source=True).values(),
|
||||
name=stage_name,
|
||||
path=self.path,
|
||||
search_fn=self._download_search,
|
||||
@@ -1177,7 +1171,7 @@ def stage(self):
|
||||
return self._stage
|
||||
|
||||
@stage.setter
|
||||
def stage(self, stage):
|
||||
def stage(self, stage: StageComposite):
|
||||
"""Allow a stage object to be set to override the default."""
|
||||
self._stage = stage
|
||||
|
||||
@@ -1471,6 +1465,7 @@ def do_fetch(self, mirror_only=False):
|
||||
checksum
|
||||
and (self.version not in self.versions)
|
||||
and (not isinstance(self.version, GitVersion))
|
||||
and ("dev_path" not in self.spec.variants)
|
||||
):
|
||||
tty.warn(
|
||||
"There is no checksum on file to fetch %s safely."
|
||||
@@ -1747,7 +1742,7 @@ def _has_make_target(self, target):
|
||||
bool: True if 'target' is found, else False
|
||||
"""
|
||||
# Prevent altering LC_ALL for 'make' outside this function
|
||||
make = copy.deepcopy(inspect.getmodule(self).make)
|
||||
make = copy.deepcopy(self.module.make)
|
||||
|
||||
# Use English locale for missing target message comparison
|
||||
make.add_default_env("LC_ALL", "C")
|
||||
@@ -1797,7 +1792,7 @@ def _if_make_target_execute(self, target, *args, **kwargs):
|
||||
"""
|
||||
if self._has_make_target(target):
|
||||
# Execute target
|
||||
inspect.getmodule(self).make(target, *args, **kwargs)
|
||||
self.module.make(target, *args, **kwargs)
|
||||
|
||||
def _has_ninja_target(self, target):
|
||||
"""Checks to see if 'target' is a valid target in a Ninja build script.
|
||||
@@ -1808,7 +1803,7 @@ def _has_ninja_target(self, target):
|
||||
Returns:
|
||||
bool: True if 'target' is found, else False
|
||||
"""
|
||||
ninja = inspect.getmodule(self).ninja
|
||||
ninja = self.module.ninja
|
||||
|
||||
# Check if we have a Ninja build script
|
||||
if not os.path.exists("build.ninja"):
|
||||
@@ -1837,7 +1832,7 @@ def _if_ninja_target_execute(self, target, *args, **kwargs):
|
||||
"""
|
||||
if self._has_ninja_target(target):
|
||||
# Execute target
|
||||
inspect.getmodule(self).ninja(target, *args, **kwargs)
|
||||
self.module.ninja(target, *args, **kwargs)
|
||||
|
||||
def _get_needed_resources(self):
|
||||
# We use intersects here cause it would also work if self.spec is abstract
|
||||
|
@@ -5,10 +5,11 @@
|
||||
import stat
|
||||
import warnings
|
||||
|
||||
import spack.config
|
||||
import spack.error
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.config import ConfigError
|
||||
from spack.util.path import canonicalize_path
|
||||
from spack.version import Version
|
||||
|
||||
_lesser_spec_types = {"compiler": spack.spec.CompilerSpec, "version": Version}
|
||||
@@ -154,44 +155,6 @@ def preferred_variants(cls, pkg_name):
|
||||
)
|
||||
|
||||
|
||||
def spec_externals(spec):
|
||||
"""Return a list of external specs (w/external directory path filled in),
|
||||
one for each known external installation.
|
||||
"""
|
||||
# break circular import.
|
||||
from spack.util.module_cmd import path_from_modules # noqa: F401
|
||||
|
||||
def _package(maybe_abstract_spec):
|
||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||
return pkg_cls(maybe_abstract_spec)
|
||||
|
||||
allpkgs = spack.config.get("packages")
|
||||
names = set([spec.name])
|
||||
names |= set(vspec.name for vspec in _package(spec).virtuals_provided)
|
||||
|
||||
external_specs = []
|
||||
for name in names:
|
||||
pkg_config = allpkgs.get(name, {})
|
||||
pkg_externals = pkg_config.get("externals", [])
|
||||
for entry in pkg_externals:
|
||||
spec_str = entry["spec"]
|
||||
external_path = entry.get("prefix", None)
|
||||
if external_path:
|
||||
external_path = canonicalize_path(external_path)
|
||||
external_modules = entry.get("modules", None)
|
||||
external_spec = spack.spec.Spec.from_detection(
|
||||
spack.spec.Spec(
|
||||
spec_str, external_path=external_path, external_modules=external_modules
|
||||
),
|
||||
extra_attributes=entry.get("extra_attributes", {}),
|
||||
)
|
||||
if external_spec.intersects(spec):
|
||||
external_specs.append(external_spec)
|
||||
|
||||
# Defensively copy returned specs
|
||||
return [s.copy() for s in external_specs]
|
||||
|
||||
|
||||
def is_spec_buildable(spec):
|
||||
"""Return true if the spec is configured as buildable"""
|
||||
allpkgs = spack.config.get("packages")
|
||||
|
@@ -4,7 +4,6 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import hashlib
|
||||
import inspect
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
@@ -185,8 +184,8 @@ def __init__(
|
||||
# search mro to look for the file
|
||||
abs_path: Optional[str] = None
|
||||
# At different times we call FilePatch on instances and classes
|
||||
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
|
||||
for cls in inspect.getmro(pkg_cls): # type: ignore
|
||||
pkg_cls = pkg if isinstance(pkg, type) else pkg.__class__
|
||||
for cls in pkg_cls.__mro__: # type: ignore
|
||||
if not hasattr(cls, "module"):
|
||||
# We've gone too far up the MRO
|
||||
break
|
||||
@@ -319,18 +318,19 @@ def stage(self) -> "spack.stage.Stage":
|
||||
self.url, archive_sha256=self.archive_sha256, expanded_sha256=self.sha256
|
||||
)
|
||||
else:
|
||||
fetcher = fs.URLFetchStrategy(self.url, sha256=self.sha256, expand=False)
|
||||
fetcher = fs.URLFetchStrategy(url=self.url, sha256=self.sha256, expand=False)
|
||||
|
||||
# The same package can have multiple patches with the same name but
|
||||
# with different contents, therefore apply a subset of the hash.
|
||||
name = "{0}-{1}".format(os.path.basename(self.url), fetch_digest[:7])
|
||||
|
||||
per_package_ref = os.path.join(self.owner.split(".")[-1], name)
|
||||
mirror_ref = spack.mirror.mirror_archive_paths(fetcher, per_package_ref)
|
||||
mirror_ref = spack.mirror.default_mirror_layout(fetcher, per_package_ref)
|
||||
self._stage = spack.stage.Stage(
|
||||
fetcher,
|
||||
name=f"{spack.stage.stage_prefix}patch-{fetch_digest}",
|
||||
mirror_paths=mirror_ref,
|
||||
mirrors=spack.mirror.MirrorCollection(source=True).values(),
|
||||
)
|
||||
return self._stage
|
||||
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import contextlib
|
||||
|
||||
from ._functions import _host, by_name, platforms, prevent_cray_detection, reset
|
||||
from ._functions import _host, by_name, platforms, reset
|
||||
from ._platform import Platform
|
||||
from .darwin import Darwin
|
||||
from .freebsd import FreeBSD
|
||||
@@ -23,7 +23,6 @@
|
||||
"host",
|
||||
"by_name",
|
||||
"reset",
|
||||
"prevent_cray_detection",
|
||||
]
|
||||
|
||||
#: The "real" platform of the host running Spack. This should not be changed
|
||||
|
@@ -2,12 +2,8 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import contextlib
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
import spack.util.environment
|
||||
|
||||
from .darwin import Darwin
|
||||
from .freebsd import FreeBSD
|
||||
from .linux import Linux
|
||||
@@ -57,14 +53,3 @@ def by_name(name):
|
||||
"""
|
||||
platform_cls = cls_by_name(name)
|
||||
return platform_cls() if platform_cls else None
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def prevent_cray_detection():
|
||||
"""Context manager that prevents the detection of the Cray platform"""
|
||||
reset()
|
||||
try:
|
||||
with spack.util.environment.set_env(MODULEPATH=""):
|
||||
yield
|
||||
finally:
|
||||
reset()
|
||||
|
@@ -12,7 +12,6 @@
|
||||
import macholib.mach_o
|
||||
import macholib.MachO
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import memoized
|
||||
@@ -25,6 +24,7 @@
|
||||
import spack.store
|
||||
import spack.util.elf as elf
|
||||
import spack.util.executable as executable
|
||||
import spack.util.filesystem as ssys
|
||||
import spack.util.path
|
||||
|
||||
from .relocate_text import BinaryFilePrefixReplacer, TextFilePrefixReplacer
|
||||
@@ -664,7 +664,7 @@ def is_binary(filename):
|
||||
Returns:
|
||||
True or False
|
||||
"""
|
||||
m_type, _ = fs.mime_type(filename)
|
||||
m_type, _ = ssys.mime_type(filename)
|
||||
|
||||
msg = "[{0}] -> ".format(filename)
|
||||
if m_type == "application":
|
||||
@@ -692,7 +692,7 @@ def fixup_macos_rpath(root, filename):
|
||||
True if fixups were applied, else False
|
||||
"""
|
||||
abspath = os.path.join(root, filename)
|
||||
if fs.mime_type(abspath) != ("application", "x-mach-binary"):
|
||||
if ssys.mime_type(abspath) != ("application", "x-mach-binary"):
|
||||
return False
|
||||
|
||||
# Get Mach-O header commands
|
||||
|
@@ -365,9 +365,9 @@ def __init__(self, namespace):
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Getattr lazily loads modules if they're not already loaded."""
|
||||
submodule = self.__package__ + "." + name
|
||||
submodule = f"{self.__package__}.{name}"
|
||||
try:
|
||||
setattr(self, name, __import__(submodule))
|
||||
setattr(self, name, importlib.import_module(submodule))
|
||||
except ImportError:
|
||||
msg = "'{0}' object has no attribute {1}"
|
||||
raise AttributeError(msg.format(type(self), name))
|
||||
@@ -1281,7 +1281,7 @@ def get_pkg_class(self, pkg_name: str) -> Type["spack.package_base.PackageBase"]
|
||||
raise RepoError(msg) from e
|
||||
|
||||
cls = getattr(module, class_name)
|
||||
if not inspect.isclass(cls):
|
||||
if not isinstance(cls, type):
|
||||
tty.die(f"{pkg_name}.{class_name} is not a class")
|
||||
|
||||
# Clear any prior changes to class attributes in case the class was loaded from the
|
||||
|
@@ -116,7 +116,7 @@ def rewire_node(spec, explicit):
|
||||
# spec being added to look for mismatches)
|
||||
spack.store.STORE.layout.write_spec(spec, spack.store.STORE.layout.spec_file_path(spec))
|
||||
# add to database, not sure about explicit
|
||||
spack.store.STORE.db.add(spec, spack.store.STORE.layout, explicit=explicit)
|
||||
spack.store.STORE.db.add(spec, explicit=explicit)
|
||||
|
||||
# run post install hooks
|
||||
spack.hooks.post_install(spec, explicit)
|
||||
|
@@ -3,22 +3,28 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""This module contains jsonschema files for all of Spack's YAML formats."""
|
||||
import typing
|
||||
import warnings
|
||||
|
||||
import llnl.util.lang
|
||||
|
||||
|
||||
class DeprecationMessage(typing.NamedTuple):
|
||||
message: str
|
||||
error: bool
|
||||
|
||||
|
||||
# jsonschema is imported lazily as it is heavy to import
|
||||
# and increases the start-up time
|
||||
def _make_validator():
|
||||
import jsonschema
|
||||
|
||||
import spack.parser
|
||||
|
||||
def _validate_spec(validator, is_spec, instance, schema):
|
||||
"""Check if the attributes on instance are valid specs."""
|
||||
import jsonschema
|
||||
|
||||
import spack.parser
|
||||
|
||||
if not validator.is_type(instance, "object"):
|
||||
return
|
||||
|
||||
@@ -32,27 +38,31 @@ def _deprecated_properties(validator, deprecated, instance, schema):
|
||||
if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
|
||||
return
|
||||
|
||||
# Get a list of the deprecated properties, return if there is none
|
||||
deprecated_properties = [x for x in instance if x in deprecated["properties"]]
|
||||
if not deprecated_properties:
|
||||
if not deprecated:
|
||||
return
|
||||
|
||||
# Retrieve the template message
|
||||
msg_str_or_func = deprecated["message"]
|
||||
if isinstance(msg_str_or_func, str):
|
||||
msg = msg_str_or_func.format(properties=deprecated_properties)
|
||||
else:
|
||||
msg = msg_str_or_func(instance, deprecated_properties)
|
||||
if msg is None:
|
||||
return
|
||||
deprecations = {
|
||||
name: DeprecationMessage(message=x["message"], error=x["error"])
|
||||
for x in deprecated
|
||||
for name in x["names"]
|
||||
}
|
||||
|
||||
is_error = deprecated["error"]
|
||||
if not is_error:
|
||||
warnings.warn(msg)
|
||||
else:
|
||||
import jsonschema
|
||||
# Get a list of the deprecated properties, return if there is none
|
||||
issues = [entry for entry in instance if entry in deprecations]
|
||||
if not issues:
|
||||
return
|
||||
|
||||
yield jsonschema.ValidationError(msg)
|
||||
# Process issues
|
||||
errors = []
|
||||
for name in issues:
|
||||
msg = deprecations[name].message.format(name=name)
|
||||
if deprecations[name].error:
|
||||
errors.append(msg)
|
||||
else:
|
||||
warnings.warn(msg)
|
||||
|
||||
if errors:
|
||||
yield jsonschema.ValidationError("\n".join(errors))
|
||||
|
||||
return jsonschema.validators.extend(
|
||||
jsonschema.Draft4Validator,
|
||||
|
@@ -96,12 +96,14 @@
|
||||
"binary_index_ttl": {"type": "integer", "minimum": 0},
|
||||
"aliases": {"type": "object", "patternProperties": {r"\w[\w-]*": {"type": "string"}}},
|
||||
},
|
||||
"deprecatedProperties": {
|
||||
"properties": ["concretizer"],
|
||||
"message": "Spack supports only clingo as a concretizer from v0.23. "
|
||||
"The config:concretizer config option is ignored.",
|
||||
"error": False,
|
||||
},
|
||||
"deprecatedProperties": [
|
||||
{
|
||||
"names": ["concretizer"],
|
||||
"message": "Spack supports only clingo as a concretizer from v0.23. "
|
||||
"The config:concretizer config option is ignored.",
|
||||
"error": False,
|
||||
}
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -109,7 +109,6 @@
|
||||
"require": requirements,
|
||||
"prefer": prefer_and_conflict,
|
||||
"conflict": prefer_and_conflict,
|
||||
"version": {}, # Here only to warn users on ignored properties
|
||||
"target": {
|
||||
"type": "array",
|
||||
"default": [],
|
||||
@@ -140,14 +139,6 @@
|
||||
},
|
||||
"variants": variants,
|
||||
},
|
||||
"deprecatedProperties": {
|
||||
"properties": ["version"],
|
||||
"message": "setting version preferences in the 'all' section of packages.yaml "
|
||||
"is deprecated and will be removed in v0.23\n\n\tThese preferences "
|
||||
"will be ignored by Spack. You can set them only in package-specific sections "
|
||||
"of the same file.\n",
|
||||
"error": False,
|
||||
},
|
||||
}
|
||||
},
|
||||
"patternProperties": {
|
||||
@@ -165,14 +156,11 @@
|
||||
# version strings
|
||||
"items": {"anyOf": [{"type": "string"}, {"type": "number"}]},
|
||||
},
|
||||
"target": {}, # Here only to warn users on ignored properties
|
||||
"compiler": {}, # Here only to warn users on ignored properties
|
||||
"buildable": {"type": "boolean", "default": True},
|
||||
"permissions": permissions,
|
||||
# If 'get_full_repo' is promoted to a Package-level
|
||||
# attribute, it could be useful to set it here
|
||||
"package_attributes": package_attributes,
|
||||
"providers": {}, # Here only to warn users on ignored properties
|
||||
"variants": variants,
|
||||
"externals": {
|
||||
"type": "array",
|
||||
@@ -204,18 +192,6 @@
|
||||
},
|
||||
},
|
||||
},
|
||||
"deprecatedProperties": {
|
||||
"properties": ["target", "compiler", "providers"],
|
||||
"message": "setting 'compiler:', 'target:' or 'provider:' preferences in "
|
||||
"a package-specific section of packages.yaml is deprecated, and will be "
|
||||
"removed in v0.23.\n\n\tThese preferences will be ignored by Spack, and "
|
||||
"can be set only in the 'all' section of the same file. "
|
||||
"You can run:\n\n\t\t$ spack audit configs\n\n\tto get better diagnostics, "
|
||||
"including files:lines where the deprecated attributes are used.\n\n"
|
||||
"\tUse requirements to enforce conditions on specific packages: "
|
||||
f"{REQUIREMENT_URL}\n",
|
||||
"error": False,
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
@@ -32,7 +32,6 @@
|
||||
import spack.config
|
||||
import spack.config as sc
|
||||
import spack.deptypes as dt
|
||||
import spack.directives
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.package_base
|
||||
@@ -285,16 +284,14 @@ def _create_counter(specs: List[spack.spec.Spec], tests: bool):
|
||||
return NoDuplicatesCounter(specs, tests=tests)
|
||||
|
||||
|
||||
def all_compilers_in_config(configuration):
|
||||
return spack.compilers.all_compilers_from(configuration)
|
||||
|
||||
|
||||
def all_libcs() -> Set[spack.spec.Spec]:
|
||||
"""Return a set of all libc specs targeted by any configured compiler. If none, fall back to
|
||||
libc determined from the current Python process if dynamically linked."""
|
||||
|
||||
libcs = {
|
||||
c.default_libc for c in all_compilers_in_config(spack.config.CONFIG) if c.default_libc
|
||||
c.default_libc
|
||||
for c in spack.compilers.all_compilers_from(spack.config.CONFIG)
|
||||
if c.default_libc
|
||||
}
|
||||
|
||||
if libcs:
|
||||
@@ -582,7 +579,7 @@ def _is_checksummed_version(version_info: Tuple[GitOrStandardVersion, dict]):
|
||||
return _is_checksummed_git_version(version)
|
||||
|
||||
|
||||
def _concretization_version_order(version_info: Tuple[GitOrStandardVersion, dict]):
|
||||
def concretization_version_order(version_info: Tuple[GitOrStandardVersion, dict]):
|
||||
"""Version order key for concretization, where preferred > not preferred,
|
||||
not deprecated > deprecated, finite > any infinite component; only if all are
|
||||
the same, do we use default version ordering."""
|
||||
@@ -613,7 +610,7 @@ def _external_config_with_implicit_externals(configuration):
|
||||
if not using_libc_compatibility():
|
||||
return packages_yaml
|
||||
|
||||
for compiler in all_compilers_in_config(configuration):
|
||||
for compiler in spack.compilers.all_compilers_from(configuration):
|
||||
libc = compiler.default_libc
|
||||
if libc:
|
||||
entry = {"spec": f"{libc} %{compiler.spec}", "prefix": libc.external_path}
|
||||
@@ -1025,6 +1022,102 @@ def __iter__(self):
|
||||
ConditionSpecCache = Dict[str, Dict[ConditionSpecKey, ConditionIdFunctionPair]]
|
||||
|
||||
|
||||
class ConstraintOrigin(enum.Enum):
|
||||
"""Generates identifiers that can be pased into the solver attached
|
||||
to constraints, and then later retrieved to determine the origin of
|
||||
those constraints when ``SpecBuilder`` creates Specs from the solve
|
||||
result.
|
||||
"""
|
||||
|
||||
DEPENDS_ON = 1
|
||||
REQUIRE = 2
|
||||
|
||||
@staticmethod
|
||||
def _SUFFIXES() -> Dict["ConstraintOrigin", str]:
|
||||
return {ConstraintOrigin.DEPENDS_ON: "_dep", ConstraintOrigin.REQUIRE: "_req"}
|
||||
|
||||
@staticmethod
|
||||
def append_type_suffix(pkg_id: str, kind: "ConstraintOrigin") -> str:
|
||||
"""Given a package identifier and a constraint kind, generate a string ID."""
|
||||
suffix = ConstraintOrigin._SUFFIXES()[kind]
|
||||
return f"{pkg_id}{suffix}"
|
||||
|
||||
@staticmethod
|
||||
def strip_type_suffix(source: str) -> Tuple[int, Optional[str]]:
|
||||
"""Take a combined package/type ID generated by
|
||||
``append_type_suffix``, and extract the package ID and
|
||||
an associated weight.
|
||||
"""
|
||||
if not source:
|
||||
return -1, None
|
||||
for kind, suffix in ConstraintOrigin._SUFFIXES().items():
|
||||
if source.endswith(suffix):
|
||||
return kind.value, source[: -len(suffix)]
|
||||
return -1, source
|
||||
|
||||
|
||||
class SourceContext:
|
||||
"""Tracks context in which a Spec's clause-set is generated (i.e.
|
||||
with ``SpackSolverSetup.spec_clauses``).
|
||||
|
||||
Facts generated for the spec may include this context.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
# This can be "literal" for constraints that come from a user
|
||||
# spec (e.g. from the command line); it can be the output of
|
||||
# `ConstraintOrigin.append_type_suffix`; the default is "none"
|
||||
# (which means it isn't important to keep track of the source
|
||||
# in that case).
|
||||
self.source = "none"
|
||||
|
||||
|
||||
class ConditionIdContext(SourceContext):
|
||||
"""Derived from a ``ConditionContext``: for clause-sets generated by
|
||||
imposed/required specs, stores an associated transform.
|
||||
|
||||
This is primarily used for tracking whether we are generating clauses
|
||||
in the context of a required spec, or for an imposed spec.
|
||||
|
||||
Is not a subclass of ``ConditionContext`` because it exists in a
|
||||
lower-level context with less information.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.transform = None
|
||||
|
||||
|
||||
class ConditionContext(SourceContext):
|
||||
"""Tracks context in which a condition (i.e. ``SpackSolverSetup.condition``)
|
||||
is generated (e.g. for a `depends_on`).
|
||||
|
||||
This may modify the required/imposed specs generated as relevant
|
||||
for the context.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
# transformation applied to facts from the required spec. Defaults
|
||||
# to leave facts as they are.
|
||||
self.transform_required = None
|
||||
# transformation applied to facts from the imposed spec. Defaults
|
||||
# to removing "node" and "virtual_node" facts.
|
||||
self.transform_imposed = None
|
||||
|
||||
def requirement_context(self) -> ConditionIdContext:
|
||||
ctxt = ConditionIdContext()
|
||||
ctxt.source = self.source
|
||||
ctxt.transform = self.transform_required
|
||||
return ctxt
|
||||
|
||||
def impose_context(self) -> ConditionIdContext:
|
||||
ctxt = ConditionIdContext()
|
||||
ctxt.source = self.source
|
||||
ctxt.transform = self.transform_imposed
|
||||
return ctxt
|
||||
|
||||
|
||||
class SpackSolverSetup:
|
||||
"""Class to set up and run a Spack concretization solve."""
|
||||
|
||||
@@ -1200,8 +1293,9 @@ def compiler_facts(self):
|
||||
if compiler.compiler_obj is not None:
|
||||
c = compiler.compiler_obj
|
||||
for flag_type, flags in c.flags.items():
|
||||
flag_group = " ".join(flags)
|
||||
for flag in flags:
|
||||
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag))
|
||||
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag, flag_group))
|
||||
|
||||
if compiler.available:
|
||||
self.gen.fact(fn.compiler_available(compiler_id))
|
||||
@@ -1378,7 +1472,7 @@ def _get_condition_id(
|
||||
named_cond: spack.spec.Spec,
|
||||
cache: ConditionSpecCache,
|
||||
body: bool,
|
||||
transform: Optional[TransformFunction] = None,
|
||||
context: ConditionIdContext,
|
||||
) -> int:
|
||||
"""Get the id for one half of a condition (either a trigger or an imposed constraint).
|
||||
|
||||
@@ -1392,15 +1486,15 @@ def _get_condition_id(
|
||||
"""
|
||||
pkg_cache = cache[named_cond.name]
|
||||
|
||||
named_cond_key = (str(named_cond), transform)
|
||||
named_cond_key = (str(named_cond), context.transform)
|
||||
result = pkg_cache.get(named_cond_key)
|
||||
if result:
|
||||
return result[0]
|
||||
|
||||
cond_id = next(self._id_counter)
|
||||
requirements = self.spec_clauses(named_cond, body=body)
|
||||
if transform:
|
||||
requirements = transform(named_cond, requirements)
|
||||
requirements = self.spec_clauses(named_cond, body=body, context=context)
|
||||
if context.transform:
|
||||
requirements = context.transform(named_cond, requirements)
|
||||
pkg_cache[named_cond_key] = (cond_id, requirements)
|
||||
|
||||
return cond_id
|
||||
@@ -1411,8 +1505,7 @@ def condition(
|
||||
imposed_spec: Optional[spack.spec.Spec] = None,
|
||||
name: Optional[str] = None,
|
||||
msg: Optional[str] = None,
|
||||
transform_required: Optional[TransformFunction] = None,
|
||||
transform_imposed: Optional[TransformFunction] = remove_node,
|
||||
context: Optional[ConditionContext] = None,
|
||||
):
|
||||
"""Generate facts for a dependency or virtual provider condition.
|
||||
|
||||
@@ -1421,10 +1514,8 @@ def condition(
|
||||
imposed_spec: the constraints that are imposed when this condition is triggered
|
||||
name: name for `required_spec` (required if required_spec is anonymous, ignored if not)
|
||||
msg: description of the condition
|
||||
transform_required: transformation applied to facts from the required spec. Defaults
|
||||
to leave facts as they are.
|
||||
transform_imposed: transformation applied to facts from the imposed spec. Defaults
|
||||
to removing "node" and "virtual_node" facts.
|
||||
context: if provided, indicates how to modify the clause-sets for the required/imposed
|
||||
specs based on the type of constraint they are generated for (e.g. `depends_on`)
|
||||
Returns:
|
||||
int: id of the condition created by this function
|
||||
"""
|
||||
@@ -1432,14 +1523,19 @@ def condition(
|
||||
if not name:
|
||||
raise ValueError(f"Must provide a name for anonymous condition: '{required_spec}'")
|
||||
|
||||
if not context:
|
||||
context = ConditionContext()
|
||||
context.transform_imposed = remove_node
|
||||
|
||||
with spec_with_name(required_spec, name):
|
||||
# Check if we can emit the requirements before updating the condition ID counter.
|
||||
# In this way, if a condition can't be emitted but the exception is handled in the
|
||||
# caller, we won't emit partial facts.
|
||||
|
||||
condition_id = next(self._id_counter)
|
||||
requirement_context = context.requirement_context()
|
||||
trigger_id = self._get_condition_id(
|
||||
required_spec, cache=self._trigger_cache, body=True, transform=transform_required
|
||||
required_spec, cache=self._trigger_cache, body=True, context=requirement_context
|
||||
)
|
||||
self.gen.fact(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
|
||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||
@@ -1449,8 +1545,9 @@ def condition(
|
||||
if not imposed_spec:
|
||||
return condition_id
|
||||
|
||||
impose_context = context.impose_context()
|
||||
effect_id = self._get_condition_id(
|
||||
imposed_spec, cache=self._effect_cache, body=False, transform=transform_imposed
|
||||
imposed_spec, cache=self._effect_cache, body=False, context=impose_context
|
||||
)
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(required_spec.name, fn.condition_effect(condition_id, effect_id))
|
||||
@@ -1458,8 +1555,8 @@ def condition(
|
||||
|
||||
return condition_id
|
||||
|
||||
def impose(self, condition_id, imposed_spec, node=True, name=None, body=False):
|
||||
imposed_constraints = self.spec_clauses(imposed_spec, body=body, required_from=name)
|
||||
def impose(self, condition_id, imposed_spec, node=True, body=False):
|
||||
imposed_constraints = self.spec_clauses(imposed_spec, body=body)
|
||||
for pred in imposed_constraints:
|
||||
# imposed "node"-like conditions are no-ops
|
||||
if not node and pred.args[0] in ("node", "virtual_node"):
|
||||
@@ -1531,14 +1628,14 @@ def dependency_holds(input_spec, requirements):
|
||||
if t & depflag
|
||||
]
|
||||
|
||||
self.condition(
|
||||
cond,
|
||||
dep.spec,
|
||||
name=pkg.name,
|
||||
msg=msg,
|
||||
transform_required=track_dependencies,
|
||||
transform_imposed=dependency_holds,
|
||||
context = ConditionContext()
|
||||
context.source = ConstraintOrigin.append_type_suffix(
|
||||
pkg.name, ConstraintOrigin.DEPENDS_ON
|
||||
)
|
||||
context.transform_required = track_dependencies
|
||||
context.transform_imposed = dependency_holds
|
||||
|
||||
self.condition(cond, dep.spec, name=pkg.name, msg=msg, context=context)
|
||||
|
||||
self.gen.newline()
|
||||
|
||||
@@ -1616,17 +1713,21 @@ def emit_facts_from_requirement_rules(self, rules: List[RequirementRule]):
|
||||
when_spec = spack.spec.Spec(pkg_name)
|
||||
|
||||
try:
|
||||
# With virtual we want to emit "node" and "virtual_node" in imposed specs
|
||||
transform: Optional[TransformFunction] = remove_node
|
||||
if virtual:
|
||||
transform = None
|
||||
context = ConditionContext()
|
||||
context.source = ConstraintOrigin.append_type_suffix(
|
||||
pkg_name, ConstraintOrigin.REQUIRE
|
||||
)
|
||||
if not virtual:
|
||||
context.transform_imposed = remove_node
|
||||
# else: for virtuals we want to emit "node" and
|
||||
# "virtual_node" in imposed specs
|
||||
|
||||
member_id = self.condition(
|
||||
required_spec=when_spec,
|
||||
imposed_spec=spec,
|
||||
name=pkg_name,
|
||||
transform_imposed=transform,
|
||||
msg=f"{input_spec} is a requirement for package {pkg_name}",
|
||||
context=context,
|
||||
)
|
||||
except Exception as e:
|
||||
# Do not raise if the rule comes from the 'all' subsection, since usability
|
||||
@@ -1681,6 +1782,10 @@ def external_packages(self):
|
||||
if pkg_name not in spack.repo.PATH:
|
||||
continue
|
||||
|
||||
# This package is not among possible dependencies
|
||||
if pkg_name not in self.pkgs:
|
||||
continue
|
||||
|
||||
# Check if the external package is buildable. If it is
|
||||
# not then "external(<pkg>)" is a fact, unless we can
|
||||
# reuse an already installed spec.
|
||||
@@ -1721,7 +1826,9 @@ def external_imposition(input_spec, requirements):
|
||||
]
|
||||
|
||||
try:
|
||||
self.condition(spec, spec, msg=msg, transform_imposed=external_imposition)
|
||||
context = ConditionContext()
|
||||
context.transform_imposed = external_imposition
|
||||
self.condition(spec, spec, msg=msg, context=context)
|
||||
except (spack.error.SpecError, RuntimeError) as e:
|
||||
warnings.warn(f"while setting up external spec {spec}: {e}")
|
||||
continue
|
||||
@@ -1796,6 +1903,7 @@ def spec_clauses(
|
||||
expand_hashes: bool = False,
|
||||
concrete_build_deps=False,
|
||||
required_from: Optional[str] = None,
|
||||
context: Optional[SourceContext] = None,
|
||||
) -> List[AspFunction]:
|
||||
"""Wrap a call to `_spec_clauses()` into a try/except block with better error handling.
|
||||
|
||||
@@ -1811,6 +1919,7 @@ def spec_clauses(
|
||||
transitive=transitive,
|
||||
expand_hashes=expand_hashes,
|
||||
concrete_build_deps=concrete_build_deps,
|
||||
context=context,
|
||||
)
|
||||
except RuntimeError as exc:
|
||||
msg = str(exc)
|
||||
@@ -1827,6 +1936,7 @@ def _spec_clauses(
|
||||
transitive: bool = True,
|
||||
expand_hashes: bool = False,
|
||||
concrete_build_deps: bool = False,
|
||||
context: Optional[SourceContext] = None,
|
||||
) -> List[AspFunction]:
|
||||
"""Return a list of clauses for a spec mandates are true.
|
||||
|
||||
@@ -1838,6 +1948,8 @@ def _spec_clauses(
|
||||
expand_hashes: if True, descend into hashes of concrete specs (default False)
|
||||
concrete_build_deps: if False, do not include pure build deps of concrete specs
|
||||
(as they have no effect on runtime constraints)
|
||||
context: tracks what constraint this clause set is generated for (e.g. a
|
||||
`depends_on` constraint in a package.py file)
|
||||
|
||||
Normally, if called with ``transitive=True``, ``spec_clauses()`` just generates
|
||||
hashes for the dependency requirements of concrete specs. If ``expand_hashes``
|
||||
@@ -1880,8 +1992,7 @@ def _spec_clauses(
|
||||
|
||||
# validate variant value only if spec not concrete
|
||||
if not spec.concrete:
|
||||
reserved_names = spack.directives.reserved_names
|
||||
if not spec.virtual and vname not in reserved_names:
|
||||
if not spec.virtual and vname not in spack.variant.reserved_names:
|
||||
pkg_cls = self.pkg_class(spec.name)
|
||||
try:
|
||||
variant_def, _ = pkg_cls.variants[vname]
|
||||
@@ -1925,13 +2036,19 @@ def _spec_clauses(
|
||||
self.compiler_version_constraints.add(spec.compiler)
|
||||
|
||||
# compiler flags
|
||||
source = context.source if context else "none"
|
||||
for flag_type, flags in spec.compiler_flags.items():
|
||||
flag_group = " ".join(flags)
|
||||
for flag in flags:
|
||||
clauses.append(f.node_flag(spec.name, flag_type, flag))
|
||||
clauses.append(
|
||||
f.node_flag(spec.name, fn.node_flag(flag_type, flag, flag_group, source))
|
||||
)
|
||||
if not spec.concrete and flag.propagate is True:
|
||||
clauses.append(
|
||||
f.propagate(
|
||||
spec.name, fn.node_flag(flag_type, flag), fn.edge_types("link", "run")
|
||||
spec.name,
|
||||
fn.node_flag(flag_type, flag, flag_group, source),
|
||||
fn.edge_types("link", "run"),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -2013,6 +2130,7 @@ def _spec_clauses(
|
||||
body=body,
|
||||
expand_hashes=expand_hashes,
|
||||
concrete_build_deps=concrete_build_deps,
|
||||
context=context,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -2030,7 +2148,7 @@ def define_package_versions_and_validate_preferences(
|
||||
# like being a "develop" version or being preferred exist only at a
|
||||
# package.py level, sort them in this partial list here
|
||||
package_py_versions = sorted(
|
||||
pkg_cls.versions.items(), key=_concretization_version_order, reverse=True
|
||||
pkg_cls.versions.items(), key=concretization_version_order, reverse=True
|
||||
)
|
||||
|
||||
if require_checksum and pkg_cls.has_code:
|
||||
@@ -2648,7 +2766,9 @@ def literal_specs(self, specs):
|
||||
effect_id, requirements = cache[imposed_spec_key]
|
||||
else:
|
||||
effect_id = next(self._id_counter)
|
||||
requirements = self.spec_clauses(spec)
|
||||
context = SourceContext()
|
||||
context.source = "literal"
|
||||
requirements = self.spec_clauses(spec, context=context)
|
||||
root_name = spec.name
|
||||
for clause in requirements:
|
||||
clause_name = clause.args[0]
|
||||
@@ -3002,7 +3122,7 @@ class CompilerParser:
|
||||
|
||||
def __init__(self, configuration) -> None:
|
||||
self.compilers: Set[KnownCompiler] = set()
|
||||
for c in all_compilers_in_config(configuration):
|
||||
for c in spack.compilers.all_compilers_from(configuration):
|
||||
if using_libc_compatibility() and not c_compiler_runs(c):
|
||||
tty.debug(
|
||||
f"the C compiler {c.cc} does not exist, or does not run correctly."
|
||||
@@ -3348,7 +3468,6 @@ def __init__(self, specs, hash_lookup=None):
|
||||
self._result = None
|
||||
self._command_line_specs = specs
|
||||
self._flag_sources = collections.defaultdict(lambda: set())
|
||||
self._flag_compiler_defaults = set()
|
||||
|
||||
# Pass in as arguments reusable specs and plug them in
|
||||
# from this dictionary during reconstruction
|
||||
@@ -3406,14 +3525,10 @@ def node_compiler_version(self, node, compiler, version):
|
||||
self._specs[node].compiler = spack.spec.CompilerSpec(compiler)
|
||||
self._specs[node].compiler.versions = vn.VersionList([vn.Version(version)])
|
||||
|
||||
def node_flag_compiler_default(self, node):
|
||||
self._flag_compiler_defaults.add(node)
|
||||
|
||||
def node_flag(self, node, flag_type, flag):
|
||||
self._specs[node].compiler_flags.add_flag(flag_type, flag, False)
|
||||
|
||||
def node_flag_source(self, node, flag_type, source):
|
||||
self._flag_sources[(node, flag_type)].add(source)
|
||||
def node_flag(self, node, node_flag):
|
||||
self._specs[node].compiler_flags.add_flag(
|
||||
node_flag.flag_type, node_flag.flag, False, node_flag.flag_group, node_flag.source
|
||||
)
|
||||
|
||||
def external_spec_selected(self, node, idx):
|
||||
"""This means that the external spec and index idx has been selected for this package."""
|
||||
@@ -3454,19 +3569,27 @@ def virtual_on_edge(self, parent_node, provider_node, virtual):
|
||||
dependencies[0].update_virtuals((virtual,))
|
||||
|
||||
def reorder_flags(self):
|
||||
"""Order compiler flags on specs in predefined order.
|
||||
|
||||
We order flags so that any node's flags will take priority over
|
||||
those of its dependents. That is, the deepest node in the DAG's
|
||||
flags will appear last on the compile line, in the order they
|
||||
were specified.
|
||||
"""For each spec, determine the order of compiler flags applied to it.
|
||||
|
||||
The solver determines which flags are on nodes; this routine
|
||||
imposes order afterwards.
|
||||
imposes order afterwards. The order is:
|
||||
|
||||
1. Flags applied in compiler definitions should come first
|
||||
2. Flags applied by dependents are ordered topologically (with a
|
||||
dependency on `traverse` to resolve the partial order into a
|
||||
stable total order)
|
||||
3. Flags from requirements are then applied (requirements always
|
||||
come from the package and never a parent)
|
||||
4. Command-line flags should come last
|
||||
|
||||
Additionally, for each source (requirements, compiler, command line, and
|
||||
dependents), flags from that source should retain their order and grouping:
|
||||
e.g. for `y cflags="-z -a"` "-z" and "-a" should never have any intervening
|
||||
flags inserted, and should always appear in that order.
|
||||
"""
|
||||
# reverse compilers so we get highest priority compilers that share a spec
|
||||
compilers = dict(
|
||||
(c.spec, c) for c in reversed(all_compilers_in_config(spack.config.CONFIG))
|
||||
(c.spec, c) for c in reversed(spack.compilers.all_compilers_from(spack.config.CONFIG))
|
||||
)
|
||||
cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())
|
||||
|
||||
@@ -3477,40 +3600,78 @@ def reorder_flags(self):
|
||||
flagmap_from_compiler = compilers[spec.compiler].flags
|
||||
|
||||
for flag_type in spec.compiler_flags.valid_compiler_flags():
|
||||
from_compiler = flagmap_from_compiler.get(flag_type, [])
|
||||
from_sources = []
|
||||
|
||||
# order is determined by the DAG. A spec's flags come after any of its ancestors
|
||||
# on the compile line
|
||||
node = SpecBuilder.make_node(pkg=spec.name)
|
||||
source_key = (node, flag_type)
|
||||
if source_key in self._flag_sources:
|
||||
order = [
|
||||
SpecBuilder.make_node(pkg=s.name)
|
||||
for s in spec.traverse(order="post", direction="parents")
|
||||
]
|
||||
sorted_sources = sorted(
|
||||
self._flag_sources[source_key], key=lambda s: order.index(s)
|
||||
|
||||
ordered_flags = []
|
||||
|
||||
# 1. Put compiler flags first
|
||||
from_compiler = tuple(flagmap_from_compiler.get(flag_type, []))
|
||||
extend_flag_list(ordered_flags, from_compiler)
|
||||
|
||||
# 2. Add all sources (the compiler is one of them, so skip any
|
||||
# flag group that matches it exactly)
|
||||
flag_groups = set()
|
||||
for flag in self._specs[node].compiler_flags.get(flag_type, []):
|
||||
flag_groups.add(
|
||||
spack.spec.CompilerFlag(
|
||||
flag.flag_group,
|
||||
propagate=flag.propagate,
|
||||
flag_group=flag.flag_group,
|
||||
source=flag.source,
|
||||
)
|
||||
)
|
||||
|
||||
# add flags from each source, lowest to highest precedence
|
||||
for node in sorted_sources:
|
||||
all_src_flags = list()
|
||||
per_pkg_sources = [self._specs[node]]
|
||||
if node.pkg in cmd_specs:
|
||||
per_pkg_sources.append(cmd_specs[node.pkg])
|
||||
for source in per_pkg_sources:
|
||||
all_src_flags.extend(source.compiler_flags.get(flag_type, []))
|
||||
extend_flag_list(from_sources, all_src_flags)
|
||||
# For flags that are applied by dependents, put flags from parents
|
||||
# before children; we depend on the stability of traverse() to
|
||||
# achieve a stable flag order for flags introduced in this manner.
|
||||
topo_order = list(s.name for s in spec.traverse(order="post", direction="parents"))
|
||||
lex_order = list(sorted(flag_groups))
|
||||
|
||||
def _order_index(flag_group):
|
||||
source = flag_group.source
|
||||
# Note: if 'require: ^dependency cflags=...' is ever possible,
|
||||
# this will topologically sort for require as well
|
||||
type_index, pkg_source = ConstraintOrigin.strip_type_suffix(source)
|
||||
if pkg_source in topo_order:
|
||||
major_index = topo_order.index(pkg_source)
|
||||
# If for x->y, x has multiple depends_on declarations that
|
||||
# are activated, and each adds cflags to y, we fall back on
|
||||
# alphabetical ordering to maintain a total order
|
||||
minor_index = lex_order.index(flag_group)
|
||||
else:
|
||||
major_index = len(topo_order) + lex_order.index(flag_group)
|
||||
minor_index = 0
|
||||
return (type_index, major_index, minor_index)
|
||||
|
||||
prioritized_groups = sorted(flag_groups, key=lambda x: _order_index(x))
|
||||
|
||||
for grp in prioritized_groups:
|
||||
grp_flags = tuple(
|
||||
x for (x, y) in spack.compiler.tokenize_flags(grp.flag_group)
|
||||
)
|
||||
if grp_flags == from_compiler:
|
||||
continue
|
||||
as_compiler_flags = list(
|
||||
spack.spec.CompilerFlag(
|
||||
x,
|
||||
propagate=grp.propagate,
|
||||
flag_group=grp.flag_group,
|
||||
source=grp.source,
|
||||
)
|
||||
for x in grp_flags
|
||||
)
|
||||
extend_flag_list(ordered_flags, as_compiler_flags)
|
||||
|
||||
# 3. Now put cmd-line flags last
|
||||
if node.pkg in cmd_specs:
|
||||
cmd_flags = cmd_specs[node.pkg].compiler_flags.get(flag_type, [])
|
||||
extend_flag_list(ordered_flags, cmd_flags)
|
||||
|
||||
# compiler flags from compilers config are lowest precedence
|
||||
ordered_compiler_flags = list(llnl.util.lang.dedupe(from_compiler + from_sources))
|
||||
compiler_flags = spec.compiler_flags.get(flag_type, [])
|
||||
msg = "%s does not equal %s" % (set(compiler_flags), set(ordered_flags))
|
||||
assert set(compiler_flags) == set(ordered_flags), msg
|
||||
|
||||
msg = f"{set(compiler_flags)} does not equal {set(ordered_compiler_flags)}"
|
||||
assert set(compiler_flags) == set(ordered_compiler_flags), msg
|
||||
|
||||
spec.compiler_flags.update({flag_type: ordered_compiler_flags})
|
||||
spec.compiler_flags.update({flag_type: ordered_flags})
|
||||
|
||||
def deprecated(self, node: NodeArgument, version: str) -> None:
|
||||
tty.warn(f'using "{node.pkg}@{version}" which is a deprecated version')
|
||||
@@ -3574,10 +3735,9 @@ def build_specs(self, function_tuples):
|
||||
continue
|
||||
|
||||
# if we've already gotten a concrete spec for this pkg,
|
||||
# do not bother calling actions on it except for node_flag_source,
|
||||
# since node_flag_source is tracking information not in the spec itself
|
||||
# do not bother calling actions on it
|
||||
spec = self._specs.get(args[0])
|
||||
if spec and spec.concrete and name != "node_flag_source":
|
||||
if spec and spec.concrete:
|
||||
continue
|
||||
|
||||
action(*args)
|
||||
@@ -3637,7 +3797,8 @@ def _develop_specs_from_env(spec, env):
|
||||
assert spec.variants["dev_path"].value == path, error_msg
|
||||
else:
|
||||
spec.variants.setdefault("dev_path", spack.variant.SingleValuedVariant("dev_path", path))
|
||||
spec.constrain(dev_info["spec"])
|
||||
|
||||
assert spec.satisfies(dev_info["spec"])
|
||||
|
||||
|
||||
def _is_reusable(spec: spack.spec.Spec, packages, local: bool) -> bool:
|
||||
|
@@ -43,9 +43,7 @@
|
||||
internal_error("Only nodes can have node_compiler_version").
|
||||
:- attr("variant_value", PackageNode, _, _), not attr("node", PackageNode),
|
||||
internal_error("variant_value true for a non-node").
|
||||
:- attr("node_flag_compiler_default", PackageNode), not attr("node", PackageNode),
|
||||
internal_error("node_flag_compiler_default true for non-node").
|
||||
:- attr("node_flag", PackageNode, _, _), not attr("node", PackageNode),
|
||||
:- attr("node_flag", PackageNode, _), not attr("node", PackageNode),
|
||||
internal_error("node_flag assigned for non-node").
|
||||
:- attr("external_spec_selected", PackageNode, _), not attr("node", PackageNode),
|
||||
internal_error("external_spec_selected for non-node").
|
||||
@@ -53,10 +51,6 @@
|
||||
internal_error("non-node depends on something").
|
||||
:- attr("depends_on", _, ChildNode, _), not attr("node", ChildNode),
|
||||
internal_error("something depends_on a non-node").
|
||||
:- attr("node_flag_source", Node, _, _), not attr("node", Node),
|
||||
internal_error("node_flag_source assigned for a non-node").
|
||||
:- attr("node_flag_source", _, _, SourceNode), not attr("node", SourceNode),
|
||||
internal_error("node_flag_source assigned with a non-node source").
|
||||
:- attr("virtual_node", VirtualNode), not provider(_, VirtualNode),
|
||||
internal_error("virtual node with no provider").
|
||||
:- provider(_, VirtualNode), not attr("virtual_node", VirtualNode),
|
||||
@@ -154,7 +148,6 @@ unification_set(SetID, VirtualNode)
|
||||
% TODO: literals, at the moment, can only influence the "root" unification set. This needs to be extended later.
|
||||
|
||||
% Node attributes that have multiple node arguments (usually, only the first argument is a node)
|
||||
multiple_nodes_attribute("node_flag_source").
|
||||
multiple_nodes_attribute("depends_on").
|
||||
multiple_nodes_attribute("virtual_on_edge").
|
||||
multiple_nodes_attribute("provider_set").
|
||||
@@ -392,7 +385,6 @@ trigger_condition_holds(ID, RequestorNode) :-
|
||||
attr(Name, node(X, A1), A2, A3) : condition_requirement(ID, Name, A1, A2, A3), condition_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name);
|
||||
attr(Name, node(X, A1), A2, A3, A4) : condition_requirement(ID, Name, A1, A2, A3, A4), condition_nodes(ID, PackageNode, node(X, A1));
|
||||
% Special cases
|
||||
attr("node_flag_source", node(X, A1), A2, node(Y, A3)) : condition_requirement(ID, "node_flag_source", A1, A2, A3), condition_nodes(ID, PackageNode, node(X, A1)), condition_nodes(ID, PackageNode, node(Y, A3));
|
||||
not cannot_hold(ID, PackageNode).
|
||||
|
||||
condition_holds(ConditionID, node(X, Package))
|
||||
@@ -440,13 +432,6 @@ attr(Name, node(X, A1), A2) :- impose(ID, PackageNode), imposed_constrai
|
||||
attr(Name, node(X, A1), A2, A3) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2, A3), imposed_nodes(ID, PackageNode, node(X, A1)), not multiple_nodes_attribute(Name).
|
||||
attr(Name, node(X, A1), A2, A3, A4) :- impose(ID, PackageNode), imposed_constraint(ID, Name, A1, A2, A3, A4), imposed_nodes(ID, PackageNode, node(X, A1)).
|
||||
|
||||
% For node flag sources we need to look at the condition_set of the source, since it is the dependent
|
||||
% of the package on which I want to impose the constraint
|
||||
attr("node_flag_source", node(X, A1), A2, node(Y, A3))
|
||||
:- impose(ID, node(X, A1)),
|
||||
imposed_constraint(ID, "node_flag_source", A1, A2, A3),
|
||||
condition_set(node(Y, A3), node(X, A1)).
|
||||
|
||||
% Provider set is relevant only for literals, since it's the only place where `^[virtuals=foo] bar`
|
||||
% might appear in the HEAD of a rule
|
||||
attr("provider_set", node(min_dupe_id, Provider), node(min_dupe_id, Virtual))
|
||||
@@ -487,8 +472,8 @@ virtual_condition_holds(node(Y, A2), Virtual)
|
||||
% we cannot have additional flag values when we are working with concrete specs
|
||||
:- attr("node", node(ID, Package)),
|
||||
attr("hash", node(ID, Package), Hash),
|
||||
attr("node_flag", node(ID, Package), FlagType, Flag),
|
||||
not imposed_constraint(Hash, "node_flag", Package, FlagType, Flag),
|
||||
attr("node_flag", node(ID, Package), node_flag(FlagType, Flag, _, _)),
|
||||
not imposed_constraint(Hash, "node_flag", Package, node_flag(FlagType, Flag, _, _)),
|
||||
internal_error("imposed hash without imposing all flag values").
|
||||
|
||||
#defined condition/2.
|
||||
@@ -789,22 +774,15 @@ required_provider(Provider, Virtual)
|
||||
|
||||
:- provider(node(Y, Package), node(X, Virtual)), required_provider(Provider, Virtual), Package != Provider.
|
||||
|
||||
% TODO: the following two choice rules allow the solver to add compiler
|
||||
% TODO: the following choice rule allows the solver to add compiler
|
||||
% flags if their only source is from a requirement. This is overly-specific
|
||||
% and should use a more-generic approach like in https://github.com/spack/spack/pull/37180
|
||||
|
||||
{ attr("node_flag", node(ID, Package), FlagType, FlagValue) } :-
|
||||
{ attr("node_flag", node(ID, Package), NodeFlag) } :-
|
||||
requirement_group_member(ConditionID, Package, RequirementID),
|
||||
activate_requirement(node(ID, Package), RequirementID),
|
||||
pkg_fact(Package, condition_effect(ConditionID, EffectID)),
|
||||
imposed_constraint(EffectID, "node_flag_set", Package, FlagType, FlagValue).
|
||||
|
||||
{ attr("node_flag_source", node(NodeID1, Package1), FlagType, node(NodeID2, Package2)) } :-
|
||||
requirement_group_member(ConditionID, Package1, RequirementID),
|
||||
activate_requirement(node(NodeID1, Package1), RequirementID),
|
||||
pkg_fact(Package1, condition_effect(ConditionID, EffectID)),
|
||||
imposed_constraint(EffectID, "node_flag_source", Package1, FlagType, Package2),
|
||||
imposed_nodes(EffectID, node(NodeID2, Package2), node(NodeID1, Package1)).
|
||||
imposed_constraint(EffectID, "node_flag_set", Package, NodeFlag).
|
||||
|
||||
requirement_weight(node(ID, Package), Group, W) :-
|
||||
W = #min {
|
||||
@@ -1050,23 +1028,22 @@ variant_is_propagated(PackageNode, Variant) :-
|
||||
% 1. The same flag type is not set on this node
|
||||
% 2. This node has the same compiler as the propagation source
|
||||
|
||||
propagated_flag(node(PackageID, Package), node_flag(FlagType, Flag), SourceNode) :-
|
||||
propagate(node(PackageID, Package), node_flag(FlagType, Flag), _),
|
||||
not attr("node_flag_set", node(PackageID, Package), FlagType, _),
|
||||
propagated_flag(node(PackageID, Package), node_flag(FlagType, Flag, FlagGroup, Source), SourceNode) :-
|
||||
propagate(node(PackageID, Package), node_flag(FlagType, Flag, FlagGroup, Source), _),
|
||||
not attr("node_flag_set", node(PackageID, Package), node_flag(FlagType, _, _, "literal")),
|
||||
% Same compiler as propagation source
|
||||
node_compiler(node(PackageID, Package), CompilerID),
|
||||
node_compiler(SourceNode, CompilerID),
|
||||
attr("propagate", SourceNode, node_flag(FlagType, Flag), _),
|
||||
attr("propagate", SourceNode, node_flag(FlagType, Flag, FlagGroup, Source), _),
|
||||
node(PackageID, Package) != SourceNode,
|
||||
not runtime(Package).
|
||||
|
||||
attr("node_flag", PackageNode, FlagType, Flag) :- propagated_flag(PackageNode, node_flag(FlagType, Flag), _).
|
||||
attr("node_flag_source", PackageNode, FlagType, SourceNode) :- propagated_flag(PackageNode, node_flag(FlagType, _), SourceNode).
|
||||
attr("node_flag", PackageNode, NodeFlag) :- propagated_flag(PackageNode, NodeFlag, _).
|
||||
|
||||
% Cannot propagate the same flag from two distinct sources
|
||||
error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, Package, FlagType) :-
|
||||
propagated_flag(node(ID, Package), node_flag(FlagType, _), node(_, Source1)),
|
||||
propagated_flag(node(ID, Package), node_flag(FlagType, _), node(_, Source2)),
|
||||
propagated_flag(node(ID, Package), node_flag(FlagType, _, _, _), node(_, Source1)),
|
||||
propagated_flag(node(ID, Package), node_flag(FlagType, _, _, _), node(_, Source2)),
|
||||
Source1 < Source2.
|
||||
|
||||
%----
|
||||
@@ -1353,32 +1330,18 @@ error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_miss
|
||||
% Compiler flags
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
% remember where flags came from
|
||||
attr("node_flag_source", PackageNode, FlagType, PackageNode) :- attr("node_flag_set", PackageNode, FlagType, _).
|
||||
attr("node_flag_source", PackageNode, FlagType, PackageNode) :- attr("node_flag", PackageNode, FlagType, _), attr("hash", PackageNode, _).
|
||||
|
||||
% compiler flags from compilers.yaml are put on nodes if compiler matches
|
||||
attr("node_flag", PackageNode, FlagType, Flag)
|
||||
:- compiler_flag(CompilerID, FlagType, Flag),
|
||||
attr("node_flag", PackageNode, node_flag(FlagType, Flag, FlagGroup, CompilerID))
|
||||
:- compiler_flag(CompilerID, FlagType, Flag, FlagGroup),
|
||||
node_compiler(PackageNode, CompilerID),
|
||||
flag_type(FlagType),
|
||||
compiler_id(CompilerID),
|
||||
compiler_name(CompilerID, CompilerName),
|
||||
compiler_version(CompilerID, Version).
|
||||
|
||||
attr("node_flag_compiler_default", PackageNode)
|
||||
:- not attr("node_flag_set", PackageNode, FlagType, _),
|
||||
compiler_flag(CompilerID, FlagType, Flag),
|
||||
node_compiler(PackageNode, CompilerID),
|
||||
flag_type(FlagType),
|
||||
compiler_id(CompilerID),
|
||||
compiler_name(CompilerID, CompilerName),
|
||||
compiler_version(CompilerID, Version).
|
||||
attr("node_flag", PackageNode, NodeFlag) :- attr("node_flag_set", PackageNode, NodeFlag).
|
||||
|
||||
% Flag set to something
|
||||
attr("node_flag", PackageNode, FlagType, Flag) :- attr("node_flag_set", PackageNode, FlagType, Flag).
|
||||
|
||||
#defined compiler_flag/3.
|
||||
#defined compiler_flag/4.
|
||||
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
|
@@ -230,6 +230,13 @@ class NodeArgument(NamedTuple):
|
||||
pkg: str
|
||||
|
||||
|
||||
class NodeFlag(NamedTuple):
|
||||
flag_type: str
|
||||
flag: str
|
||||
flag_group: str
|
||||
source: str
|
||||
|
||||
|
||||
def intermediate_repr(sym):
|
||||
"""Returns an intermediate representation of clingo models for Spack's spec builder.
|
||||
|
||||
@@ -248,6 +255,13 @@ def intermediate_repr(sym):
|
||||
return NodeArgument(
|
||||
id=intermediate_repr(sym.arguments[0]), pkg=intermediate_repr(sym.arguments[1])
|
||||
)
|
||||
elif sym.name == "node_flag":
|
||||
return NodeFlag(
|
||||
flag_type=intermediate_repr(sym.arguments[0]),
|
||||
flag=intermediate_repr(sym.arguments[1]),
|
||||
flag_group=intermediate_repr(sym.arguments[2]),
|
||||
source=intermediate_repr(sym.arguments[3]),
|
||||
)
|
||||
except RuntimeError:
|
||||
# This happens when using clingo w/ CFFI and trying to access ".name" for symbols
|
||||
# that are not functions
|
||||
|
@@ -13,6 +13,7 @@
|
||||
#show attr/2.
|
||||
#show attr/3.
|
||||
#show attr/4.
|
||||
#show attr/5.
|
||||
|
||||
% names of optimization criteria
|
||||
#show opt_criterion/2.
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user