Compare commits
456 Commits
my_branch
...
bugfix/bin
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d82b537339 | ||
|
|
3d8d1fe924 | ||
|
|
f09f834a8f | ||
|
|
147f39d7aa | ||
|
|
9fad3ed5f1 | ||
|
|
1375b1975b | ||
|
|
c533612ab6 | ||
|
|
e5cc3c51d1 | ||
|
|
70650cacbd | ||
|
|
d8a5638d9f | ||
|
|
158f6ddb14 | ||
|
|
86d1a6f2fc | ||
|
|
ca10ff1649 | ||
|
|
19175b9bab | ||
|
|
7d64a8e81f | ||
|
|
a0b68a0baa | ||
|
|
b7e0692dfa | ||
|
|
8cc62ef916 | ||
|
|
2363aba710 | ||
|
|
5a0c56229c | ||
|
|
96bb72e412 | ||
|
|
7c977733f1 | ||
|
|
d48a760837 | ||
|
|
c64298a183 | ||
|
|
31d89e80bc | ||
|
|
2844f1fc45 | ||
|
|
8b85f8ec58 | ||
|
|
7ef52acfce | ||
|
|
c3fecfb103 | ||
|
|
ef278c601c | ||
|
|
5e9a8d27f7 | ||
|
|
ffc90f944c | ||
|
|
699c0cf9a0 | ||
|
|
55cac3b098 | ||
|
|
35d07a6f18 | ||
|
|
1f42a5e598 | ||
|
|
967463432b | ||
|
|
bc03b5caaf | ||
|
|
649760dc1a | ||
|
|
77118427b1 | ||
|
|
1a024963a2 | ||
|
|
aebb601b70 | ||
|
|
d95f53e1ac | ||
|
|
0fe57962b3 | ||
|
|
8ec451aa7d | ||
|
|
14349cb882 | ||
|
|
bc438ed4e9 | ||
|
|
760a12c440 | ||
|
|
9325c8e53f | ||
|
|
fd5b7d6cce | ||
|
|
b19a0744f1 | ||
|
|
75d0c0dff7 | ||
|
|
aedc41c3a0 | ||
|
|
1f39d6d916 | ||
|
|
1bc3b0a926 | ||
|
|
62596fb796 | ||
|
|
15c35a3cff | ||
|
|
249c90f909 | ||
|
|
043b2cbb7c | ||
|
|
aedf215b90 | ||
|
|
eed4a63be7 | ||
|
|
13b609b4b6 | ||
|
|
85dc20cb55 | ||
|
|
667c39987c | ||
|
|
f35d8c4102 | ||
|
|
6ac3186132 | ||
|
|
466572dc14 | ||
|
|
1c0bf12e5b | ||
|
|
bf990bc8ec | ||
|
|
267358a799 | ||
|
|
392b548312 | ||
|
|
8c3b82c140 | ||
|
|
2e4b533420 | ||
|
|
3b393fe0eb | ||
|
|
895ceeda38 | ||
|
|
6f1e3a4ad3 | ||
|
|
ce41b7457b | ||
|
|
4f9f56630b | ||
|
|
e216ba1520 | ||
|
|
4fd5ee1d9d | ||
|
|
89f32d51f3 | ||
|
|
3b3be70226 | ||
|
|
95888602f2 | ||
|
|
0205fefe0c | ||
|
|
b261b2a5ff | ||
|
|
8c58c14c3d | ||
|
|
f7b4d488c3 | ||
|
|
38a8d4d2fe | ||
|
|
1ceee714db | ||
|
|
5b3e4ba3f8 | ||
|
|
37426e41cb | ||
|
|
a05e729e1b | ||
|
|
f8a6799e67 | ||
|
|
633ebd149c | ||
|
|
bf6220821b | ||
|
|
3db96aa892 | ||
|
|
c7628d768a | ||
|
|
e1ad926189 | ||
|
|
8705735c74 | ||
|
|
d82fa045d5 | ||
|
|
c72d51cb3a | ||
|
|
d76f184430 | ||
|
|
19ea24d2bd | ||
|
|
83efea32f4 | ||
|
|
bc577f2dee | ||
|
|
04529fbe80 | ||
|
|
cdfbe2c25d | ||
|
|
8305742d75 | ||
|
|
e0137b1566 | ||
|
|
be95699a55 | ||
|
|
7d96a0aa5a | ||
|
|
874b713edf | ||
|
|
3c0a98c5ab | ||
|
|
998bf90b35 | ||
|
|
2ca32fbc8c | ||
|
|
e50d08ce48 | ||
|
|
696d81513d | ||
|
|
b38afa7528 | ||
|
|
0f2786c9d3 | ||
|
|
f72c2ab583 | ||
|
|
f6e6403fd1 | ||
|
|
5550271ead | ||
|
|
a1b19345b1 | ||
|
|
e2aeb06c91 | ||
|
|
14ae0e0d94 | ||
|
|
516587a1da | ||
|
|
dc36fd87bb | ||
|
|
06b5141d01 | ||
|
|
27610838dd | ||
|
|
0c7fd9bd8c | ||
|
|
bf2b30a5f5 | ||
|
|
f42680b785 | ||
|
|
a2afd5b82f | ||
|
|
11f1b371f7 | ||
|
|
cfc46504ac | ||
|
|
163251aa65 | ||
|
|
6ab9f3a290 | ||
|
|
45043bcdf5 | ||
|
|
7642fa3d99 | ||
|
|
1689f7fcbe | ||
|
|
4f67afeb5f | ||
|
|
9d8ecffed0 | ||
|
|
1ada7ea809 | ||
|
|
4a8db00691 | ||
|
|
01f8236bf5 | ||
|
|
57822d3014 | ||
|
|
3fdb3f832a | ||
|
|
bedad508a9 | ||
|
|
31f6dedecd | ||
|
|
dc0c4959db | ||
|
|
8550b6cf49 | ||
|
|
0e5c4c9cbf | ||
|
|
9bac72e818 | ||
|
|
a65a217c54 | ||
|
|
26e6aae8d4 | ||
|
|
d6fabd1533 | ||
|
|
d245c46487 | ||
|
|
3eba28e383 | ||
|
|
0d91cb58dc | ||
|
|
5f55abeecb | ||
|
|
c3c2416672 | ||
|
|
9e2bc41e45 | ||
|
|
8f80c5e6f7 | ||
|
|
a5e1367882 | ||
|
|
c724e26ba9 | ||
|
|
1b9a1992fb | ||
|
|
815764bdef | ||
|
|
adf073b53c | ||
|
|
449e885d4c | ||
|
|
560472ce3a | ||
|
|
0c66446437 | ||
|
|
6b4b1dacd9 | ||
|
|
3e90134e14 | ||
|
|
4ad0594c7b | ||
|
|
f13b760f10 | ||
|
|
0f6fb1a706 | ||
|
|
e74d85a524 | ||
|
|
ffd63c5de1 | ||
|
|
ed263615d7 | ||
|
|
b4e775a11a | ||
|
|
6a2844fdee | ||
|
|
44d670a8ce | ||
|
|
4a0ac87d07 | ||
|
|
2c6898c717 | ||
|
|
0a8083c604 | ||
|
|
5b45df5269 | ||
|
|
9d7cc43673 | ||
|
|
932065beca | ||
|
|
360192cbfe | ||
|
|
7bc349c041 | ||
|
|
603ec40ab1 | ||
|
|
ed6695b9c9 | ||
|
|
fb173f80b2 | ||
|
|
15d4262b9b | ||
|
|
7116fb1b70 | ||
|
|
9b713fa6a6 | ||
|
|
8d4a5cb247 | ||
|
|
bf2d44c87e | ||
|
|
926e311f3c | ||
|
|
8ef937032c | ||
|
|
73ce789390 | ||
|
|
e4f3cfcc3a | ||
|
|
7ac05485c6 | ||
|
|
13b0e73a4e | ||
|
|
03c54aebdd | ||
|
|
f4a4b3fa87 | ||
|
|
6b3607287a | ||
|
|
b4b2585d67 | ||
|
|
29855ae31e | ||
|
|
44b9efa132 | ||
|
|
f21e26b904 | ||
|
|
73865c38f9 | ||
|
|
38ccefbe84 | ||
|
|
1bd33d88bd | ||
|
|
67ad23cc11 | ||
|
|
8640b50258 | ||
|
|
043cc688ef | ||
|
|
e52527029a | ||
|
|
a7d2f76ac5 | ||
|
|
fbb134b1af | ||
|
|
548e9ae88c | ||
|
|
5728ba0122 | ||
|
|
2bda10edb5 | ||
|
|
bd15ca4f16 | ||
|
|
f9dfd5fcb8 | ||
|
|
f3c4e1adbb | ||
|
|
b2d3ed9096 | ||
|
|
dac5fec255 | ||
|
|
9784b8f926 | ||
|
|
adef8f6ca7 | ||
|
|
c36f15e29e | ||
|
|
03531ed904 | ||
|
|
54332b2d83 | ||
|
|
165f42b7ce | ||
|
|
1190d03b0f | ||
|
|
5faa927fe6 | ||
|
|
c60d220f81 | ||
|
|
61d3d60414 | ||
|
|
174258c09a | ||
|
|
73c6a8f73d | ||
|
|
86dc904080 | ||
|
|
9e1c87409d | ||
|
|
2b30dc2e30 | ||
|
|
b1ce756d69 | ||
|
|
1194ac6985 | ||
|
|
954f961208 | ||
|
|
47ac710796 | ||
|
|
d7fb5a6db4 | ||
|
|
e0624b9278 | ||
|
|
e86614f7b8 | ||
|
|
d166b948ce | ||
|
|
9cc3a2942d | ||
|
|
5d685f9ff6 | ||
|
|
9ddf45964d | ||
|
|
b88cc77f16 | ||
|
|
f3af38ba9b | ||
|
|
adc9f887ea | ||
|
|
9461f482d9 | ||
|
|
e014b889c6 | ||
|
|
181ac574bb | ||
|
|
055c9d125d | ||
|
|
a94438b1f5 | ||
|
|
f583e471b8 | ||
|
|
f67f3b1796 | ||
|
|
77c86c759c | ||
|
|
8084259bd3 | ||
|
|
98860c6a5f | ||
|
|
e6929b9ff9 | ||
|
|
18c2f1a57a | ||
|
|
3054cd0eff | ||
|
|
9016b79270 | ||
|
|
9f5c6fb398 | ||
|
|
19087c9d35 | ||
|
|
4116b04368 | ||
|
|
1485931695 | ||
|
|
78cac4d840 | ||
|
|
2f628c3a97 | ||
|
|
a3a8710cbe | ||
|
|
0bf3a9c2af | ||
|
|
cff955f7bd | ||
|
|
3d43ebec72 | ||
|
|
6fd07479e3 | ||
|
|
03bc36f8b0 | ||
|
|
93e1b283b7 | ||
|
|
df2c0fbfbd | ||
|
|
54a69587c3 | ||
|
|
294312f02b | ||
|
|
0636fdbfef | ||
|
|
85e13260cf | ||
|
|
b5a519fa51 | ||
|
|
2e2d0b3211 | ||
|
|
d51f949768 | ||
|
|
1b955e66c1 | ||
|
|
5f8a3527e7 | ||
|
|
ec02369dba | ||
|
|
8ceac2ba9b | ||
|
|
85eeed650e | ||
|
|
14d4203722 | ||
|
|
1bc742c13e | ||
|
|
2712ea6299 | ||
|
|
a9c064cd7e | ||
|
|
17fc244cba | ||
|
|
334c786b52 | ||
|
|
492541b9cb | ||
|
|
369f825523 | ||
|
|
aba9149b71 | ||
|
|
b29f27aec7 | ||
|
|
0176d9830d | ||
|
|
0c9370ce72 | ||
|
|
3620204db6 | ||
|
|
13984a4e8d | ||
|
|
d5c68fdc0d | ||
|
|
93649f6b68 | ||
|
|
d367f1e787 | ||
|
|
1c44999192 | ||
|
|
ad506ac2a8 | ||
|
|
806521b4a0 | ||
|
|
70824e4a5e | ||
|
|
0fe5e72744 | ||
|
|
ba907defca | ||
|
|
87b078d1f3 | ||
|
|
54ea1f4bf6 | ||
|
|
067800bc31 | ||
|
|
0d2eae8da0 | ||
|
|
f2a81af70e | ||
|
|
494e567fe5 | ||
|
|
6a57aede57 | ||
|
|
ba701a7cf8 | ||
|
|
557845cccc | ||
|
|
c5297523af | ||
|
|
6883868896 | ||
|
|
1c5587f72d | ||
|
|
6e7eb49888 | ||
|
|
3df4a32c4f | ||
|
|
95b03e7bc9 | ||
|
|
817ee81eaa | ||
|
|
330832c22c | ||
|
|
306bed48d7 | ||
|
|
63402c512b | ||
|
|
736fddc079 | ||
|
|
036048c26f | ||
|
|
8616ba04db | ||
|
|
07e9c0695a | ||
|
|
f24886acb5 | ||
|
|
5031578c39 | ||
|
|
7c4cc1c71c | ||
|
|
f7258e246f | ||
|
|
ff980a1452 | ||
|
|
51130abf86 | ||
|
|
383356452b | ||
|
|
5fc1547886 | ||
|
|
68cd6c72c7 | ||
|
|
3d2ff57e7b | ||
|
|
3bc656808c | ||
|
|
7ded692a76 | ||
|
|
aa3c7a138a | ||
|
|
42441cddcc | ||
|
|
b78025345b | ||
|
|
2113b625d1 | ||
|
|
c6c3d243e1 | ||
|
|
870b997cb6 | ||
|
|
c9492f1cd4 | ||
|
|
24f370491e | ||
|
|
d688a699fa | ||
|
|
4fbb822072 | ||
|
|
f86c481280 | ||
|
|
91a99882b3 | ||
|
|
74bef2105a | ||
|
|
630ebb9d8b | ||
|
|
183465321e | ||
|
|
580f9ec86e | ||
|
|
0b0920bc90 | ||
|
|
ee04a1ab0b | ||
|
|
55f4950ed4 | ||
|
|
23960ed623 | ||
|
|
fb2730d87f | ||
|
|
30f2394782 | ||
|
|
262c3f07bf | ||
|
|
b018eb041f | ||
|
|
3f4398dd67 | ||
|
|
a225a5b276 | ||
|
|
c9cfc548da | ||
|
|
3b30886a3a | ||
|
|
c2fd98ccd2 | ||
|
|
a0fe6ab2ed | ||
|
|
c3be777ea8 | ||
|
|
8fe39be3df | ||
|
|
f5250da611 | ||
|
|
c2af154cd2 | ||
|
|
1f6b880fff | ||
|
|
2c211d95ee | ||
|
|
c46f673c16 | ||
|
|
8ff2b4b747 | ||
|
|
9e05dde28c | ||
|
|
b1ef5a75f0 | ||
|
|
f9aa7c611c | ||
|
|
9a2e01e22d | ||
|
|
2090351d7f | ||
|
|
c775c322ec | ||
|
|
1185eb9199 | ||
|
|
51fa8e7b5e | ||
|
|
f505c50770 | ||
|
|
2fdc817f03 | ||
|
|
e1d0b35d5b | ||
|
|
ace5a7c4bf | ||
|
|
a91ae8cafe | ||
|
|
b9e3ee6dd0 | ||
|
|
10ea0a2a3e | ||
|
|
d6f8ffc6bc | ||
|
|
02be2f27d1 | ||
|
|
dfd0702aec | ||
|
|
f454a683b5 | ||
|
|
d7d0c892d8 | ||
|
|
d566330a33 | ||
|
|
446cbf4b5a | ||
|
|
5153c9e98c | ||
|
|
d74f2d0be5 | ||
|
|
021b65d76f | ||
|
|
45312d49be | ||
|
|
3fcd85efe9 | ||
|
|
6f3a082c3e | ||
|
|
23e2820547 | ||
|
|
22b999fcd4 | ||
|
|
1df7de62ca | ||
|
|
97ec8f1d19 | ||
|
|
63b6e484fc | ||
|
|
2b12d19314 | ||
|
|
c37fcccd7c | ||
|
|
6034b5afc2 | ||
|
|
17bc937083 | ||
|
|
ad8db0680d | ||
|
|
4f033b155b | ||
|
|
ad829ccee1 | ||
|
|
4b60a17174 | ||
|
|
edb91f4077 | ||
|
|
0fdc3bf420 | ||
|
|
8b34cabb16 | ||
|
|
77fb651e01 | ||
|
|
35ed7973e2 | ||
|
|
e73b19024f | ||
|
|
7803bc9e5f | ||
|
|
55c400297c | ||
|
|
8686e18494 | ||
|
|
d28967bbf3 | ||
|
|
5f928f71c0 | ||
|
|
dc7bdf5f24 | ||
|
|
a681fd7b42 | ||
|
|
f40f1b5c7c | ||
|
|
edd3cf0b17 | ||
|
|
ff03e2ef4c | ||
|
|
bee311edf3 | ||
|
|
73b69cfeec | ||
|
|
4a1041dbc3 | ||
|
|
ccab7bf4fd | ||
|
|
e24e71be6a | ||
|
|
72d83a6f94 |
23
.github/workflows/bootstrap.yml
vendored
23
.github/workflows/bootstrap.yml
vendored
@@ -12,6 +12,7 @@ on:
|
||||
# built-in repository or documentation
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/clingo-bootstrap/**'
|
||||
- '!var/spack/repos/builtin/packages/clingo/**'
|
||||
- '!var/spack/repos/builtin/packages/python/**'
|
||||
- '!var/spack/repos/builtin/packages/re2c/**'
|
||||
- 'lib/spack/docs/**'
|
||||
@@ -19,11 +20,16 @@ on:
|
||||
# nightly at 2:16 AM
|
||||
- cron: '16 2 * * *'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
fedora-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "fedora:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -57,6 +63,7 @@ jobs:
|
||||
ubuntu-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -93,6 +100,7 @@ jobs:
|
||||
ubuntu-clingo-binaries-and-patchelf:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -126,6 +134,7 @@ jobs:
|
||||
opensuse-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -154,6 +163,7 @@ jobs:
|
||||
|
||||
macos-clingo-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -170,17 +180,19 @@ jobs:
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-clingo-binaries:
|
||||
runs-on: macos-latest
|
||||
runs-on: ${{ matrix.macos-version }}
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
macos-version: ['macos-10.15', 'macos-11', 'macos-12']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
@@ -195,10 +207,11 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9', '3.10']
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo
|
||||
@@ -216,6 +229,7 @@ jobs:
|
||||
ubuntu-gnupg-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -250,6 +264,7 @@ jobs:
|
||||
ubuntu-gnupg-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
@@ -285,6 +300,7 @@ jobs:
|
||||
|
||||
macos-gnupg-binaries:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -302,6 +318,7 @@ jobs:
|
||||
|
||||
macos-gnupg-sources:
|
||||
runs-on: macos-latest
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
|
||||
9
.github/workflows/build-containers.yml
vendored
9
.github/workflows/build-containers.yml
vendored
@@ -19,6 +19,10 @@ on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
deploy-images:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -43,6 +47,7 @@ jobs:
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
@@ -75,7 +80,7 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@6673cd052c4cd6fcf4b4e6e60ea986c889389535
|
||||
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
with:
|
||||
name: dockerfiles
|
||||
path: dockerfiles
|
||||
@@ -94,7 +99,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
|
||||
13
.github/workflows/macos_python.yml
vendored
13
.github/workflows/macos_python.yml
vendored
@@ -16,16 +16,21 @@ on:
|
||||
- '.github/workflows/macos_python.yml'
|
||||
# TODO: run if we touch any of the recipes involved in this
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
# GitHub Action Limits
|
||||
# https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions
|
||||
|
||||
jobs:
|
||||
install_gcc:
|
||||
name: gcc with clang
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -36,11 +41,12 @@ jobs:
|
||||
|
||||
install_jupyter_clang:
|
||||
name: jupyter
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -50,10 +56,11 @@ jobs:
|
||||
|
||||
install_scipy_clang:
|
||||
name: scipy, mpl, pd
|
||||
if: github.repository == 'spack/spack'
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
|
||||
1
.github/workflows/setup_git.ps1
vendored
1
.github/workflows/setup_git.ps1
vendored
@@ -4,6 +4,7 @@ Set-Location spack
|
||||
|
||||
git config --global user.email "spack@example.com"
|
||||
git config --global user.name "Test User"
|
||||
git config --global core.longpaths true
|
||||
|
||||
if ($(git branch --show-current) -ne "develop")
|
||||
{
|
||||
|
||||
21
.github/workflows/unit_tests.yaml
vendored
21
.github/workflows/unit_tests.yaml
vendored
@@ -9,6 +9,11 @@ on:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Validate that the code can be run on all the Python versions
|
||||
# supported by Spack
|
||||
@@ -16,7 +21,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python Packages
|
||||
@@ -34,7 +39,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
@@ -109,7 +114,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -174,7 +179,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
@@ -240,7 +245,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install System packages
|
||||
@@ -289,7 +294,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -332,7 +337,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b # @v2
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6 # @v2
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb # @v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- name: Install Python packages
|
||||
@@ -345,7 +350,7 @@ jobs:
|
||||
coverage run $(which spack) audit packages
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (wwithout coverage)
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
|
||||
23
.github/workflows/windows_python.yml
vendored
23
.github/workflows/windows_python.yml
vendored
@@ -9,6 +9,11 @@ on:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_number }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell:
|
||||
@@ -18,7 +23,7 @@ jobs:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
@@ -36,7 +41,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -58,7 +63,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -78,7 +83,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -98,7 +103,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -123,7 +128,7 @@ jobs:
|
||||
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -139,11 +144,11 @@ jobs:
|
||||
echo "installer_root=$((pwd).Path)" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
env:
|
||||
ProgressPreference: SilentlyContinue
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
with:
|
||||
name: Windows Spack Installer Bundle
|
||||
path: ${{ env.installer_root }}\pkg\Spack.exe
|
||||
- uses: actions/upload-artifact@v3
|
||||
- uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8
|
||||
with:
|
||||
name: Windows Spack Installer
|
||||
path: ${{ env.installer_root}}\pkg\Spack.msi
|
||||
@@ -154,7 +159,7 @@ jobs:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- uses: actions/setup-python@98f2ad02fd48d057ee3b4d4f66525b231c3e52b6
|
||||
- uses: actions/setup-python@d09bd5e6005b175076f227b13d9730d56e9dcfcb
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
204
CHANGELOG.md
204
CHANGELOG.md
@@ -1,3 +1,205 @@
|
||||
# v0.18.0 (2022-05-28)
|
||||
|
||||
`v0.18.0` is a major feature release.
|
||||
|
||||
## Major features in this release
|
||||
|
||||
1. **Concretizer now reuses by default**
|
||||
|
||||
`spack install --reuse` was introduced in `v0.17.0`, and `--reuse`
|
||||
is now the default concretization mode. Spack will try hard to
|
||||
resolve dependencies using installed packages or binaries (#30396).
|
||||
|
||||
To avoid reuse and to use the latest package configurations, (the
|
||||
old default), you can use `spack install --fresh`, or add
|
||||
configuration like this to your environment or `concretizer.yaml`:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse: false
|
||||
```
|
||||
|
||||
2. **Finer-grained hashes**
|
||||
|
||||
Spack hashes now include `link`, `run`, *and* `build` dependencies,
|
||||
as well as a canonical hash of package recipes. Previously, hashes
|
||||
only included `link` and `run` dependencies (though `build`
|
||||
dependencies were stored by environments). We coarsened the hash to
|
||||
reduce churn in user installations, but the new default concretizer
|
||||
behavior mitigates this concern and gets us reuse *and* provenance.
|
||||
You will be able to see the build dependencies of new installations
|
||||
with `spack find`. Old installations will not change and their
|
||||
hashes will not be affected. (#28156, #28504, #30717, #30861)
|
||||
|
||||
3. **Improved error messages**
|
||||
|
||||
Error handling with the new concretizer is now done with
|
||||
optimization criteria rather than with unsatisfiable cores, and
|
||||
Spack reports many more details about conflicting constraints.
|
||||
(#30669)
|
||||
|
||||
4. **Unify environments when possible**
|
||||
|
||||
Environments have thus far supported `concretization: together` or
|
||||
`concretization: separately`. These have been replaced by a new
|
||||
preference in `concretizer.yaml`:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
unify: [true|false|when_possible]
|
||||
```
|
||||
|
||||
`concretizer:unify:when_possible` will *try* to resolve a fully
|
||||
unified environment, but if it cannot, it will create multiple
|
||||
configurations of some packages where it has to. For large
|
||||
environments that previously had to be concretized separately, this
|
||||
can result in a huge speedup (40-50x). (#28941)
|
||||
|
||||
5. **Automatically find externals on Cray machines**
|
||||
|
||||
Spack can now automatically discover installed packages in the Cray
|
||||
Programming Environment by running `spack external find` (or `spack
|
||||
external read-cray-manifest` to *only* query the PE). Packages from
|
||||
the PE (e.g., `cray-mpich` are added to the database with full
|
||||
dependency information, and compilers from the PE are added to
|
||||
`compilers.yaml`. Available with the June 2022 release of the Cray
|
||||
Programming Environment. (#24894, #30428)
|
||||
|
||||
6. **New binary format and hardened signing**
|
||||
|
||||
Spack now has an updated binary format, with improvements for
|
||||
security. The new format has a detached signature file, and Spack
|
||||
verifies the signature before untarring or decompressing the binary
|
||||
package. The previous format embedded the signature in a `tar`
|
||||
file, which required the client to run `tar` *before* verifying
|
||||
(#30750). Spack can still install from build caches using the old
|
||||
format, but we encourage users to switch to the new format going
|
||||
forward.
|
||||
|
||||
Production GitLab pipelines have been hardened to securely sign
|
||||
binaries. There is now a separate signing stage so that signing
|
||||
keys are never exposed to build system code, and signing keys are
|
||||
ephemeral and only live as long as the signing pipeline stage.
|
||||
(#30753)
|
||||
|
||||
7. **Bootstrap mirror generation**
|
||||
|
||||
The `spack bootstrap mirror` command can automatically create a
|
||||
mirror for bootstrapping the concretizer and other needed
|
||||
dependencies in an air-gapped environment. (#28556)
|
||||
|
||||
8. **Nascent Windows support**
|
||||
|
||||
Spack now has initial support for Windows. Spack core has been
|
||||
refactored to run in the Windows environment, and a small number of
|
||||
packages can now build for Windows. More details are
|
||||
[in the documentation](https://spack.rtfd.io/en/latest/getting_started.html#spack-on-windows)
|
||||
(#27021, #28385, many more)
|
||||
|
||||
9. **Makefile generation**
|
||||
|
||||
`spack env depfile` can be used to generate a `Makefile` from an
|
||||
environment, which can be used to build packages the environment
|
||||
in parallel on a single node. e.g.:
|
||||
|
||||
```console
|
||||
spack -e myenv env depfile > Makefile
|
||||
make
|
||||
```
|
||||
|
||||
Spack propagates `gmake` jobserver information to builds so that
|
||||
their jobs can share cores. (#30039, #30254, #30302, #30526)
|
||||
|
||||
10. **New variant features**
|
||||
|
||||
In addition to being conditional themselves, variants can now have
|
||||
[conditional *values*](https://spack.readthedocs.io/en/latest/packaging_guide.html#conditional-possible-values)
|
||||
that are only possible for certain configurations of a package. (#29530)
|
||||
|
||||
Variants can be
|
||||
[declared "sticky"](https://spack.readthedocs.io/en/latest/packaging_guide.html#sticky-variants),
|
||||
which prevents them from being enabled or disabled by the
|
||||
concretizer. Sticky variants must be set explicitly by users
|
||||
on the command line or in `packages.yaml`. (#28630)
|
||||
|
||||
* Allow conditional possible values in variants
|
||||
* Add a "sticky" property to variants
|
||||
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* Environment views can optionally link only `run` dependencies
|
||||
with `link:run` (#29336)
|
||||
* `spack external find --all` finds library-only packages in
|
||||
addition to build dependencies (#28005)
|
||||
* Customizable `config:license_dir` option (#30135)
|
||||
* `spack external find --path PATH` takes a custom search path (#30479)
|
||||
* `spack spec` has a new `--format` argument like `spack find` (#27908)
|
||||
* `spack concretize --quiet` skips printing concretized specs (#30272)
|
||||
* `spack info` now has cleaner output and displays test info (#22097)
|
||||
* Package-level submodule option for git commit versions (#30085, #30037)
|
||||
* Using `/hash` syntax to refer to concrete specs in an environment
|
||||
now works even if `/hash` is not installed. (#30276)
|
||||
|
||||
## Major internal refactors
|
||||
|
||||
* full hash (see above)
|
||||
* new develop versioning scheme `0.19.0-dev0`
|
||||
* Allow for multiple dependencies/dependents from the same package (#28673)
|
||||
* Splice differing virtual packages (#27919)
|
||||
|
||||
## Performance Improvements
|
||||
|
||||
* Concretization of large environments with `unify: when_possible` is
|
||||
much faster than concretizing separately (#28941, see above)
|
||||
* Single-pass view generation algorithm is 2.6x faster (#29443)
|
||||
|
||||
## Archspec improvements
|
||||
|
||||
* `oneapi` and `dpcpp` flag support (#30783)
|
||||
* better support for `M1` and `a64fx` (#30683)
|
||||
|
||||
## Removals and Deprecations
|
||||
|
||||
* Spack no longer supports Python `2.6` (#27256)
|
||||
* Removed deprecated `--run-tests` option of `spack install`;
|
||||
use `spack test` (#30461)
|
||||
* Removed deprecated `spack flake8`; use `spack style` (#27290)
|
||||
|
||||
* Deprecate `spack:concretization` config option; use
|
||||
`concretizer:unify` (#30038)
|
||||
* Deprecate top-level module configuration; use module sets (#28659)
|
||||
* `spack activate` and `spack deactivate` are deprecated in favor of
|
||||
environments; will be removed in `0.19.0` (#29430; see also `link:run`
|
||||
in #29336 above)
|
||||
|
||||
## Notable Bugfixes
|
||||
|
||||
* Fix bug that broke locks with many parallel builds (#27846)
|
||||
* Many bugfixes and consistency improvements for the new concretizer
|
||||
and `--reuse` (#30357, #30092, #29835, #29933, #28605, #29694, #28848)
|
||||
|
||||
## Packages
|
||||
|
||||
* `CMakePackage` uses `CMAKE_INSTALL_RPATH_USE_LINK_PATH` (#29703)
|
||||
* Refactored `lua` support: `lua-lang` virtual supports both
|
||||
`lua` and `luajit` via new `LuaPackage` build system(#28854)
|
||||
* PythonPackage: now installs packages with `pip` (#27798)
|
||||
* Python: improve site_packages_dir handling (#28346)
|
||||
* Extends: support spec, not just package name (#27754)
|
||||
* `find_libraries`: search for both .so and .dylib on macOS (#28924)
|
||||
* Use stable URLs and `?full_index=1` for all github patches (#29239)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 6,416 total packages, 458 new since `v0.17.0`
|
||||
* 219 new Python packages
|
||||
* 60 new R packages
|
||||
* 377 people contributed to this release
|
||||
* 337 committers to packages
|
||||
* 85 committers to core
|
||||
|
||||
|
||||
# v0.17.2 (2022-04-13)
|
||||
|
||||
### Spack bugfixes
|
||||
@@ -11,7 +213,7 @@
|
||||
* Fixed a few bugs affecting the spack ci command (#29518, #29419)
|
||||
* Fix handling of Intel compiler environment (#29439)
|
||||
* Fix a few edge cases when reindexing the DB (#28764)
|
||||
* Remove "Known issues" from documentation (#29664)
|
||||
* Remove "Known issues" from documentation (#29664)
|
||||
* Other miscellaneous bugfixes (0b72e070583fc5bcd016f5adc8a84c99f2b7805f, #28403, #29261)
|
||||
|
||||
# v0.17.1 (2021-12-23)
|
||||
|
||||
@@ -6,34 +6,15 @@ bootstrap:
|
||||
# by Spack is installed in a "store" subfolder of this root directory
|
||||
root: $user_cache_path/bootstrap
|
||||
# Methods that can be used to bootstrap software. Each method may or
|
||||
# may not be able to bootstrap all of the software that Spack needs,
|
||||
# may not be able to bootstrap all the software that Spack needs,
|
||||
# depending on its type.
|
||||
sources:
|
||||
- name: 'github-actions-v0.2'
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.2
|
||||
- name: 'github-actions-v0.1'
|
||||
type: buildcache
|
||||
description: |
|
||||
Buildcache generated from a public workflow using Github Actions.
|
||||
The sha256 checksum of binaries is checked before installation.
|
||||
info:
|
||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.1
|
||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
||||
# This method is just Spack bootstrapping the software it needs from sources.
|
||||
# It has been added here so that users can selectively disable bootstrapping
|
||||
# from sources by "untrusting" it.
|
||||
- name: spack-install
|
||||
type: install
|
||||
description: |
|
||||
Specs built from sources by Spack. May take a long time.
|
||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.1
|
||||
- name: 'spack-install'
|
||||
metadata: $spack/share/spack/bootstrap/spack-install
|
||||
trusted:
|
||||
# By default we trust bootstrapping from sources and from binaries
|
||||
# produced on Github via the workflow
|
||||
|
||||
@@ -28,3 +28,9 @@ concretizer:
|
||||
# instance concretize with target "icelake" while running on "haswell").
|
||||
# If "true" only allow targets that are compatible with the host.
|
||||
host_compatible: true
|
||||
# When "true" concretize root specs of environments together, so that each unique
|
||||
# package in an environment corresponds to one concrete spec. This ensures
|
||||
# environments can always be activated. When "false" perform concretization separately
|
||||
# on each root spec, allowing different versions and variants of the same package in
|
||||
# an environment.
|
||||
unify: false
|
||||
@@ -33,6 +33,9 @@ config:
|
||||
template_dirs:
|
||||
- $spack/share/spack/templates
|
||||
|
||||
# Directory where licenses should be located
|
||||
license_dir: $spack/etc/spack/licenses
|
||||
|
||||
# Temporary locations Spack can try to use for builds.
|
||||
#
|
||||
# Recommended options are given below.
|
||||
|
||||
@@ -50,6 +50,13 @@ build cache files for the "ninja" spec:
|
||||
Note that the targeted spec must already be installed. Once you have a build cache,
|
||||
you can add it as a mirror, discussed next.
|
||||
|
||||
.. warning::
|
||||
|
||||
Spack improved the format used for binary caches in v0.18. The entire v0.18 series
|
||||
will be able to verify and install binary caches both in the new and in the old format.
|
||||
Support for using the old format is expected to end in v0.19, so we advise users to
|
||||
recreate relevant buildcaches using Spack v0.18 or higher.
|
||||
|
||||
---------------------------------------
|
||||
Finding or installing build cache files
|
||||
---------------------------------------
|
||||
|
||||
160
lib/spack/docs/bootstrapping.rst
Normal file
160
lib/spack/docs/bootstrapping.rst
Normal file
@@ -0,0 +1,160 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _bootstrapping:
|
||||
|
||||
=============
|
||||
Bootstrapping
|
||||
=============
|
||||
|
||||
In the :ref:`Getting started <getting_started>` Section we already mentioned that
|
||||
Spack can bootstrap some of its dependencies, including ``clingo``. In fact, there
|
||||
is an entire command dedicated to the management of every aspect of bootstrapping:
|
||||
|
||||
.. command-output:: spack bootstrap --help
|
||||
|
||||
The first thing to know to understand bootstrapping in Spack is that each of
|
||||
Spack's dependencies is bootstrapped lazily; i.e. the first time it is needed and
|
||||
can't be found. You can readily check if any prerequisite for using Spack
|
||||
is missing by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap status
|
||||
Spack v0.17.1 - python@3.8
|
||||
|
||||
[FAIL] Core Functionalities
|
||||
[B] MISSING "clingo": required to concretize specs
|
||||
|
||||
[FAIL] Binary packages
|
||||
[B] MISSING "gpg2": required to sign/verify buildcaches
|
||||
|
||||
|
||||
Spack will take care of bootstrapping any missing dependency marked as [B]. Dependencies marked as [-] are instead required to be found on the system.
|
||||
|
||||
In the case of the output shown above Spack detected that both ``clingo`` and ``gnupg``
|
||||
are missing and it's giving detailed information on why they are needed and whether
|
||||
they can be bootstrapped. Running a command that concretize a spec, like:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack solve zlib
|
||||
==> Bootstrapping clingo from pre-built binaries
|
||||
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.1/build_cache/darwin-catalina-x86_64/apple-clang-12.0.0/clingo-bootstrap-spack/darwin-catalina-x86_64-apple-clang-12.0.0-clingo-bootstrap-spack-p5on7i4hejl775ezndzfdkhvwra3hatn.spack
|
||||
==> Installing "clingo-bootstrap@spack%apple-clang@12.0.0~docs~ipo+python build_type=Release arch=darwin-catalina-x86_64" from a buildcache
|
||||
[ ... ]
|
||||
|
||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||
|
||||
-----------------------
|
||||
The Bootstrapping store
|
||||
-----------------------
|
||||
|
||||
The software installed for bootstrapping purposes is deployed in a separate store.
|
||||
Its location can be checked with the following command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap root
|
||||
|
||||
It can also be changed with the same command by just specifying the newly desired path:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap root /opt/spack/bootstrap
|
||||
|
||||
You can check what is installed in the bootstrapping store at any time using:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack find -b
|
||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||
==> 11 installed packages
|
||||
-- darwin-catalina-x86_64 / apple-clang@12.0.0 ------------------
|
||||
clingo-bootstrap@spack libassuan@2.5.5 libgpg-error@1.42 libksba@1.5.1 pinentry@1.1.1 zlib@1.2.11
|
||||
gnupg@2.3.1 libgcrypt@1.9.3 libiconv@1.16 npth@1.6 python@3.8
|
||||
|
||||
In case it is needed you can remove all the software in the current bootstrapping store with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack clean -b
|
||||
==> Removing bootstrapped software and configuration in "/Users/spack/.spack/bootstrap"
|
||||
|
||||
% spack find -b
|
||||
==> Showing internal bootstrap store at "/Users/spack/.spack/bootstrap/store"
|
||||
==> 0 installed packages
|
||||
|
||||
--------------------------------------------
|
||||
Enabling and disabling bootstrapping methods
|
||||
--------------------------------------------
|
||||
|
||||
Bootstrapping is always performed by trying the methods listed by:
|
||||
|
||||
.. command-output:: spack bootstrap list
|
||||
|
||||
in the order they appear, from top to bottom. By default Spack is
|
||||
configured to try first bootstrapping from pre-built binaries and to
|
||||
fall-back to bootstrapping from sources if that failed.
|
||||
|
||||
If need be, you can disable bootstrapping altogether by running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap disable
|
||||
|
||||
in which case it's your responsibility to ensure Spack runs in an
|
||||
environment where all its prerequisites are installed. You can
|
||||
also configure Spack to skip certain bootstrapping methods by *untrusting*
|
||||
them. For instance:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap untrust github-actions
|
||||
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
||||
|
||||
tells Spack to skip trying to bootstrap from binaries. To add the "github-actions" method back you can:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap trust github-actions
|
||||
|
||||
There is also an option to reset the bootstrapping configuration to Spack's defaults:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap reset
|
||||
==> Bootstrapping configuration is being reset to Spack's defaults. Current configuration will be lost.
|
||||
Do you want to continue? [Y/n]
|
||||
%
|
||||
|
||||
----------------------------------------
|
||||
Creating a mirror for air-gapped systems
|
||||
----------------------------------------
|
||||
|
||||
Spack's default configuration for bootstrapping relies on the user having
|
||||
access to the internet, either to fetch pre-compiled binaries or source tarballs.
|
||||
Sometimes though Spack is deployed on air-gapped systems where such access is denied.
|
||||
|
||||
To help with similar situations Spack has a command that recreates, in a local folder
|
||||
of choice, a mirror containing the source tarballs and/or binary packages needed for
|
||||
bootstrapping.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack bootstrap mirror --binary-packages /opt/bootstrap
|
||||
==> Adding "clingo-bootstrap@spack+python %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding "gnupg@2.3: %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding "patchelf@0.13.1:0.13.99 %apple-clang target=x86_64" and dependencies to the mirror at /opt/bootstrap/local-mirror
|
||||
==> Adding binary packages from "https://github.com/alalazo/spack-bootstrap-mirrors/releases/download/v0.1-rc.2/bootstrap-buildcache.tar.gz" to the mirror at /opt/bootstrap/local-mirror
|
||||
|
||||
To register the mirror on the platform where it's supposed to be used run the following command(s):
|
||||
% spack bootstrap add --trust local-sources /opt/bootstrap/metadata/sources
|
||||
% spack bootstrap add --trust local-binaries /opt/bootstrap/metadata/binaries
|
||||
|
||||
|
||||
This command needs to be run on a machine with internet access and the resulting folder
|
||||
has to be moved over to the air-gapped system. Once the local sources are added using the
|
||||
commands suggested at the prompt, they can be used to bootstrap Spack.
|
||||
@@ -39,6 +39,7 @@ on these ideas for each distinct build system that Spack supports:
|
||||
|
||||
build_systems/autotoolspackage
|
||||
build_systems/cmakepackage
|
||||
build_systems/cachedcmakepackage
|
||||
build_systems/mesonpackage
|
||||
build_systems/qmakepackage
|
||||
build_systems/sippackage
|
||||
|
||||
123
lib/spack/docs/build_systems/cachedcmakepackage.rst
Normal file
123
lib/spack/docs/build_systems/cachedcmakepackage.rst
Normal file
@@ -0,0 +1,123 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _cachedcmakepackage:
|
||||
|
||||
------------------
|
||||
CachedCMakePackage
|
||||
------------------
|
||||
|
||||
The CachedCMakePackage base class is used for CMake-based workflows
|
||||
that create a CMake cache file prior to running ``cmake``. This is
|
||||
useful for packages with arguments longer than the system limit, and
|
||||
for reproducibility.
|
||||
|
||||
The documentation for this class assumes that the user is familiar with
|
||||
the ``CMakePackage`` class from which it inherits. See the documentation
|
||||
for :ref:`CMakePackage <cmakepackage>`.
|
||||
|
||||
^^^^^^
|
||||
Phases
|
||||
^^^^^^
|
||||
|
||||
The ``CachedCMakePackage`` base class comes with the following phases:
|
||||
|
||||
#. ``initconfig`` - generate the CMake cache file
|
||||
#. ``cmake`` - generate the Makefile
|
||||
#. ``build`` - build the package
|
||||
#. ``install`` - install the package
|
||||
|
||||
By default, these phases run:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ mkdir spack-build
|
||||
$ cd spack-build
|
||||
$ cat << EOF > name-arch-compiler@version.cmake
|
||||
# Write information on compilers and dependencies
|
||||
# includes information on mpi and cuda if applicable
|
||||
$ cmake .. -DCMAKE_INSTALL_PREFIX=/path/to/installation/prefix -C name-arch-compiler@version.cmake
|
||||
$ make
|
||||
$ make test # optional
|
||||
$ make install
|
||||
|
||||
The ``CachedCMakePackage`` class inherits from the ``CMakePackage``
|
||||
class, and accepts all of the same options and adds all of the same
|
||||
flags to the ``cmake`` command. Similar to the ``CMakePAckage`` class,
|
||||
you may need to add a few arguments yourself, and the
|
||||
``CachedCMakePackage`` provides the same interface to add those
|
||||
flags.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding entries to the CMake cache
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In addition to adding flags to the ``cmake`` command, you may need to
|
||||
add entries to the CMake cache in the ``initconfig`` phase. This can
|
||||
be done by overriding one of four methods:
|
||||
|
||||
#. ``CachedCMakePackage.initconfig_compiler_entries``
|
||||
#. ``CachedCMakePackage.initconfig_mpi_entries``
|
||||
#. ``CachedCMakePackage.initconfig_hardware_entries``
|
||||
#. ``CachedCMakePackage.initconfig_package_entries``
|
||||
|
||||
Each of these methods returns a list of CMake cache strings. The
|
||||
distinction between these methods is merely to provide a
|
||||
well-structured and legible cmake cache file -- otherwise, entries
|
||||
from each of these methods are handled identically.
|
||||
|
||||
Spack also provides convenience methods for generating CMake cache
|
||||
entries. These methods are available at module scope in every Spack
|
||||
package. Because CMake parses boolean options, strings, and paths
|
||||
differently, there are three such methods:
|
||||
|
||||
#. ``cmake_cache_option``
|
||||
#. ``cmake_cache_string``
|
||||
#. ``cmake_cache_path``
|
||||
|
||||
These methods each accept three parameters -- the name of the CMake
|
||||
variable associated with the entry, the value of the entry, and an
|
||||
optional comment -- and return strings in the appropriate format to be
|
||||
returned from any of the ``initconfig*`` methods. Additionally, these
|
||||
methods may return comments beginning with the ``#`` character.
|
||||
|
||||
A typical usage of these methods may look something like this:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def initconfig_mpi_entries(self)
|
||||
# Get existing MPI configurations
|
||||
entries = super(self, Foo).initconfig_mpi_entries()
|
||||
|
||||
# The existing MPI configurations key on whether ``mpi`` is in the spec
|
||||
# This spec has an MPI variant, and we need to enable MPI when it is on.
|
||||
# This hypothetical package controls MPI with the ``FOO_MPI`` option to
|
||||
# cmake.
|
||||
if '+mpi' in self.spec:
|
||||
entries.append(cmake_cache_option('FOO_MPI', True, "enable mpi"))
|
||||
else:
|
||||
entries.append(cmake_cache_option('FOO_MPI', False, "disable mpi"))
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
# Package specific options
|
||||
entries = []
|
||||
|
||||
entries.append('#Entries for build options')
|
||||
|
||||
bar_on = '+bar' in self.spec
|
||||
entries.append(cmake_cache_option('FOO_BAR', bar_on, 'toggle bar'))
|
||||
|
||||
entries.append('#Entries for dependencies')
|
||||
|
||||
if self.spec['blas'].name == 'baz': # baz is our blas provider
|
||||
entries.append(cmake_cache_string('FOO_BLAS', 'baz', 'Use baz'))
|
||||
entries.append(cmake_cache_path('BAZ_PREFIX', self.spec['baz'].prefix))
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
External documentation
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
For more information on CMake cache files, see:
|
||||
https://cmake.org/cmake/help/latest/manual/cmake.1.html
|
||||
@@ -84,8 +84,8 @@ build ``hdf5`` with Intel oneAPI MPI do::
|
||||
|
||||
spack install hdf5 +mpi ^intel-oneapi-mpi
|
||||
|
||||
Using an Externally Installed oneAPI
|
||||
====================================
|
||||
Using Externally Installed oneAPI Tools
|
||||
=======================================
|
||||
|
||||
Spack can also use oneAPI tools that are manually installed with
|
||||
`Intel Installers`_. The procedures for configuring Spack to use
|
||||
@@ -110,7 +110,7 @@ Another option is to manually add the configuration to
|
||||
Libraries
|
||||
---------
|
||||
|
||||
If you want Spack to use MKL that you have installed without Spack in
|
||||
If you want Spack to use oneMKL that you have installed without Spack in
|
||||
the default location, then add the following to
|
||||
``~/.spack/packages.yaml``, adjusting the version as appropriate::
|
||||
|
||||
@@ -139,7 +139,7 @@ You can also use Spack-installed libraries. For example::
|
||||
spack load intel-oneapi-mkl
|
||||
|
||||
Will update your environment CPATH, LIBRARY_PATH, and other
|
||||
environment variables for building an application with MKL.
|
||||
environment variables for building an application with oneMKL.
|
||||
|
||||
More information
|
||||
================
|
||||
|
||||
@@ -15,6 +15,9 @@ IntelPackage
|
||||
Intel packages in Spack
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
This is an earlier version of Intel software development tools and has
|
||||
now been replaced by Intel oneAPI Toolkits.
|
||||
|
||||
Spack can install and use several software development products offered by Intel.
|
||||
Some of these are available under no-cost terms, others require a paid license.
|
||||
All share the same basic steps for configuration, installation, and, where
|
||||
|
||||
@@ -59,7 +59,8 @@ other techniques to minimize the size of the final image:
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " concretizer: together" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
@@ -108,9 +109,10 @@ Spack Images on Docker Hub
|
||||
--------------------------
|
||||
|
||||
Docker images with Spack preinstalled and ready to be used are
|
||||
built on `Docker Hub <https://hub.docker.com/u/spack>`_
|
||||
at every push to ``develop`` or to a release branch. The OS that
|
||||
are currently supported are summarized in the table below:
|
||||
built when a release is tagged, or nightly on ``develop``. The images
|
||||
are then pushed both to `Docker Hub <https://hub.docker.com/u/spack>`_
|
||||
and to `GitHub Container Registry <https://github.com/orgs/spack/packages?repo_name=spack>`_.
|
||||
The OS that are currently supported are summarized in the table below:
|
||||
|
||||
.. _containers-supported-os:
|
||||
|
||||
@@ -120,22 +122,31 @@ are currently supported are summarized in the table below:
|
||||
* - Operating System
|
||||
- Base Image
|
||||
- Spack Image
|
||||
* - Ubuntu 16.04
|
||||
- ``ubuntu:16.04``
|
||||
- ``spack/ubuntu-xenial``
|
||||
* - Ubuntu 18.04
|
||||
- ``ubuntu:18.04``
|
||||
- ``spack/ubuntu-bionic``
|
||||
* - Ubuntu 20.04
|
||||
- ``ubuntu:20.04``
|
||||
- ``spack/ubuntu-focal``
|
||||
* - Ubuntu 22.04
|
||||
- ``ubuntu:22.04``
|
||||
- ``spack/ubuntu-jammy``
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
* - CentOS Stream
|
||||
- ``quay.io/centos/centos:stream``
|
||||
- ``spack/centos-stream``
|
||||
* - openSUSE Leap
|
||||
- ``opensuse/leap``
|
||||
- ``spack/leap15``
|
||||
* - Amazon Linux 2
|
||||
- ``amazonlinux:2``
|
||||
- ``spack/amazon-linux``
|
||||
|
||||
All the images are tagged with the corresponding release of Spack:
|
||||
|
||||
.. image:: dockerhub_spack.png
|
||||
.. image:: images/ghcr_spack.png
|
||||
|
||||
with the exception of the ``latest`` tag that points to the HEAD
|
||||
of the ``develop`` branch. These images are available for anyone
|
||||
@@ -245,7 +256,8 @@ software is respectively built and installed:
|
||||
&& echo " specs:" \
|
||||
&& echo " - gromacs+mpi" \
|
||||
&& echo " - mpich" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
@@ -366,7 +378,8 @@ produces, for instance, the following ``Dockerfile``:
|
||||
&& echo " externals:" \
|
||||
&& echo " - spec: cuda%gcc" \
|
||||
&& echo " prefix: /usr/local/cuda" \
|
||||
&& echo " concretization: together" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " config:" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
|
||||
@@ -151,7 +151,7 @@ Package-related modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
:mod:`spack.package`
|
||||
Contains the :class:`~spack.package.Package` class, which
|
||||
Contains the :class:`~spack.package_base.Package` class, which
|
||||
is the superclass for all packages in Spack. Methods on ``Package``
|
||||
implement all phases of the :ref:`package lifecycle
|
||||
<package-lifecycle>` and manage the build process.
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 88 KiB |
@@ -273,19 +273,9 @@ or
|
||||
Concretizing
|
||||
^^^^^^^^^^^^
|
||||
|
||||
Once some user specs have been added to an environment, they can be
|
||||
concretized. *By default specs are concretized separately*, one after
|
||||
the other. This mode of operation permits to deploy a full
|
||||
software stack where multiple configurations of the same package
|
||||
need to be installed alongside each other. Central installations done
|
||||
at HPC centers by system administrators or user support groups
|
||||
are a common case that fits in this behavior.
|
||||
Environments *can also be configured to concretize all
|
||||
the root specs in a self-consistent way* to ensure that
|
||||
each package in the environment comes with a single configuration. This
|
||||
mode of operation is usually what is required by software developers that
|
||||
want to deploy their development environment.
|
||||
|
||||
Once some user specs have been added to an environment, they can be concretized.
|
||||
There are at the moment three different modes of operation to concretize an environment,
|
||||
which are explained in details in :ref:`environments_concretization_config`.
|
||||
Regardless of which mode of operation has been chosen, the following
|
||||
command will ensure all the root specs are concretized according to the
|
||||
constraints that are prescribed in the configuration:
|
||||
@@ -493,32 +483,76 @@ Appending to this list in the yaml is identical to using the ``spack
|
||||
add`` command from the command line. However, there is more power
|
||||
available from the yaml file.
|
||||
|
||||
.. _environments_concretization_config:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
Spec concretization
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Specs can be concretized separately or together, as already
|
||||
explained in :ref:`environments_concretization`. The behavior active
|
||||
under any environment is determined by the ``concretization`` property:
|
||||
An environment can be concretized in three different modes and the behavior active under any environment
|
||||
is determined by the ``concretizer:unify`` property. By default specs are concretized *separately*, one after the other:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- ncview
|
||||
- netcdf
|
||||
- nco
|
||||
- py-sphinx
|
||||
concretization: together
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: false
|
||||
|
||||
which can currently take either one of the two allowed values ``together`` or ``separately``
|
||||
(the default).
|
||||
This mode of operation permits to deploy a full software stack where multiple configurations of the same package
|
||||
need to be installed alongside each other using the best possible selection of transitive dependencies. The downside
|
||||
is that redundancy of installations is disregarded completely, and thus environments might be more bloated than
|
||||
strictly needed. In the example above, for instance, if a version of ``zlib`` newer than ``1.2.8`` is known to Spack,
|
||||
then it will be used for both ``hdf5`` installations.
|
||||
|
||||
If redundancy of the environment is a concern, Spack provides a way to install it *together where possible*,
|
||||
i.e. trying to maximize reuse of dependencies across different specs:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5~mpi
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: when_possible
|
||||
|
||||
Also in this case Spack allows having multiple configurations of the same package, but privileges the reuse of
|
||||
specs over other factors. Going back to our example, this means that both ``hdf5`` installations will use
|
||||
``zlib@1.2.8`` as a dependency even if newer versions of that library are available.
|
||||
Central installations done at HPC centers by system administrators or user support groups are a common case
|
||||
that fits either of these two modes.
|
||||
|
||||
Environments can also be configured to concretize all the root specs *together*, in a self-consistent way, to
|
||||
ensure that each package in the environment comes with a single configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5+mpi
|
||||
- zlib@1.2.8
|
||||
concretizer:
|
||||
unify: true
|
||||
|
||||
This mode of operation is usually what is required by software developers that want to deploy their development
|
||||
environment and have a single view of it in the filesystem.
|
||||
|
||||
.. note::
|
||||
|
||||
The ``concretizer:unify`` config option was introduced in Spack 0.18 to
|
||||
replace the ``concretization`` property. For reference,
|
||||
``concretization: together`` is replaced by ``concretizer:unify:true``,
|
||||
and ``concretization: separately`` is replaced by ``concretizer:unify:false``.
|
||||
|
||||
.. admonition:: Re-concretization of user specs
|
||||
|
||||
When concretizing specs together the entire set of specs will be
|
||||
When concretizing specs *together* or *together where possible* the entire set of specs will be
|
||||
re-concretized after any addition of new user specs, to ensure that
|
||||
the environment remains consistent. When instead the specs are concretized
|
||||
the environment remains consistent / minimal. When instead the specs are concretized
|
||||
separately only the new specs will be re-concretized after any addition.
|
||||
|
||||
^^^^^^^^^^^^^
|
||||
@@ -765,7 +799,7 @@ directories.
|
||||
select: [^mpi]
|
||||
exclude: ['%pgi@18.5']
|
||||
projections:
|
||||
all: {name}/{version}-{compiler.name}
|
||||
all: '{name}/{version}-{compiler.name}'
|
||||
link: all
|
||||
link_type: symlink
|
||||
|
||||
@@ -1017,4 +1051,4 @@ the include is conditional.
|
||||
the ``--make-target-prefix`` flag and use the non-phony targets
|
||||
``<target-prefix>/env`` and ``<target-prefix>/fetch`` as
|
||||
prerequisites, instead of the phony targets ``<target-prefix>/all``
|
||||
and ``<target-prefix>/fetch-all`` respectively.
|
||||
and ``<target-prefix>/fetch-all`` respectively.
|
||||
|
||||
BIN
lib/spack/docs/images/ghcr_spack.png
Normal file
BIN
lib/spack/docs/images/ghcr_spack.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 70 KiB |
@@ -63,6 +63,7 @@ or refer to the full manual below.
|
||||
|
||||
configuration
|
||||
config_yaml
|
||||
bootstrapping
|
||||
build_settings
|
||||
environments
|
||||
containers
|
||||
|
||||
@@ -1070,13 +1070,32 @@ Commits
|
||||
|
||||
Submodules
|
||||
You can supply ``submodules=True`` to cause Spack to fetch submodules
|
||||
recursively along with the repository at fetch time. For more information
|
||||
about git submodules see the manpage of git: ``man git-submodule``.
|
||||
recursively along with the repository at fetch time.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
version('1.0.1', tag='v1.0.1', submodules=True)
|
||||
|
||||
If a package has needs more fine-grained control over submodules, define
|
||||
``submodules`` to be a callable function that takes the package instance as
|
||||
its only argument. The function should return a list of submodules to be fetched.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
def submodules(package):
|
||||
submodules = []
|
||||
if "+variant-1" in package.spec:
|
||||
submodules.append("submodule_for_variant_1")
|
||||
if "+variant-2" in package.spec:
|
||||
submodules.append("submodule_for_variant_2")
|
||||
return submodules
|
||||
|
||||
|
||||
class MyPackage(Package):
|
||||
version("0.1.0", submodules=submodules)
|
||||
|
||||
For more information about git submodules see the manpage of git: ``man
|
||||
git-submodule``.
|
||||
|
||||
.. _github-fetch:
|
||||
|
||||
@@ -2393,9 +2412,9 @@ Influence how dependents are built or run
|
||||
|
||||
Spack provides a mechanism for dependencies to influence the
|
||||
environment of their dependents by overriding the
|
||||
:meth:`setup_dependent_run_environment <spack.package.PackageBase.setup_dependent_run_environment>`
|
||||
:meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||
or the
|
||||
:meth:`setup_dependent_build_environment <spack.package.PackageBase.setup_dependent_build_environment>`
|
||||
:meth:`setup_dependent_build_environment <spack.package_base.PackageBase.setup_dependent_build_environment>`
|
||||
methods.
|
||||
The Qt package, for instance, uses this call:
|
||||
|
||||
@@ -2417,7 +2436,7 @@ will have the ``PYTHONPATH``, ``PYTHONHOME`` and ``PATH`` environment
|
||||
variables set appropriately before starting the installation. To make things
|
||||
even simpler the ``python setup.py`` command is also inserted into the module
|
||||
scope of dependents by overriding a third method called
|
||||
:meth:`setup_dependent_package <spack.package.PackageBase.setup_dependent_package>`
|
||||
:meth:`setup_dependent_package <spack.package_base.PackageBase.setup_dependent_package>`
|
||||
:
|
||||
|
||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/python/package.py
|
||||
@@ -2775,6 +2794,256 @@ Suppose a user invokes ``spack install`` like this:
|
||||
Spack will fail with a constraint violation, because the version of
|
||||
MPICH requested is too low for the ``mpi`` requirement in ``foo``.
|
||||
|
||||
.. _custom-attributes:
|
||||
|
||||
------------------
|
||||
Custom attributes
|
||||
------------------
|
||||
|
||||
Often a package will need to provide attributes for dependents to query
|
||||
various details about what it provides. While any number of custom defined
|
||||
attributes can be implemented by a package, the four specific attributes
|
||||
described below are always available on every package with default
|
||||
implementations and the ability to customize with alternate implementations
|
||||
in the case of virtual packages provided:
|
||||
|
||||
=========== =========================================== =====================
|
||||
Attribute Purpose Default
|
||||
=========== =========================================== =====================
|
||||
``home`` The installation path for the package ``spec.prefix``
|
||||
``command`` An executable command for the package | ``spec.name`` found
|
||||
in
|
||||
| ``.home.bin``
|
||||
``headers`` A list of headers provided by the package | All headers
|
||||
searched
|
||||
| recursively in
|
||||
``.home.include``
|
||||
``libs`` A list of libraries provided by the package | ``lib{spec.name}``
|
||||
searched
|
||||
| recursively in
|
||||
``.home`` starting
|
||||
| with ``lib``,
|
||||
``lib64``, then the
|
||||
| rest of ``.home``
|
||||
=========== =========================================== =====================
|
||||
|
||||
Each of these can be customized by implementing the relevant attribute
|
||||
as a ``@property`` in the package's class:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
class Foo(Package):
|
||||
...
|
||||
@property
|
||||
def libs(self):
|
||||
# The library provided by Foo is libMyFoo.so
|
||||
return find_libraries('libMyFoo', root=self.home, recursive=True)
|
||||
|
||||
A package may also provide a custom implementation of each attribute
|
||||
for the virtual packages it provides by implementing the
|
||||
``virtualpackagename_attributename`` property in the package's class.
|
||||
The implementation used is the first one found from:
|
||||
|
||||
#. Specialized virtual: ``Package.virtualpackagename_attributename``
|
||||
#. Generic package: ``Package.attributename``
|
||||
#. Default
|
||||
|
||||
The use of customized attributes is demonstrated in the next example.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Example: Customized attributes for virtual packages
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Consider a package ``foo`` that can optionally provide two virtual
|
||||
packages ``bar`` and ``baz``. When both are enabled the installation tree
|
||||
appears as follows:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
include/foo.h
|
||||
include/bar/bar.h
|
||||
lib64/libFoo.so
|
||||
lib64/libFooBar.so
|
||||
baz/include/baz/baz.h
|
||||
baz/lib/libFooBaz.so
|
||||
|
||||
The install tree shows that ``foo`` is providing the header ``include/foo.h``
|
||||
and library ``lib64/libFoo.so`` in it's install prefix. The virtual
|
||||
package ``bar`` is providing ``include/bar/bar.h`` and library
|
||||
``lib64/libFooBar.so``, also in ``foo``'s install prefix. The ``baz``
|
||||
package, however, is provided in the ``baz`` subdirectory of ``foo``'s
|
||||
prefix with the ``include/baz/baz.h`` header and ``lib/libFooBaz.so``
|
||||
library. Such a package could implement the optional attributes as
|
||||
follows:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
class Foo(Package):
|
||||
...
|
||||
variant('bar', default=False, description='Enable the Foo implementation of bar')
|
||||
variant('baz', default=False, description='Enable the Foo implementation of baz')
|
||||
...
|
||||
provides('bar', when='+bar')
|
||||
provides('baz', when='+baz')
|
||||
....
|
||||
|
||||
# Just the foo headers
|
||||
@property
|
||||
def headers(self):
|
||||
return find_headers('foo', root=self.home.include, recursive=False)
|
||||
|
||||
# Just the foo libraries
|
||||
@property
|
||||
def libs(self):
|
||||
return find_libraries('libFoo', root=self.home, recursive=True)
|
||||
|
||||
# The header provided by the bar virutal package
|
||||
@property
|
||||
def bar_headers(self):
|
||||
return find_headers('bar/bar.h', root=self.home.include, recursive=False)
|
||||
|
||||
# The libary provided by the bar virtual package
|
||||
@property
|
||||
def bar_libs(self):
|
||||
return find_libraries('libFooBar', root=sef.home, recursive=True)
|
||||
|
||||
# The baz virtual package home
|
||||
@property
|
||||
def baz_home(self):
|
||||
return self.prefix.baz
|
||||
|
||||
# The header provided by the baz virtual package
|
||||
@property
|
||||
def baz_headers(self):
|
||||
return find_headers('baz/baz', root=self.baz_home.include, recursive=False)
|
||||
|
||||
# The library provided by the baz virtual package
|
||||
@property
|
||||
def baz_libs(self):
|
||||
return find_libraries('libFooBaz', root=self.baz_home, recursive=True)
|
||||
|
||||
Now consider another package, ``foo-app``, depending on all three:
|
||||
|
||||
.. code-block:: python
|
||||
:linenos:
|
||||
|
||||
class FooApp(CMakePackage):
|
||||
...
|
||||
depends_on('foo')
|
||||
depends_on('bar')
|
||||
depends_on('baz')
|
||||
|
||||
The resulting spec objects for it's dependencies shows the result of
|
||||
the above attribute implementations:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# The core headers and libraries of the foo package
|
||||
|
||||
>>> spec['foo']
|
||||
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
||||
>>> spec['foo'].prefix
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# home defaults to the package install prefix without an explicit implementation
|
||||
>>> spec['foo'].home
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# foo headers from the foo prefix
|
||||
>>> spec['foo'].headers
|
||||
HeaderList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include/foo.h',
|
||||
])
|
||||
|
||||
# foo include directories from the foo prefix
|
||||
>>> spec['foo'].headers.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include']
|
||||
|
||||
# foo libraries from the foo prefix
|
||||
>>> spec['foo'].libs
|
||||
LibraryList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64/libFoo.so',
|
||||
])
|
||||
|
||||
# foo library directories from the foo prefix
|
||||
>>> spec['foo'].libs.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64']
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# The virtual bar package in the same prefix as foo
|
||||
|
||||
# bar resolves to the foo package
|
||||
>>> spec['bar']
|
||||
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
||||
>>> spec['bar'].prefix
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# home defaults to the foo prefix without either a Foo.bar_home
|
||||
# or Foo.home implementation
|
||||
>>> spec['bar'].home
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# bar header in the foo prefix
|
||||
>>> spec['bar'].headers
|
||||
HeaderList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include/bar/bar.h'
|
||||
])
|
||||
|
||||
# bar include dirs from the foo prefix
|
||||
>>> spec['bar'].headers.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/include']
|
||||
|
||||
# bar library from the foo prefix
|
||||
>>> spec['bar'].libs
|
||||
LibraryList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64/libFooBar.so'
|
||||
])
|
||||
|
||||
# bar library directories from the foo prefix
|
||||
>>> spec['bar'].libs.directories
|
||||
['/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/lib64']
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# The virtual baz package in a subdirectory of foo's prefix
|
||||
|
||||
# baz resolves to the foo package
|
||||
>>> spec['baz']
|
||||
foo@1.0%gcc@11.3.1+bar+baz arch=linux-fedora35-haswell
|
||||
>>> spec['baz'].prefix
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6'
|
||||
|
||||
# baz_home implementation provides the subdirectory inside the foo prefix
|
||||
>>> spec['baz'].home
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz'
|
||||
|
||||
# baz headers in the baz subdirectory of the foo prefix
|
||||
>>> spec['baz'].headers
|
||||
HeaderList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/include/baz/baz.h'
|
||||
])
|
||||
|
||||
# baz include directories in the baz subdirectory of the foo prefix
|
||||
>>> spec['baz'].headers.directories
|
||||
[
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/include'
|
||||
]
|
||||
|
||||
# baz libraries in the baz subdirectory of the foo prefix
|
||||
>>> spec['baz'].libs
|
||||
LibraryList([
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib/libFooBaz.so'
|
||||
])
|
||||
|
||||
# baz library directories in the baz subdirectory of the foo porefix
|
||||
>>> spec['baz'].libs.directories
|
||||
[
|
||||
'/opt/spack/linux-fedora35-haswell/gcc-11.3.1/foo-1.0-ca3rczp5omy7dfzoqw4p7oc2yh3u7lt6/baz/lib'
|
||||
]
|
||||
|
||||
.. _abstract-and-concrete:
|
||||
|
||||
-------------------------
|
||||
@@ -3022,7 +3291,7 @@ The classes that are currently provided by Spack are:
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| **Base Class** | **Purpose** |
|
||||
+==========================================================+==================================+
|
||||
| :class:`~spack.package.Package` | General base class not |
|
||||
| :class:`~spack.package_base.Package` | General base class not |
|
||||
| | specialized for any build system |
|
||||
+----------------------------------------------------------+----------------------------------+
|
||||
| :class:`~spack.build_systems.makefile.MakefilePackage` | Specialized class for packages |
|
||||
@@ -3153,7 +3422,7 @@ for the install phase is:
|
||||
For those not used to Python instance methods, this is the
|
||||
package itself. In this case it's an instance of ``Foo``, which
|
||||
extends ``Package``. For API docs on Package objects, see
|
||||
:py:class:`Package <spack.package.Package>`.
|
||||
:py:class:`Package <spack.package_base.Package>`.
|
||||
|
||||
``spec``
|
||||
This is the concrete spec object created by Spack from an
|
||||
@@ -5476,6 +5745,24 @@ Version Lists
|
||||
|
||||
Spack packages should list supported versions with the newest first.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using ``home`` vs ``prefix``
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
``home`` and ``prefix`` are both attributes that can be queried on a
|
||||
package's dependencies, often when passing configure arguments pointing to the
|
||||
location of a dependency. The difference is that while ``prefix`` is the
|
||||
location on disk where a concrete package resides, ``home`` is the `logical`
|
||||
location that a package resides, which may be different than ``prefix`` in
|
||||
the case of virtual packages or other special circumstances. For most use
|
||||
cases inside a package, it's dependency locations can be accessed via either
|
||||
``self.spec['foo'].home`` or ``self.spec['foo'].prefix``. Specific packages
|
||||
that should be consumed by dependents via ``.home`` instead of ``.prefix``
|
||||
should be noted in their respective documentation.
|
||||
|
||||
See :ref:`custom-attributes` for more details and an example implementing
|
||||
a custom ``home`` attribute.
|
||||
|
||||
---------------------------
|
||||
Packaging workflow commands
|
||||
---------------------------
|
||||
|
||||
@@ -115,7 +115,8 @@ And here's the spack environment built by the pipeline represented as a
|
||||
|
||||
spack:
|
||||
view: false
|
||||
concretization: separately
|
||||
concretizer:
|
||||
unify: false
|
||||
|
||||
definitions:
|
||||
- pkgs:
|
||||
|
||||
@@ -61,7 +61,7 @@ You can see the packages we added earlier in the ``specs:`` section. If you
|
||||
ever want to add more packages, you can either use ``spack add`` or manually
|
||||
edit this file.
|
||||
|
||||
We also need to change the ``concretization:`` option. By default, Spack
|
||||
We also need to change the ``concretizer:unify`` option. By default, Spack
|
||||
concretizes each spec *separately*, allowing multiple versions of the same
|
||||
package to coexist. Since we want a single consistent environment, we want to
|
||||
concretize all of the specs *together*.
|
||||
@@ -78,7 +78,8 @@ Here is what your ``spack.yaml`` looks like with this new setting:
|
||||
# add package specs to the `specs` list
|
||||
specs: [bash@5, python, py-numpy, py-scipy, py-matplotlib]
|
||||
view: true
|
||||
concretization: together
|
||||
concretizer:
|
||||
unify: true
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
Symlink location
|
||||
|
||||
@@ -25,4 +25,5 @@ spack:
|
||||
- subversion
|
||||
# Plotting
|
||||
- graphviz
|
||||
concretization: together
|
||||
concretizer:
|
||||
unify: true
|
||||
|
||||
@@ -7,7 +7,7 @@ bash, , , Compiler wrappers
|
||||
tar, , , Extract/create archives
|
||||
gzip, , , Compress/Decompress archives
|
||||
unzip, , , Compress/Decompress archives
|
||||
bzip, , , Compress/Decompress archives
|
||||
bzip2, , , Compress/Decompress archives
|
||||
xz, , , Compress/Decompress archives
|
||||
zstd, , Optional, Compress/Decompress archives
|
||||
file, , , Create/Use Buildcaches
|
||||
@@ -15,4 +15,4 @@ gnupg2, , , Sign/Verify Buildcaches
|
||||
git, , , Manage Software Repositories
|
||||
svn, , Optional, Manage Software Repositories
|
||||
hg, , Optional, Manage Software Repositories
|
||||
Python header files, , Optional (e.g. ``python3-dev`` on Debian), Bootstrapping from sources
|
||||
Python header files, , Optional (e.g. ``python3-dev`` on Debian), Bootstrapping from sources
|
||||
|
||||
|
6
lib/spack/env/cc
vendored
6
lib/spack/env/cc
vendored
@@ -1,4 +1,4 @@
|
||||
#!/bin/sh
|
||||
#!/bin/sh -f
|
||||
# shellcheck disable=SC2034 # evals in this script fool shellcheck
|
||||
#
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
@@ -768,7 +768,9 @@ if [ "$SPACK_DEBUG" = TRUE ]; then
|
||||
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.in.log"
|
||||
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_DEBUG_LOG_ID.out.log"
|
||||
echo "[$mode] $command $input_command" >> "$input_log"
|
||||
echo "[$mode] ${full_command_list}" >> "$output_log"
|
||||
IFS="$lsep"
|
||||
echo "[$mode] "$full_command_list >> "$output_log"
|
||||
unset IFS
|
||||
fi
|
||||
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
|
||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.1.2 (commit 85757b6666422fca86aa882a769bf78b0f992f54)
|
||||
* Version: 0.1.4 (commit b8eea9df2b4204ff27d204452cd46f5199a0b423)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
||||
75
lib/spack/external/archspec/cpu/detect.py
vendored
75
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -61,7 +61,7 @@ def proc_cpuinfo():
|
||||
``/proc/cpuinfo``
|
||||
"""
|
||||
info = {}
|
||||
with open("/proc/cpuinfo") as file:
|
||||
with open("/proc/cpuinfo") as file: # pylint: disable=unspecified-encoding
|
||||
for line in file:
|
||||
key, separator, value = line.partition(":")
|
||||
|
||||
@@ -80,26 +80,46 @@ def proc_cpuinfo():
|
||||
|
||||
|
||||
def _check_output(args, env):
|
||||
output = subprocess.Popen(args, stdout=subprocess.PIPE, env=env).communicate()[0]
|
||||
output = subprocess.Popen( # pylint: disable=consider-using-with
|
||||
args, stdout=subprocess.PIPE, env=env
|
||||
).communicate()[0]
|
||||
return six.text_type(output.decode("utf-8"))
|
||||
|
||||
|
||||
def _machine():
|
||||
""" "Return the machine architecture we are on"""
|
||||
operating_system = platform.system()
|
||||
|
||||
# If we are not on Darwin, trust what Python tells us
|
||||
if operating_system != "Darwin":
|
||||
return platform.machine()
|
||||
|
||||
# On Darwin it might happen that we are on M1, but using an interpreter
|
||||
# built for x86_64. In that case "platform.machine() == 'x86_64'", so we
|
||||
# need to fix that.
|
||||
#
|
||||
# See: https://bugs.python.org/issue42704
|
||||
output = _check_output(
|
||||
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
|
||||
).strip()
|
||||
|
||||
if "Apple" in output:
|
||||
# Note that a native Python interpreter on Apple M1 would return
|
||||
# "arm64" instead of "aarch64". Here we normalize to the latter.
|
||||
return "aarch64"
|
||||
|
||||
return "x86_64"
|
||||
|
||||
|
||||
@info_dict(operating_system="Darwin")
|
||||
def sysctl_info_dict():
|
||||
"""Returns a raw info dictionary parsing the output of sysctl."""
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||
# usually found there
|
||||
child_environment = dict(os.environ.items())
|
||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||
for additional_path in ("/sbin", "/usr/sbin"):
|
||||
if additional_path not in search_paths:
|
||||
search_paths.append(additional_path)
|
||||
child_environment["PATH"] = os.pathsep.join(search_paths)
|
||||
child_environment = _ensure_bin_usrbin_in_path()
|
||||
|
||||
def sysctl(*args):
|
||||
return _check_output(["sysctl"] + list(args), env=child_environment).strip()
|
||||
|
||||
if platform.machine() == "x86_64":
|
||||
if _machine() == "x86_64":
|
||||
flags = (
|
||||
sysctl("-n", "machdep.cpu.features").lower()
|
||||
+ " "
|
||||
@@ -125,6 +145,18 @@ def sysctl(*args):
|
||||
return info
|
||||
|
||||
|
||||
def _ensure_bin_usrbin_in_path():
|
||||
# Make sure that /sbin and /usr/sbin are in PATH as sysctl is
|
||||
# usually found there
|
||||
child_environment = dict(os.environ.items())
|
||||
search_paths = child_environment.get("PATH", "").split(os.pathsep)
|
||||
for additional_path in ("/sbin", "/usr/sbin"):
|
||||
if additional_path not in search_paths:
|
||||
search_paths.append(additional_path)
|
||||
child_environment["PATH"] = os.pathsep.join(search_paths)
|
||||
return child_environment
|
||||
|
||||
|
||||
def adjust_raw_flags(info):
|
||||
"""Adjust the flags detected on the system to homogenize
|
||||
slightly different representations.
|
||||
@@ -184,12 +216,7 @@ def compatible_microarchitectures(info):
|
||||
Args:
|
||||
info (dict): dictionary containing information on the host cpu
|
||||
"""
|
||||
architecture_family = platform.machine()
|
||||
# On Apple M1 platform.machine() returns "arm64" instead of "aarch64"
|
||||
# so we should normalize the name here
|
||||
if architecture_family == "arm64":
|
||||
architecture_family = "aarch64"
|
||||
|
||||
architecture_family = _machine()
|
||||
# If a tester is not registered, be conservative and assume no known
|
||||
# target is compatible with the host
|
||||
tester = COMPATIBILITY_CHECKS.get(architecture_family, lambda x, y: False)
|
||||
@@ -244,12 +271,7 @@ def compatibility_check(architecture_family):
|
||||
architecture_family = (architecture_family,)
|
||||
|
||||
def decorator(func):
|
||||
# pylint: disable=fixme
|
||||
# TODO: on removal of Python 2.6 support this can be re-written as
|
||||
# TODO: an update + a dict comprehension
|
||||
for arch_family in architecture_family:
|
||||
COMPATIBILITY_CHECKS[arch_family] = func
|
||||
|
||||
COMPATIBILITY_CHECKS.update({family: func for family in architecture_family})
|
||||
return func
|
||||
|
||||
return decorator
|
||||
@@ -288,7 +310,7 @@ def compatibility_check_for_x86_64(info, target):
|
||||
arch_root = TARGETS[basename]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.vendor in (vendor, "generic")
|
||||
and target.features.issubset(features)
|
||||
)
|
||||
|
||||
@@ -303,8 +325,9 @@ def compatibility_check_for_aarch64(info, target):
|
||||
arch_root = TARGETS[basename]
|
||||
return (
|
||||
(target == arch_root or arch_root in target.ancestors)
|
||||
and (target.vendor == vendor or target.vendor == "generic")
|
||||
and target.features.issubset(features)
|
||||
and target.vendor in (vendor, "generic")
|
||||
# On macOS it seems impossible to get all the CPU features with syctl info
|
||||
and (target.features.issubset(features) or platform.system() == "Darwin")
|
||||
)
|
||||
|
||||
|
||||
|
||||
4
lib/spack/external/archspec/cpu/schema.py
vendored
4
lib/spack/external/archspec/cpu/schema.py
vendored
@@ -11,7 +11,7 @@
|
||||
try:
|
||||
from collections.abc import MutableMapping # novm
|
||||
except ImportError:
|
||||
from collections import MutableMapping
|
||||
from collections import MutableMapping # pylint: disable=deprecated-class
|
||||
|
||||
|
||||
class LazyDictionary(MutableMapping):
|
||||
@@ -56,7 +56,7 @@ def _load_json_file(json_file):
|
||||
|
||||
def _factory():
|
||||
filename = os.path.join(json_dir, json_file)
|
||||
with open(filename, "r") as file:
|
||||
with open(filename, "r") as file: # pylint: disable=unspecified-encoding
|
||||
return json.load(file)
|
||||
|
||||
return _factory
|
||||
|
||||
@@ -85,7 +85,21 @@
|
||||
"intel": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "pentium4",
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
@@ -129,6 +143,20 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v2",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v2",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -186,6 +214,20 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v3",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v3",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -248,6 +290,20 @@
|
||||
"name": "x86-64",
|
||||
"flags": "-march={name} -mtune=generic -mcx16 -msahf -mpopcnt -msse3 -msse4.1 -msse4.2 -mssse3 -mavx -mavx2 -mbmi -mbmi2 -mf16c -mfma -mlzcnt -mmovbe -mxsave -mavx512f -mavx512bw -mavx512cd -mavx512dq -mavx512vl"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": "2021.2.0:",
|
||||
"name": "x86-64-v4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -288,8 +344,19 @@
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"name": "pentium4",
|
||||
"flags": "-march={name} -mtune=generic"
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -333,6 +400,18 @@
|
||||
"versions": "16.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -384,6 +463,20 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -432,6 +525,20 @@
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "corei7",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -490,6 +597,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -550,6 +669,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -615,6 +746,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -672,6 +815,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -732,6 +887,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -798,6 +965,20 @@
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "knl",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -868,6 +1049,20 @@
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "skylake-avx512",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -937,6 +1132,18 @@
|
||||
"versions": "18.0:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1004,6 +1211,18 @@
|
||||
"versions": "19.0.1:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1098,6 +1317,20 @@
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"name": "icelake-client",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1142,6 +1375,20 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1192,6 +1439,20 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1246,6 +1507,20 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse3"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1301,6 +1576,20 @@
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"flags": "-msse4.2"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1360,6 +1649,22 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1422,6 +1727,22 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1485,6 +1806,22 @@
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1543,6 +1880,30 @@
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"oneapi": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
],
|
||||
"dpcpp": [
|
||||
{
|
||||
"versions": ":",
|
||||
"warnings": "Intel's compilers may or may not optimize to the same degree for non-Intel microprocessors for optimizations that are not unique to Intel microprocessors",
|
||||
"name": "core-avx2",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -1788,7 +2149,6 @@
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
@@ -1821,18 +2181,26 @@
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
||||
},
|
||||
{
|
||||
"versions": "8:",
|
||||
"flags": "-march=armv8.2-a+crc+aes+sha2+fp16+sve -msve-vector-bits=512"
|
||||
"versions": "8:10.2",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve -msve-vector-bits=512"
|
||||
},
|
||||
{
|
||||
"versions": "10.3:",
|
||||
"flags": "-mcpu=a64fx -msve-vector-bits=512"
|
||||
}
|
||||
],
|
||||
"clang": [
|
||||
{
|
||||
"versions": "3.9:4.9",
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16"
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16"
|
||||
},
|
||||
{
|
||||
"versions": "5:",
|
||||
"flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
|
||||
"versions": "5:10",
|
||||
"flags": "-march=armv8.2-a+crc+sha2+fp16+sve"
|
||||
},
|
||||
{
|
||||
"versions": "11:",
|
||||
"flags": "-mcpu=a64fx"
|
||||
}
|
||||
],
|
||||
"arm": [
|
||||
@@ -1954,7 +2322,40 @@
|
||||
"m1": {
|
||||
"from": ["aarch64"],
|
||||
"vendor": "Apple",
|
||||
"features": [],
|
||||
"features": [
|
||||
"fp",
|
||||
"asimd",
|
||||
"evtstrm",
|
||||
"aes",
|
||||
"pmull",
|
||||
"sha1",
|
||||
"sha2",
|
||||
"crc32",
|
||||
"atomics",
|
||||
"fphp",
|
||||
"asimdhp",
|
||||
"cpuid",
|
||||
"asimdrdm",
|
||||
"jscvt",
|
||||
"fcma",
|
||||
"lrcpc",
|
||||
"dcpop",
|
||||
"sha3",
|
||||
"asimddp",
|
||||
"sha512",
|
||||
"asimdfhm",
|
||||
"dit",
|
||||
"uscat",
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"flagm2",
|
||||
"frint"
|
||||
],
|
||||
"compilers": {
|
||||
"gcc": [
|
||||
{
|
||||
@@ -1964,14 +2365,22 @@
|
||||
],
|
||||
"clang" : [
|
||||
{
|
||||
"versions": "9.0:",
|
||||
"versions": "9.0:12.0",
|
||||
"flags" : "-march=armv8.4-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
{
|
||||
"versions": "11.0:",
|
||||
"versions": "11.0:12.5",
|
||||
"flags" : "-march=armv8.4-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -308,6 +308,68 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
|
||||
filter_file(double_quoted, '"%s"' % repl, f)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def exploding_archive_catch(stage):
|
||||
# Check for an exploding tarball, i.e. one that doesn't expand to
|
||||
# a single directory. If the tarball *didn't* explode, move its
|
||||
# contents to the staging source directory & remove the container
|
||||
# directory. If the tarball did explode, just rename the tarball
|
||||
# directory to the staging source directory.
|
||||
#
|
||||
# NOTE: The tar program on Mac OS X will encode HFS metadata in
|
||||
# hidden files, which can end up *alongside* a single top-level
|
||||
# directory. We initially ignore presence of hidden files to
|
||||
# accomodate these "semi-exploding" tarballs but ensure the files
|
||||
# are copied to the source directory.
|
||||
|
||||
# Expand all tarballs in their own directory to contain
|
||||
# exploding tarballs.
|
||||
tarball_container = os.path.join(stage.path,
|
||||
"spack-expanded-archive")
|
||||
mkdirp(tarball_container)
|
||||
orig_dir = os.getcwd()
|
||||
os.chdir(tarball_container)
|
||||
try:
|
||||
yield
|
||||
# catch an exploding archive on sucessful extraction
|
||||
os.chdir(orig_dir)
|
||||
exploding_archive_handler(tarball_container, stage)
|
||||
except Exception as e:
|
||||
# return current directory context to previous on failure
|
||||
os.chdir(orig_dir)
|
||||
raise e
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def exploding_archive_handler(tarball_container, stage):
|
||||
"""
|
||||
Args:
|
||||
tarball_container: where the archive was expanded to
|
||||
stage: Stage object referencing filesystem location
|
||||
where archive is being expanded
|
||||
"""
|
||||
files = os.listdir(tarball_container)
|
||||
non_hidden = [f for f in files if not f.startswith('.')]
|
||||
if len(non_hidden) == 1:
|
||||
src = os.path.join(tarball_container, non_hidden[0])
|
||||
if os.path.isdir(src):
|
||||
stage.srcdir = non_hidden[0]
|
||||
shutil.move(src, stage.source_path)
|
||||
if len(files) > 1:
|
||||
files.remove(non_hidden[0])
|
||||
for f in files:
|
||||
src = os.path.join(tarball_container, f)
|
||||
dest = os.path.join(stage.path, f)
|
||||
shutil.move(src, dest)
|
||||
os.rmdir(tarball_container)
|
||||
else:
|
||||
# This is a non-directory entry (e.g., a patch file) so simply
|
||||
# rename the tarball container to be the source path.
|
||||
shutil.move(tarball_container, stage.source_path)
|
||||
else:
|
||||
shutil.move(tarball_container, stage.source_path)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def get_owner_uid(path, err_msg=None):
|
||||
if not os.path.exists(path):
|
||||
@@ -367,7 +429,7 @@ def group_ids(uid=None):
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
def chgrp(path, group):
|
||||
def chgrp(path, group, follow_symlinks=True):
|
||||
"""Implement the bash chgrp function on a single path"""
|
||||
if is_windows:
|
||||
raise OSError("Function 'chgrp' is not supported on Windows")
|
||||
@@ -376,7 +438,10 @@ def chgrp(path, group):
|
||||
gid = grp.getgrnam(group).gr_gid
|
||||
else:
|
||||
gid = group
|
||||
os.chown(path, -1, gid)
|
||||
if follow_symlinks:
|
||||
os.chown(path, -1, gid)
|
||||
else:
|
||||
os.lchown(path, -1, gid)
|
||||
|
||||
|
||||
@system_path_filter(arg_slice=slice(1))
|
||||
|
||||
@@ -11,7 +11,9 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List, Tuple
|
||||
|
||||
import six
|
||||
from six import string_types
|
||||
@@ -1009,3 +1011,64 @@ def __repr__(self):
|
||||
|
||||
def __str__(self):
|
||||
return str(self.data)
|
||||
|
||||
|
||||
class GroupedExceptionHandler(object):
|
||||
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
|
||||
|
||||
def __init__(self):
|
||||
self.exceptions = [] # type: List[Tuple[str, Exception, List[str]]]
|
||||
|
||||
def __bool__(self):
|
||||
"""Whether any exceptions were handled."""
|
||||
return bool(self.exceptions)
|
||||
|
||||
def forward(self, context):
|
||||
# type: (str) -> GroupedExceptionForwarder
|
||||
"""Return a contextmanager which extracts tracebacks and prefixes a message."""
|
||||
return GroupedExceptionForwarder(context, self)
|
||||
|
||||
def _receive_forwarded(self, context, exc, tb):
|
||||
# type: (str, Exception, List[str]) -> None
|
||||
self.exceptions.append((context, exc, tb))
|
||||
|
||||
def grouped_message(self, with_tracebacks=True):
|
||||
# type: (bool) -> str
|
||||
"""Print out an error message coalescing all the forwarded errors."""
|
||||
each_exception_message = [
|
||||
'{0} raised {1}: {2}{3}'.format(
|
||||
context,
|
||||
exc.__class__.__name__,
|
||||
exc,
|
||||
'\n{0}'.format(''.join(tb)) if with_tracebacks else '',
|
||||
)
|
||||
for context, exc, tb in self.exceptions
|
||||
]
|
||||
return 'due to the following failures:\n{0}'.format(
|
||||
'\n'.join(each_exception_message)
|
||||
)
|
||||
|
||||
|
||||
class GroupedExceptionForwarder(object):
|
||||
"""A contextmanager to capture exceptions and forward them to a
|
||||
GroupedExceptionHandler."""
|
||||
|
||||
def __init__(self, context, handler):
|
||||
# type: (str, GroupedExceptionHandler) -> None
|
||||
self._context = context
|
||||
self._handler = handler
|
||||
|
||||
def __enter__(self):
|
||||
return None
|
||||
|
||||
def __exit__(self, exc_type, exc_value, tb):
|
||||
if exc_value is not None:
|
||||
self._handler._receive_forwarded(
|
||||
self._context,
|
||||
exc_value,
|
||||
traceback.format_tb(tb),
|
||||
)
|
||||
|
||||
# Suppress any exception from being re-raised:
|
||||
# https://docs.python.org/3/reference/datamodel.html#object.__exit__.
|
||||
return True
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: (major, minor, micro, dev release) tuple
|
||||
spack_version_info = (0, 18, 0, 'dev0')
|
||||
spack_version_info = (0, 19, 0, 'dev0')
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
spack_version = '.'.join(str(s) for s in spack_version_info)
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.monitor
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.util.executable
|
||||
|
||||
|
||||
@@ -298,7 +298,8 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
||||
def _check_patch_urls(pkgs, error_cls):
|
||||
"""Ensure that patches fetched from GitHub have stable sha256 hashes."""
|
||||
github_patch_url_re = (
|
||||
r"^https?://github\.com/.+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||
r"^https?://(?:patch-diff\.)?github(?:usercontent)?\.com/"
|
||||
".+/.+/(?:commit|pull)/[a-fA-F0-9]*.(?:patch|diff)"
|
||||
)
|
||||
|
||||
errors = []
|
||||
|
||||
@@ -210,7 +210,7 @@ def get_all_built_specs(self):
|
||||
|
||||
return spec_list
|
||||
|
||||
def find_built_spec(self, spec):
|
||||
def find_built_spec(self, spec, mirrors_to_check=None):
|
||||
"""Look in our cache for the built spec corresponding to ``spec``.
|
||||
|
||||
If the spec can be found among the configured binary mirrors, a
|
||||
@@ -225,6 +225,8 @@ def find_built_spec(self, spec):
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): Concrete spec to find
|
||||
mirrors_to_check: Optional mapping containing mirrors to check. If
|
||||
None, just assumes all configured mirrors.
|
||||
|
||||
Returns:
|
||||
An list of objects containing the found specs and mirror url where
|
||||
@@ -240,17 +242,23 @@ def find_built_spec(self, spec):
|
||||
]
|
||||
"""
|
||||
self.regenerate_spec_cache()
|
||||
return self.find_by_hash(spec.dag_hash())
|
||||
return self.find_by_hash(spec.dag_hash(), mirrors_to_check=mirrors_to_check)
|
||||
|
||||
def find_by_hash(self, find_hash):
|
||||
def find_by_hash(self, find_hash, mirrors_to_check=None):
|
||||
"""Same as find_built_spec but uses the hash of a spec.
|
||||
|
||||
Args:
|
||||
find_hash (str): hash of the spec to search
|
||||
mirrors_to_check: Optional mapping containing mirrors to check. If
|
||||
None, just assumes all configured mirrors.
|
||||
"""
|
||||
if find_hash not in self._mirrors_for_spec:
|
||||
return None
|
||||
return self._mirrors_for_spec[find_hash]
|
||||
results = self._mirrors_for_spec[find_hash]
|
||||
if not mirrors_to_check:
|
||||
return results
|
||||
mirror_urls = mirrors_to_check.values()
|
||||
return [r for r in results if r['mirror_url'] in mirror_urls]
|
||||
|
||||
def update_spec(self, spec, found_list):
|
||||
"""
|
||||
@@ -563,6 +571,13 @@ def __init__(self, msg):
|
||||
super(NewLayoutException, self).__init__(msg)
|
||||
|
||||
|
||||
class UnsignedPackageException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if installation of unsigned package is attempted without
|
||||
the use of ``--no-check-signature``.
|
||||
"""
|
||||
|
||||
|
||||
def compute_hash(data):
|
||||
return hashlib.sha256(data.encode('utf-8')).hexdigest()
|
||||
|
||||
@@ -751,15 +766,16 @@ def select_signing_key(key=None):
|
||||
return key
|
||||
|
||||
|
||||
def sign_tarball(key, force, specfile_path):
|
||||
if os.path.exists('%s.asc' % specfile_path):
|
||||
def sign_specfile(key, force, specfile_path):
|
||||
signed_specfile_path = '%s.sig' % specfile_path
|
||||
if os.path.exists(signed_specfile_path):
|
||||
if force:
|
||||
os.remove('%s.asc' % specfile_path)
|
||||
os.remove(signed_specfile_path)
|
||||
else:
|
||||
raise NoOverwriteException('%s.asc' % specfile_path)
|
||||
raise NoOverwriteException(signed_specfile_path)
|
||||
|
||||
key = select_signing_key(key)
|
||||
spack.util.gpg.sign(key, specfile_path, '%s.asc' % specfile_path)
|
||||
spack.util.gpg.sign(key, specfile_path, signed_specfile_path, clearsign=True)
|
||||
|
||||
|
||||
def _fetch_spec_from_mirror(spec_url):
|
||||
@@ -768,7 +784,10 @@ def _fetch_spec_from_mirror(spec_url):
|
||||
_, _, spec_file = web_util.read_from_url(spec_url)
|
||||
spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
|
||||
# Need full spec.json name or this gets confused with index.json.
|
||||
if spec_url.endswith('.json'):
|
||||
if spec_url.endswith('.json.sig'):
|
||||
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
|
||||
s = Spec.from_dict(specfile_json)
|
||||
elif spec_url.endswith('.json'):
|
||||
s = Spec.from_json(spec_file_contents)
|
||||
elif spec_url.endswith('.yaml'):
|
||||
s = Spec.from_yaml(spec_file_contents)
|
||||
@@ -829,7 +848,9 @@ def generate_package_index(cache_prefix):
|
||||
file_list = (
|
||||
entry
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
if entry.endswith('.yaml') or entry.endswith('spec.json'))
|
||||
if entry.endswith('.yaml') or
|
||||
entry.endswith('spec.json') or
|
||||
entry.endswith('spec.json.sig'))
|
||||
except KeyError as inst:
|
||||
msg = 'No packages at {0}: {1}'.format(cache_prefix, inst)
|
||||
tty.warn(msg)
|
||||
@@ -944,7 +965,7 @@ def _build_tarball(
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
cache_prefix = build_cache_prefix(tmpdir)
|
||||
|
||||
tarfile_name = tarball_name(spec, '.tar.gz')
|
||||
tarfile_name = tarball_name(spec, '.spack')
|
||||
tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
|
||||
tarfile_path = os.path.join(tarfile_dir, tarfile_name)
|
||||
spackfile_path = os.path.join(
|
||||
@@ -967,10 +988,12 @@ def _build_tarball(
|
||||
spec_file = spack.store.layout.spec_file_path(spec)
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
||||
signed_specfile_path = '{0}.sig'.format(specfile_path)
|
||||
deprecated_specfile_path = specfile_path.replace('.spec.json', '.spec.yaml')
|
||||
|
||||
remote_specfile_path = url_util.join(
|
||||
outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir)))
|
||||
remote_signed_specfile_path = '{0}.sig'.format(remote_specfile_path)
|
||||
remote_specfile_path_deprecated = url_util.join(
|
||||
outdir, os.path.relpath(deprecated_specfile_path,
|
||||
os.path.realpath(tmpdir)))
|
||||
@@ -979,9 +1002,12 @@ def _build_tarball(
|
||||
if force:
|
||||
if web_util.url_exists(remote_specfile_path):
|
||||
web_util.remove_url(remote_specfile_path)
|
||||
if web_util.url_exists(remote_signed_specfile_path):
|
||||
web_util.remove_url(remote_signed_specfile_path)
|
||||
if web_util.url_exists(remote_specfile_path_deprecated):
|
||||
web_util.remove_url(remote_specfile_path_deprecated)
|
||||
elif (web_util.url_exists(remote_specfile_path) or
|
||||
web_util.url_exists(remote_signed_specfile_path) or
|
||||
web_util.url_exists(remote_specfile_path_deprecated)):
|
||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||
|
||||
@@ -1043,6 +1069,7 @@ def _build_tarball(
|
||||
raise ValueError(
|
||||
'{0} not a valid spec file type (json or yaml)'.format(
|
||||
spec_file))
|
||||
spec_dict['buildcache_layout_version'] = 1
|
||||
bchecksum = {}
|
||||
bchecksum['hash_algorithm'] = 'sha256'
|
||||
bchecksum['hash'] = checksum
|
||||
@@ -1061,25 +1088,15 @@ def _build_tarball(
|
||||
# sign the tarball and spec file with gpg
|
||||
if not unsigned:
|
||||
key = select_signing_key(key)
|
||||
sign_tarball(key, force, specfile_path)
|
||||
|
||||
# put tarball, spec and signature files in .spack archive
|
||||
with closing(tarfile.open(spackfile_path, 'w')) as tar:
|
||||
tar.add(name=tarfile_path, arcname='%s' % tarfile_name)
|
||||
tar.add(name=specfile_path, arcname='%s' % specfile_name)
|
||||
if not unsigned:
|
||||
tar.add(name='%s.asc' % specfile_path,
|
||||
arcname='%s.asc' % specfile_name)
|
||||
|
||||
# cleanup file moved to archive
|
||||
os.remove(tarfile_path)
|
||||
if not unsigned:
|
||||
os.remove('%s.asc' % specfile_path)
|
||||
sign_specfile(key, force, specfile_path)
|
||||
|
||||
# push tarball and signed spec json to remote mirror
|
||||
web_util.push_to_url(
|
||||
spackfile_path, remote_spackfile_path, keep_original=False)
|
||||
web_util.push_to_url(
|
||||
specfile_path, remote_specfile_path, keep_original=False)
|
||||
signed_specfile_path if not unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not unsigned else remote_specfile_path,
|
||||
keep_original=False)
|
||||
|
||||
tty.debug('Buildcache for "{0}" written to \n {1}'
|
||||
.format(spec, remote_spackfile_path))
|
||||
@@ -1162,48 +1179,174 @@ def push(specs, push_url, specs_kwargs=None, **kwargs):
|
||||
warnings.warn(str(e))
|
||||
|
||||
|
||||
def download_tarball(spec, preferred_mirrors=None):
|
||||
def try_verify(specfile_path):
|
||||
"""Utility function to attempt to verify a local file. Assumes the
|
||||
file is a clearsigned signature file.
|
||||
|
||||
Args:
|
||||
specfile_path (str): Path to file to be verified.
|
||||
|
||||
Returns:
|
||||
``True`` if the signature could be verified, ``False`` otherwise.
|
||||
"""
|
||||
suppress = config.get('config:suppress_gpg_warnings', False)
|
||||
|
||||
try:
|
||||
spack.util.gpg.verify(specfile_path, suppress_warnings=suppress)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def try_fetch(url_to_fetch):
|
||||
"""Utility function to try and fetch a file from a url, stage it
|
||||
locally, and return the path to the staged file.
|
||||
|
||||
Args:
|
||||
url_to_fetch (str): Url pointing to remote resource to fetch
|
||||
|
||||
Returns:
|
||||
Path to locally staged resource or ``None`` if it could not be fetched.
|
||||
"""
|
||||
stage = Stage(url_to_fetch, keep=True)
|
||||
stage.create()
|
||||
|
||||
try:
|
||||
stage.fetch()
|
||||
except fs.FetchError:
|
||||
stage.destroy()
|
||||
return None
|
||||
|
||||
return stage
|
||||
|
||||
|
||||
def _delete_staged_downloads(download_result):
|
||||
"""Clean up stages used to download tarball and specfile"""
|
||||
download_result['tarball_stage'].destroy()
|
||||
download_result['specfile_stage'].destroy()
|
||||
|
||||
|
||||
def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
||||
"""
|
||||
Download binary tarball for given package into stage area, returning
|
||||
path to downloaded tarball if successful, None otherwise.
|
||||
|
||||
Args:
|
||||
spec (spack.spec.Spec): Concrete spec
|
||||
preferred_mirrors (list): If provided, this is a list of preferred
|
||||
mirror urls. Other configured mirrors will only be used if the
|
||||
tarball can't be retrieved from one of these.
|
||||
unsigned (bool): Whether or not to require signed binaries
|
||||
mirrors_for_spec (list): Optional list of concrete specs and mirrors
|
||||
obtained by calling binary_distribution.get_mirrors_for_spec().
|
||||
These will be checked in order first before looking in other
|
||||
configured mirrors.
|
||||
|
||||
Returns:
|
||||
Path to the downloaded tarball, or ``None`` if the tarball could not
|
||||
be downloaded from any configured mirrors.
|
||||
``None`` if the tarball could not be downloaded (maybe also verified,
|
||||
depending on whether new-style signed binary packages were found).
|
||||
Otherwise, return an object indicating the path to the downloaded
|
||||
tarball, the path to the downloaded specfile (in the case of new-style
|
||||
buildcache), and whether or not the tarball is already verified.
|
||||
|
||||
.. code-block:: JSON
|
||||
|
||||
{
|
||||
"tarball_path": "path-to-locally-saved-tarfile",
|
||||
"specfile_path": "none-or-path-to-locally-saved-specfile",
|
||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||
}
|
||||
"""
|
||||
if not spack.mirror.MirrorCollection():
|
||||
tty.die("Please add a spack mirror to allow " +
|
||||
"download of pre-compiled packages.")
|
||||
|
||||
tarball = tarball_path_name(spec, '.spack')
|
||||
specfile_prefix = tarball_name(spec, '.spec')
|
||||
|
||||
urls_to_try = []
|
||||
mirrors_to_try = []
|
||||
|
||||
if preferred_mirrors:
|
||||
for preferred_url in preferred_mirrors:
|
||||
urls_to_try.append(url_util.join(
|
||||
preferred_url, _build_cache_relative_path, tarball))
|
||||
# Note on try_first and try_next:
|
||||
# mirrors_for_spec mostly likely came from spack caching remote
|
||||
# mirror indices locally and adding their specs to a local data
|
||||
# structure supporting quick lookup of concrete specs. Those
|
||||
# mirrors are likely a subset of all configured mirrors, and
|
||||
# we'll probably find what we need in one of them. But we'll
|
||||
# look in all configured mirrors if needed, as maybe the spec
|
||||
# we need was in an un-indexed mirror. No need to check any
|
||||
# mirror for the spec twice though.
|
||||
try_first = [i['mirror_url'] for i in mirrors_for_spec] if mirrors_for_spec else []
|
||||
try_next = [
|
||||
i.fetch_url for i in spack.mirror.MirrorCollection().values()
|
||||
if i.fetch_url not in try_first
|
||||
]
|
||||
|
||||
for mirror in spack.mirror.MirrorCollection().values():
|
||||
if not preferred_mirrors or mirror.fetch_url not in preferred_mirrors:
|
||||
urls_to_try.append(url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, tarball))
|
||||
for url in try_first + try_next:
|
||||
mirrors_to_try.append({
|
||||
'specfile': url_util.join(url,
|
||||
_build_cache_relative_path, specfile_prefix),
|
||||
'spackfile': url_util.join(url,
|
||||
_build_cache_relative_path, tarball)
|
||||
})
|
||||
|
||||
for try_url in urls_to_try:
|
||||
# stage the tarball into standard place
|
||||
stage = Stage(try_url, name="build_cache", keep=True)
|
||||
stage.create()
|
||||
try:
|
||||
stage.fetch()
|
||||
return stage.save_filename
|
||||
except fs.FetchError:
|
||||
continue
|
||||
tried_to_verify_sigs = []
|
||||
|
||||
# Assumes we care more about finding a spec file by preferred ext
|
||||
# than by mirrory priority. This can be made less complicated as
|
||||
# we remove support for deprecated spec formats and buildcache layouts.
|
||||
for ext in ['json.sig', 'json', 'yaml']:
|
||||
for mirror_to_try in mirrors_to_try:
|
||||
specfile_url = '{0}.{1}'.format(mirror_to_try['specfile'], ext)
|
||||
spackfile_url = mirror_to_try['spackfile']
|
||||
local_specfile_stage = try_fetch(specfile_url)
|
||||
if local_specfile_stage:
|
||||
local_specfile_path = local_specfile_stage.save_filename
|
||||
signature_verified = False
|
||||
|
||||
if ext.endswith('.sig') and not unsigned:
|
||||
# If we found a signed specfile at the root, try to verify
|
||||
# the signature immediately. We will not download the
|
||||
# tarball if we could not verify the signature.
|
||||
tried_to_verify_sigs.append(specfile_url)
|
||||
signature_verified = try_verify(local_specfile_path)
|
||||
if not signature_verified:
|
||||
tty.warn("Failed to verify: {0}".format(specfile_url))
|
||||
|
||||
if unsigned or signature_verified or not ext.endswith('.sig'):
|
||||
# We will download the tarball in one of three cases:
|
||||
# 1. user asked for --no-check-signature
|
||||
# 2. user didn't ask for --no-check-signature, but we
|
||||
# found a spec.json.sig and verified the signature already
|
||||
# 3. neither of the first two cases are true, but this file
|
||||
# is *not* a signed json (not a spec.json.sig file). That
|
||||
# means we already looked at all the mirrors and either didn't
|
||||
# find any .sig files or couldn't verify any of them. But it
|
||||
# is still possible to find an old style binary package where
|
||||
# the signature is a detached .asc file in the outer archive
|
||||
# of the tarball, and in that case, the only way to know is to
|
||||
# download the tarball. This is a deprecated use case, so if
|
||||
# something goes wrong during the extraction process (can't
|
||||
# verify signature, checksum doesn't match) we will fail at
|
||||
# that point instead of trying to download more tarballs from
|
||||
# the remaining mirrors, looking for one we can use.
|
||||
tarball_stage = try_fetch(spackfile_url)
|
||||
if tarball_stage:
|
||||
return {
|
||||
'tarball_stage': tarball_stage,
|
||||
'specfile_stage': local_specfile_stage,
|
||||
'signature_verified': signature_verified,
|
||||
}
|
||||
|
||||
local_specfile_stage.destroy()
|
||||
|
||||
# Falling through the nested loops meeans we exhaustively searched
|
||||
# for all known kinds of spec files on all mirrors and did not find
|
||||
# an acceptable one for which we could download a tarball.
|
||||
|
||||
if tried_to_verify_sigs:
|
||||
raise NoVerifyException(("Spack found new style signed binary packages, "
|
||||
"but was unable to verify any of them. Please "
|
||||
"obtain and trust the correct public key. If "
|
||||
"these are public spack binaries, please see the "
|
||||
"spack docs for locations where keys can be found."))
|
||||
|
||||
tty.warn("download_tarball() was unable to download " +
|
||||
"{0} from any configured mirrors".format(spec))
|
||||
@@ -1377,7 +1520,55 @@ def is_backup_file(file):
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
|
||||
def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
|
||||
stagepath = os.path.dirname(filename)
|
||||
spackfile_name = tarball_name(spec, '.spack')
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
tarfile_name = tarball_name(spec, '.tar.gz')
|
||||
tarfile_path = os.path.join(extract_to, tarfile_name)
|
||||
deprecated_yaml_name = tarball_name(spec, '.spec.yaml')
|
||||
deprecated_yaml_path = os.path.join(extract_to, deprecated_yaml_name)
|
||||
json_name = tarball_name(spec, '.spec.json')
|
||||
json_path = os.path.join(extract_to, json_name)
|
||||
with closing(tarfile.open(spackfile_path, 'r')) as tar:
|
||||
tar.extractall(extract_to)
|
||||
# some buildcache tarfiles use bzip2 compression
|
||||
if not os.path.exists(tarfile_path):
|
||||
tarfile_name = tarball_name(spec, '.tar.bz2')
|
||||
tarfile_path = os.path.join(extract_to, tarfile_name)
|
||||
|
||||
if os.path.exists(json_path):
|
||||
specfile_path = json_path
|
||||
elif os.path.exists(deprecated_yaml_path):
|
||||
specfile_path = deprecated_yaml_path
|
||||
else:
|
||||
raise ValueError('Cannot find spec file for {0}.'.format(extract_to))
|
||||
|
||||
if not unsigned:
|
||||
if os.path.exists('%s.asc' % specfile_path):
|
||||
suppress = config.get('config:suppress_gpg_warnings', False)
|
||||
try:
|
||||
spack.util.gpg.verify('%s.asc' % specfile_path, specfile_path, suppress)
|
||||
except Exception:
|
||||
raise NoVerifyException("Spack was unable to verify package "
|
||||
"signature, please obtain and trust the "
|
||||
"correct public key.")
|
||||
else:
|
||||
raise UnsignedPackageException(
|
||||
"To install unsigned packages, use the --no-check-signature option.")
|
||||
# get the sha256 checksum of the tarball
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# if the checksums don't match don't install
|
||||
if local_checksum != remote_checksum['hash']:
|
||||
raise NoChecksumException(
|
||||
"Package tarball failed checksum verification.\n"
|
||||
"It cannot be installed.")
|
||||
|
||||
return tarfile_path
|
||||
|
||||
|
||||
def extract_tarball(spec, download_result, allow_root=False, unsigned=False,
|
||||
force=False):
|
||||
"""
|
||||
extract binary tarball for given package into install area
|
||||
@@ -1388,66 +1579,56 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
else:
|
||||
raise NoOverwriteException(str(spec.prefix))
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
stagepath = os.path.dirname(filename)
|
||||
spackfile_name = tarball_name(spec, '.spack')
|
||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||
tarfile_name = tarball_name(spec, '.tar.gz')
|
||||
tarfile_path = os.path.join(tmpdir, tarfile_name)
|
||||
specfile_is_json = True
|
||||
deprecated_yaml_name = tarball_name(spec, '.spec.yaml')
|
||||
deprecated_yaml_path = os.path.join(tmpdir, deprecated_yaml_name)
|
||||
json_name = tarball_name(spec, '.spec.json')
|
||||
json_path = os.path.join(tmpdir, json_name)
|
||||
with closing(tarfile.open(spackfile_path, 'r')) as tar:
|
||||
tar.extractall(tmpdir)
|
||||
# some buildcache tarfiles use bzip2 compression
|
||||
if not os.path.exists(tarfile_path):
|
||||
tarfile_name = tarball_name(spec, '.tar.bz2')
|
||||
tarfile_path = os.path.join(tmpdir, tarfile_name)
|
||||
specfile_path = download_result['specfile_stage'].save_filename
|
||||
|
||||
if os.path.exists(json_path):
|
||||
specfile_path = json_path
|
||||
elif os.path.exists(deprecated_yaml_path):
|
||||
specfile_is_json = False
|
||||
specfile_path = deprecated_yaml_path
|
||||
else:
|
||||
raise ValueError('Cannot find spec file for {0}.'.format(tmpdir))
|
||||
|
||||
if not unsigned:
|
||||
if os.path.exists('%s.asc' % specfile_path):
|
||||
try:
|
||||
suppress = config.get('config:suppress_gpg_warnings', False)
|
||||
spack.util.gpg.verify(
|
||||
'%s.asc' % specfile_path, specfile_path, suppress)
|
||||
except Exception as e:
|
||||
shutil.rmtree(tmpdir)
|
||||
raise e
|
||||
else:
|
||||
shutil.rmtree(tmpdir)
|
||||
raise NoVerifyException(
|
||||
"Package spec file failed signature verification.\n"
|
||||
"Use spack buildcache keys to download "
|
||||
"and install a key for verification from the mirror.")
|
||||
# get the sha256 checksum of the tarball
|
||||
checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# get the sha256 checksum recorded at creation
|
||||
spec_dict = {}
|
||||
with open(specfile_path, 'r') as inputfile:
|
||||
content = inputfile.read()
|
||||
if specfile_is_json:
|
||||
if specfile_path.endswith('.json.sig'):
|
||||
spec_dict = Spec.extract_json_from_clearsig(content)
|
||||
elif specfile_path.endswith('.json'):
|
||||
spec_dict = sjson.load(content)
|
||||
else:
|
||||
spec_dict = syaml.load(content)
|
||||
bchecksum = spec_dict['binary_cache_checksum']
|
||||
|
||||
# if the checksums don't match don't install
|
||||
if bchecksum['hash'] != checksum:
|
||||
shutil.rmtree(tmpdir)
|
||||
raise NoChecksumException(
|
||||
"Package tarball failed checksum verification.\n"
|
||||
"It cannot be installed.")
|
||||
bchecksum = spec_dict['binary_cache_checksum']
|
||||
filename = download_result['tarball_stage'].save_filename
|
||||
signature_verified = download_result['signature_verified']
|
||||
tmpdir = None
|
||||
|
||||
if ('buildcache_layout_version' not in spec_dict or
|
||||
int(spec_dict['buildcache_layout_version']) < 1):
|
||||
# Handle the older buildcache layout where the .spack file
|
||||
# contains a spec json/yaml, maybe an .asc file (signature),
|
||||
# and another tarball containing the actual install tree.
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
tarfile_path = _extract_inner_tarball(
|
||||
spec, filename, tmpdir, unsigned, bchecksum)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(tmpdir)
|
||||
raise e
|
||||
else:
|
||||
# Newer buildcache layout: the .spack file contains just
|
||||
# in the install tree, the signature, if it exists, is
|
||||
# wrapped around the spec.json at the root. If sig verify
|
||||
# was required, it was already done before downloading
|
||||
# the tarball.
|
||||
tarfile_path = filename
|
||||
|
||||
if not unsigned and not signature_verified:
|
||||
raise UnsignedPackageException(
|
||||
"To install unsigned packages, use the --no-check-signature option.")
|
||||
|
||||
# compute the sha256 checksum of the tarball
|
||||
local_checksum = checksum_tarball(tarfile_path)
|
||||
|
||||
# if the checksums don't match don't install
|
||||
if local_checksum != bchecksum['hash']:
|
||||
_delete_staged_downloads(download_result)
|
||||
raise NoChecksumException(
|
||||
"Package tarball failed checksum verification.\n"
|
||||
"It cannot be installed.")
|
||||
|
||||
new_relative_prefix = str(os.path.relpath(spec.prefix,
|
||||
spack.store.layout.root))
|
||||
@@ -1472,11 +1653,13 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
try:
|
||||
tar.extractall(path=extract_tmp)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
try:
|
||||
shutil.move(extracted_dir, spec.prefix)
|
||||
except Exception as e:
|
||||
_delete_staged_downloads(download_result)
|
||||
shutil.rmtree(extracted_dir)
|
||||
raise e
|
||||
os.remove(tarfile_path)
|
||||
@@ -1495,9 +1678,11 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
|
||||
spec_id = spec.format('{name}/{hash:7}')
|
||||
tty.warn('No manifest file in tarball for spec %s' % spec_id)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
if tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
if os.path.exists(filename):
|
||||
os.remove(filename)
|
||||
_delete_staged_downloads(download_result)
|
||||
|
||||
|
||||
def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None):
|
||||
@@ -1525,21 +1710,23 @@ def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None
|
||||
warnings.warn("Package for spec {0} already installed.".format(spec.format()))
|
||||
return
|
||||
|
||||
tarball = download_tarball(spec)
|
||||
if not tarball:
|
||||
download_result = download_tarball(spec, unsigned)
|
||||
if not download_result:
|
||||
msg = 'download of binary cache file for spec "{0}" failed'
|
||||
raise RuntimeError(msg.format(spec.format()))
|
||||
|
||||
if sha256:
|
||||
checker = spack.util.crypto.Checker(sha256)
|
||||
msg = 'cannot verify checksum for "{0}" [expected={1}]'
|
||||
msg = msg.format(tarball, sha256)
|
||||
if not checker.check(tarball):
|
||||
tarball_path = download_result['tarball_stage'].save_filename
|
||||
msg = msg.format(tarball_path, sha256)
|
||||
if not checker.check(tarball_path):
|
||||
_delete_staged_downloads(download_result)
|
||||
raise spack.binary_distribution.NoChecksumException(msg)
|
||||
tty.debug('Verified SHA256 checksum of the build cache')
|
||||
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, tarball, allow_root, unsigned, force)
|
||||
extract_tarball(spec, download_result, allow_root, unsigned, force)
|
||||
spack.hooks.post_install(spec)
|
||||
spack.store.db.add(spec, spack.store.layout)
|
||||
|
||||
@@ -1565,6 +1752,8 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
"""
|
||||
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
|
||||
specfile_name = tarball_name(spec, '.spec.json')
|
||||
signed_specfile_name = tarball_name(spec, '.spec.json.sig')
|
||||
specfile_is_signed = False
|
||||
specfile_is_json = True
|
||||
found_specs = []
|
||||
|
||||
@@ -1573,24 +1762,35 @@ def try_direct_fetch(spec, mirrors=None):
|
||||
mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name)
|
||||
buildcache_fetch_url_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, specfile_name)
|
||||
buildcache_fetch_url_signed_json = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path, signed_specfile_name)
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
|
||||
specfile_is_signed = True
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_json), url_err)
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_yaml), url_err_y)
|
||||
continue
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
|
||||
try:
|
||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
|
||||
specfile_is_json = False
|
||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_signed_json), url_err)
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_json), url_err_x)
|
||||
tty.debug('Did not find {0} on {1}'.format(
|
||||
specfile_name, buildcache_fetch_url_yaml), url_err_y)
|
||||
continue
|
||||
specfile_contents = codecs.getreader('utf-8')(fs).read()
|
||||
|
||||
# read the spec from the build cache file. All specs in build caches
|
||||
# are concrete (as they are built) so we need to mark this spec
|
||||
# concrete on read-in.
|
||||
if specfile_is_json:
|
||||
if specfile_is_signed:
|
||||
specfile_json = Spec.extract_json_from_clearsig(specfile_contents)
|
||||
fetched_spec = Spec.from_dict(specfile_json)
|
||||
elif specfile_is_json:
|
||||
fetched_spec = Spec.from_json(specfile_contents)
|
||||
else:
|
||||
fetched_spec = Spec.from_yaml(specfile_contents)
|
||||
@@ -1627,7 +1827,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
return {}
|
||||
|
||||
results = binary_index.find_built_spec(spec)
|
||||
results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
|
||||
|
||||
# Maybe we just didn't have the latest information from the mirror, so
|
||||
# try to fetch directly, unless we are only considering the indices.
|
||||
@@ -1917,7 +2117,8 @@ def download_single_spec(
|
||||
'path': local_tarball_path,
|
||||
'required': True,
|
||||
}, {
|
||||
'url': [tarball_name(concrete_spec, '.spec.json'),
|
||||
'url': [tarball_name(concrete_spec, '.spec.json.sig'),
|
||||
tarball_name(concrete_spec, '.spec.json'),
|
||||
tarball_name(concrete_spec, '.spec.yaml')],
|
||||
'path': destination,
|
||||
'required': True,
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import copy
|
||||
import fnmatch
|
||||
import functools
|
||||
import json
|
||||
@@ -21,6 +22,7 @@
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import GroupedExceptionHandler
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.config
|
||||
@@ -36,6 +38,11 @@
|
||||
import spack.util.environment
|
||||
import spack.util.executable
|
||||
import spack.util.path
|
||||
import spack.util.spack_yaml
|
||||
import spack.util.url
|
||||
|
||||
#: Name of the file containing metadata about the bootstrapping source
|
||||
METADATA_YAML_FILENAME = 'metadata.yaml'
|
||||
|
||||
#: Map a bootstrapper type to the corresponding class
|
||||
_bootstrap_methods = {}
|
||||
@@ -73,32 +80,41 @@ def _try_import_from_store(module, query_spec, query_info=None):
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
pkg = candidate_spec['python'].package
|
||||
module_paths = {
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||
}
|
||||
sys.path.extend(module_paths)
|
||||
] # type: list[str]
|
||||
path_before = list(sys.path)
|
||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||
# to be picked up and used, possibly depending on something in the store, first
|
||||
# allows the bootstrap version to work when an incompatible version is in
|
||||
# sys.path
|
||||
orders = [
|
||||
module_paths + sys.path,
|
||||
sys.path + module_paths,
|
||||
]
|
||||
for path in orders:
|
||||
sys.path = path
|
||||
try:
|
||||
_fix_ext_suffix(candidate_spec)
|
||||
if _python_import(module):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
|
||||
'provides the "{0}" Python module').format(
|
||||
module, query_spec, candidate_spec.dag_hash()
|
||||
)
|
||||
tty.debug(msg)
|
||||
if query_info is not None:
|
||||
query_info['spec'] = candidate_spec
|
||||
return True
|
||||
except Exception as e:
|
||||
msg = ('unexpected error while trying to import module '
|
||||
'"{0}" from spec "{1}" [error="{2}"]')
|
||||
tty.warn(msg.format(module, candidate_spec, str(e)))
|
||||
else:
|
||||
msg = "Spec {0} did not provide module {1}"
|
||||
tty.warn(msg.format(candidate_spec, module))
|
||||
|
||||
try:
|
||||
_fix_ext_suffix(candidate_spec)
|
||||
if _python_import(module):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
|
||||
'provides the "{0}" Python module').format(
|
||||
module, query_spec, candidate_spec.dag_hash()
|
||||
)
|
||||
tty.debug(msg)
|
||||
if query_info is not None:
|
||||
query_info['spec'] = candidate_spec
|
||||
return True
|
||||
except Exception as e:
|
||||
msg = ('unexpected error while trying to import module '
|
||||
'"{0}" from spec "{1}" [error="{2}"]')
|
||||
tty.warn(msg.format(module, candidate_spec, str(e)))
|
||||
else:
|
||||
msg = "Spec {0} did not provide module {1}"
|
||||
tty.warn(msg.format(candidate_spec, module))
|
||||
|
||||
sys.path = sys.path[:-3]
|
||||
sys.path = path_before
|
||||
|
||||
return False
|
||||
|
||||
@@ -203,12 +219,43 @@ def _executables_in_store(executables, query_spec, query_info=None):
|
||||
return False
|
||||
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(object):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
class _BootstrapperBase(object):
|
||||
"""Base class to derive types that can bootstrap software for Spack"""
|
||||
config_scope_name = ''
|
||||
|
||||
def __init__(self, conf):
|
||||
self.name = conf['name']
|
||||
self.url = conf['info']['url']
|
||||
|
||||
@property
|
||||
def mirror_url(self):
|
||||
# Absolute paths
|
||||
if os.path.isabs(self.url):
|
||||
return spack.util.url.format(self.url)
|
||||
|
||||
# Check for :// and assume it's an url if we find it
|
||||
if '://' in self.url:
|
||||
return self.url
|
||||
|
||||
# Otherwise, it's a relative path
|
||||
return spack.util.url.format(os.path.join(self.metadata_dir, self.url))
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
return spack.config.InternalConfigScope(
|
||||
self.config_scope_name, {'mirrors:': {self.name: self.mirror_url}}
|
||||
)
|
||||
|
||||
|
||||
@_bootstrapper(type='buildcache')
|
||||
class _BuildcacheBootstrapper(_BootstrapperBase):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
|
||||
config_scope_name = 'bootstrap_buildcache'
|
||||
|
||||
def __init__(self, conf):
|
||||
super(_BuildcacheBootstrapper, self).__init__(conf)
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
|
||||
self.last_search = None
|
||||
|
||||
@staticmethod
|
||||
@@ -231,9 +278,8 @@ def _spec_and_platform(abstract_spec_str):
|
||||
def _read_metadata(self, package_name):
|
||||
"""Return metadata about the given package."""
|
||||
json_filename = '{0}.json'.format(package_name)
|
||||
json_path = os.path.join(
|
||||
spack.paths.share_path, 'bootstrap', self.name, json_filename
|
||||
)
|
||||
json_dir = self.metadata_dir
|
||||
json_path = os.path.join(json_dir, json_filename)
|
||||
with open(json_path) as f:
|
||||
data = json.load(f)
|
||||
return data
|
||||
@@ -307,12 +353,6 @@ def _install_and_test(
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
return spack.config.InternalConfigScope(
|
||||
'bootstrap_buildcache', {'mirrors:': {self.name: self.url}}
|
||||
)
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
@@ -342,9 +382,13 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
|
||||
|
||||
@_bootstrapper(type='install')
|
||||
class _SourceBootstrapper(object):
|
||||
class _SourceBootstrapper(_BootstrapperBase):
|
||||
"""Install the software needed during bootstrapping from sources."""
|
||||
config_scope_name = 'bootstrap_source'
|
||||
|
||||
def __init__(self, conf):
|
||||
super(_SourceBootstrapper, self).__init__(conf)
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
|
||||
self.conf = conf
|
||||
self.last_search = None
|
||||
|
||||
@@ -377,7 +421,8 @@ def try_import(self, module, abstract_spec_str):
|
||||
tty.debug(msg.format(module, abstract_spec_str))
|
||||
|
||||
# Install the spec that should make the module importable
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install(fail_fast=True)
|
||||
|
||||
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -390,6 +435,8 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
self.last_search = info
|
||||
return True
|
||||
|
||||
tty.info("Bootstrapping {0} from sources".format(abstract_spec_str))
|
||||
|
||||
# If we compile code from sources detecting a few build tools
|
||||
# might reduce compilation time by a fair amount
|
||||
_add_externals_if_missing()
|
||||
@@ -402,7 +449,8 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
|
||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||
tty.debug(msg.format(abstract_spec_str))
|
||||
concrete_spec.package.do_install()
|
||||
with spack.config.override(self.mirror_scope):
|
||||
concrete_spec.package.do_install()
|
||||
if _executables_in_store(executables, concrete_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -417,11 +465,11 @@ def _make_bootstrapper(conf):
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def _source_is_trusted(conf):
|
||||
def source_is_enabled_or_raise(conf):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
|
||||
if name not in trusted:
|
||||
return False
|
||||
return trusted[name]
|
||||
if not trusted.get(name, False):
|
||||
raise ValueError('source is not trusted')
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
@@ -486,36 +534,26 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||
return
|
||||
|
||||
abstract_spec = abstract_spec or module
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
|
||||
errors = {}
|
||||
h = GroupedExceptionHandler()
|
||||
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP MODULE {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(module, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
for current_config in bootstrapping_sources():
|
||||
with h.forward(current_config['name']):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
b = _make_bootstrapper(current_config)
|
||||
if b.try_import(module, abstract_spec):
|
||||
return
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP MODULE {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(module, str(e)))
|
||||
errors[current_config['name']] = e
|
||||
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap the "{0}" Python module'.format(module)
|
||||
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(module) # noqa: E501
|
||||
msg = 'cannot bootstrap the "{0}" Python module '.format(module)
|
||||
if abstract_spec:
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
msg += ' due to the following failures:\n'
|
||||
for method in errors:
|
||||
err = errors[method]
|
||||
msg += " '{0}' raised {1}: {2}\n".format(
|
||||
method, err.__class__.__name__, str(err))
|
||||
msg += ' Please run `spack -d spec zlib` for more verbose error messages'
|
||||
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||
if tty.is_debug():
|
||||
msg += h.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += h.grouped_message(with_tracebacks=False)
|
||||
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||
raise ImportError(msg)
|
||||
|
||||
|
||||
@@ -538,16 +576,14 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
return cmd
|
||||
|
||||
executables_str = ', '.join(executables)
|
||||
source_configs = spack.config.get('bootstrap:sources', [])
|
||||
for current_config in source_configs:
|
||||
if not _source_is_trusted(current_config):
|
||||
msg = ('[BOOTSTRAP EXECUTABLES {0}] Skipping source "{1}" since it is '
|
||||
'not trusted').format(executables_str, current_config['name'])
|
||||
tty.debug(msg)
|
||||
continue
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
try:
|
||||
h = GroupedExceptionHandler()
|
||||
|
||||
for current_config in bootstrapping_sources():
|
||||
with h.forward(current_config['name']):
|
||||
source_is_enabled_or_raise(current_config)
|
||||
|
||||
b = _make_bootstrapper(current_config)
|
||||
if b.try_search_path(executables, abstract_spec):
|
||||
# Additional environment variables needed
|
||||
concrete_spec, cmd = b.last_search['spec'], b.last_search['command']
|
||||
@@ -562,14 +598,16 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
|
||||
)
|
||||
cmd.add_default_envmod(env_mods)
|
||||
return cmd
|
||||
except Exception as e:
|
||||
msg = '[BOOTSTRAP EXECUTABLES {0}] Unexpected error "{1}"'
|
||||
tty.debug(msg.format(executables_str, str(e)))
|
||||
|
||||
# We couldn't import in any way, so raise an import error
|
||||
msg = 'cannot bootstrap any of the {0} executables'.format(executables_str)
|
||||
assert h, 'expected at least one exception to have been raised at this point: while bootstrapping {0}'.format(executables_str) # noqa: E501
|
||||
msg = 'cannot bootstrap any of the {0} executables '.format(executables_str)
|
||||
if abstract_spec:
|
||||
msg += ' from spec "{0}"'.format(abstract_spec)
|
||||
msg += 'from spec "{0}" '.format(abstract_spec)
|
||||
if tty.is_debug():
|
||||
msg += h.grouped_message(with_tracebacks=True)
|
||||
else:
|
||||
msg += h.grouped_message(with_tracebacks=False)
|
||||
msg += '\nRun `spack --debug ...` for more detailed errors'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
@@ -826,6 +864,19 @@ def ensure_flake8_in_path_or_raise():
|
||||
return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
|
||||
|
||||
|
||||
def all_root_specs(development=False):
|
||||
"""Return a list of all the root specs that may be used to bootstrap Spack.
|
||||
|
||||
Args:
|
||||
development (bool): if True include dev dependencies
|
||||
"""
|
||||
specs = [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||
if development:
|
||||
specs += [isort_root_spec(), mypy_root_spec(),
|
||||
black_root_spec(), flake8_root_spec()]
|
||||
return specs
|
||||
|
||||
|
||||
def _missing(name, purpose, system_only=True):
|
||||
"""Message to be printed if an executable is not found"""
|
||||
msg = '[{2}] MISSING "{0}": {1}'
|
||||
@@ -963,3 +1014,23 @@ def status_message(section):
|
||||
msg += '\n'
|
||||
msg = msg.format(pass_token if not missing_software else fail_token)
|
||||
return msg, missing_software
|
||||
|
||||
|
||||
def bootstrapping_sources(scope=None):
|
||||
"""Return the list of configured sources of software for bootstrapping Spack
|
||||
|
||||
Args:
|
||||
scope (str or None): if a valid configuration scope is given, return the
|
||||
list only from that scope
|
||||
"""
|
||||
source_configs = spack.config.get('bootstrap:sources', default=None, scope=scope)
|
||||
source_configs = source_configs or []
|
||||
list_of_sources = []
|
||||
for entry in source_configs:
|
||||
current = copy.copy(entry)
|
||||
metadata_dir = spack.util.path.canonicalize_path(entry['metadata'])
|
||||
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||
with open(metadata_yaml) as f:
|
||||
current.update(spack.util.spack_yaml.load(f))
|
||||
list_of_sources.append(current)
|
||||
return list_of_sources
|
||||
|
||||
@@ -55,7 +55,7 @@
|
||||
import spack.config
|
||||
import spack.install_test
|
||||
import spack.main
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
@@ -722,7 +722,7 @@ def get_std_cmake_args(pkg):
|
||||
package were a CMakePackage instance.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): package under consideration
|
||||
pkg (spack.package_base.PackageBase): package under consideration
|
||||
|
||||
Returns:
|
||||
list: arguments for cmake
|
||||
@@ -738,7 +738,7 @@ def get_std_meson_args(pkg):
|
||||
package were a MesonPackage instance.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): package under consideration
|
||||
pkg (spack.package_base.PackageBase): package under consideration
|
||||
|
||||
Returns:
|
||||
list: arguments for meson
|
||||
@@ -748,12 +748,12 @@ def get_std_meson_args(pkg):
|
||||
|
||||
def parent_class_modules(cls):
|
||||
"""
|
||||
Get list of superclass modules that descend from spack.package.PackageBase
|
||||
Get list of superclass modules that descend from spack.package_base.PackageBase
|
||||
|
||||
Includes cls.__module__
|
||||
"""
|
||||
if (not issubclass(cls, spack.package.PackageBase) or
|
||||
issubclass(spack.package.PackageBase, cls)):
|
||||
if (not issubclass(cls, spack.package_base.PackageBase) or
|
||||
issubclass(spack.package_base.PackageBase, cls)):
|
||||
return []
|
||||
result = []
|
||||
module = sys.modules.get(cls.__module__)
|
||||
@@ -771,7 +771,7 @@ def load_external_modules(pkg):
|
||||
associated with them.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): package to load deps for
|
||||
pkg (spack.package_base.PackageBase): package to load deps for
|
||||
"""
|
||||
for dep in list(pkg.spec.traverse()):
|
||||
external_modules = dep.external_modules or []
|
||||
@@ -1109,7 +1109,7 @@ def start_build_process(pkg, function, kwargs):
|
||||
|
||||
Args:
|
||||
|
||||
pkg (spack.package.PackageBase): package whose environment we should set up the
|
||||
pkg (spack.package_base.PackageBase): package whose environment we should set up the
|
||||
child process for.
|
||||
function (typing.Callable): argless function to run in the child
|
||||
process.
|
||||
@@ -1234,7 +1234,7 @@ def make_stack(tb, stack=None):
|
||||
if 'self' in frame.f_locals:
|
||||
# Find the first proper subclass of PackageBase.
|
||||
obj = frame.f_locals['self']
|
||||
if isinstance(obj, spack.package.PackageBase):
|
||||
if isinstance(obj, spack.package_base.PackageBase):
|
||||
break
|
||||
|
||||
# We found obj, the Package implementation we care about.
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
|
||||
from spack.build_systems.autotools import AutotoolsPackage
|
||||
from spack.directives import extends
|
||||
from spack.package import ExtensionError
|
||||
from spack.package_base import ExtensionError
|
||||
from spack.util.executable import which
|
||||
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
from spack.build_environment import InstallError
|
||||
from spack.directives import conflicts, depends_on
|
||||
from spack.operating_systems.mac_os import macos_version
|
||||
from spack.package import PackageBase, run_after, run_before
|
||||
from spack.package_base import PackageBase, run_after, run_before
|
||||
from spack.util.executable import Executable
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
from llnl.util.filesystem import install, mkdirp
|
||||
|
||||
from spack.build_systems.cmake import CMakePackage
|
||||
from spack.package import run_after
|
||||
from spack.package_base import run_after
|
||||
|
||||
|
||||
def cmake_cache_path(name, value, comment=""):
|
||||
@@ -210,6 +210,10 @@ def std_initconfig_entries(self):
|
||||
"#------------------{0}\n".format("-" * 60),
|
||||
]
|
||||
|
||||
def initconfig_package_entries(self):
|
||||
"""This method is to be overwritten by the package"""
|
||||
return []
|
||||
|
||||
def initconfig(self, spec, prefix):
|
||||
cache_entries = (self.std_initconfig_entries() +
|
||||
self.initconfig_compiler_entries() +
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
|
||||
import spack.build_environment
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package import InstallError, PackageBase, run_after
|
||||
from spack.package_base import InstallError, PackageBase, run_after
|
||||
from spack.util.path import convert_to_posix_path
|
||||
|
||||
# Regex to extract the primary generator from the CMake generator
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.multimethod import when
|
||||
from spack.package import PackageBase
|
||||
from spack.package_base import PackageBase
|
||||
|
||||
|
||||
class CudaPackage(PackageBase):
|
||||
@@ -37,6 +37,7 @@ class CudaPackage(PackageBase):
|
||||
variant('cuda_arch',
|
||||
description='CUDA architecture',
|
||||
values=spack.variant.any_combination_of(*cuda_arch_values),
|
||||
sticky=True,
|
||||
when='+cuda')
|
||||
|
||||
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#nvcc-examples
|
||||
|
||||
@@ -3,14 +3,16 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.package
|
||||
from typing import Optional
|
||||
|
||||
import spack.package_base
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class GNUMirrorPackage(spack.package.PackageBase):
|
||||
class GNUMirrorPackage(spack.package_base.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for GNU packages."""
|
||||
#: Path of the package in a GNU mirror
|
||||
gnu_mirror_path = None
|
||||
gnu_mirror_path = None # type: Optional[str]
|
||||
|
||||
#: List of GNU mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
|
||||
import spack.error
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.package import InstallError, PackageBase, run_after
|
||||
from spack.package_base import InstallError, PackageBase, run_after
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
from spack.util.prefix import Prefix
|
||||
@@ -1115,7 +1115,7 @@ def _setup_dependent_env_callback(
|
||||
raise InstallError('compilers_of_client arg required for MPI')
|
||||
|
||||
def setup_dependent_package(self, module, dep_spec):
|
||||
# https://spack.readthedocs.io/en/latest/spack.html#spack.package.PackageBase.setup_dependent_package
|
||||
# https://spack.readthedocs.io/en/latest/spack.html#spack.package_base.PackageBase.setup_dependent_package
|
||||
# Reminder: "module" refers to Python module.
|
||||
# Called before the install() method of dependents.
|
||||
|
||||
@@ -1259,6 +1259,14 @@ def install(self, spec, prefix):
|
||||
for f in glob.glob('%s/intel*log' % tmpdir):
|
||||
install(f, dst)
|
||||
|
||||
@run_after('install')
|
||||
def validate_install(self):
|
||||
# Sometimes the installer exits with an error but doesn't pass a
|
||||
# non-zero exit code to spack. Check for the existence of a 'bin'
|
||||
# directory to catch this error condition.
|
||||
if not os.path.exists(self.prefix.bin):
|
||||
raise InstallError('The installer has failed to install anything.')
|
||||
|
||||
@run_after('install')
|
||||
def configure_rpath(self):
|
||||
if '+rpath' not in self.spec:
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.multimethod import when
|
||||
from spack.package import PackageBase
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import conflicts
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class MakefilePackage(PackageBase):
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
from llnl.util.filesystem import install_tree, working_dir
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.util.executable import which
|
||||
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import depends_on, variant
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class MesonPackage(PackageBase):
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
import inspect
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class OctavePackage(PackageBase):
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
from llnl.util.filesystem import find_headers, find_libraries, join_path
|
||||
|
||||
from spack.package import Package
|
||||
from spack.package_base import Package
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
from llnl.util.filesystem import filter_file
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
from spack.util.executable import Executable
|
||||
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import (
|
||||
@@ -19,13 +20,13 @@
|
||||
from llnl.util.lang import match_predicate
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class PythonPackage(PackageBase):
|
||||
"""Specialized class for packages that are built using pip."""
|
||||
#: Package name, version, and extension on PyPI
|
||||
pypi = None
|
||||
pypi = None # type: Optional[str]
|
||||
|
||||
maintainers = ['adamjstewart']
|
||||
|
||||
@@ -46,7 +47,7 @@ class PythonPackage(PackageBase):
|
||||
# package manually
|
||||
depends_on('py-wheel', type='build')
|
||||
|
||||
py_namespace = None
|
||||
py_namespace = None # type: Optional[str]
|
||||
|
||||
@staticmethod
|
||||
def _std_args(cls):
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class QMakePackage(PackageBase):
|
||||
|
||||
@@ -5,9 +5,10 @@
|
||||
|
||||
|
||||
import inspect
|
||||
from typing import Optional
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class RPackage(PackageBase):
|
||||
@@ -28,10 +29,10 @@ class RPackage(PackageBase):
|
||||
# package attributes that can be expanded to set the homepage, url,
|
||||
# list_url, and git values
|
||||
# For CRAN packages
|
||||
cran = None
|
||||
cran = None # type: Optional[str]
|
||||
|
||||
# For Bioconductor packages
|
||||
bioc = None
|
||||
bioc = None # type: Optional[str]
|
||||
|
||||
maintainers = ['glennpj']
|
||||
|
||||
|
||||
@@ -3,13 +3,14 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.environment import env_flag
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
@@ -36,8 +37,8 @@ class RacketPackage(PackageBase):
|
||||
extends('racket')
|
||||
|
||||
pkgs = False
|
||||
subdirectory = None
|
||||
name = None
|
||||
subdirectory = None # type: Optional[str]
|
||||
name = None # type: Optional[str]
|
||||
parallel = True
|
||||
|
||||
@property
|
||||
|
||||
@@ -77,7 +77,7 @@
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package import PackageBase
|
||||
from spack.package_base import PackageBase
|
||||
|
||||
|
||||
class ROCmPackage(PackageBase):
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
import inspect
|
||||
|
||||
from spack.directives import extends
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class RubyPackage(PackageBase):
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
import inspect
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class SConsPackage(PackageBase):
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
from llnl.util.filesystem import find, join_path, working_dir
|
||||
|
||||
from spack.directives import depends_on, extends
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class SIPPackage(PackageBase):
|
||||
|
||||
@@ -3,15 +3,17 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.package
|
||||
from typing import Optional
|
||||
|
||||
import spack.package_base
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class SourceforgePackage(spack.package.PackageBase):
|
||||
class SourceforgePackage(spack.package_base.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for Sourceforge
|
||||
packages."""
|
||||
#: Path of the package in a Sourceforge mirror
|
||||
sourceforge_mirror_path = None
|
||||
sourceforge_mirror_path = None # type: Optional[str]
|
||||
|
||||
#: List of Sourceforge mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
||||
@@ -2,16 +2,17 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
from typing import Optional
|
||||
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class SourcewarePackage(spack.package.PackageBase):
|
||||
class SourcewarePackage(spack.package_base.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for Sourceware.org
|
||||
packages."""
|
||||
#: Path of the package in a Sourceware mirror
|
||||
sourceware_mirror_path = None
|
||||
sourceware_mirror_path = None # type: Optional[str]
|
||||
|
||||
#: List of Sourceware mirrors used by Spack
|
||||
base_mirrors = [
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
from llnl.util.filesystem import working_dir
|
||||
|
||||
from spack.directives import depends_on
|
||||
from spack.package import PackageBase, run_after
|
||||
from spack.package_base import PackageBase, run_after
|
||||
|
||||
|
||||
class WafPackage(PackageBase):
|
||||
|
||||
@@ -3,15 +3,17 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.package
|
||||
from typing import Optional
|
||||
|
||||
import spack.package_base
|
||||
import spack.util.url
|
||||
|
||||
|
||||
class XorgPackage(spack.package.PackageBase):
|
||||
class XorgPackage(spack.package_base.PackageBase):
|
||||
"""Mixin that takes care of setting url and mirrors for x.org
|
||||
packages."""
|
||||
#: Path of the package in a x.org mirror
|
||||
xorg_mirror_path = None
|
||||
xorg_mirror_path = None # type: Optional[str]
|
||||
|
||||
#: List of x.org mirrors used by Spack
|
||||
# Note: x.org mirrors are a bit tricky, since many are out-of-sync or off.
|
||||
|
||||
@@ -33,7 +33,6 @@
|
||||
import spack.util.executable as exe
|
||||
import spack.util.gpg as gpg_util
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.error import SpackError
|
||||
from spack.spec import Spec
|
||||
@@ -42,10 +41,8 @@
|
||||
'always',
|
||||
]
|
||||
|
||||
SPACK_PR_MIRRORS_ROOT_URL = 's3://spack-binaries-prs'
|
||||
SPACK_SHARED_PR_MIRROR_URL = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
|
||||
'shared_pr_mirror')
|
||||
TEMP_STORAGE_MIRROR_NAME = 'ci_temporary_mirror'
|
||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||
|
||||
spack_gpg = spack.main.SpackCommand('gpg')
|
||||
spack_compiler = spack.main.SpackCommand('compiler')
|
||||
@@ -199,6 +196,11 @@ def _get_cdash_build_name(spec, build_group):
|
||||
spec.name, spec.version, spec.compiler, spec.architecture, build_group)
|
||||
|
||||
|
||||
def _remove_reserved_tags(tags):
|
||||
"""Convenience function to strip reserved tags from jobs"""
|
||||
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
||||
|
||||
|
||||
def _get_spec_string(spec):
|
||||
format_elements = [
|
||||
'{name}{@version}',
|
||||
@@ -231,8 +233,10 @@ def _add_dependency(spec_label, dep_label, deps):
|
||||
deps[spec_label].add(dep_label)
|
||||
|
||||
|
||||
def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
|
||||
spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only)
|
||||
def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False,
|
||||
mirrors_to_check=None):
|
||||
spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check)
|
||||
|
||||
if spec_deps_obj:
|
||||
dependencies = spec_deps_obj['dependencies']
|
||||
@@ -249,7 +253,7 @@ def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
|
||||
_add_dependency(entry['spec'], entry['depends'], deps)
|
||||
|
||||
|
||||
def stage_spec_jobs(specs, check_index_only=False):
|
||||
def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
||||
"""Take a set of release specs and generate a list of "stages", where the
|
||||
jobs in any stage are dependent only on jobs in previous stages. This
|
||||
allows us to maximize build parallelism within the gitlab-ci framework.
|
||||
@@ -261,6 +265,8 @@ def stage_spec_jobs(specs, check_index_only=False):
|
||||
are up to date on those mirrors. This flag limits that search to
|
||||
the binary cache indices on those mirrors to speed the process up,
|
||||
even though there is no garantee the index is up to date.
|
||||
mirrors_to_checK: Optional mapping giving mirrors to check instead of
|
||||
any configured mirrors.
|
||||
|
||||
Returns: A tuple of information objects describing the specs, dependencies
|
||||
and stages:
|
||||
@@ -297,8 +303,8 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
deps = {}
|
||||
spec_labels = {}
|
||||
|
||||
_get_spec_dependencies(
|
||||
specs, deps, spec_labels, check_index_only=check_index_only)
|
||||
_get_spec_dependencies(specs, deps, spec_labels, check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check)
|
||||
|
||||
# Save the original deps, as we need to return them at the end of the
|
||||
# function. In the while loop below, the "dependencies" variable is
|
||||
@@ -340,7 +346,7 @@ def _print_staging_summary(spec_labels, dependencies, stages):
|
||||
_get_spec_string(s)))
|
||||
|
||||
|
||||
def _compute_spec_deps(spec_list, check_index_only=False):
|
||||
def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None):
|
||||
"""
|
||||
Computes all the dependencies for the spec(s) and generates a JSON
|
||||
object which provides both a list of unique spec names as well as a
|
||||
@@ -413,7 +419,7 @@ def append_dep(s, d):
|
||||
continue
|
||||
|
||||
up_to_date_mirrors = bindist.get_mirrors_for_spec(
|
||||
spec=s, index_only=check_index_only)
|
||||
spec=s, mirrors_to_check=mirrors_to_check, index_only=check_index_only)
|
||||
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = {
|
||||
@@ -602,8 +608,8 @@ def get_spec_filter_list(env, affected_pkgs, dependencies=True, dependents=True)
|
||||
def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
prune_dag=False, check_index_only=False,
|
||||
run_optimizer=False, use_dependencies=False,
|
||||
artifacts_root=None):
|
||||
""" Generate a gitlab yaml file to run a dynamic chile pipeline from
|
||||
artifacts_root=None, remote_mirror_override=None):
|
||||
""" Generate a gitlab yaml file to run a dynamic child pipeline from
|
||||
the spec matrix in the active environment.
|
||||
|
||||
Arguments:
|
||||
@@ -629,6 +635,10 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
artifacts_root (str): Path where artifacts like logs, environment
|
||||
files (spack.yaml, spack.lock), etc should be written. GitLab
|
||||
requires this to be within the project directory.
|
||||
remote_mirror_override (str): Typically only needed when one spack.yaml
|
||||
is used to populate several mirrors with binaries, based on some
|
||||
criteria. Spack protected pipelines populate different mirrors based
|
||||
on branch name, facilitated by this option.
|
||||
"""
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
with env.write_transaction():
|
||||
@@ -678,17 +688,19 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
for s in affected_specs:
|
||||
tty.debug(' {0}'.format(s.name))
|
||||
|
||||
generate_job_name = os.environ.get('CI_JOB_NAME', None)
|
||||
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', None)
|
||||
# Downstream jobs will "need" (depend on, for both scheduling and
|
||||
# artifacts, which include spack.lock file) this pipeline generation
|
||||
# job by both name and pipeline id. If those environment variables
|
||||
# do not exist, then maybe this is just running in a shell, in which
|
||||
# case, there is no expectation gitlab will ever run the generated
|
||||
# pipeline and those environment variables do not matter.
|
||||
generate_job_name = os.environ.get('CI_JOB_NAME', 'job-does-not-exist')
|
||||
parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', 'pipeline-does-not-exist')
|
||||
|
||||
# Values: "spack_pull_request", "spack_protected_branch", or not set
|
||||
spack_pipeline_type = os.environ.get('SPACK_PIPELINE_TYPE', None)
|
||||
is_pr_pipeline = spack_pipeline_type == 'spack_pull_request'
|
||||
|
||||
spack_pr_branch = os.environ.get('SPACK_PR_BRANCH', None)
|
||||
pr_mirror_url = None
|
||||
if spack_pr_branch:
|
||||
pr_mirror_url = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
|
||||
spack_pr_branch)
|
||||
spack_buildcache_copy = os.environ.get('SPACK_COPY_BUILDCACHE', None)
|
||||
|
||||
if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
|
||||
tty.die('spack ci generate requires an env containing a mirror')
|
||||
@@ -743,14 +755,25 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'strip-compilers': False,
|
||||
})
|
||||
|
||||
# Add per-PR mirror (and shared PR mirror) if enabled, as some specs might
|
||||
# be up to date in one of those and thus not need to be rebuilt.
|
||||
if pr_mirror_url:
|
||||
spack.mirror.add(
|
||||
'ci_pr_mirror', pr_mirror_url, cfg.default_modify_scope())
|
||||
spack.mirror.add('ci_shared_pr_mirror',
|
||||
SPACK_SHARED_PR_MIRROR_URL,
|
||||
cfg.default_modify_scope())
|
||||
# If a remote mirror override (alternate buildcache destination) was
|
||||
# specified, add it here in case it has already built hashes we might
|
||||
# generate.
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type == 'spack_protected_branch':
|
||||
# Overriding the main mirror in this case might result
|
||||
# in skipping jobs on a release pipeline because specs are
|
||||
# up to date in develop. Eventually we want to notice and take
|
||||
# advantage of this by scheduling a job to copy the spec from
|
||||
# develop to the release, but until we have that, this makes
|
||||
# sure we schedule a rebuild job if the spec isn't already in
|
||||
# override mirror.
|
||||
mirrors_to_check = {
|
||||
'override': remote_mirror_override
|
||||
}
|
||||
else:
|
||||
spack.mirror.add(
|
||||
'ci_pr_mirror', remote_mirror_override, cfg.default_modify_scope())
|
||||
|
||||
pipeline_artifacts_dir = artifacts_root
|
||||
if not pipeline_artifacts_dir:
|
||||
@@ -825,11 +848,13 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
phase_spec.concretize()
|
||||
staged_phases[phase_name] = stage_spec_jobs(
|
||||
concrete_phase_specs,
|
||||
check_index_only=check_index_only)
|
||||
check_index_only=check_index_only,
|
||||
mirrors_to_check=mirrors_to_check)
|
||||
finally:
|
||||
# Clean up PR mirror if enabled
|
||||
if pr_mirror_url:
|
||||
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
|
||||
# Clean up remote mirror override if enabled
|
||||
if remote_mirror_override:
|
||||
if spack_pipeline_type != 'spack_protected_branch':
|
||||
spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
|
||||
|
||||
all_job_names = []
|
||||
output_object = {}
|
||||
@@ -889,6 +914,14 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
|
||||
tags = [tag for tag in runner_attribs['tags']]
|
||||
|
||||
if spack_pipeline_type is not None:
|
||||
# For spack pipelines "public" and "protected" are reserved tags
|
||||
tags = _remove_reserved_tags(tags)
|
||||
if spack_pipeline_type == 'spack_protected_branch':
|
||||
tags.extend(['aws', 'protected'])
|
||||
elif spack_pipeline_type == 'spack_pull_request':
|
||||
tags.extend(['public'])
|
||||
|
||||
variables = {}
|
||||
if 'variables' in runner_attribs:
|
||||
variables.update(runner_attribs['variables'])
|
||||
@@ -1174,6 +1207,10 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
service_job_config,
|
||||
cleanup_job)
|
||||
|
||||
if 'tags' in cleanup_job:
|
||||
service_tags = _remove_reserved_tags(cleanup_job['tags'])
|
||||
cleanup_job['tags'] = service_tags
|
||||
|
||||
cleanup_job['stage'] = 'cleanup-temp-storage'
|
||||
cleanup_job['script'] = [
|
||||
'spack -d mirror destroy --mirror-url {0}/$CI_PIPELINE_ID'.format(
|
||||
@@ -1181,9 +1218,74 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
]
|
||||
cleanup_job['when'] = 'always'
|
||||
cleanup_job['retry'] = service_job_retries
|
||||
cleanup_job['interruptible'] = True
|
||||
|
||||
output_object['cleanup'] = cleanup_job
|
||||
|
||||
if ('signing-job-attributes' in gitlab_ci and
|
||||
spack_pipeline_type == 'spack_protected_branch'):
|
||||
# External signing: generate a job to check and sign binary pkgs
|
||||
stage_names.append('stage-sign-pkgs')
|
||||
signing_job_config = gitlab_ci['signing-job-attributes']
|
||||
signing_job = {}
|
||||
|
||||
signing_job_attrs_to_copy = [
|
||||
'image',
|
||||
'tags',
|
||||
'variables',
|
||||
'before_script',
|
||||
'script',
|
||||
'after_script',
|
||||
]
|
||||
|
||||
_copy_attributes(signing_job_attrs_to_copy,
|
||||
signing_job_config,
|
||||
signing_job)
|
||||
|
||||
signing_job_tags = []
|
||||
if 'tags' in signing_job:
|
||||
signing_job_tags = _remove_reserved_tags(signing_job['tags'])
|
||||
|
||||
for tag in ['aws', 'protected', 'notary']:
|
||||
if tag not in signing_job_tags:
|
||||
signing_job_tags.append(tag)
|
||||
signing_job['tags'] = signing_job_tags
|
||||
|
||||
signing_job['stage'] = 'stage-sign-pkgs'
|
||||
signing_job['when'] = 'always'
|
||||
signing_job['retry'] = {
|
||||
'max': 2,
|
||||
'when': ['always']
|
||||
}
|
||||
signing_job['interruptible'] = True
|
||||
|
||||
output_object['sign-pkgs'] = signing_job
|
||||
|
||||
if spack_buildcache_copy:
|
||||
# Generate a job to copy the contents from wherever the builds are getting
|
||||
# pushed to the url specified in the "SPACK_BUILDCACHE_COPY" environment
|
||||
# variable.
|
||||
src_url = remote_mirror_override or remote_mirror_url
|
||||
dest_url = spack_buildcache_copy
|
||||
|
||||
stage_names.append('stage-copy-buildcache')
|
||||
copy_job = {
|
||||
'stage': 'stage-copy-buildcache',
|
||||
'tags': ['spack', 'public', 'medium', 'aws', 'x86_64'],
|
||||
'image': 'ghcr.io/spack/python-aws-bash:0.0.1',
|
||||
'when': 'on_success',
|
||||
'interruptible': True,
|
||||
'retry': service_job_retries,
|
||||
'script': [
|
||||
'. ./share/spack/setup-env.sh',
|
||||
'spack --version',
|
||||
'aws s3 sync --exclude *index.json* --exclude *pgp* {0} {1}'.format(
|
||||
src_url, dest_url)
|
||||
]
|
||||
}
|
||||
|
||||
output_object['copy-mirror'] = copy_job
|
||||
|
||||
if rebuild_index_enabled:
|
||||
# Add a final job to regenerate the index
|
||||
stage_names.append('stage-rebuild-index')
|
||||
@@ -1194,9 +1296,13 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
service_job_config,
|
||||
final_job)
|
||||
|
||||
if 'tags' in final_job:
|
||||
service_tags = _remove_reserved_tags(final_job['tags'])
|
||||
final_job['tags'] = service_tags
|
||||
|
||||
index_target_mirror = mirror_urls[0]
|
||||
if is_pr_pipeline:
|
||||
index_target_mirror = pr_mirror_url
|
||||
if remote_mirror_override:
|
||||
index_target_mirror = remote_mirror_override
|
||||
|
||||
final_job['stage'] = 'stage-rebuild-index'
|
||||
final_job['script'] = [
|
||||
@@ -1205,6 +1311,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
]
|
||||
final_job['when'] = 'always'
|
||||
final_job['retry'] = service_job_retries
|
||||
final_job['interruptible'] = True
|
||||
|
||||
output_object['rebuild-index'] = final_job
|
||||
|
||||
@@ -1237,8 +1344,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
'SPACK_PIPELINE_TYPE': str(spack_pipeline_type)
|
||||
}
|
||||
|
||||
if pr_mirror_url:
|
||||
output_object['variables']['SPACK_PR_MIRROR_URL'] = pr_mirror_url
|
||||
if remote_mirror_override:
|
||||
(output_object['variables']
|
||||
['SPACK_REMOTE_MIRROR_OVERRIDE']) = remote_mirror_override
|
||||
|
||||
spack_stack_name = os.environ.get('SPACK_CI_STACK_NAME', None)
|
||||
if spack_stack_name:
|
||||
|
||||
@@ -8,7 +8,10 @@
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
from typing import List, Tuple
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
import six
|
||||
@@ -147,6 +150,58 @@ def get_command(cmd_name):
|
||||
return getattr(get_module(cmd_name), pname)
|
||||
|
||||
|
||||
class _UnquotedFlags(object):
|
||||
"""Use a heuristic in `.extract()` to detect whether the user is trying to set
|
||||
multiple flags like the docker ENV attribute allows (e.g. 'cflags=-Os -pipe').
|
||||
|
||||
If the heuristic finds a match (which can be checked with `__bool__()`), a warning
|
||||
message explaining how to quote multiple flags correctly can be generated with
|
||||
`.report()`.
|
||||
"""
|
||||
|
||||
flags_arg_pattern = re.compile(
|
||||
r'^({0})=([^\'"].*)$'.format(
|
||||
'|'.join(spack.spec.FlagMap.valid_compiler_flags()),
|
||||
))
|
||||
|
||||
def __init__(self, all_unquoted_flag_pairs):
|
||||
# type: (List[Tuple[re.Match, str]]) -> None
|
||||
self._flag_pairs = all_unquoted_flag_pairs
|
||||
|
||||
def __bool__(self):
|
||||
# type: () -> bool
|
||||
return bool(self._flag_pairs)
|
||||
|
||||
@classmethod
|
||||
def extract(cls, sargs):
|
||||
# type: (str) -> _UnquotedFlags
|
||||
all_unquoted_flag_pairs = [] # type: List[Tuple[re.Match, str]]
|
||||
prev_flags_arg = None
|
||||
for arg in shlex.split(sargs):
|
||||
if prev_flags_arg is not None:
|
||||
all_unquoted_flag_pairs.append((prev_flags_arg, arg))
|
||||
prev_flags_arg = cls.flags_arg_pattern.match(arg)
|
||||
return cls(all_unquoted_flag_pairs)
|
||||
|
||||
def report(self):
|
||||
# type: () -> str
|
||||
single_errors = [
|
||||
'({0}) {1} {2} => {3}'.format(
|
||||
i + 1, match.group(0), next_arg,
|
||||
'{0}="{1} {2}"'.format(match.group(1), match.group(2), next_arg),
|
||||
)
|
||||
for i, (match, next_arg) in enumerate(self._flag_pairs)
|
||||
]
|
||||
return dedent("""\
|
||||
Some compiler or linker flags were provided without quoting their arguments,
|
||||
which now causes spack to try to parse the *next* argument as a spec component
|
||||
such as a variant instead of an additional compiler or linker flag. If the
|
||||
intent was to set multiple flags, try quoting them together as described below.
|
||||
|
||||
Possible flag quotation errors (with the correctly-quoted version after the =>):
|
||||
{0}""").format('\n'.join(single_errors))
|
||||
|
||||
|
||||
def parse_specs(args, **kwargs):
|
||||
"""Convenience function for parsing arguments from specs. Handles common
|
||||
exceptions and dies if there are errors.
|
||||
@@ -155,29 +210,28 @@ def parse_specs(args, **kwargs):
|
||||
normalize = kwargs.get('normalize', False)
|
||||
tests = kwargs.get('tests', False)
|
||||
|
||||
sargs = args
|
||||
if not isinstance(args, six.string_types):
|
||||
sargs = ' '.join(args)
|
||||
unquoted_flags = _UnquotedFlags.extract(sargs)
|
||||
|
||||
try:
|
||||
sargs = args
|
||||
if not isinstance(args, six.string_types):
|
||||
sargs = ' '.join(spack.util.string.quote(args))
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
|
||||
return specs
|
||||
|
||||
except spack.spec.SpecParseError as e:
|
||||
msg = e.message + "\n" + str(e.string) + "\n"
|
||||
msg += (e.pos + 2) * " " + "^"
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
except spack.error.SpecError as e:
|
||||
|
||||
msg = e.message
|
||||
if e.long_message:
|
||||
msg += e.long_message
|
||||
if unquoted_flags:
|
||||
msg += '\n\n'
|
||||
msg += unquoted_flags.report()
|
||||
|
||||
raise spack.error.SpackError(msg)
|
||||
|
||||
|
||||
@@ -6,7 +6,9 @@
|
||||
|
||||
import os.path
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import llnl.util.filesystem
|
||||
import llnl.util.tty
|
||||
import llnl.util.tty.color
|
||||
|
||||
@@ -15,6 +17,9 @@
|
||||
import spack.cmd.common.arguments
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.mirror
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.util.path
|
||||
|
||||
description = "manage bootstrap configuration"
|
||||
@@ -22,6 +27,38 @@
|
||||
level = "long"
|
||||
|
||||
|
||||
# Tarball to be downloaded if binary packages are requested in a local mirror
|
||||
BINARY_TARBALL = 'https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.2/bootstrap-buildcache.tar.gz'
|
||||
|
||||
#: Subdirectory where to create the mirror
|
||||
LOCAL_MIRROR_DIR = 'bootstrap_cache'
|
||||
|
||||
# Metadata for a generated binary mirror
|
||||
BINARY_METADATA = {
|
||||
'type': 'buildcache',
|
||||
'description': ('Buildcache copied from a public tarball available on Github.'
|
||||
'The sha256 checksum of binaries is checked before installation.'),
|
||||
'info': {
|
||||
'url': os.path.join('..', '..', LOCAL_MIRROR_DIR),
|
||||
'homepage': 'https://github.com/spack/spack-bootstrap-mirrors',
|
||||
'releases': 'https://github.com/spack/spack-bootstrap-mirrors/releases',
|
||||
'tarball': BINARY_TARBALL
|
||||
}
|
||||
}
|
||||
|
||||
CLINGO_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/clingo.json'
|
||||
GNUPG_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/gnupg.json'
|
||||
|
||||
# Metadata for a generated source mirror
|
||||
SOURCE_METADATA = {
|
||||
'type': 'install',
|
||||
'description': 'Mirror with software needed to bootstrap Spack',
|
||||
'info': {
|
||||
'url': os.path.join('..', '..', LOCAL_MIRROR_DIR)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def _add_scope_option(parser):
|
||||
scopes = spack.config.scopes()
|
||||
scopes_metavar = spack.config.scopes_metavar
|
||||
@@ -67,24 +104,61 @@ def setup_parser(subparser):
|
||||
)
|
||||
|
||||
list = sp.add_parser(
|
||||
'list', help='list the methods available for bootstrapping'
|
||||
'list', help='list all the sources of software to bootstrap Spack'
|
||||
)
|
||||
_add_scope_option(list)
|
||||
|
||||
trust = sp.add_parser(
|
||||
'trust', help='trust a bootstrapping method'
|
||||
'trust', help='trust a bootstrapping source'
|
||||
)
|
||||
_add_scope_option(trust)
|
||||
trust.add_argument(
|
||||
'name', help='name of the method to be trusted'
|
||||
'name', help='name of the source to be trusted'
|
||||
)
|
||||
|
||||
untrust = sp.add_parser(
|
||||
'untrust', help='untrust a bootstrapping method'
|
||||
'untrust', help='untrust a bootstrapping source'
|
||||
)
|
||||
_add_scope_option(untrust)
|
||||
untrust.add_argument(
|
||||
'name', help='name of the method to be untrusted'
|
||||
'name', help='name of the source to be untrusted'
|
||||
)
|
||||
|
||||
add = sp.add_parser(
|
||||
'add', help='add a new source for bootstrapping'
|
||||
)
|
||||
_add_scope_option(add)
|
||||
add.add_argument(
|
||||
'--trust', action='store_true',
|
||||
help='trust the source immediately upon addition')
|
||||
add.add_argument(
|
||||
'name', help='name of the new source of software'
|
||||
)
|
||||
add.add_argument(
|
||||
'metadata_dir', help='directory where to find metadata files'
|
||||
)
|
||||
|
||||
remove = sp.add_parser(
|
||||
'remove', help='remove a bootstrapping source'
|
||||
)
|
||||
remove.add_argument(
|
||||
'name', help='name of the source to be removed'
|
||||
)
|
||||
|
||||
mirror = sp.add_parser(
|
||||
'mirror', help='create a local mirror to bootstrap Spack'
|
||||
)
|
||||
mirror.add_argument(
|
||||
'--binary-packages', action='store_true',
|
||||
help='download public binaries in the mirror'
|
||||
)
|
||||
mirror.add_argument(
|
||||
'--dev', action='store_true',
|
||||
help='download dev dependencies too'
|
||||
)
|
||||
mirror.add_argument(
|
||||
metavar='DIRECTORY', dest='root_dir',
|
||||
help='root directory in which to create the mirror and metadata'
|
||||
)
|
||||
|
||||
|
||||
@@ -137,10 +211,7 @@ def _root(args):
|
||||
|
||||
|
||||
def _list(args):
|
||||
sources = spack.config.get(
|
||||
'bootstrap:sources', default=None, scope=args.scope
|
||||
)
|
||||
|
||||
sources = spack.bootstrap.bootstrapping_sources(scope=args.scope)
|
||||
if not sources:
|
||||
llnl.util.tty.msg(
|
||||
"No method available for bootstrapping Spack's dependencies"
|
||||
@@ -249,6 +320,121 @@ def _status(args):
|
||||
print()
|
||||
|
||||
|
||||
def _add(args):
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
names = [s['name'] for s in initial_sources]
|
||||
|
||||
# If the name is already used error out
|
||||
if args.name in names:
|
||||
msg = 'a source named "{0}" already exist. Please choose a different name'
|
||||
raise RuntimeError(msg.format(args.name))
|
||||
|
||||
# Check that the metadata file exists
|
||||
metadata_dir = spack.util.path.canonicalize_path(args.metadata_dir)
|
||||
if not os.path.exists(metadata_dir) or not os.path.isdir(metadata_dir):
|
||||
raise RuntimeError(
|
||||
'the directory "{0}" does not exist'.format(args.metadata_dir)
|
||||
)
|
||||
|
||||
file = os.path.join(metadata_dir, 'metadata.yaml')
|
||||
if not os.path.exists(file):
|
||||
raise RuntimeError('the file "{0}" does not exist'.format(file))
|
||||
|
||||
# Insert the new source as the highest priority one
|
||||
write_scope = args.scope or spack.config.default_modify_scope(section='bootstrap')
|
||||
sources = spack.config.get('bootstrap:sources', scope=write_scope) or []
|
||||
sources = [
|
||||
{'name': args.name, 'metadata': args.metadata_dir}
|
||||
] + sources
|
||||
spack.config.set('bootstrap:sources', sources, scope=write_scope)
|
||||
|
||||
msg = 'New bootstrapping source "{0}" added in the "{1}" configuration scope'
|
||||
llnl.util.tty.msg(msg.format(args.name, write_scope))
|
||||
if args.trust:
|
||||
_trust(args)
|
||||
|
||||
|
||||
def _remove(args):
|
||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
||||
names = [s['name'] for s in initial_sources]
|
||||
if args.name not in names:
|
||||
msg = ('cannot find any bootstrapping source named "{0}". '
|
||||
'Run `spack bootstrap list` to see available sources.')
|
||||
raise RuntimeError(msg.format(args.name))
|
||||
|
||||
for current_scope in spack.config.scopes():
|
||||
sources = spack.config.get('bootstrap:sources', scope=current_scope) or []
|
||||
if args.name in [s['name'] for s in sources]:
|
||||
sources = [s for s in sources if s['name'] != args.name]
|
||||
spack.config.set('bootstrap:sources', sources, scope=current_scope)
|
||||
msg = ('Removed the bootstrapping source named "{0}" from the '
|
||||
'"{1}" configuration scope.')
|
||||
llnl.util.tty.msg(msg.format(args.name, current_scope))
|
||||
trusted = spack.config.get('bootstrap:trusted', scope=current_scope) or []
|
||||
if args.name in trusted:
|
||||
trusted.pop(args.name)
|
||||
spack.config.set('bootstrap:trusted', trusted, scope=current_scope)
|
||||
msg = 'Deleting information on "{0}" from list of trusted sources'
|
||||
llnl.util.tty.msg(msg.format(args.name))
|
||||
|
||||
|
||||
def _mirror(args):
|
||||
mirror_dir = spack.util.path.canonicalize_path(
|
||||
os.path.join(args.root_dir, LOCAL_MIRROR_DIR)
|
||||
)
|
||||
|
||||
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
||||
# TODO: as soon as we have an option to add to a mirror all the possible
|
||||
# TODO: dependencies of a spec
|
||||
root_specs = spack.bootstrap.all_root_specs(development=args.dev) + ['gnuconfig']
|
||||
for spec_str in root_specs:
|
||||
msg = 'Adding "{0}" and dependencies to the mirror at {1}'
|
||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||
# Suppress tty from the call below for terser messages
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
spec = spack.spec.Spec(spec_str).concretized()
|
||||
for node in spec.traverse():
|
||||
spack.mirror.create(mirror_dir, [node])
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
if args.binary_packages:
|
||||
msg = 'Adding binary packages from "{0}" to the mirror at {1}'
|
||||
llnl.util.tty.msg(msg.format(BINARY_TARBALL, mirror_dir))
|
||||
llnl.util.tty.set_msg_enabled(False)
|
||||
stage = spack.stage.Stage(BINARY_TARBALL, path=tempfile.mkdtemp())
|
||||
stage.create()
|
||||
stage.fetch()
|
||||
stage.expand_archive()
|
||||
build_cache_dir = os.path.join(stage.source_path, 'build_cache')
|
||||
shutil.move(build_cache_dir, mirror_dir)
|
||||
llnl.util.tty.set_msg_enabled(True)
|
||||
|
||||
def write_metadata(subdir, metadata):
|
||||
metadata_rel_dir = os.path.join('metadata', subdir)
|
||||
metadata_yaml = os.path.join(
|
||||
args.root_dir, metadata_rel_dir, 'metadata.yaml'
|
||||
)
|
||||
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
|
||||
with open(metadata_yaml, mode='w') as f:
|
||||
spack.util.spack_yaml.dump(metadata, stream=f)
|
||||
return os.path.dirname(metadata_yaml), metadata_rel_dir
|
||||
|
||||
instructions = ('\nTo register the mirror on the platform where it\'s supposed '
|
||||
'to be used, move "{0}" to its final location and run the '
|
||||
'following command(s):\n\n').format(args.root_dir)
|
||||
cmd = ' % spack bootstrap add --trust {0} <final-path>/{1}\n'
|
||||
_, rel_directory = write_metadata(subdir='sources', metadata=SOURCE_METADATA)
|
||||
instructions += cmd.format('local-sources', rel_directory)
|
||||
if args.binary_packages:
|
||||
abs_directory, rel_directory = write_metadata(
|
||||
subdir='binaries', metadata=BINARY_METADATA
|
||||
)
|
||||
shutil.copy(spack.util.path.canonicalize_path(CLINGO_JSON), abs_directory)
|
||||
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
|
||||
instructions += cmd.format('local-binaries', rel_directory)
|
||||
print(instructions)
|
||||
|
||||
|
||||
def bootstrap(parser, args):
|
||||
callbacks = {
|
||||
'status': _status,
|
||||
@@ -258,6 +444,9 @@ def bootstrap(parser, args):
|
||||
'root': _root,
|
||||
'list': _list,
|
||||
'trust': _trust,
|
||||
'untrust': _untrust
|
||||
'untrust': _untrust,
|
||||
'add': _add,
|
||||
'remove': _remove,
|
||||
'mirror': _mirror
|
||||
}
|
||||
callbacks[args.subcommand](args)
|
||||
|
||||
@@ -478,11 +478,12 @@ def save_specfile_fn(args):
|
||||
if args.root_specfile:
|
||||
with open(args.root_specfile) as fd:
|
||||
root_spec_as_json = fd.read()
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
else:
|
||||
root_spec = Spec(args.root_spec)
|
||||
root_spec.concretize()
|
||||
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
|
||||
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
|
||||
spec_format = 'json'
|
||||
save_dependency_specfiles(
|
||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.util.crypto
|
||||
from spack.package import preferred_version
|
||||
from spack.package_base import preferred_version
|
||||
from spack.util.naming import valid_fully_qualified_module_name
|
||||
from spack.version import Version, ver
|
||||
|
||||
|
||||
@@ -64,6 +64,11 @@ def setup_parser(subparser):
|
||||
'--dependencies', action='store_true', default=False,
|
||||
help="(Experimental) disable DAG scheduling; use "
|
||||
' "plain" dependencies.')
|
||||
generate.add_argument(
|
||||
'--buildcache-destination', default=None,
|
||||
help="Override the mirror configured in the environment (spack.yaml) " +
|
||||
"in order to push binaries from the generated pipeline to a " +
|
||||
"different location.")
|
||||
prune_group = generate.add_mutually_exclusive_group()
|
||||
prune_group.add_argument(
|
||||
'--prune-dag', action='store_true', dest='prune_dag',
|
||||
@@ -127,6 +132,7 @@ def ci_generate(args):
|
||||
prune_dag = args.prune_dag
|
||||
index_only = args.index_only
|
||||
artifacts_root = args.artifacts_root
|
||||
buildcache_destination = args.buildcache_destination
|
||||
|
||||
if not output_file:
|
||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||
@@ -140,7 +146,8 @@ def ci_generate(args):
|
||||
spack_ci.generate_gitlab_ci_yaml(
|
||||
env, True, output_file, prune_dag=prune_dag,
|
||||
check_index_only=index_only, run_optimizer=run_optimizer,
|
||||
use_dependencies=use_dependencies, artifacts_root=artifacts_root)
|
||||
use_dependencies=use_dependencies, artifacts_root=artifacts_root,
|
||||
remote_mirror_override=buildcache_destination)
|
||||
|
||||
if copy_yaml_to:
|
||||
copy_to_dir = os.path.dirname(copy_yaml_to)
|
||||
@@ -180,6 +187,9 @@ def ci_rebuild(args):
|
||||
if not gitlab_ci:
|
||||
tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')
|
||||
|
||||
tty.msg('SPACK_BUILDCACHE_DESTINATION={0}'.format(
|
||||
os.environ.get('SPACK_BUILDCACHE_DESTINATION', None)))
|
||||
|
||||
# Grab the environment variables we need. These either come from the
|
||||
# pipeline generation step ("spack ci generate"), where they were written
|
||||
# out as variables, or else provided by GitLab itself.
|
||||
@@ -196,7 +206,7 @@ def ci_rebuild(args):
|
||||
compiler_action = get_env_var('SPACK_COMPILER_ACTION')
|
||||
cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
|
||||
spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
|
||||
pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
|
||||
remote_mirror_override = get_env_var('SPACK_REMOTE_MIRROR_OVERRIDE')
|
||||
remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')
|
||||
|
||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||
@@ -244,6 +254,10 @@ def ci_rebuild(args):
|
||||
tty.debug('Pipeline type - PR: {0}, develop: {1}'.format(
|
||||
spack_is_pr_pipeline, spack_is_develop_pipeline))
|
||||
|
||||
# If no override url exists, then just push binary package to the
|
||||
# normal remote mirror url.
|
||||
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
|
||||
|
||||
# Figure out what is our temporary storage mirror: Is it artifacts
|
||||
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
|
||||
# force something or pipelines might not have a way to propagate build
|
||||
@@ -373,7 +387,24 @@ def ci_rebuild(args):
|
||||
cfg.default_modify_scope())
|
||||
|
||||
# Check configured mirrors for a built spec with a matching hash
|
||||
matches = bindist.get_mirrors_for_spec(job_spec, index_only=False)
|
||||
mirrors_to_check = None
|
||||
if remote_mirror_override and spack_pipeline_type == 'spack_protected_branch':
|
||||
# Passing "mirrors_to_check" below means we *only* look in the override
|
||||
# mirror to see if we should skip building, which is what we want.
|
||||
mirrors_to_check = {
|
||||
'override': remote_mirror_override
|
||||
}
|
||||
|
||||
# Adding this mirror to the list of configured mirrors means dependencies
|
||||
# could be installed from either the override mirror or any other configured
|
||||
# mirror (e.g. remote_mirror_url which is defined in the environment or
|
||||
# pipeline_mirror_url), which is also what we want.
|
||||
spack.mirror.add('mirror_override',
|
||||
remote_mirror_override,
|
||||
cfg.default_modify_scope())
|
||||
|
||||
matches = bindist.get_mirrors_for_spec(
|
||||
job_spec, mirrors_to_check=mirrors_to_check, index_only=False)
|
||||
|
||||
if matches:
|
||||
# Got a hash match on at least one configured mirror. All
|
||||
@@ -517,13 +548,6 @@ def ci_rebuild(args):
|
||||
# any logs from the staging directory to artifacts now
|
||||
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
|
||||
|
||||
# Create buildcache on remote mirror, either on pr-specific mirror or
|
||||
# on the main mirror defined in the gitlab-enabled spack environment
|
||||
if spack_is_pr_pipeline:
|
||||
buildcache_mirror_url = pr_mirror_url
|
||||
else:
|
||||
buildcache_mirror_url = remote_mirror_url
|
||||
|
||||
# If the install succeeded, create a buildcache entry for this job spec
|
||||
# and push it to one or more mirrors. If the install did not succeed,
|
||||
# print out some instructions on how to reproduce this build failure
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
# See the Spack documentation for more information on packaging.
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
from spack import *
|
||||
from spack.package import *
|
||||
|
||||
|
||||
class {class_name}({base_class_name}):
|
||||
@@ -826,7 +826,7 @@ def get_versions(args, name):
|
||||
spack.util.url.require_url_format(args.url)
|
||||
if args.url.startswith('file://'):
|
||||
valid_url = False # No point in spidering these
|
||||
except ValueError:
|
||||
except (ValueError, TypeError):
|
||||
valid_url = False
|
||||
|
||||
if args.url is not None and args.template != 'bundle' and valid_url:
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.store
|
||||
|
||||
@@ -57,7 +57,7 @@ def dependencies(parser, args):
|
||||
|
||||
else:
|
||||
spec = specs[0]
|
||||
dependencies = spack.package.possible_dependencies(
|
||||
dependencies = spack.package_base.possible_dependencies(
|
||||
spec,
|
||||
transitive=args.transitive,
|
||||
expand_virtuals=args.expand_virtuals,
|
||||
|
||||
@@ -125,7 +125,7 @@ def external_find(args):
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not packages_to_check:
|
||||
packages_to_check = spack.repo.path.all_packages()
|
||||
packages_to_check = list(spack.repo.path.all_packages())
|
||||
|
||||
detected_packages = spack.detection.by_executable(
|
||||
packages_to_check, path_hints=args.path)
|
||||
@@ -177,7 +177,10 @@ def _collect_and_consume_cray_manifest_files(
|
||||
|
||||
for directory in manifest_dirs:
|
||||
for fname in os.listdir(directory):
|
||||
manifest_files.append(os.path.join(directory, fname))
|
||||
if fname.endswith('.json'):
|
||||
fpath = os.path.join(directory, fname)
|
||||
tty.debug("Adding manifest file: {0}".format(fpath))
|
||||
manifest_files.append(os.path.join(directory, fpath))
|
||||
|
||||
if not manifest_files:
|
||||
raise NoManifestFileError(
|
||||
@@ -185,6 +188,7 @@ def _collect_and_consume_cray_manifest_files(
|
||||
.format(cray_manifest.default_path))
|
||||
|
||||
for path in manifest_files:
|
||||
tty.debug("Reading manifest file: " + path)
|
||||
try:
|
||||
cray_manifest.read(path, not dry_run)
|
||||
except (spack.compilers.UnknownCompilerError, spack.error.SpackError) as e:
|
||||
@@ -200,7 +204,7 @@ def external_list(args):
|
||||
list(spack.repo.path.all_packages())
|
||||
# Print all the detectable packages
|
||||
tty.msg("Detectable packages per repository")
|
||||
for namespace, pkgs in sorted(spack.package.detectable_packages.items()):
|
||||
for namespace, pkgs in sorted(spack.package_base.detectable_packages.items()):
|
||||
print("Repository:", namespace)
|
||||
colify.colify(pkgs, indent=4, output=sys.stdout)
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.package import has_test_method, preferred_version
|
||||
from spack.package_base import has_test_method, preferred_version
|
||||
|
||||
description = 'get detailed information on a particular package'
|
||||
section = 'basic'
|
||||
@@ -269,14 +269,14 @@ def print_tests(pkg):
|
||||
names = []
|
||||
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
|
||||
if has_test_method(pkg_cls):
|
||||
pkg_base = spack.package.PackageBase
|
||||
pkg_base = spack.package_base.PackageBase
|
||||
test_pkgs = [str(cls.test) for cls in inspect.getmro(pkg_cls) if
|
||||
issubclass(cls, pkg_base) and cls.test != pkg_base.test]
|
||||
test_pkgs = list(set(test_pkgs))
|
||||
names.extend([(test.split()[1]).lower() for test in test_pkgs])
|
||||
|
||||
# TODO Refactor START
|
||||
# Use code from package.py's test_process IF this functionality is
|
||||
# Use code from package_base.py's test_process IF this functionality is
|
||||
# accepted.
|
||||
v_names = list(set([vspec.name for vspec in pkg.virtuals_provided]))
|
||||
|
||||
|
||||
@@ -302,7 +302,7 @@ def install(parser, args, **kwargs):
|
||||
)
|
||||
|
||||
reporter = spack.report.collect_info(
|
||||
spack.package.PackageInstaller, '_install_task', args.log_format, args)
|
||||
spack.package_base.PackageInstaller, '_install_task', args.log_format, args)
|
||||
if args.log_file:
|
||||
reporter.filename = args.log_file
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.error
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
@@ -15,8 +15,10 @@
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment
|
||||
import spack.hash_types as ht
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.solver.asp as asp
|
||||
|
||||
description = "concretize a specs using an ASP solver"
|
||||
@@ -74,6 +76,51 @@ def setup_parser(subparser):
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
|
||||
def _process_result(result, show, required_format, kwargs):
|
||||
result.raise_if_unsat()
|
||||
opt, _, _ = min(result.answers)
|
||||
if ("opt" in show) and (not required_format):
|
||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||
tty.msg("Optimization Criteria:")
|
||||
|
||||
maxlen = max(len(s[2]) for s in result.criteria)
|
||||
color.cprint(
|
||||
"@*{ Priority Criterion %sInstalled ToBuild}" % ((maxlen - 10) * " ")
|
||||
)
|
||||
|
||||
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
|
||||
for i, (installed_cost, build_cost, name) in enumerate(result.criteria, 1):
|
||||
color.cprint(
|
||||
fmt % (
|
||||
i,
|
||||
name,
|
||||
"-" if build_cost is None else installed_cost,
|
||||
installed_cost if build_cost is None else build_cost,
|
||||
)
|
||||
)
|
||||
print()
|
||||
|
||||
# dump the solutions as concretized specs
|
||||
if 'solutions' in show:
|
||||
for spec in result.specs:
|
||||
# With -y, just print YAML to output.
|
||||
if required_format == 'yaml':
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif required_format == 'json':
|
||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||
else:
|
||||
sys.stdout.write(
|
||||
spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||
print()
|
||||
|
||||
if result.unsolved_specs and "solutions" in show:
|
||||
tty.msg("Unsolved specs")
|
||||
for spec in result.unsolved_specs:
|
||||
print(spec)
|
||||
print()
|
||||
|
||||
|
||||
def solve(parser, args):
|
||||
# these are the same options as `spack spec`
|
||||
name_fmt = '{namespace}.{name}' if args.namespaces else '{name}'
|
||||
@@ -102,58 +149,42 @@ def solve(parser, args):
|
||||
if models < 0:
|
||||
tty.die("model count must be non-negative: %d")
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
# Format required for the output (JSON, YAML or None)
|
||||
required_format = args.format
|
||||
|
||||
# If we have an active environment, pick the specs from there
|
||||
env = spack.environment.active_environment()
|
||||
if env and args.specs:
|
||||
msg = "cannot give explicit specs when an environment is active"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
specs = list(env.user_specs) if env else spack.cmd.parse_specs(args.specs)
|
||||
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
solver = asp.Solver()
|
||||
output = sys.stdout if "asp" in show else None
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
models=models,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=(set(show) == {'asp'})
|
||||
)
|
||||
if 'solutions' not in show:
|
||||
return
|
||||
|
||||
# die if no solution was found
|
||||
result.raise_if_unsat()
|
||||
|
||||
# show the solutions as concretized specs
|
||||
if 'solutions' in show:
|
||||
opt, _, _ = min(result.answers)
|
||||
|
||||
if ("opt" in show) and (not args.format):
|
||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||
tty.msg("Optimization Criteria:")
|
||||
|
||||
maxlen = max(len(s[2]) for s in result.criteria)
|
||||
color.cprint(
|
||||
"@*{ Priority Criterion %sInstalled ToBuild}" % ((maxlen - 10) * " ")
|
||||
)
|
||||
|
||||
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
|
||||
for i, (idx, build_idx, name) in enumerate(result.criteria, 1):
|
||||
color.cprint(
|
||||
fmt % (
|
||||
i,
|
||||
name,
|
||||
"-" if build_idx is None else opt[idx],
|
||||
opt[idx] if build_idx is None else opt[build_idx],
|
||||
)
|
||||
)
|
||||
print()
|
||||
|
||||
for spec in result.specs:
|
||||
# With -y, just print YAML to output.
|
||||
if args.format == 'yaml':
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
|
||||
elif args.format == 'json':
|
||||
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
|
||||
setup_only = set(show) == {'asp'}
|
||||
unify = spack.config.get('concretizer:unify')
|
||||
if unify != 'when_possible':
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
models=models,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=setup_only
|
||||
)
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
else:
|
||||
for idx, result in enumerate(solver.solve_in_rounds(
|
||||
specs, out=output, models=models, timers=args.timers, stats=args.stats
|
||||
)):
|
||||
if "solutions" in show:
|
||||
tty.msg("ROUND {0}".format(idx))
|
||||
tty.msg("")
|
||||
else:
|
||||
sys.stdout.write(
|
||||
spec.tree(color=sys.stdout.isatty(), **kwargs))
|
||||
print("% END ROUND {0}\n".format(idx))
|
||||
if not setup_only:
|
||||
_process_result(result, show, required_format, kwargs)
|
||||
|
||||
@@ -80,7 +80,8 @@ def spec(parser, args):
|
||||
# Use command line specified specs, otherwise try to use environment specs.
|
||||
if args.specs:
|
||||
input_specs = spack.cmd.parse_specs(args.specs)
|
||||
specs = [(s, s.concretized()) for s in input_specs]
|
||||
concretized_specs = spack.cmd.parse_specs(args.specs, concretize=True)
|
||||
specs = list(zip(input_specs, concretized_specs))
|
||||
else:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
|
||||
@@ -27,12 +27,6 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def stage(parser, args):
|
||||
# We temporarily modify the working directory when setting up a stage, so we need to
|
||||
# convert this to an absolute path here in order for it to remain valid later.
|
||||
custom_path = os.path.abspath(args.path) if args.path else None
|
||||
if custom_path:
|
||||
spack.stage.create_stage_root(custom_path)
|
||||
|
||||
if not args.specs:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
@@ -54,6 +48,10 @@ def stage(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||
|
||||
# We temporarily modify the working directory when setting up a stage, so we need to
|
||||
# convert this to an absolute path here in order for it to remain valid later.
|
||||
custom_path = os.path.abspath(args.path) if args.path else None
|
||||
|
||||
# prevent multiple specs from extracting in the same folder
|
||||
if len(specs) > 1 and custom_path:
|
||||
tty.die("`--path` requires a single spec, but multiple were provided")
|
||||
|
||||
@@ -65,7 +65,7 @@ def is_package(f):
|
||||
packages, since we allow `from spack import *` and poking globals
|
||||
into packages.
|
||||
"""
|
||||
return f.startswith("var/spack/repos/")
|
||||
return f.startswith("var/spack/repos/") and f.endswith('package.py')
|
||||
|
||||
|
||||
#: decorator for adding tools to the list
|
||||
@@ -94,16 +94,16 @@ def changed_files(base="develop", untracked=True, all_files=False, root=None):
|
||||
git = which("git", required=True)
|
||||
|
||||
# ensure base is in the repo
|
||||
git("show-ref", "--verify", "--quiet", "refs/heads/%s" % base,
|
||||
fail_on_error=False)
|
||||
base_sha = git("rev-parse", "--quiet", "--verify", "--revs-only", base,
|
||||
fail_on_error=False, output=str)
|
||||
if git.returncode != 0:
|
||||
tty.die(
|
||||
"This repository does not have a '%s' branch." % base,
|
||||
"This repository does not have a '%s' revision." % base,
|
||||
"spack style needs this branch to determine which files changed.",
|
||||
"Ensure that '%s' exists, or specify files to check explicitly." % base
|
||||
)
|
||||
|
||||
range = "{0}...".format(base)
|
||||
range = "{0}...".format(base_sha.strip())
|
||||
|
||||
git_args = [
|
||||
# Add changed files committed since branching off of develop
|
||||
@@ -236,7 +236,7 @@ def translate(match):
|
||||
continue
|
||||
if not args.root_relative and re_obj:
|
||||
line = re_obj.sub(translate, line)
|
||||
print(" " + line)
|
||||
print(line)
|
||||
|
||||
|
||||
def print_style_header(file_list, args):
|
||||
@@ -290,20 +290,26 @@ def run_flake8(flake8_cmd, file_list, args):
|
||||
@tool("mypy")
|
||||
def run_mypy(mypy_cmd, file_list, args):
|
||||
# always run with config from running spack prefix
|
||||
mypy_args = [
|
||||
common_mypy_args = [
|
||||
"--config-file", os.path.join(spack.paths.prefix, "pyproject.toml"),
|
||||
"--package", "spack",
|
||||
"--package", "llnl",
|
||||
"--show-error-codes",
|
||||
]
|
||||
# not yet, need other updates to enable this
|
||||
# if any([is_package(f) for f in file_list]):
|
||||
# mypy_args.extend(["--package", "packages"])
|
||||
mypy_arg_sets = [common_mypy_args + [
|
||||
"--package", "spack",
|
||||
"--package", "llnl",
|
||||
]]
|
||||
if 'SPACK_MYPY_CHECK_PACKAGES' in os.environ:
|
||||
mypy_arg_sets.append(common_mypy_args + [
|
||||
'--package', 'packages',
|
||||
'--disable-error-code', 'no-redef',
|
||||
])
|
||||
|
||||
output = mypy_cmd(*mypy_args, fail_on_error=False, output=str)
|
||||
returncode = mypy_cmd.returncode
|
||||
returncode = 0
|
||||
for mypy_args in mypy_arg_sets:
|
||||
output = mypy_cmd(*mypy_args, fail_on_error=False, output=str)
|
||||
returncode |= mypy_cmd.returncode
|
||||
|
||||
rewrite_and_print_output(output, args)
|
||||
rewrite_and_print_output(output, args)
|
||||
|
||||
print_tool_result("mypy", returncode)
|
||||
return returncode
|
||||
@@ -318,16 +324,29 @@ def run_isort(isort_cmd, file_list, args):
|
||||
|
||||
pat = re.compile("ERROR: (.*) Imports are incorrectly sorted")
|
||||
replacement = "ERROR: {0} Imports are incorrectly sorted"
|
||||
returncode = 0
|
||||
for chunk in grouper(file_list, 100):
|
||||
packed_args = isort_args + tuple(chunk)
|
||||
output = isort_cmd(*packed_args, fail_on_error=False, output=str, error=str)
|
||||
returncode |= isort_cmd.returncode
|
||||
returncode = [0]
|
||||
|
||||
rewrite_and_print_output(output, args, pat, replacement)
|
||||
def process_files(file_list, is_args):
|
||||
for chunk in grouper(file_list, 100):
|
||||
packed_args = is_args + tuple(chunk)
|
||||
output = isort_cmd(*packed_args, fail_on_error=False, output=str, error=str)
|
||||
returncode[0] |= isort_cmd.returncode
|
||||
|
||||
print_tool_result("isort", returncode)
|
||||
return returncode
|
||||
rewrite_and_print_output(output, args, pat, replacement)
|
||||
|
||||
packages_isort_args = ('--rm', 'spack', '--rm', 'spack.pkgkit', '--rm',
|
||||
'spack.package_defs', '-a', 'from spack.package import *')
|
||||
packages_isort_args = packages_isort_args + isort_args
|
||||
|
||||
# packages
|
||||
process_files(filter(is_package, file_list),
|
||||
packages_isort_args)
|
||||
# non-packages
|
||||
process_files(filter(lambda f: not is_package(f), file_list),
|
||||
isort_args)
|
||||
|
||||
print_tool_result("isort", returncode[0])
|
||||
return returncode[0]
|
||||
|
||||
|
||||
@tool("black")
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.install_test
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.report
|
||||
|
||||
@@ -189,7 +189,7 @@ def test_run(args):
|
||||
# Set up reporter
|
||||
setattr(args, 'package', [s.format() for s in test_suite.specs])
|
||||
reporter = spack.report.collect_info(
|
||||
spack.package.PackageBase, 'do_test', args.log_format, args)
|
||||
spack.package_base.PackageBase, 'do_test', args.log_format, args)
|
||||
if not reporter.filename:
|
||||
if args.log_file:
|
||||
if os.path.isabs(args.log_file):
|
||||
@@ -217,7 +217,7 @@ def test_list(args):
|
||||
else set()
|
||||
|
||||
def has_test_and_tags(pkg_class):
|
||||
return spack.package.has_test_method(pkg_class) and \
|
||||
return spack.package_base.has_test_method(pkg_class) and \
|
||||
(not args.tag or pkg_class.name in tagged)
|
||||
|
||||
if args.list_all:
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.17"
|
||||
tutorial_branch = "releases/v0.18"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.error
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.database import InstallStatuses
|
||||
@@ -221,7 +221,7 @@ def do_uninstall(env, specs, force):
|
||||
except spack.repo.UnknownEntityError:
|
||||
# The package.py file has gone away -- but still
|
||||
# want to uninstall.
|
||||
spack.package.Package.uninstall_by_spec(item, force=True)
|
||||
spack.package_base.Package.uninstall_by_spec(item, force=True)
|
||||
|
||||
# A package is ready to be uninstalled when nothing else references it,
|
||||
# unless we are requested to force uninstall it.
|
||||
@@ -235,7 +235,7 @@ def is_ready(dag_hash):
|
||||
|
||||
# If this spec is only used as a build dependency, we can uninstall
|
||||
return all(
|
||||
dspec.deptypes == ("build",)
|
||||
dspec.deptypes == ("build",) or not dspec.parent.installed
|
||||
for dspec in record.spec.edges_from_dependents()
|
||||
)
|
||||
|
||||
|
||||
@@ -422,7 +422,7 @@ def url_list_parsing(args, urls, url, pkg):
|
||||
urls (set): List of URLs that have already been added
|
||||
url (str or None): A URL to potentially add to ``urls`` depending on
|
||||
``args``
|
||||
pkg (spack.package.PackageBase): The Spack package
|
||||
pkg (spack.package_base.PackageBase): The Spack package
|
||||
|
||||
Returns:
|
||||
set: The updated set of ``urls``
|
||||
@@ -470,7 +470,7 @@ def name_parsed_correctly(pkg, name):
|
||||
"""Determine if the name of a package was correctly parsed.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): The Spack package
|
||||
pkg (spack.package_base.PackageBase): The Spack package
|
||||
name (str): The name that was extracted from the URL
|
||||
|
||||
Returns:
|
||||
@@ -487,7 +487,7 @@ def version_parsed_correctly(pkg, version):
|
||||
"""Determine if the version of a package was correctly parsed.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): The Spack package
|
||||
pkg (spack.package_base.PackageBase): The Spack package
|
||||
version (str): The version that was extracted from the URL
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -766,7 +766,8 @@ def name_matches(name, name_list):
|
||||
toolchains.add(compiler_cls.__name__)
|
||||
|
||||
if len(toolchains) > 1:
|
||||
if toolchains == set(['Clang', 'AppleClang', 'Aocc']):
|
||||
if toolchains == set(['Clang', 'AppleClang', 'Aocc']) or \
|
||||
toolchains == set(['Dpcpp', 'Oneapi']):
|
||||
return False
|
||||
tty.debug("[TOOLCHAINS] {0}".format(toolchains))
|
||||
return True
|
||||
|
||||
@@ -88,7 +88,7 @@
|
||||
|
||||
#: Path to the default configuration
|
||||
configuration_defaults_path = (
|
||||
'defaults', os.path.join(spack.paths.etc_path, 'spack', 'defaults')
|
||||
'defaults', os.path.join(spack.paths.etc_path, 'defaults')
|
||||
)
|
||||
|
||||
#: Hard-coded default values for some key configuration options.
|
||||
@@ -104,12 +104,13 @@
|
||||
'build_jobs': min(16, cpus_available()),
|
||||
'build_stage': '$tempdir/spack-stage',
|
||||
'concretizer': 'clingo',
|
||||
'license_dir': spack.paths.default_license_dir,
|
||||
}
|
||||
}
|
||||
|
||||
#: metavar to use for commands that accept scopes
|
||||
#: this is shorter and more readable than listing all choices
|
||||
scopes_metavar = '{defaults,system,site,user}[/PLATFORM]'
|
||||
scopes_metavar = '{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT'
|
||||
|
||||
#: Base name for the (internal) overrides scope.
|
||||
overrides_base_name = 'overrides-'
|
||||
@@ -815,7 +816,7 @@ def _config():
|
||||
# Site configuration is per spack instance, for sites or projects
|
||||
# No site-level configs should be checked into spack by default.
|
||||
configuration_paths.append(
|
||||
('site', os.path.join(spack.paths.etc_path, 'spack')),
|
||||
('site', os.path.join(spack.paths.etc_path)),
|
||||
)
|
||||
|
||||
# User configuration can override both spack defaults and site config
|
||||
|
||||
@@ -20,6 +20,7 @@
|
||||
|
||||
compiler_name_translation = {
|
||||
'nvidia': 'nvhpc',
|
||||
'rocm': 'rocmcc',
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -356,10 +356,10 @@ def __init__(self, root, db_dir=None, upstream_dbs=None,
|
||||
self.prefix_fail_path = os.path.join(self._db_dir, 'prefix_failures')
|
||||
|
||||
# Create needed directories and files
|
||||
if not os.path.exists(self._db_dir):
|
||||
if not is_upstream and not os.path.exists(self._db_dir):
|
||||
fs.mkdirp(self._db_dir)
|
||||
|
||||
if not os.path.exists(self._failure_dir) and not is_upstream:
|
||||
if not is_upstream and not os.path.exists(self._failure_dir):
|
||||
fs.mkdirp(self._failure_dir)
|
||||
|
||||
self.is_upstream = is_upstream
|
||||
@@ -1064,9 +1064,7 @@ def _read(self):
|
||||
self._state_is_inconsistent = False
|
||||
return
|
||||
elif self.is_upstream:
|
||||
raise UpstreamDatabaseLockingError(
|
||||
"No database index file is present, and upstream"
|
||||
" databases cannot generate an index file")
|
||||
tty.warn('upstream not found: {0}'.format(self._index_path))
|
||||
|
||||
def _add(
|
||||
self,
|
||||
|
||||
@@ -240,7 +240,7 @@ def compute_windows_program_path_for_package(pkg):
|
||||
program files location, return list of best guesses
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): package for which
|
||||
pkg (spack.package_base.Package): package for which
|
||||
Program Files location is to be computed
|
||||
"""
|
||||
if not is_windows:
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
from llnl.util.lang import dedupe
|
||||
from llnl.util.symlink import symlink
|
||||
|
||||
import spack.binary_distribution
|
||||
import spack.bootstrap
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
@@ -79,8 +80,9 @@
|
||||
env_subdir_name = '.spack-env'
|
||||
|
||||
|
||||
#: default spack.yaml file to put in new environments
|
||||
default_manifest_yaml = """\
|
||||
def default_manifest_yaml():
|
||||
"""default spack.yaml file to put in new environments"""
|
||||
return """\
|
||||
# This is a Spack Environment file.
|
||||
#
|
||||
# It describes a set of packages to be installed, along with
|
||||
@@ -89,7 +91,11 @@
|
||||
# add package specs to the `specs` list
|
||||
specs: []
|
||||
view: true
|
||||
"""
|
||||
concretizer:
|
||||
unify: {}
|
||||
""".format('true' if spack.config.get('concretizer:unify') else 'false')
|
||||
|
||||
|
||||
#: regex for validating enviroment names
|
||||
valid_environment_name_re = r'^\w[\w-]*$'
|
||||
|
||||
@@ -623,7 +629,7 @@ def __init__(self, path, init_file=None, with_view=None, keep_relative=False):
|
||||
|
||||
# This attribute will be set properly from configuration
|
||||
# during concretization
|
||||
self.concretization = None
|
||||
self.unify = None
|
||||
self.clear()
|
||||
|
||||
if init_file:
|
||||
@@ -632,11 +638,11 @@ def __init__(self, path, init_file=None, with_view=None, keep_relative=False):
|
||||
# the init file.
|
||||
with fs.open_if_filename(init_file) as f:
|
||||
if hasattr(f, 'name') and f.name.endswith('.lock'):
|
||||
self._read_manifest(default_manifest_yaml)
|
||||
self._read_manifest(default_manifest_yaml())
|
||||
self._read_lockfile(f)
|
||||
self._set_user_specs_from_lockfile()
|
||||
else:
|
||||
self._read_manifest(f, raw_yaml=default_manifest_yaml)
|
||||
self._read_manifest(f, raw_yaml=default_manifest_yaml())
|
||||
|
||||
# Rewrite relative develop paths when initializing a new
|
||||
# environment in a different location from the spack.yaml file.
|
||||
@@ -700,7 +706,7 @@ def _read(self):
|
||||
default_manifest = not os.path.exists(self.manifest_path)
|
||||
if default_manifest:
|
||||
# No manifest, use default yaml
|
||||
self._read_manifest(default_manifest_yaml)
|
||||
self._read_manifest(default_manifest_yaml())
|
||||
else:
|
||||
with open(self.manifest_path) as f:
|
||||
self._read_manifest(f)
|
||||
@@ -766,8 +772,15 @@ def _read_manifest(self, f, raw_yaml=None):
|
||||
self.views = {}
|
||||
# Retrieve the current concretization strategy
|
||||
configuration = config_dict(self.yaml)
|
||||
# default concretization to separately
|
||||
self.concretization = configuration.get('concretization', 'separately')
|
||||
|
||||
# Let `concretization` overrule `concretize:unify` config for now,
|
||||
# but use a translation table to have internally a representation
|
||||
# as if we were using the new configuration
|
||||
translation = {'separately': False, 'together': True}
|
||||
try:
|
||||
self.unify = translation[configuration['concretization']]
|
||||
except KeyError:
|
||||
self.unify = spack.config.get('concretizer:unify', False)
|
||||
|
||||
# Retrieve dev-build packages:
|
||||
self.dev_specs = configuration.get('develop', {})
|
||||
@@ -1148,14 +1161,44 @@ def concretize(self, force=False, tests=False):
|
||||
self.specs_by_hash = {}
|
||||
|
||||
# Pick the right concretization strategy
|
||||
if self.concretization == 'together':
|
||||
if self.unify == 'when_possible':
|
||||
return self._concretize_together_where_possible(tests=tests)
|
||||
|
||||
if self.unify is True:
|
||||
return self._concretize_together(tests=tests)
|
||||
|
||||
if self.concretization == 'separately':
|
||||
if self.unify is False:
|
||||
return self._concretize_separately(tests=tests)
|
||||
|
||||
msg = 'concretization strategy not implemented [{0}]'
|
||||
raise SpackEnvironmentError(msg.format(self.concretization))
|
||||
raise SpackEnvironmentError(msg.format(self.unify))
|
||||
|
||||
def _concretize_together_where_possible(self, tests=False):
|
||||
# Avoid cyclic dependency
|
||||
import spack.solver.asp
|
||||
|
||||
# Exit early if the set of concretized specs is the set of user specs
|
||||
user_specs_did_not_change = not bool(
|
||||
set(self.user_specs) - set(self.concretized_user_specs)
|
||||
)
|
||||
if user_specs_did_not_change:
|
||||
return []
|
||||
|
||||
# Proceed with concretization
|
||||
self.concretized_user_specs = []
|
||||
self.concretized_order = []
|
||||
self.specs_by_hash = {}
|
||||
|
||||
result_by_user_spec = {}
|
||||
solver = spack.solver.asp.Solver()
|
||||
for result in solver.solve_in_rounds(self.user_specs, tests=tests):
|
||||
result_by_user_spec.update(result.specs_by_input)
|
||||
|
||||
result = []
|
||||
for abstract, concrete in sorted(result_by_user_spec.items()):
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
result.append((abstract, concrete))
|
||||
return result
|
||||
|
||||
def _concretize_together(self, tests=False):
|
||||
"""Concretization strategy that concretizes all the specs
|
||||
@@ -1240,6 +1283,10 @@ def _concretize_separately(self, tests=False):
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.get_compiler_config()
|
||||
|
||||
# Ensure that buildcache index is updated if reuse is on
|
||||
if spack.config.get('config:reuse', False):
|
||||
spack.binary_distribution.binary_index.update()
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(arguments) == 0:
|
||||
return []
|
||||
@@ -1308,7 +1355,7 @@ def concretize_and_add(self, user_spec, concrete_spec=None, tests=False):
|
||||
concrete_spec: if provided, then it is assumed that it is the
|
||||
result of concretizing the provided ``user_spec``
|
||||
"""
|
||||
if self.concretization == 'together':
|
||||
if self.unify is True:
|
||||
msg = 'cannot install a single spec in an environment that is ' \
|
||||
'configured to be concretized together. Run instead:\n\n' \
|
||||
' $ spack add <spec>\n' \
|
||||
@@ -1611,7 +1658,14 @@ def all_specs(self):
|
||||
"""Return all specs, even those a user spec would shadow."""
|
||||
all_specs = set()
|
||||
for h in self.concretized_order:
|
||||
all_specs.update(self.specs_by_hash[h].traverse())
|
||||
try:
|
||||
spec = self.specs_by_hash[h]
|
||||
except KeyError:
|
||||
tty.warn(
|
||||
'Environment %s appears to be corrupt: missing spec '
|
||||
'"%s"' % (self.name, h))
|
||||
continue
|
||||
all_specs.update(spec.traverse())
|
||||
|
||||
return sorted(all_specs)
|
||||
|
||||
@@ -1869,17 +1923,15 @@ def write(self, regenerate=True):
|
||||
regenerate (bool): regenerate views and run post-write hooks as
|
||||
well as writing if True.
|
||||
"""
|
||||
# Intercept environment not using the latest schema format and prevent
|
||||
# them from being modified
|
||||
manifest_exists = os.path.exists(self.manifest_path)
|
||||
if manifest_exists and not is_latest_format(self.manifest_path):
|
||||
msg = ('The environment "{0}" needs to be written to disk, but '
|
||||
'is currently using a deprecated format. Please update it '
|
||||
'using:\n\n'
|
||||
'\tspack env update {0}\n\n'
|
||||
'Note that previous versions of Spack will not be able to '
|
||||
# Warn that environments are not in the latest format.
|
||||
if not is_latest_format(self.manifest_path):
|
||||
ver = '.'.join(str(s) for s in spack.spack_version_info[:2])
|
||||
msg = ('The environment "{}" is written to disk in a deprecated format. '
|
||||
'Please update it using:\n\n'
|
||||
'\tspack env update {}\n\n'
|
||||
'Note that versions of Spack older than {} may not be able to '
|
||||
'use the updated configuration.')
|
||||
raise RuntimeError(msg.format(self.name))
|
||||
tty.warn(msg.format(self.name, self.name, ver))
|
||||
|
||||
# ensure path in var/spack/environments
|
||||
fs.mkdirp(self.path)
|
||||
@@ -2231,14 +2283,16 @@ def _top_level_key(data):
|
||||
|
||||
|
||||
def is_latest_format(manifest):
|
||||
"""Return True if the manifest file is at the latest schema format,
|
||||
False otherwise.
|
||||
"""Return False if the manifest file exists and is not in the latest schema format.
|
||||
|
||||
Args:
|
||||
manifest (str): manifest file to be analyzed
|
||||
"""
|
||||
with open(manifest) as f:
|
||||
data = syaml.load(f)
|
||||
try:
|
||||
with open(manifest) as f:
|
||||
data = syaml.load(f)
|
||||
except (OSError, IOError):
|
||||
return True
|
||||
top_level_key = _top_level_key(data)
|
||||
changed = spack.schema.env.update(data[top_level_key])
|
||||
return not changed
|
||||
|
||||
@@ -10,9 +10,9 @@
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
#: whether we should write stack traces or short error messages
|
||||
#: at what level we should write stack traces or short error messages
|
||||
#: this is module-scoped because it needs to be set very early
|
||||
debug = False
|
||||
debug = 0
|
||||
|
||||
|
||||
class SpackError(Exception):
|
||||
|
||||
@@ -35,6 +35,7 @@
|
||||
import six.moves.urllib.parse as urllib_parse
|
||||
|
||||
import llnl.util
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import (
|
||||
get_single_file,
|
||||
@@ -119,6 +120,11 @@ def __init__(self, **kwargs):
|
||||
# 'no_cache' option from version directive.
|
||||
self.cache_enabled = not kwargs.pop('no_cache', False)
|
||||
|
||||
self.package = None
|
||||
|
||||
def set_package(self, package):
|
||||
self.package = package
|
||||
|
||||
# Subclasses need to implement these methods
|
||||
def fetch(self):
|
||||
"""Fetch source code archive or repo.
|
||||
@@ -242,6 +248,10 @@ def source_id(self):
|
||||
if all(component_ids):
|
||||
return component_ids
|
||||
|
||||
def set_package(self, package):
|
||||
for item in self:
|
||||
item.package = package
|
||||
|
||||
|
||||
@fetcher
|
||||
class URLFetchStrategy(FetchStrategy):
|
||||
@@ -520,7 +530,7 @@ def expand(self):
|
||||
"Failed on expand() for URL %s" % self.url)
|
||||
|
||||
if not self.extension:
|
||||
self.extension = extension(self.archive_file)
|
||||
self.extension = extension(self.url)
|
||||
|
||||
if self.stage.expanded:
|
||||
tty.debug('Source already staged to %s' % self.stage.source_path)
|
||||
@@ -528,50 +538,11 @@ def expand(self):
|
||||
|
||||
decompress = decompressor_for(self.archive_file, self.extension)
|
||||
|
||||
# Expand all tarballs in their own directory to contain
|
||||
# exploding tarballs.
|
||||
tarball_container = os.path.join(self.stage.path,
|
||||
"spack-expanded-archive")
|
||||
|
||||
# Below we assume that the command to decompress expand the
|
||||
# archive in the current working directory
|
||||
mkdirp(tarball_container)
|
||||
with working_dir(tarball_container):
|
||||
with fs.exploding_archive_catch(self.stage):
|
||||
decompress(self.archive_file)
|
||||
|
||||
# Check for an exploding tarball, i.e. one that doesn't expand to
|
||||
# a single directory. If the tarball *didn't* explode, move its
|
||||
# contents to the staging source directory & remove the container
|
||||
# directory. If the tarball did explode, just rename the tarball
|
||||
# directory to the staging source directory.
|
||||
#
|
||||
# NOTE: The tar program on Mac OS X will encode HFS metadata in
|
||||
# hidden files, which can end up *alongside* a single top-level
|
||||
# directory. We initially ignore presence of hidden files to
|
||||
# accomodate these "semi-exploding" tarballs but ensure the files
|
||||
# are copied to the source directory.
|
||||
files = os.listdir(tarball_container)
|
||||
non_hidden = [f for f in files if not f.startswith('.')]
|
||||
if len(non_hidden) == 1:
|
||||
src = os.path.join(tarball_container, non_hidden[0])
|
||||
if os.path.isdir(src):
|
||||
self.stage.srcdir = non_hidden[0]
|
||||
shutil.move(src, self.stage.source_path)
|
||||
if len(files) > 1:
|
||||
files.remove(non_hidden[0])
|
||||
for f in files:
|
||||
src = os.path.join(tarball_container, f)
|
||||
dest = os.path.join(self.stage.path, f)
|
||||
shutil.move(src, dest)
|
||||
os.rmdir(tarball_container)
|
||||
else:
|
||||
# This is a non-directory entry (e.g., a patch file) so simply
|
||||
# rename the tarball container to be the source path.
|
||||
shutil.move(tarball_container, self.stage.source_path)
|
||||
|
||||
else:
|
||||
shutil.move(tarball_container, self.stage.source_path)
|
||||
|
||||
def archive(self, destination):
|
||||
"""Just moves this archive to the destination."""
|
||||
if not self.archive_file:
|
||||
@@ -1014,9 +985,20 @@ def clone(self, dest=None, commit=None, branch=None, tag=None, bare=False):
|
||||
git(*args)
|
||||
|
||||
# Init submodules if the user asked for them.
|
||||
if self.submodules:
|
||||
with working_dir(dest):
|
||||
args = ['submodule', 'update', '--init', '--recursive']
|
||||
git_commands = []
|
||||
submodules = self.submodules
|
||||
if callable(submodules):
|
||||
submodules = list(submodules(self.package))
|
||||
git_commands.append(["submodule", "init", "--"] + submodules)
|
||||
git_commands.append(['submodule', 'update', '--recursive'])
|
||||
elif submodules:
|
||||
git_commands.append(["submodule", "update", "--init", "--recursive"])
|
||||
|
||||
if not git_commands:
|
||||
return
|
||||
|
||||
with working_dir(dest):
|
||||
for args in git_commands:
|
||||
if not spack.config.get('config:debug'):
|
||||
args.insert(1, '--quiet')
|
||||
git(*args)
|
||||
@@ -1556,7 +1538,7 @@ def _extrapolate(pkg, version):
|
||||
try:
|
||||
return URLFetchStrategy(pkg.url_for_version(version),
|
||||
fetch_options=pkg.fetch_options)
|
||||
except spack.package.NoURLError:
|
||||
except spack.package_base.NoURLError:
|
||||
msg = ("Can't extrapolate a URL for version %s "
|
||||
"because package %s defines no URLs")
|
||||
raise ExtrapolationError(msg % (version, pkg.name))
|
||||
|
||||
@@ -493,9 +493,11 @@ def write(self, spec, color=None, out=None):
|
||||
|
||||
# Replace node with its dependencies
|
||||
self._frontier.pop(i)
|
||||
deps = node.dependencies(deptype=self.deptype)
|
||||
if deps:
|
||||
deps = sorted((d.dag_hash() for d in deps), reverse=True)
|
||||
edges = sorted(
|
||||
node.edges_to_dependencies(deptype=self.deptype), reverse=True
|
||||
)
|
||||
if edges:
|
||||
deps = [e.spec.dag_hash() for e in edges]
|
||||
self._connect_deps(i, deps, "new-deps") # anywhere.
|
||||
|
||||
elif self._frontier:
|
||||
|
||||
@@ -50,7 +50,7 @@
|
||||
import spack.error
|
||||
import spack.hooks
|
||||
import spack.monitor
|
||||
import spack.package
|
||||
import spack.package_base
|
||||
import spack.package_prefs as prefs
|
||||
import spack.repo
|
||||
import spack.store
|
||||
@@ -103,7 +103,7 @@ def _check_last_phase(pkg):
|
||||
package already.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): the package being installed
|
||||
pkg (spack.package_base.PackageBase): the package being installed
|
||||
|
||||
Raises:
|
||||
``BadInstallPhase`` if stop_before or last phase is invalid
|
||||
@@ -125,7 +125,7 @@ def _handle_external_and_upstream(pkg, explicit):
|
||||
database if it is external package.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): the package whose installation is under
|
||||
pkg (spack.package_base.Package): the package whose installation is under
|
||||
consideration
|
||||
explicit (bool): the package was explicitly requested by the user
|
||||
Return:
|
||||
@@ -265,7 +265,7 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
|
||||
Extract the package from binary cache
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): the package to install from the binary cache
|
||||
pkg (spack.package_base.PackageBase): package to install from the binary cache
|
||||
cache_only (bool): only extract from binary cache
|
||||
explicit (bool): ``True`` if installing the package was explicitly
|
||||
requested by the user, otherwise, ``False``
|
||||
@@ -350,27 +350,27 @@ def _process_external_package(pkg, explicit):
|
||||
|
||||
|
||||
def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
|
||||
preferred_mirrors=None):
|
||||
mirrors_for_spec=None):
|
||||
"""
|
||||
Process the binary cache tarball.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): the package being installed
|
||||
pkg (spack.package_base.PackageBase): the package being installed
|
||||
binary_spec (spack.spec.Spec): the spec whose cache has been confirmed
|
||||
explicit (bool): the package was explicitly requested by the user
|
||||
unsigned (bool): ``True`` if binary package signatures to be checked,
|
||||
otherwise, ``False``
|
||||
preferred_mirrors (list): Optional list of urls to prefer when
|
||||
attempting to download the tarball
|
||||
mirrors_for_spec (list): Optional list of concrete specs and mirrors
|
||||
obtained by calling binary_distribution.get_mirrors_for_spec().
|
||||
|
||||
Return:
|
||||
bool: ``True`` if the package was extracted from binary cache,
|
||||
else ``False``
|
||||
"""
|
||||
tarball = binary_distribution.download_tarball(
|
||||
binary_spec, preferred_mirrors=preferred_mirrors)
|
||||
download_result = binary_distribution.download_tarball(
|
||||
binary_spec, unsigned, mirrors_for_spec=mirrors_for_spec)
|
||||
# see #10063 : install from source if tarball doesn't exist
|
||||
if tarball is None:
|
||||
if download_result is None:
|
||||
tty.msg('{0} exists in binary cache but with different hash'
|
||||
.format(pkg.name))
|
||||
return False
|
||||
@@ -380,9 +380,9 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
|
||||
|
||||
# don't print long padded paths while extracting/relocating binaries
|
||||
with spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(
|
||||
binary_spec, tarball, allow_root=False, unsigned=unsigned, force=False
|
||||
)
|
||||
binary_distribution.extract_tarball(binary_spec, download_result,
|
||||
allow_root=False, unsigned=unsigned,
|
||||
force=False)
|
||||
|
||||
pkg.installed_from_binary_cache = True
|
||||
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
|
||||
@@ -394,7 +394,7 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
|
||||
Try to extract the package from binary cache.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): the package to be extracted from binary cache
|
||||
pkg (spack.package_base.PackageBase): package to be extracted from binary cache
|
||||
explicit (bool): the package was explicitly requested by the user
|
||||
unsigned (bool): ``True`` if binary package signatures to be checked,
|
||||
otherwise, ``False``
|
||||
@@ -406,12 +406,8 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
|
||||
if not matches:
|
||||
return False
|
||||
|
||||
# In the absence of guidance from user or some other reason to prefer one
|
||||
# mirror over another, any match will suffice, so just pick the first one.
|
||||
preferred_mirrors = [match['mirror_url'] for match in matches]
|
||||
binary_spec = matches[0]['spec']
|
||||
return _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
|
||||
preferred_mirrors=preferred_mirrors)
|
||||
return _process_binary_cache_tarball(pkg, pkg.spec, explicit, unsigned,
|
||||
mirrors_for_spec=matches)
|
||||
|
||||
|
||||
def clear_failures():
|
||||
@@ -534,7 +530,7 @@ def log(pkg):
|
||||
Copy provenance into the install directory on success
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): the package that was built and installed
|
||||
pkg (spack.package_base.Package): the package that was built and installed
|
||||
"""
|
||||
packages_dir = spack.store.layout.build_packages_path(pkg.spec)
|
||||
|
||||
@@ -620,7 +616,7 @@ def package_id(pkg):
|
||||
and packages for combinatorial environments.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): the package from which the identifier is
|
||||
pkg (spack.package_base.PackageBase): the package from which the identifier is
|
||||
derived
|
||||
"""
|
||||
if not pkg.spec.concrete:
|
||||
@@ -773,7 +769,7 @@ def _add_bootstrap_compilers(
|
||||
Args:
|
||||
compiler: the compiler to boostrap
|
||||
architecture: the architecture for which to bootstrap the compiler
|
||||
pkgs (spack.package.PackageBase): the package with possible compiler
|
||||
pkgs (spack.package_base.PackageBase): the package with possible compiler
|
||||
dependencies
|
||||
request (BuildRequest): the associated install request
|
||||
all_deps (defaultdict(set)): dictionary of all dependencies and
|
||||
@@ -790,7 +786,7 @@ def _add_init_task(self, pkg, request, is_compiler, all_deps):
|
||||
Creates and queus the initial build task for the package.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): the package to be built and installed
|
||||
pkg (spack.package_base.Package): the package to be built and installed
|
||||
request (BuildRequest or None): the associated install request
|
||||
where ``None`` can be used to indicate the package was
|
||||
explicitly requested by the user
|
||||
@@ -972,7 +968,7 @@ def _cleanup_task(self, pkg):
|
||||
Cleanup the build task for the spec
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): the package being installed
|
||||
pkg (spack.package_base.PackageBase): the package being installed
|
||||
"""
|
||||
self._remove_task(package_id(pkg))
|
||||
|
||||
@@ -986,7 +982,7 @@ def _ensure_install_ready(self, pkg):
|
||||
already locked.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): the package being locally installed
|
||||
pkg (spack.package_base.PackageBase): the package being locally installed
|
||||
"""
|
||||
pkg_id = package_id(pkg)
|
||||
pre = "{0} cannot be installed locally:".format(pkg_id)
|
||||
@@ -1018,7 +1014,8 @@ def _ensure_locked(self, lock_type, pkg):
|
||||
|
||||
Args:
|
||||
lock_type (str): 'read' for a read lock, 'write' for a write lock
|
||||
pkg (spack.package.PackageBase): the package whose spec is being installed
|
||||
pkg (spack.package_base.PackageBase): the package whose spec is being
|
||||
installed
|
||||
|
||||
Return:
|
||||
(lock_type, lock) tuple where lock will be None if it could not
|
||||
@@ -1232,7 +1229,7 @@ def _install_task(self, task):
|
||||
|
||||
# Create a child process to do the actual installation.
|
||||
# Preserve verbosity settings across installs.
|
||||
spack.package.PackageBase._verbose = (
|
||||
spack.package_base.PackageBase._verbose = (
|
||||
spack.build_environment.start_build_process(
|
||||
pkg, build_process, install_args)
|
||||
)
|
||||
@@ -1377,7 +1374,7 @@ def _setup_install_dir(self, pkg):
|
||||
Write a small metadata file with the current spack environment.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): the package to be built and installed
|
||||
pkg (spack.package_base.Package): the package to be built and installed
|
||||
"""
|
||||
if not os.path.exists(pkg.spec.prefix):
|
||||
tty.debug('Creating the installation directory {0}'.format(pkg.spec.prefix))
|
||||
@@ -1451,7 +1448,7 @@ def _flag_installed(self, pkg, dependent_ids=None):
|
||||
known dependents.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): Package that has been installed locally,
|
||||
pkg (spack.package_base.Package): Package that has been installed locally,
|
||||
externally or upstream
|
||||
dependent_ids (list or None): list of the package's
|
||||
dependent ids, or None if the dependent ids are limited to
|
||||
@@ -1540,7 +1537,7 @@ def install(self):
|
||||
Install the requested package(s) and or associated dependencies.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): the package to be built and installed"""
|
||||
pkg (spack.package_base.Package): the package to be built and installed"""
|
||||
|
||||
self._init_queue()
|
||||
fail_fast_err = 'Terminating after first install failure'
|
||||
@@ -1792,7 +1789,7 @@ def __init__(self, pkg, install_args):
|
||||
process in the build.
|
||||
|
||||
Arguments:
|
||||
pkg (spack.package.PackageBase) the package being installed.
|
||||
pkg (spack.package_base.PackageBase) the package being installed.
|
||||
install_args (dict) arguments to do_install() from parent process.
|
||||
|
||||
"""
|
||||
@@ -1852,8 +1849,8 @@ def run(self):
|
||||
|
||||
# get verbosity from do_install() parameter or saved value
|
||||
self.echo = self.verbose
|
||||
if spack.package.PackageBase._verbose is not None:
|
||||
self.echo = spack.package.PackageBase._verbose
|
||||
if spack.package_base.PackageBase._verbose is not None:
|
||||
self.echo = spack.package_base.PackageBase._verbose
|
||||
|
||||
self.pkg.stage.keep = self.keep_stage
|
||||
|
||||
@@ -2005,7 +2002,7 @@ def build_process(pkg, install_args):
|
||||
This function's return value is returned to the parent process.
|
||||
|
||||
Arguments:
|
||||
pkg (spack.package.PackageBase): the package being installed.
|
||||
pkg (spack.package_base.PackageBase): the package being installed.
|
||||
install_args (dict): arguments to do_install() from parent process.
|
||||
|
||||
"""
|
||||
@@ -2053,7 +2050,7 @@ def __init__(self, pkg, request, compiler, start, attempts, status,
|
||||
Instantiate a build task for a package.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): the package to be built and installed
|
||||
pkg (spack.package_base.Package): the package to be built and installed
|
||||
request (BuildRequest or None): the associated install request
|
||||
where ``None`` can be used to indicate the package was
|
||||
explicitly requested by the user
|
||||
@@ -2066,7 +2063,7 @@ def __init__(self, pkg, request, compiler, start, attempts, status,
|
||||
"""
|
||||
|
||||
# Ensure dealing with a package that has a concrete spec
|
||||
if not isinstance(pkg, spack.package.PackageBase):
|
||||
if not isinstance(pkg, spack.package_base.PackageBase):
|
||||
raise ValueError("{0} must be a package".format(str(pkg)))
|
||||
|
||||
self.pkg = pkg
|
||||
@@ -2244,11 +2241,11 @@ def __init__(self, pkg, install_args):
|
||||
Instantiate a build request for a package.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.Package): the package to be built and installed
|
||||
pkg (spack.package_base.Package): the package to be built and installed
|
||||
install_args (dict): the install arguments associated with ``pkg``
|
||||
"""
|
||||
# Ensure dealing with a package that has a concrete spec
|
||||
if not isinstance(pkg, spack.package.PackageBase):
|
||||
if not isinstance(pkg, spack.package_base.PackageBase):
|
||||
raise ValueError("{0} must be a package".format(str(pkg)))
|
||||
|
||||
self.pkg = pkg
|
||||
@@ -2318,7 +2315,7 @@ def get_deptypes(self, pkg):
|
||||
"""Determine the required dependency types for the associated package.
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): explicit or implicit package being
|
||||
pkg (spack.package_base.PackageBase): explicit or implicit package being
|
||||
installed
|
||||
|
||||
Returns:
|
||||
@@ -2341,7 +2338,7 @@ def run_tests(self, pkg):
|
||||
"""Determine if the tests should be run for the provided packages
|
||||
|
||||
Args:
|
||||
pkg (spack.package.PackageBase): explicit or implicit package being
|
||||
pkg (spack.package_base.PackageBase): explicit or implicit package being
|
||||
installed
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -375,13 +375,6 @@ def make_argument_parser(**kwargs):
|
||||
# stat names in groups of 7, for nice wrapping.
|
||||
stat_lines = list(zip(*(iter(stat_names),) * 7))
|
||||
|
||||
# help message for --show-cores
|
||||
show_cores_help = 'provide additional information on concretization failures\n'
|
||||
show_cores_help += 'off (default): show only the violated rule\n'
|
||||
show_cores_help += 'full: show raw unsat cores from clingo\n'
|
||||
show_cores_help += 'minimized: show subset-minimal unsat cores '
|
||||
show_cores_help += '(Warning: this may take hours for some specs)'
|
||||
|
||||
parser.add_argument(
|
||||
'-h', '--help',
|
||||
dest='help', action='store_const', const='short', default=None,
|
||||
@@ -405,9 +398,6 @@ def make_argument_parser(**kwargs):
|
||||
'-d', '--debug', action='count', default=0,
|
||||
help="write out debug messages "
|
||||
"(more d's for more verbosity: -d, -dd, -ddd, etc.)")
|
||||
parser.add_argument(
|
||||
'--show-cores', choices=["off", "full", "minimized"], default="off",
|
||||
help=show_cores_help)
|
||||
parser.add_argument(
|
||||
'--timestamp', action='store_true',
|
||||
help="Add a timestamp to tty output")
|
||||
@@ -490,18 +480,11 @@ def setup_main_options(args):
|
||||
# errors raised by spack.config.
|
||||
|
||||
if args.debug:
|
||||
spack.error.debug = True
|
||||
spack.error.debug = args.debug
|
||||
spack.util.debug.register_interrupt_handler()
|
||||
spack.config.set('config:debug', True, scope='command_line')
|
||||
spack.util.environment.tracing_enabled = True
|
||||
|
||||
if args.show_cores != "off":
|
||||
# minimize_cores defaults to true, turn it off if we're showing full core
|
||||
# but don't want to wait to minimize it.
|
||||
spack.solver.asp.full_cores = True
|
||||
if args.show_cores == 'full':
|
||||
spack.solver.asp.minimize_cores = False
|
||||
|
||||
if args.timestamp:
|
||||
tty.set_timestamp(True)
|
||||
|
||||
|
||||
@@ -196,6 +196,14 @@ def provides(self):
|
||||
if self.spec.name == 'llvm-amdgpu':
|
||||
provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
|
||||
provides['compiler'].name = 'rocmcc'
|
||||
# Special case for oneapi
|
||||
if self.spec.name == 'intel-oneapi-compilers':
|
||||
provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
|
||||
provides['compiler'].name = 'oneapi'
|
||||
# Special case for oneapi classic
|
||||
if self.spec.name == 'intel-oneapi-compilers-classic':
|
||||
provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
|
||||
provides['compiler'].name = 'intel'
|
||||
|
||||
# All the other tokens in the hierarchy must be virtual dependencies
|
||||
for x in self.hierarchy_tokens:
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import platform as py_platform
|
||||
import re
|
||||
from subprocess import check_output
|
||||
|
||||
from spack.version import Version
|
||||
|
||||
@@ -51,6 +52,17 @@ def __init__(self):
|
||||
|
||||
if 'ubuntu' in distname:
|
||||
version = '.'.join(version[0:2])
|
||||
# openSUSE Tumbleweed is a rolling release which can change
|
||||
# more than once in a week, so set version to tumbleweed$GLIBVERS
|
||||
elif 'opensuse-tumbleweed' in distname or 'opensusetumbleweed' in distname:
|
||||
distname = 'opensuse'
|
||||
output = check_output(["ldd", "--version"]).decode()
|
||||
libcvers = re.findall(r'ldd \(GNU libc\) (.*)', output)
|
||||
if len(libcvers) == 1:
|
||||
version = 'tumbleweed' + libcvers[0]
|
||||
else:
|
||||
version = 'tumbleweed' + version[0]
|
||||
|
||||
else:
|
||||
version = version[0]
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user