Compare commits
445 Commits
develop-20
...
develop-20
Author | SHA1 | Date | |
---|---|---|---|
![]() |
82df0e549d | ||
![]() |
f5591f9068 | ||
![]() |
98c08d277d | ||
![]() |
facca4e2c8 | ||
![]() |
764029bcd1 | ||
![]() |
44cb4eca93 | ||
![]() |
39888d4df6 | ||
![]() |
f68ea49e54 | ||
![]() |
78b5e4cdfa | ||
![]() |
26515b8871 | ||
![]() |
74640987c7 | ||
![]() |
d6154645c7 | ||
![]() |
faed43704b | ||
![]() |
6fba31ce34 | ||
![]() |
112cead00b | ||
![]() |
9e2558bd56 | ||
![]() |
019058226f | ||
![]() |
ac0040f67d | ||
![]() |
38f341f12d | ||
![]() |
26ad22743f | ||
![]() |
46c2b8a565 | ||
![]() |
5cbb59f2b8 | ||
![]() |
f29fa1cfdf | ||
![]() |
c69951d6e1 | ||
![]() |
f406f27d9c | ||
![]() |
36ea208e12 | ||
![]() |
17e0774189 | ||
![]() |
3162c2459d | ||
![]() |
7cad6c62a3 | ||
![]() |
eb2ddf6fa2 | ||
![]() |
2bc2902fed | ||
![]() |
b362362291 | ||
![]() |
32bb5c7523 | ||
![]() |
a2b76c68a0 | ||
![]() |
62132919e1 | ||
![]() |
b06929f6df | ||
![]() |
0f33de157b | ||
![]() |
03a074ebe7 | ||
![]() |
4d12b6a4fd | ||
![]() |
26bb15e1fb | ||
![]() |
1bf92c7881 | ||
![]() |
eefe0b2eec | ||
![]() |
de6c6f0cd9 | ||
![]() |
309d3aa1ec | ||
![]() |
feff11f914 | ||
![]() |
de3b324983 | ||
![]() |
747cd374df | ||
![]() |
8b3ac40436 | ||
![]() |
28e9be443c | ||
![]() |
1381bede80 | ||
![]() |
6502785908 | ||
![]() |
53257408a3 | ||
![]() |
28d02dff60 | ||
![]() |
9d60b42a97 | ||
![]() |
9ff5a30574 | ||
![]() |
9a6c013365 | ||
![]() |
9f62a3e819 | ||
![]() |
e380e9a0ab | ||
![]() |
8415ea9ada | ||
![]() |
6960766e0c | ||
![]() |
0c2ca8c841 | ||
![]() |
273960fdbb | ||
![]() |
0cd2a1102c | ||
![]() |
e40676e901 | ||
![]() |
4ddb07e94f | ||
![]() |
50585d55c5 | ||
![]() |
5d6b5f3f6f | ||
![]() |
2351c19489 | ||
![]() |
08d49361f0 | ||
![]() |
c3c63e5ca4 | ||
![]() |
e72d4075bd | ||
![]() |
f9f97bf22b | ||
![]() |
8033455d5f | ||
![]() |
50a5a6fea4 | ||
![]() |
0de8a0e3f3 | ||
![]() |
0a26e74cc8 | ||
![]() |
9dfd91efbb | ||
![]() |
1a7baadbff | ||
![]() |
afcfd56ae5 | ||
![]() |
7eb2e704b6 | ||
![]() |
564b4fa263 | ||
![]() |
0a941b43ca | ||
![]() |
35ff24ddea | ||
![]() |
7019e4e3cb | ||
![]() |
cb16b8a047 | ||
![]() |
381acb3726 | ||
![]() |
d87ea0b256 | ||
![]() |
1a757e7f70 | ||
![]() |
704e2c53a8 | ||
![]() |
478d8a668c | ||
![]() |
7903f9fcfd | ||
![]() |
670d3d3fdc | ||
![]() |
e8aab6b31c | ||
![]() |
1ce408ecc5 | ||
![]() |
dc81a2dcdb | ||
![]() |
b10f51f020 | ||
![]() |
4f4e3f5607 | ||
![]() |
00fb80e766 | ||
![]() |
057603cad8 | ||
![]() |
5b8b6e492d | ||
![]() |
763279cd61 | ||
![]() |
e4237b9153 | ||
![]() |
d288658cf0 | ||
![]() |
2c22ae0576 | ||
![]() |
fc3fc94689 | ||
![]() |
b5013c1372 | ||
![]() |
e220674c4d | ||
![]() |
7f13518225 | ||
![]() |
96a13a97e6 | ||
![]() |
6d244b3f67 | ||
![]() |
6bc66db141 | ||
![]() |
acfb2b9270 | ||
![]() |
d92a2c31fb | ||
![]() |
e32561aff6 | ||
![]() |
4b0479159f | ||
![]() |
03bfd36926 | ||
![]() |
4d30c8dce4 | ||
![]() |
49d4104f22 | ||
![]() |
07fb83b493 | ||
![]() |
263007ba81 | ||
![]() |
3b6e99381f | ||
![]() |
a30af1ac54 | ||
![]() |
294742ab7b | ||
![]() |
6391559fb6 | ||
![]() |
d4d4f813a9 | ||
![]() |
4667163dc4 | ||
![]() |
439f105285 | ||
![]() |
f65b1fd7b6 | ||
![]() |
d23e06c27e | ||
![]() |
b76e9a887b | ||
![]() |
55ffd439ce | ||
![]() |
d8a7b88e7b | ||
![]() |
aaa1bb1d98 | ||
![]() |
0d94b8044b | ||
![]() |
5a52780f7c | ||
![]() |
dd0a8452ee | ||
![]() |
c467bba73e | ||
![]() |
d680a0cb99 | ||
![]() |
efadee26ef | ||
![]() |
2077b3a006 | ||
![]() |
8e0c659b51 | ||
![]() |
863ab5a597 | ||
![]() |
db4e76ab27 | ||
![]() |
6728a46a84 | ||
![]() |
5a09459dd5 | ||
![]() |
7e14ff806a | ||
![]() |
7e88cf795c | ||
![]() |
1536e3d422 | ||
![]() |
1fe8e63481 | ||
![]() |
dfca2c285e | ||
![]() |
2686f778fa | ||
![]() |
925e9c73b1 | ||
![]() |
aba447e885 | ||
![]() |
1113de0dad | ||
![]() |
4110225166 | ||
![]() |
24c839c837 | ||
![]() |
42c6a6b189 | ||
![]() |
b0ea1c6f24 | ||
![]() |
735102eb2b | ||
![]() |
2e3cdb349b | ||
![]() |
05c8030119 | ||
![]() |
bbcd4224fa | ||
![]() |
4c0cdb99b3 | ||
![]() |
f22d009c6d | ||
![]() |
c5a3e36ad0 | ||
![]() |
1c76ba1c3e | ||
![]() |
b969f739bd | ||
![]() |
4788c4774c | ||
![]() |
34de028dbc | ||
![]() |
a69254fd79 | ||
![]() |
af5f205759 | ||
![]() |
77f9100a59 | ||
![]() |
386bb71392 | ||
![]() |
0676d6457f | ||
![]() |
0b80e36867 | ||
![]() |
4c9816f10c | ||
![]() |
fb6741cf85 | ||
![]() |
3f2fa256fc | ||
![]() |
d5c8864942 | ||
![]() |
b3cef1072d | ||
![]() |
e8ae9a403c | ||
![]() |
1a8ef161c8 | ||
![]() |
d3913938bc | ||
![]() |
4179880fe6 | ||
![]() |
125dd0368e | ||
![]() |
fd68f8916c | ||
![]() |
93e6f5fa4e | ||
![]() |
54acda3f11 | ||
![]() |
663e20fcc4 | ||
![]() |
6428132ebb | ||
![]() |
171958cf09 | ||
![]() |
0d0f7ab030 | ||
![]() |
35f8b43a54 | ||
![]() |
6f7eb3750c | ||
![]() |
2121eb31ba | ||
![]() |
c68d739825 | ||
![]() |
c468697b35 | ||
![]() |
c4094cf051 | ||
![]() |
9ff9ca61e6 | ||
![]() |
826e0c0405 | ||
![]() |
1b86a842ea | ||
![]() |
558a28bf52 | ||
![]() |
411576e1fa | ||
![]() |
cab4f92960 | ||
![]() |
c6c13f6782 | ||
![]() |
cf11fab5ad | ||
![]() |
1d8b35c840 | ||
![]() |
5dc46a976d | ||
![]() |
05f5596cdd | ||
![]() |
6942c7f35b | ||
![]() |
18f0ac0f94 | ||
![]() |
d9196ee3f8 | ||
![]() |
ef0bb6fe6b | ||
![]() |
3fed320013 | ||
![]() |
1aa77e695d | ||
![]() |
3a0efeecf1 | ||
![]() |
5ffb5657c9 | ||
![]() |
2b3e7fd10a | ||
![]() |
cb315e18f0 | ||
![]() |
10c637aca0 | ||
![]() |
fb4e1cad45 | ||
![]() |
3054b71e2e | ||
![]() |
47163f7435 | ||
![]() |
e322a8382f | ||
![]() |
53fb4795ca | ||
![]() |
4517c7fa9b | ||
![]() |
efaed17f91 | ||
![]() |
2c17cd365d | ||
![]() |
dfe537f688 | ||
![]() |
be0002b460 | ||
![]() |
743ee5f3de | ||
![]() |
b6caf0156f | ||
![]() |
ec00ffc244 | ||
![]() |
f020256b9f | ||
![]() |
04377e39e0 | ||
![]() |
ba2703fea6 | ||
![]() |
92b1c8f763 | ||
![]() |
2b29ecd9b6 | ||
![]() |
5b43bf1b58 | ||
![]() |
37d9770e02 | ||
![]() |
0e016ba6f5 | ||
![]() |
7afa949da1 | ||
![]() |
b81d7d0aac | ||
![]() |
e78484f501 | ||
![]() |
6fd43b4e75 | ||
![]() |
14edb55288 | ||
![]() |
f062f1c5b3 | ||
![]() |
7756c8f4fc | ||
![]() |
69c8a9e4ba | ||
![]() |
47c0736952 | ||
![]() |
8b89287084 | ||
![]() |
8bd6283b52 | ||
![]() |
179e4f3ad1 | ||
![]() |
e97787691b | ||
![]() |
5932ee901c | ||
![]() |
3bdebeba3c | ||
![]() |
d390ee1902 | ||
![]() |
4f9fe6f9bf | ||
![]() |
df6d6d9b5c | ||
![]() |
e57d33b29f | ||
![]() |
85c6d6dbab | ||
![]() |
5f9228746e | ||
![]() |
9f2451ddff | ||
![]() |
a05eb11b7b | ||
![]() |
ae2d0ff1cd | ||
![]() |
7e906ced75 | ||
![]() |
647e89f6bc | ||
![]() |
3239c29fb0 | ||
![]() |
abced0e87d | ||
![]() |
300fc2ee42 | ||
![]() |
13c4258e54 | ||
![]() |
f29cb7f953 | ||
![]() |
826b8f25c5 | ||
![]() |
ebaeea7820 | ||
![]() |
f76eb993aa | ||
![]() |
0b2c370a83 | ||
![]() |
6a9ee480bf | ||
![]() |
cc80d52b62 | ||
![]() |
b9c7d3b89b | ||
![]() |
c1be6a5483 | ||
![]() |
42550208c3 | ||
![]() |
be231face6 | ||
![]() |
89ac747a76 | ||
![]() |
5d8f36d667 | ||
![]() |
6c3fed351f | ||
![]() |
b9cbd15674 | ||
![]() |
b8f633246a | ||
![]() |
a2f3e98ab9 | ||
![]() |
acffe37313 | ||
![]() |
249e5415e8 | ||
![]() |
e2a942d07e | ||
![]() |
32deca2a4c | ||
![]() |
e4c64865f1 | ||
![]() |
1175f37577 | ||
![]() |
faa183331f | ||
![]() |
bbac33871c | ||
![]() |
6d4dd33c46 | ||
![]() |
579bad05a8 | ||
![]() |
27a8eb0f68 | ||
![]() |
4cd993070f | ||
![]() |
4c55c6a268 | ||
![]() |
a4a27fb1e4 | ||
![]() |
66345e7185 | ||
![]() |
8f76f1b0d8 | ||
![]() |
4cab6f3af5 | ||
![]() |
0d4665583b | ||
![]() |
5d0ef9e4f4 | ||
![]() |
e145baf619 | ||
![]() |
6c912b30a2 | ||
![]() |
f4da453f6b | ||
![]() |
7e9caed8c2 | ||
![]() |
69509a6d9a | ||
![]() |
0841050d20 | ||
![]() |
321ffd732b | ||
![]() |
22922323e3 | ||
![]() |
0b5b192c18 | ||
![]() |
1275c57d88 | ||
![]() |
29a39ac6a0 | ||
![]() |
ae9c86a930 | ||
![]() |
83199a981d | ||
![]() |
ed40c3210e | ||
![]() |
be96460ab2 | ||
![]() |
95caf55fe7 | ||
![]() |
960af24270 | ||
![]() |
899bef2aa8 | ||
![]() |
f0f092d9f1 | ||
![]() |
6eaac2270d | ||
![]() |
a9f3f6c007 | ||
![]() |
08a04ebd46 | ||
![]() |
d8e642ecb7 | ||
![]() |
669ed69d8e | ||
![]() |
7ebb21a0da | ||
![]() |
93ffa9ba5d | ||
![]() |
e5fdb90496 | ||
![]() |
303a0b3653 | ||
![]() |
9f07544bde | ||
![]() |
9b046a39a8 | ||
![]() |
0c9a53ba3a | ||
![]() |
1fd4353289 | ||
![]() |
fcb8ed6409 | ||
![]() |
2f11862832 | ||
![]() |
bff11ce8e7 | ||
![]() |
218693431c | ||
![]() |
e036cd9ef6 | ||
![]() |
cd5bef6780 | ||
![]() |
159e9a20d1 | ||
![]() |
99bb288db7 | ||
![]() |
99744a766b | ||
![]() |
ddd8be51a0 | ||
![]() |
bba66b1063 | ||
![]() |
1c3c21d9c7 | ||
![]() |
cbe9b3d01c | ||
![]() |
0abf5ba43c | ||
![]() |
9ab3c1332b | ||
![]() |
b6425da50f | ||
![]() |
937a4dbf69 | ||
![]() |
cd779ee54d | ||
![]() |
7ddcb13325 | ||
![]() |
7666046ce3 | ||
![]() |
8e89e61402 | ||
![]() |
d0dbfaa5d6 | ||
![]() |
26f562b5a7 | ||
![]() |
2967804da1 | ||
![]() |
c3eaf4d6cf | ||
![]() |
397334a4be | ||
![]() |
434836be81 | ||
![]() |
7b9b976f40 | ||
![]() |
4746e8a048 | ||
![]() |
69c684fef9 | ||
![]() |
2314aeb884 | ||
![]() |
d33e10a695 | ||
![]() |
7668a0889a | ||
![]() |
d7a74bde9f | ||
![]() |
fedf8128ae | ||
![]() |
f70af2cc57 | ||
![]() |
50562e6a0e | ||
![]() |
4ac51b2127 | ||
![]() |
81c9e346dc | ||
![]() |
73e16a7881 | ||
![]() |
af8868fa47 | ||
![]() |
cfd4e356f8 | ||
![]() |
fc87dcad4c | ||
![]() |
65472159c7 | ||
![]() |
d1f9d8f06d | ||
![]() |
67ac9c46a8 | ||
![]() |
aa39465188 | ||
![]() |
09810a5e7c | ||
![]() |
446c0f2325 | ||
![]() |
c4ce51c9be | ||
![]() |
1f63a764ac | ||
![]() |
384e198304 | ||
![]() |
2303332415 | ||
![]() |
0eb1957999 | ||
![]() |
de1f9593c6 | ||
![]() |
65fa71c1b4 | ||
![]() |
9802649716 | ||
![]() |
8d9d721f07 | ||
![]() |
ecef72c471 | ||
![]() |
485b6e2170 | ||
![]() |
ba02c6b70f | ||
![]() |
7028669d50 | ||
![]() |
2f0a73f7ef | ||
![]() |
7cb0dbf77a | ||
![]() |
ac8800ffc7 | ||
![]() |
eb11fa7d18 | ||
![]() |
4d8381a775 | ||
![]() |
de5e20fc21 | ||
![]() |
c33af49ed5 | ||
![]() |
3addda6c4d | ||
![]() |
33f6f55d6b | ||
![]() |
41d20d3731 | ||
![]() |
dde8fa5561 | ||
![]() |
588a94bc8c | ||
![]() |
06392f2c01 | ||
![]() |
f16e29559e | ||
![]() |
ea96403157 | ||
![]() |
b659eac453 | ||
![]() |
ab590cc03a | ||
![]() |
1a007a842b | ||
![]() |
9756354998 | ||
![]() |
3984dd750c | ||
![]() |
d5c1e16e43 | ||
![]() |
56ace9a087 | ||
![]() |
6e0bab1706 | ||
![]() |
193386f6ac | ||
![]() |
755131fcdf | ||
![]() |
9a71733adb | ||
![]() |
cd919d51ea | ||
![]() |
12adf66d07 | ||
![]() |
c02f58da8f | ||
![]() |
9662d181a0 | ||
![]() |
282df7aecc | ||
![]() |
b4c0e6f03b | ||
![]() |
4cd8488139 | ||
![]() |
69a052841c | ||
![]() |
a3f39890c2 | ||
![]() |
02d126ce2b | ||
![]() |
339a63370f | ||
![]() |
fef6aed627 | ||
![]() |
3445da807e | ||
![]() |
429c3598af | ||
![]() |
3d8136493a | ||
![]() |
8cd160db85 | ||
![]() |
a7dd756b34 | ||
![]() |
53be280681 |
4
.devcontainer/devcontainer.json
Normal file
4
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"image": "ghcr.io/spack/ubuntu20.04-runner-amd64-gcc-11.4:2023.08.01",
|
||||
"postCreateCommand": "./.devcontainer/postCreateCommand.sh"
|
||||
}
|
20
.devcontainer/postCreateCommand.sh
Executable file
20
.devcontainer/postCreateCommand.sh
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Load spack environment at terminal startup
|
||||
cat <<EOF >> /root/.bashrc
|
||||
. /workspaces/spack/share/spack/setup-env.sh
|
||||
EOF
|
||||
|
||||
# Load spack environment in this script
|
||||
. /workspaces/spack/share/spack/setup-env.sh
|
||||
|
||||
# Ensure generic targets for maximum matching with buildcaches
|
||||
spack config --scope site add "packages:all:require:[target=x86_64_v3]"
|
||||
spack config --scope site add "concretizer:targets:granularity:generic"
|
||||
|
||||
# Find compiler and install gcc-runtime
|
||||
spack compiler find --scope site
|
||||
|
||||
# Setup buildcaches
|
||||
spack mirror add --scope site develop https://binaries.spack.io/develop
|
||||
spack buildcache keys --install --trust
|
8
.github/workflows/audit.yaml
vendored
8
.github/workflows/audit.yaml
vendored
@@ -22,8 +22,8 @@ jobs:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
@@ -43,7 +43,9 @@ jobs:
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
$(which spack) audit externals
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab # @v2.1.0
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
2
.github/workflows/bootstrap.yml
vendored
2
.github/workflows/bootstrap.yml
vendored
@@ -159,7 +159,7 @@ jobs:
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
|
6
.github/workflows/build-containers.yml
vendored
6
.github/workflows/build-containers.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
@@ -96,7 +96,7 @@ jobs:
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@0d103c3126aa41d772a8362f6aa67afac040f80c
|
||||
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build & Deploy ${{ matrix.dockerfile[0] }}
|
||||
uses: docker/build-push-action@af5a7ed5ba88268d5278f7203fb52cd833f66d6e
|
||||
uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0
|
||||
with:
|
||||
context: dockerfiles/${{ matrix.dockerfile[0] }}
|
||||
platforms: ${{ matrix.dockerfile[1] }}
|
||||
|
6
.github/workflows/ci.yaml
vendored
6
.github/workflows/ci.yaml
vendored
@@ -18,6 +18,7 @@ jobs:
|
||||
prechecks:
|
||||
needs: [ changes ]
|
||||
uses: ./.github/workflows/valid-style.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ needs.changes.outputs.core }}
|
||||
all-prechecks:
|
||||
@@ -35,7 +36,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -70,14 +71,17 @@ jobs:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.bootstrap == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/bootstrap.yml
|
||||
secrets: inherit
|
||||
unit-tests:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks, changes ]
|
||||
uses: ./.github/workflows/unit_tests.yaml
|
||||
secrets: inherit
|
||||
windows:
|
||||
if: ${{ github.repository == 'spack/spack' && needs.changes.outputs.core == 'true' }}
|
||||
needs: [ prechecks ]
|
||||
uses: ./.github/workflows/windows_python.yml
|
||||
secrets: inherit
|
||||
all:
|
||||
needs: [ windows, unit-tests, bootstrap ]
|
||||
runs-on: ubuntu-latest
|
||||
|
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
2
.github/workflows/style/requirements.txt
vendored
2
.github/workflows/style/requirements.txt
vendored
@@ -1,4 +1,4 @@
|
||||
black==24.3.0
|
||||
black==24.4.0
|
||||
clingo==5.7.1
|
||||
flake8==7.0.0
|
||||
isort==5.13.2
|
||||
|
37
.github/workflows/unit_tests.yaml
vendored
37
.github/workflows/unit_tests.yaml
vendored
@@ -51,10 +51,10 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -91,17 +91,19 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Test shell integration
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -122,9 +124,11 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
||||
# Test RHEL8 UBI with platform Python. This job is run
|
||||
# only on PRs modifying core Spack
|
||||
@@ -137,7 +141,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -156,10 +160,10 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
- name: Install System packages
|
||||
@@ -181,20 +185,23 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab # @v2.1.0
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
# Run unit tests on MacOS
|
||||
macos:
|
||||
runs-on: macos-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # @v2
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -216,6 +223,8 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
|
7
.github/workflows/valid-style.yml
vendored
7
.github/workflows/valid-style.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -38,7 +38,7 @@ jobs:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: 'pip'
|
||||
@@ -56,6 +56,7 @@ jobs:
|
||||
share/spack/qa/run-style-tests
|
||||
audit:
|
||||
uses: ./.github/workflows/audit.yaml
|
||||
secrets: inherit
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.11'
|
||||
@@ -69,7 +70,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # @v2
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
14
.github/workflows/windows_python.yml
vendored
14
.github/workflows/windows_python.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -33,16 +33,18 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -57,16 +59,18 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@54bcd8715eee62d40e33596ef5e8f0f48dbbccab
|
||||
- uses: codecov/codecov-action@84508663e988701840491b86de86b666e8a86bed
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
@@ -88,7 +88,7 @@ Resources:
|
||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||
for Q&A and discussions. Note the pinned discussions for announcements.
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
* **X**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
|
||||
only for announcements. Please use other venues for discussions.
|
||||
|
@@ -42,3 +42,8 @@ concretizer:
|
||||
# "minimal": allows the duplication of 'build-tools' nodes only (e.g. py-setuptools, cmake etc.)
|
||||
# "full" (experimental): allows separation of the entire build-tool stack (e.g. the entire "cmake" subDAG)
|
||||
strategy: minimal
|
||||
# Option to specify compatiblity between operating systems for reuse of compilers and packages
|
||||
# Specified as a key: [list] where the key is the os that is being targeted, and the list contains the OS's
|
||||
# it can reuse. Note this is a directional compatibility so mutual compatibility between two OS's
|
||||
# requires two entries i.e. os_compatible: {sonoma: [monterey], monterey: [sonoma]}
|
||||
os_compatible: {}
|
||||
|
@@ -101,6 +101,12 @@ config:
|
||||
verify_ssl: true
|
||||
|
||||
|
||||
# This is where custom certs for proxy/firewall are stored.
|
||||
# It can be a path or environment variable. To match ssl env configuration
|
||||
# the default is the environment variable SSL_CERT_FILE
|
||||
ssl_certs: $SSL_CERT_FILE
|
||||
|
||||
|
||||
# Suppress gpg warnings from binary package verification
|
||||
# Only suppresses warnings, gpg failure will still fail the install
|
||||
# Potential rationale to set True: users have already explicitly trusted the
|
||||
|
@@ -24,6 +24,7 @@ packages:
|
||||
elf: [elfutils]
|
||||
fftw-api: [fftw, amdfftw]
|
||||
flame: [libflame, amdlibflame]
|
||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||
fuse: [libfuse]
|
||||
gl: [glx, osmesa]
|
||||
glu: [mesa-glu, openglu]
|
||||
@@ -34,7 +35,9 @@ packages:
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libgfortran: [ gcc-runtime ]
|
||||
libglx: [mesa+glx, mesa18+glx]
|
||||
libifcore: [ intel-oneapi-runtime ]
|
||||
libllvm: [llvm]
|
||||
libosmesa: [mesa+osmesa, mesa18+osmesa]
|
||||
lua-lang: [lua, lua-luajit-openresty, lua-luajit]
|
||||
|
@@ -1119,6 +1119,9 @@ and ``3.4.2``. Similarly, ``@4.2:`` means any version above and including
|
||||
``4.2``. As a short-hand, ``@3`` is equivalent to the range ``@3:3`` and
|
||||
includes any version with major version ``3``.
|
||||
|
||||
Versions are ordered lexicograpically by its components. For more details
|
||||
on the order, see :ref:`the packaging guide <version-comparison>`.
|
||||
|
||||
Notice that you can distinguish between the specific version ``@=3.2`` and
|
||||
the range ``@3.2``. This is useful for packages that follow a versioning
|
||||
scheme that omits the zero patch version number: ``3.2``, ``3.2.1``,
|
||||
|
@@ -220,6 +220,40 @@ section of the configuration:
|
||||
|
||||
.. _binary_caches_oci:
|
||||
|
||||
---------------------------------
|
||||
Automatic push to a build cache
|
||||
---------------------------------
|
||||
|
||||
Sometimes it is convenient to push packages to a build cache as soon as they are installed. Spack can do this by setting autopush flag when adding a mirror:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --autopush <name> <url or path>
|
||||
|
||||
Or the autopush flag can be set for an existing mirror:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror set --autopush <name> # enable automatic push for an existing mirror
|
||||
$ spack mirror set --no-autopush <name> # disable automatic push for an existing mirror
|
||||
|
||||
Then after installing a package it is automatically pushed to all mirrors with ``autopush: true``. The command
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install <package>
|
||||
|
||||
will have the same effect as
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install <package>
|
||||
$ spack buildcache push <cache> <package> # for all caches with autopush: true
|
||||
|
||||
.. note::
|
||||
|
||||
Packages are automatically pushed to a build cache only if they are built from source.
|
||||
|
||||
-----------------------------------------
|
||||
OCI / Docker V2 registries as build cache
|
||||
-----------------------------------------
|
||||
|
@@ -250,7 +250,7 @@ generator is Ninja. To switch to the Ninja generator, simply add:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
generator = "Ninja"
|
||||
generator("ninja")
|
||||
|
||||
|
||||
``CMakePackage`` defaults to "Unix Makefiles". If you switch to the
|
||||
|
@@ -145,6 +145,22 @@ hosts when making ``ssl`` connections. Set to ``false`` to disable, and
|
||||
tools like ``curl`` will use their ``--insecure`` options. Disabling
|
||||
this can expose you to attacks. Use at your own risk.
|
||||
|
||||
--------------------
|
||||
``ssl_certs``
|
||||
--------------------
|
||||
|
||||
Path to custom certificats for SSL verification. The value can be a
|
||||
filesytem path, or an environment variable that expands to a file path.
|
||||
The default value is set to the environment variable ``SSL_CERT_FILE``
|
||||
to use the same syntax used by many other applications that automatically
|
||||
detect custom certificates.
|
||||
When ``url_fetch_method:curl`` the ``config:ssl_certs`` should resolve to
|
||||
a single file. Spack will then set the environment variable ``CURL_CA_BUNDLE``
|
||||
in the subprocess calling ``curl``.
|
||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
||||
will work.
|
||||
|
||||
--------------------
|
||||
``checksum``
|
||||
--------------------
|
||||
|
@@ -1071,9 +1071,9 @@ Announcing a release
|
||||
|
||||
We announce releases in all of the major Spack communication channels.
|
||||
Publishing the release takes care of GitHub. The remaining channels are
|
||||
Twitter, Slack, and the mailing list. Here are the steps:
|
||||
X, Slack, and the mailing list. Here are the steps:
|
||||
|
||||
#. Announce the release on Twitter.
|
||||
#. Announce the release on X.
|
||||
|
||||
* Compose the tweet on the ``@spackpm`` account per the
|
||||
``spack-twitter`` slack channel.
|
||||
|
@@ -1572,6 +1572,8 @@ Microsoft Visual Studio
|
||||
"""""""""""""""""""""""
|
||||
|
||||
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
|
||||
Spack additionally requires that the Windows SDK (including WGL) to be installed as part of your
|
||||
visual studio installation as it is required to build many packages from source.
|
||||
|
||||
We require several specific components to be included in the Visual Studio installation.
|
||||
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
|
||||
@@ -1579,6 +1581,7 @@ depending on installation type (Professional, Build Tools, etc.) The other requ
|
||||
"C++ CMake tools for Windows," which can be selected from among the optional packages.
|
||||
This provides CMake and Ninja for use during Spack configuration.
|
||||
|
||||
|
||||
If you already have Visual Studio installed, you can make sure these components are installed by
|
||||
rerunning the installer. Next to your installation, select "Modify" and look at the
|
||||
"Installation details" pane on the right.
|
||||
|
@@ -893,26 +893,50 @@ as an option to the ``version()`` directive. Example situations would be a
|
||||
"snapshot"-like Version Control System (VCS) tag, a VCS branch such as
|
||||
``v6-16-00-patches``, or a URL specifying a regularly updated snapshot tarball.
|
||||
|
||||
|
||||
.. _version-comparison:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
Version comparison
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack imposes a generic total ordering on the set of versions,
|
||||
independently from the package they are associated with.
|
||||
|
||||
Most Spack versions are numeric, a tuple of integers; for example,
|
||||
``0.1``, ``6.96`` or ``1.2.3.1``. Spack knows how to compare and sort
|
||||
numeric versions.
|
||||
``0.1``, ``6.96`` or ``1.2.3.1``. In this very basic case, version
|
||||
comparison is lexicographical on the numeric components:
|
||||
``1.2 < 1.2.1 < 1.2.2 < 1.10``.
|
||||
|
||||
Some Spack versions involve slight extensions of numeric syntax; for
|
||||
example, ``py-sphinx-rtd-theme@=0.1.10a0``. In this case, numbers are
|
||||
always considered to be "newer" than letters. This is for consistency
|
||||
with `RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_.
|
||||
Spack can also supports string components such as ``1.1.1a`` and
|
||||
``1.y.0``. String components are considered less than numeric
|
||||
components, so ``1.y.0 < 1.0``. This is for consistency with
|
||||
`RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_. String
|
||||
components do not have to be separated by dots or any other delimiter.
|
||||
So, the contrived version ``1y0`` is identical to ``1.y.0``.
|
||||
|
||||
Spack versions may also be arbitrary non-numeric strings, for example
|
||||
``develop``, ``master``, ``local``.
|
||||
Pre-release suffixes also contain string parts, but they are handled
|
||||
in a special way. For example ``1.2.3alpha1`` is parsed as a pre-release
|
||||
of the version ``1.2.3``. This allows Spack to order it before the
|
||||
actual release: ``1.2.3alpha1 < 1.2.3``. Spack supports alpha, beta and
|
||||
release candidate suffixes: ``1.2alpha1 < 1.2beta1 < 1.2rc1 < 1.2``. Any
|
||||
suffix not recognized as a pre-release is treated as an ordinary
|
||||
string component, so ``1.2 < 1.2-mysuffix``.
|
||||
|
||||
The order on versions is defined as follows. A version string is split
|
||||
into a list of components based on delimiters such as ``.``, ``-`` etc.
|
||||
Lists are then ordered lexicographically, where components are ordered
|
||||
as follows:
|
||||
Finally, there are a few special string components that are considered
|
||||
"infinity versions". They include ``develop``, ``main``, ``master``,
|
||||
``head``, ``trunk``, and ``stable``. For example: ``1.2 < develop``.
|
||||
These are useful for specifying the most recent development version of
|
||||
a package (often a moving target like a git branch), without assigning
|
||||
a specific version number. Infinity versions are not automatically used when determining the latest version of a package unless explicitly required by another package or user.
|
||||
|
||||
More formally, the order on versions is defined as follows. A version
|
||||
string is split into a list of components based on delimiters such as
|
||||
``.`` and ``-`` and string boundaries. The components are split into
|
||||
the **release** and a possible **pre-release** (if the last component
|
||||
is numeric and the second to last is a string ``alpha``, ``beta`` or ``rc``).
|
||||
The release components are ordered lexicographically, with comparsion
|
||||
between different types of components as follows:
|
||||
|
||||
#. The following special strings are considered larger than any other
|
||||
numeric or non-numeric version component, and satisfy the following
|
||||
@@ -925,6 +949,9 @@ as follows:
|
||||
#. All other non-numeric components are less than numeric components,
|
||||
and are ordered alphabetically.
|
||||
|
||||
Finally, if the release components are equal, the pre-release components
|
||||
are used to break the tie, in the obvious way.
|
||||
|
||||
The logic behind this sort order is two-fold:
|
||||
|
||||
#. Non-numeric versions are usually used for special cases while
|
||||
|
@@ -2,12 +2,12 @@ sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.0
|
||||
python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
pygments==2.17.2
|
||||
urllib3==2.2.1
|
||||
pytest==8.1.1
|
||||
isort==5.13.2
|
||||
black==24.3.0
|
||||
black==24.4.0
|
||||
flake8==7.0.0
|
||||
mypy==1.9.0
|
||||
|
249
lib/spack/env/cc
vendored
249
lib/spack/env/cc
vendored
@@ -47,7 +47,8 @@ SPACK_F77_RPATH_ARG
|
||||
SPACK_FC_RPATH_ARG
|
||||
SPACK_LINKER_ARG
|
||||
SPACK_SHORT_SPEC
|
||||
SPACK_SYSTEM_DIRS"
|
||||
SPACK_SYSTEM_DIRS
|
||||
SPACK_MANAGED_DIRS"
|
||||
|
||||
# Optional parameters that aren't required to be set
|
||||
|
||||
@@ -173,21 +174,17 @@ preextend() {
|
||||
unset IFS
|
||||
}
|
||||
|
||||
# system_dir PATH
|
||||
# test whether a path is a system directory
|
||||
system_dir() {
|
||||
IFS=':' # SPACK_SYSTEM_DIRS is colon-separated
|
||||
path="$1"
|
||||
for sd in $SPACK_SYSTEM_DIRS; do
|
||||
if [ "${path}" = "${sd}" ] || [ "${path}" = "${sd}/" ]; then
|
||||
# success if path starts with a system prefix
|
||||
unset IFS
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
unset IFS
|
||||
return 1 # fail if path starts no system prefix
|
||||
# eval this because SPACK_MANAGED_DIRS and SPACK_SYSTEM_DIRS are inputs we don't wanna loop over.
|
||||
# moving the eval inside the function would eval it every call.
|
||||
eval "\
|
||||
path_order() {
|
||||
case \"\$1\" in
|
||||
$SPACK_MANAGED_DIRS) return 0 ;;
|
||||
$SPACK_SYSTEM_DIRS) return 2 ;;
|
||||
/*) return 1 ;;
|
||||
esac
|
||||
}
|
||||
"
|
||||
|
||||
# Fail with a clear message if the input contains any bell characters.
|
||||
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
|
||||
@@ -248,7 +245,7 @@ case "$command" in
|
||||
lang_flags=C
|
||||
debug_flags="-g"
|
||||
;;
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
||||
c++|CC|g++|clang++|armclang++|icpc|icpx|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
||||
command="$SPACK_CXX"
|
||||
language="C++"
|
||||
comp="CXX"
|
||||
@@ -420,11 +417,12 @@ input_command="$*"
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
@@ -432,21 +430,25 @@ parse_Wl() {
|
||||
arg="${1#-rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
wl_expect_rpath=yes
|
||||
@@ -473,12 +475,20 @@ categorize_arguments() {
|
||||
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
|
||||
return_isystem_spack_store_include_dirs_list=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
|
||||
return_spack_store_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
|
||||
return_spack_store_lib_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
|
||||
return_spack_store_rpath_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
|
||||
@@ -526,7 +536,7 @@ categorize_arguments() {
|
||||
continue
|
||||
fi
|
||||
|
||||
replaced="$after$stripped"
|
||||
replaced="$after$stripped"
|
||||
|
||||
# it matched, remove it
|
||||
shift
|
||||
@@ -546,29 +556,32 @@ categorize_arguments() {
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_isystem_include_dirs_list "$arg" ;;
|
||||
2) append return_isystem_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_include_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_include_dirs_list "$arg" ;;
|
||||
2) append return_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_lib_dirs_list "$arg"
|
||||
else
|
||||
append return_lib_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_lib_dirs_list "$arg" ;;
|
||||
1) append return_lib_dirs_list "$arg" ;;
|
||||
2) append return_system_lib_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
@@ -601,29 +614,32 @@ categorize_arguments() {
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
@@ -661,16 +677,25 @@ categorize_arguments() {
|
||||
}
|
||||
|
||||
categorize_arguments "$@"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
other_args_list="$return_other_args_list"
|
||||
|
||||
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
other_args_list="$return_other_args_list"
|
||||
|
||||
#
|
||||
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
||||
@@ -738,16 +763,25 @@ esac
|
||||
IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
|
||||
# On macOS insert headerpad_max_install_names linker flag
|
||||
@@ -767,11 +801,13 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
# Append RPATH directories. Note that in the case of the
|
||||
# top-level package these directories may not exist yet. For dependencies
|
||||
# it is assumed that paths have already been confirmed.
|
||||
extend spack_store_rpath_dirs_list SPACK_STORE_RPATH_DIRS
|
||||
extend rpath_dirs_list SPACK_RPATH_DIRS
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
extend spack_store_lib_dirs_list SPACK_STORE_LINK_DIRS
|
||||
extend lib_dirs_list SPACK_LINK_DIRS
|
||||
fi
|
||||
|
||||
@@ -798,38 +834,50 @@ case "$mode" in
|
||||
;;
|
||||
esac
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
||||
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
||||
else
|
||||
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend include_dirs_list SPACK_INCLUDE_DIRS
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
#
|
||||
# Finally, reassemble the command line.
|
||||
#
|
||||
args_list="$flags_list"
|
||||
|
||||
# Insert include directories just prior to any system include directories
|
||||
# Include search paths partitioned by (in store, non-sytem, system)
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_flags_include_dirs_list "-I"
|
||||
extend args_list include_dirs_list "-I"
|
||||
extend args_list spack_flags_spack_store_include_dirs_list -I
|
||||
extend args_list spack_store_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_include_dirs_list -I
|
||||
extend args_list include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
elif [ "$isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
else
|
||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths
|
||||
# Library search paths partitioned by (in store, non-sytem, system)
|
||||
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
|
||||
extend args_list spack_store_lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
@@ -839,8 +887,12 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
@@ -848,8 +900,12 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
@@ -913,4 +969,3 @@ fi
|
||||
# Execute the full command, preserving spaces with IFS set
|
||||
# to the alarm bell separator.
|
||||
IFS="$lsep"; exec $full_command_list
|
||||
|
||||
|
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
|
||||
Tag.attrib, merge_attrib]:
|
||||
if hasattr(self, a):
|
||||
if memo is not None:
|
||||
setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
|
||||
setattr(t, a, copy.deepcopy(getattr(self, a), memo))
|
||||
else:
|
||||
setattr(t, a, getattr(self, a))
|
||||
# fmt: on
|
||||
|
13
lib/spack/external/patches/ruamelyaml.patch
vendored
Normal file
13
lib/spack/external/patches/ruamelyaml.patch
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
diff --git a/lib/spack/external/_vendoring/ruamel/yaml/comments.py b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||
index 1badeda585..892c868af3 100644
|
||||
--- a/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||
+++ b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
|
||||
Tag.attrib, merge_attrib]:
|
||||
if hasattr(self, a):
|
||||
if memo is not None:
|
||||
- setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
|
||||
+ setattr(t, a, copy.deepcopy(getattr(self, a), memo))
|
||||
else:
|
||||
setattr(t, a, getattr(self, a))
|
||||
# fmt: on
|
@@ -12,7 +12,7 @@
|
||||
# Archive extensions allowed in Spack
|
||||
PREFIX_EXTENSIONS = ("tar", "TAR")
|
||||
EXTENSIONS = ("gz", "bz2", "xz", "Z")
|
||||
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz")
|
||||
NO_TAR_EXTENSIONS = ("zip", "tgz", "tbz2", "tbz", "txz", "whl")
|
||||
|
||||
# Add PREFIX_EXTENSIONS and EXTENSIONS last so that .tar.gz is matched *before* .tar or .gz
|
||||
ALLOWED_ARCHIVE_TYPES = (
|
||||
@@ -357,10 +357,8 @@ def strip_version_suffixes(path_or_url: str) -> str:
|
||||
r"i[36]86",
|
||||
r"ppc64(le)?",
|
||||
r"armv?(7l|6l|64)?",
|
||||
# PyPI
|
||||
r"[._-]py[23].*\.whl",
|
||||
r"[._-]cp[23].*\.whl",
|
||||
r"[._-]win.*\.exe",
|
||||
# PyPI wheels
|
||||
r"-(?:py|cp)[23].*",
|
||||
]
|
||||
|
||||
for regex in suffix_regexes:
|
||||
@@ -403,7 +401,7 @@ def expand_contracted_extension_in_path(
|
||||
def compression_ext_from_compressed_archive(extension: str) -> Optional[str]:
|
||||
"""Returns compression extension for a compressed archive"""
|
||||
extension = expand_contracted_extension(extension)
|
||||
for ext in [*EXTENSIONS]:
|
||||
for ext in EXTENSIONS:
|
||||
if ext in extension:
|
||||
return ext
|
||||
return None
|
||||
|
@@ -198,15 +198,32 @@ def getuid():
|
||||
return os.getuid()
|
||||
|
||||
|
||||
def _win_rename(src, dst):
|
||||
# os.replace will still fail if on Windows (but not POSIX) if the dst
|
||||
# is a symlink to a directory (all other cases have parity Windows <-> Posix)
|
||||
if os.path.islink(dst) and os.path.isdir(os.path.realpath(dst)):
|
||||
if os.path.samefile(src, dst):
|
||||
# src and dst are the same
|
||||
# do nothing and exit early
|
||||
return
|
||||
# If dst exists and is a symlink to a directory
|
||||
# we need to remove dst and then perform rename/replace
|
||||
# this is safe to do as there's no chance src == dst now
|
||||
os.remove(dst)
|
||||
os.replace(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
# os.replace is the same as os.rename on POSIX and is MoveFileExW w/
|
||||
# the MOVEFILE_REPLACE_EXISTING flag on Windows
|
||||
# Windows invocation is abstracted behind additonal logic handling
|
||||
# remaining cases of divergent behavior accross platforms
|
||||
if sys.platform == "win32":
|
||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||
# so check for that case
|
||||
if os.path.exists(dst) or islink(dst):
|
||||
os.remove(dst)
|
||||
os.rename(src, dst)
|
||||
_win_rename(src, dst)
|
||||
else:
|
||||
os.replace(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1217,10 +1234,12 @@ def windows_sfn(path: os.PathLike):
|
||||
import ctypes
|
||||
|
||||
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
|
||||
# Method with null values returns size of short path name
|
||||
sz = k32.GetShortPathNameW(path, None, 0)
|
||||
# stub Windows types TCHAR[LENGTH]
|
||||
TCHAR_arr = ctypes.c_wchar * len(path)
|
||||
TCHAR_arr = ctypes.c_wchar * sz
|
||||
ret_str = TCHAR_arr()
|
||||
k32.GetShortPathNameW(path, ret_str, len(path))
|
||||
k32.GetShortPathNameW(path, ctypes.byref(ret_str), sz)
|
||||
return ret_str.value
|
||||
|
||||
|
||||
|
@@ -17,7 +17,6 @@
|
||||
import tarfile
|
||||
import tempfile
|
||||
import time
|
||||
import traceback
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
@@ -111,10 +110,6 @@ def __init__(self, errors):
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class ListMirrorSpecsError(spack.error.SpackError):
|
||||
"""Raised when unable to retrieve list of specs from the mirror"""
|
||||
|
||||
|
||||
class BinaryCacheIndex:
|
||||
"""
|
||||
The BinaryCacheIndex tracks what specs are available on (usually remote)
|
||||
@@ -541,83 +536,6 @@ def binary_index_location():
|
||||
BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore
|
||||
|
||||
|
||||
class NoOverwriteException(spack.error.SpackError):
|
||||
"""Raised when a file would be overwritten"""
|
||||
|
||||
def __init__(self, file_path):
|
||||
super().__init__(f"Refusing to overwrite the following file: {file_path}")
|
||||
|
||||
|
||||
class NoGpgException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg2 is not in PATH
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class NoKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg has no default key added.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class PickKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when multiple keys can be used to sign.
|
||||
"""
|
||||
|
||||
def __init__(self, keys):
|
||||
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
||||
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
||||
super().__init__(err_msg)
|
||||
|
||||
|
||||
class NoVerifyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails signature verification.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NoChecksumException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails checksum verification.
|
||||
"""
|
||||
|
||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||
super().__init__(
|
||||
f"{algorithm} checksum failed for {path}",
|
||||
f"Expected {expected} but got {computed}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
|
||||
|
||||
class NewLayoutException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if directory layout is different from buildcache.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class InvalidMetadataFile(spack.error.SpackError):
|
||||
pass
|
||||
|
||||
|
||||
class UnsignedPackageException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if installation of unsigned package is attempted without
|
||||
the use of ``--no-check-signature``.
|
||||
"""
|
||||
|
||||
|
||||
def compute_hash(data):
|
||||
if isinstance(data, str):
|
||||
data = data.encode("utf-8")
|
||||
@@ -992,15 +910,10 @@ def url_read_method(url):
|
||||
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
|
||||
]
|
||||
read_fn = url_read_method
|
||||
except KeyError as inst:
|
||||
msg = "No packages at {0}: {1}".format(cache_prefix, inst)
|
||||
tty.warn(msg)
|
||||
except Exception as err:
|
||||
# If we got some kind of S3 (access denied or other connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = "Encountered problem listing packages at {0}: {1}".format(cache_prefix, err)
|
||||
tty.warn(msg)
|
||||
# If we got some kind of S3 (access denied or other connection error), the first non
|
||||
# boto-specific class in the exception is Exception. Just print a warning and return
|
||||
tty.warn(f"Encountered problem listing packages at {cache_prefix}: {err}")
|
||||
|
||||
return file_list, read_fn
|
||||
|
||||
@@ -1047,11 +960,10 @@ def generate_package_index(cache_prefix, concurrency=32):
|
||||
"""
|
||||
try:
|
||||
file_list, read_fn = _spec_files_from_cache(cache_prefix)
|
||||
except ListMirrorSpecsError as err:
|
||||
tty.error("Unable to generate package index, {0}".format(err))
|
||||
return
|
||||
except ListMirrorSpecsError as e:
|
||||
raise GenerateIndexError(f"Unable to generate package index: {e}") from e
|
||||
|
||||
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
||||
tty.debug(f"Retrieving spec descriptor files from {cache_prefix} to build index")
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
@@ -1061,27 +973,22 @@ def generate_package_index(cache_prefix, concurrency=32):
|
||||
|
||||
try:
|
||||
_read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)
|
||||
except Exception as err:
|
||||
msg = "Encountered problem pushing package index to {0}: {1}".format(cache_prefix, err)
|
||||
tty.warn(msg)
|
||||
tty.debug("\n" + traceback.format_exc())
|
||||
except Exception as e:
|
||||
raise GenerateIndexError(
|
||||
f"Encountered problem pushing package index to {cache_prefix}: {e}"
|
||||
) from e
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
|
||||
|
||||
def generate_key_index(key_prefix, tmpdir=None):
|
||||
"""Create the key index page.
|
||||
|
||||
Creates (or replaces) the "index.json" page at the location given in
|
||||
key_prefix. This page contains an entry for each key (.pub) under
|
||||
key_prefix.
|
||||
Creates (or replaces) the "index.json" page at the location given in key_prefix. This page
|
||||
contains an entry for each key (.pub) under key_prefix.
|
||||
"""
|
||||
|
||||
tty.debug(
|
||||
" ".join(
|
||||
("Retrieving key.pub files from", url_util.format(key_prefix), "to build key index")
|
||||
)
|
||||
)
|
||||
tty.debug(f"Retrieving key.pub files from {url_util.format(key_prefix)} to build key index")
|
||||
|
||||
try:
|
||||
fingerprints = (
|
||||
@@ -1089,17 +996,8 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
for entry in web_util.list_url(key_prefix, recursive=False)
|
||||
if entry.endswith(".pub")
|
||||
)
|
||||
except KeyError as inst:
|
||||
msg = "No keys at {0}: {1}".format(key_prefix, inst)
|
||||
tty.warn(msg)
|
||||
return
|
||||
except Exception as err:
|
||||
# If we got some kind of S3 (access denied or other connection
|
||||
# error), the first non boto-specific class in the exception
|
||||
# hierarchy is Exception. Just print a warning and return
|
||||
msg = "Encountered problem listing keys at {0}: {1}".format(key_prefix, err)
|
||||
tty.warn(msg)
|
||||
return
|
||||
except Exception as e:
|
||||
raise CannotListKeys(f"Encountered problem listing keys at {key_prefix}: {e}") from e
|
||||
|
||||
remove_tmpdir = False
|
||||
|
||||
@@ -1124,12 +1022,13 @@ def generate_key_index(key_prefix, tmpdir=None):
|
||||
keep_original=False,
|
||||
extra_args={"ContentType": "application/json"},
|
||||
)
|
||||
except Exception as err:
|
||||
msg = "Encountered problem pushing key index to {0}: {1}".format(key_prefix, err)
|
||||
tty.warn(msg)
|
||||
except Exception as e:
|
||||
raise GenerateIndexError(
|
||||
f"Encountered problem pushing key index to {key_prefix}: {e}"
|
||||
) from e
|
||||
finally:
|
||||
if remove_tmpdir:
|
||||
shutil.rmtree(tmpdir)
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
|
||||
|
||||
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||
@@ -1200,7 +1099,8 @@ def push_or_raise(spec: Spec, out_url: str, options: PushOptions):
|
||||
used at the mirror (following <tarball_directory_name>).
|
||||
|
||||
This method raises :py:class:`NoOverwriteException` when ``force=False`` and the tarball or
|
||||
spec.json file already exist in the buildcache.
|
||||
spec.json file already exist in the buildcache. It raises :py:class:`PushToBuildCacheError`
|
||||
when the tarball or spec.json file cannot be pushed to the buildcache.
|
||||
"""
|
||||
if not spec.concrete:
|
||||
raise ValueError("spec must be concrete to build tarball")
|
||||
@@ -1278,13 +1178,18 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
key = select_signing_key(options.key)
|
||||
sign_specfile(key, options.force, specfile_path)
|
||||
|
||||
# push tarball and signed spec json to remote mirror
|
||||
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
|
||||
web_util.push_to_url(
|
||||
signed_specfile_path if not options.unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
|
||||
keep_original=False,
|
||||
)
|
||||
try:
|
||||
# push tarball and signed spec json to remote mirror
|
||||
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
|
||||
web_util.push_to_url(
|
||||
signed_specfile_path if not options.unsigned else specfile_path,
|
||||
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
|
||||
keep_original=False,
|
||||
)
|
||||
except Exception as e:
|
||||
raise PushToBuildCacheError(
|
||||
f"Encountered problem pushing binary {remote_spackfile_path}: {e}"
|
||||
) from e
|
||||
|
||||
# push the key to the build cache's _pgp directory so it can be
|
||||
# imported
|
||||
@@ -1296,8 +1201,6 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
|
||||
if options.regenerate_index:
|
||||
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, stage_dir)))
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class NotInstalledError(spack.error.SpackError):
|
||||
"""Raised when a spec is not installed but picked to be packaged."""
|
||||
@@ -1352,28 +1255,6 @@ def specs_to_be_packaged(
|
||||
return [s for s in itertools.chain(roots, deps) if not s.external]
|
||||
|
||||
|
||||
def push(spec: Spec, mirror_url: str, options: PushOptions):
|
||||
"""Create and push binary package for a single spec to the specified
|
||||
mirror url.
|
||||
|
||||
Args:
|
||||
spec: Spec to package and push
|
||||
mirror_url: Desired destination url for binary package
|
||||
options:
|
||||
|
||||
Returns:
|
||||
True if package was pushed, False otherwise.
|
||||
|
||||
"""
|
||||
try:
|
||||
push_or_raise(spec, mirror_url, options)
|
||||
except NoOverwriteException as e:
|
||||
warnings.warn(str(e))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def try_verify(specfile_path):
|
||||
"""Utility function to attempt to verify a local file. Assumes the
|
||||
file is a clearsigned signature file.
|
||||
@@ -2706,3 +2587,96 @@ def conditional_fetch(self) -> FetchIndexResult:
|
||||
raise FetchIndexError(f"Remote index {url_manifest} is invalid")
|
||||
|
||||
return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False)
|
||||
|
||||
|
||||
class NoOverwriteException(spack.error.SpackError):
|
||||
"""Raised when a file would be overwritten"""
|
||||
|
||||
def __init__(self, file_path):
|
||||
super().__init__(f"Refusing to overwrite the following file: {file_path}")
|
||||
|
||||
|
||||
class NoGpgException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg2 is not in PATH
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class NoKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when gpg has no default key added.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class PickKeyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised when multiple keys can be used to sign.
|
||||
"""
|
||||
|
||||
def __init__(self, keys):
|
||||
err_msg = "Multiple keys available for signing\n%s\n" % keys
|
||||
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
|
||||
super().__init__(err_msg)
|
||||
|
||||
|
||||
class NoVerifyException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails signature verification.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NoChecksumException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if file fails checksum verification.
|
||||
"""
|
||||
|
||||
def __init__(self, path, size, contents, algorithm, expected, computed):
|
||||
super().__init__(
|
||||
f"{algorithm} checksum failed for {path}",
|
||||
f"Expected {expected} but got {computed}. "
|
||||
f"File size = {size} bytes. Contents = {contents!r}",
|
||||
)
|
||||
|
||||
|
||||
class NewLayoutException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if directory layout is different from buildcache.
|
||||
"""
|
||||
|
||||
def __init__(self, msg):
|
||||
super().__init__(msg)
|
||||
|
||||
|
||||
class InvalidMetadataFile(spack.error.SpackError):
|
||||
pass
|
||||
|
||||
|
||||
class UnsignedPackageException(spack.error.SpackError):
|
||||
"""
|
||||
Raised if installation of unsigned package is attempted without
|
||||
the use of ``--no-check-signature``.
|
||||
"""
|
||||
|
||||
|
||||
class ListMirrorSpecsError(spack.error.SpackError):
|
||||
"""Raised when unable to retrieve list of specs from the mirror"""
|
||||
|
||||
|
||||
class GenerateIndexError(spack.error.SpackError):
|
||||
"""Raised when unable to generate key or package index for mirror"""
|
||||
|
||||
|
||||
class CannotListKeys(GenerateIndexError):
|
||||
"""Raised when unable to list keys when generating key index"""
|
||||
|
||||
|
||||
class PushToBuildCacheError(spack.error.SpackError):
|
||||
"""Raised when unable to push objects to binary mirror"""
|
||||
|
@@ -559,12 +559,49 @@ def ensure_patchelf_in_path_or_raise() -> spack.util.executable.Executable:
|
||||
)
|
||||
|
||||
|
||||
def ensure_winsdk_external_or_raise() -> None:
|
||||
"""Ensure the Windows SDK + WGL are available on system
|
||||
If both of these package are found, the Spack user or bootstrap
|
||||
configuration (depending on where Spack is running)
|
||||
will be updated to include all versions and variants detected.
|
||||
If either the WDK or WSDK are not found, this method will raise
|
||||
a RuntimeError.
|
||||
|
||||
**NOTE:** This modifies the Spack config in the current scope,
|
||||
either user or environment depending on the calling context.
|
||||
This is different from all other current bootstrap dependency
|
||||
checks.
|
||||
"""
|
||||
if set(["win-sdk", "wgl"]).issubset(spack.config.get("packages").keys()):
|
||||
return
|
||||
externals = spack.detection.by_path(["win-sdk", "wgl"])
|
||||
if not set(["win-sdk", "wgl"]) == externals.keys():
|
||||
missing_packages_lst = []
|
||||
if "wgl" not in externals:
|
||||
missing_packages_lst.append("wgl")
|
||||
if "win-sdk" not in externals:
|
||||
missing_packages_lst.append("win-sdk")
|
||||
missing_packages = " & ".join(missing_packages_lst)
|
||||
raise RuntimeError(
|
||||
f"Unable to find the {missing_packages}, please install these packages\
|
||||
via the Visual Studio installer\
|
||||
before proceeding with Spack or provide the path to a non standard install via\
|
||||
'spack external find --path'"
|
||||
)
|
||||
# wgl/sdk are not required for bootstrapping Spack, but
|
||||
# are required for building anything non trivial
|
||||
# add to user config so they can be used by subsequent Spack ops
|
||||
spack.detection.update_configuration(externals, buildable=False)
|
||||
|
||||
|
||||
def ensure_core_dependencies() -> None:
|
||||
"""Ensure the presence of all the core dependencies."""
|
||||
if sys.platform.lower() == "linux":
|
||||
ensure_patchelf_in_path_or_raise()
|
||||
if not IS_WINDOWS:
|
||||
ensure_gpg_in_path_or_raise()
|
||||
else:
|
||||
ensure_winsdk_external_or_raise()
|
||||
ensure_clingo_importable_or_raise()
|
||||
|
||||
|
||||
|
@@ -57,8 +57,10 @@
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
@@ -66,6 +68,7 @@
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
@@ -78,7 +81,7 @@
|
||||
from spack.installer import InstallError
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import (
|
||||
SYSTEM_DIRS,
|
||||
SYSTEM_DIR_CASE_ENTRY,
|
||||
EnvironmentModifications,
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
@@ -101,9 +104,13 @@
|
||||
# Spack's compiler wrappers.
|
||||
#
|
||||
SPACK_ENV_PATH = "SPACK_ENV_PATH"
|
||||
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
|
||||
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
|
||||
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
|
||||
SPACK_RPATH_DIRS = "SPACK_RPATH_DIRS"
|
||||
SPACK_STORE_INCLUDE_DIRS = "SPACK_STORE_INCLUDE_DIRS"
|
||||
SPACK_STORE_LINK_DIRS = "SPACK_STORE_LINK_DIRS"
|
||||
SPACK_STORE_RPATH_DIRS = "SPACK_STORE_RPATH_DIRS"
|
||||
SPACK_RPATH_DEPS = "SPACK_RPATH_DEPS"
|
||||
SPACK_LINK_DEPS = "SPACK_LINK_DEPS"
|
||||
SPACK_PREFIX = "SPACK_PREFIX"
|
||||
@@ -416,7 +423,7 @@ def set_compiler_environment_variables(pkg, env):
|
||||
|
||||
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
||||
|
||||
env.set("SPACK_SYSTEM_DIRS", ":".join(SYSTEM_DIRS))
|
||||
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
|
||||
|
||||
compiler.setup_custom_environment(pkg, env)
|
||||
|
||||
@@ -544,9 +551,23 @@ def update_compiler_args_for_dep(dep):
|
||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||
|
||||
env.set(SPACK_LINK_DIRS, ":".join(link_dirs))
|
||||
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs))
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
||||
spack_managed_dirs: List[str] = [
|
||||
spack.stage.get_stage_root(),
|
||||
spack.store.STORE.db.root,
|
||||
*(db.root for db in spack.store.STORE.db.upstream_dbs),
|
||||
]
|
||||
|
||||
env.set(SPACK_MANAGED_DIRS, "|".join(f'"{p}/"*' for p in spack_managed_dirs))
|
||||
is_spack_managed = lambda p: any(p.startswith(store) for store in spack_managed_dirs)
|
||||
link_dirs_spack, link_dirs_system = stable_partition(link_dirs, is_spack_managed)
|
||||
include_dirs_spack, include_dirs_system = stable_partition(include_dirs, is_spack_managed)
|
||||
rpath_dirs_spack, rpath_dirs_system = stable_partition(rpath_dirs, is_spack_managed)
|
||||
env.set(SPACK_LINK_DIRS, ":".join(link_dirs_system))
|
||||
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs_system))
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs_system))
|
||||
env.set(SPACK_STORE_LINK_DIRS, ":".join(link_dirs_spack))
|
||||
env.set(SPACK_STORE_INCLUDE_DIRS, ":".join(include_dirs_spack))
|
||||
env.set(SPACK_STORE_RPATH_DIRS, ":".join(rpath_dirs_spack))
|
||||
|
||||
|
||||
def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
@@ -583,10 +604,22 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
# Put spack compiler paths in module scope. (Some packages use it
|
||||
# in setup_run_environment etc, so don't put it context == build)
|
||||
link_dir = spack.paths.build_env_path
|
||||
module.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
|
||||
pkg_compiler = None
|
||||
try:
|
||||
pkg_compiler = pkg.compiler
|
||||
except spack.compilers.NoCompilerForSpecError as e:
|
||||
tty.debug(f"cannot set 'spack_cc': {str(e)}")
|
||||
|
||||
if pkg_compiler is not None:
|
||||
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
|
||||
else:
|
||||
module.spack_cc = None
|
||||
module.spack_cxx = None
|
||||
module.spack_f77 = None
|
||||
module.spack_fc = None
|
||||
|
||||
# Useful directories within the prefix are encapsulated in
|
||||
# a Prefix object.
|
||||
@@ -789,7 +822,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||
for mod in ["cray-mpich", "cray-libsci"]:
|
||||
module("unload", mod)
|
||||
|
||||
if target.module_name:
|
||||
if target and target.module_name:
|
||||
load_module(target.module_name)
|
||||
|
||||
load_external_modules(pkg)
|
||||
|
@@ -434,11 +434,6 @@ def _do_patch_libtool(self):
|
||||
r"crtendS\.o",
|
||||
]:
|
||||
x.filter(regex=(rehead + o), repl="")
|
||||
elif self.pkg.compiler.name == "dpcpp":
|
||||
# Hack to filter out spurious predep_objects when building with Intel dpcpp
|
||||
# (see https://github.com/spack/spack/issues/32863):
|
||||
x.filter(regex=r"^(predep_objects=.*)/tmp/conftest-[0-9A-Fa-f]+\.o", repl=r"\1")
|
||||
x.filter(regex=r"^(predep_objects=.*)/tmp/a-[0-9A-Fa-f]+\.o", repl=r"\1")
|
||||
elif self.pkg.compiler.name == "nag":
|
||||
for tag in ["fc", "f77"]:
|
||||
marker = markers[tag]
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import collections.abc
|
||||
import os
|
||||
import re
|
||||
from typing import Tuple
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
@@ -15,6 +16,12 @@
|
||||
from .cmake import CMakeBuilder, CMakePackage
|
||||
|
||||
|
||||
def spec_uses_toolchain(spec):
|
||||
gcc_toolchain_regex = re.compile(".*gcc-toolchain.*")
|
||||
using_toolchain = list(filter(gcc_toolchain_regex.match, spec.compiler_flags["cxxflags"]))
|
||||
return using_toolchain
|
||||
|
||||
|
||||
def cmake_cache_path(name, value, comment="", force=False):
|
||||
"""Generate a string for a cmake cache variable"""
|
||||
force_str = " FORCE" if force else ""
|
||||
@@ -213,7 +220,7 @@ def initconfig_mpi_entries(self):
|
||||
else:
|
||||
# starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE
|
||||
# vs the older versions which expect MPIEXEC
|
||||
if self.pkg.spec["cmake"].satisfies("@3.10:"):
|
||||
if spec["cmake"].satisfies("@3.10:"):
|
||||
entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE", mpiexec))
|
||||
else:
|
||||
entries.append(cmake_cache_path("MPIEXEC", mpiexec))
|
||||
@@ -248,12 +255,17 @@ def initconfig_hardware_entries(self):
|
||||
# Include the deprecated CUDA_TOOLKIT_ROOT_DIR for supporting BLT packages
|
||||
entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
|
||||
|
||||
archs = spec.variants["cuda_arch"].value
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", "{0}".format(arch_str))
|
||||
)
|
||||
# CUDA_FLAGS
|
||||
cuda_flags = []
|
||||
|
||||
if not spec.satisfies("cuda_arch=none"):
|
||||
cuda_archs = ";".join(spec.variants["cuda_arch"].value)
|
||||
entries.append(cmake_cache_string("CMAKE_CUDA_ARCHITECTURES", cuda_archs))
|
||||
|
||||
if spec_uses_toolchain(spec):
|
||||
cuda_flags.append("-Xcompiler {}".format(spec_uses_toolchain(spec)[0]))
|
||||
|
||||
entries.append(cmake_cache_string("CMAKE_CUDA_FLAGS", " ".join(cuda_flags)))
|
||||
|
||||
if "+rocm" in spec:
|
||||
entries.append("#------------------{0}".format("-" * 30))
|
||||
@@ -262,9 +274,6 @@ def initconfig_hardware_entries(self):
|
||||
|
||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||
entries.append(
|
||||
cmake_cache_path("HIP_CXX_COMPILER", "{0}".format(self.spec["hip"].hipcc))
|
||||
)
|
||||
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
||||
llvm_prefix = spec["llvm-amdgpu"].prefix
|
||||
# Some ROCm systems seem to point to /<path>/rocm-<ver>/ and
|
||||
@@ -277,11 +286,9 @@ def initconfig_hardware_entries(self):
|
||||
archs = self.spec.variants["amdgpu_target"].value
|
||||
if archs[0] != "none":
|
||||
arch_str = ";".join(archs)
|
||||
entries.append(
|
||||
cmake_cache_string("CMAKE_HIP_ARCHITECTURES", "{0}".format(arch_str))
|
||||
)
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", "{0}".format(arch_str)))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", "{0}".format(arch_str)))
|
||||
entries.append(cmake_cache_string("CMAKE_HIP_ARCHITECTURES", arch_str))
|
||||
entries.append(cmake_cache_string("AMDGPU_TARGETS", arch_str))
|
||||
entries.append(cmake_cache_string("GPU_TARGETS", arch_str))
|
||||
|
||||
return entries
|
||||
|
||||
|
@@ -16,7 +16,7 @@
|
||||
|
||||
|
||||
class CargoPackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using a Makefiles."""
|
||||
"""Specialized class for packages built using cargo."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
|
@@ -21,7 +21,7 @@
|
||||
|
||||
|
||||
class MakefilePackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using a Makefiles."""
|
||||
"""Specialized class for packages built using Makefiles."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
|
@@ -14,7 +14,7 @@
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.directives import conflicts, variant
|
||||
from spack.directives import conflicts, license, variant
|
||||
from spack.package_base import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
@@ -26,6 +26,7 @@ class IntelOneApiPackage(Package):
|
||||
"""Base class for Intel oneAPI packages."""
|
||||
|
||||
homepage = "https://software.intel.com/oneapi"
|
||||
license("https://intel.ly/393CijO")
|
||||
|
||||
# oneAPI license does not allow mirroring outside of the
|
||||
# organization (e.g. University/Company).
|
||||
|
@@ -75,6 +75,8 @@
|
||||
# does not like its directory structure.
|
||||
#
|
||||
|
||||
import os
|
||||
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package_base import PackageBase
|
||||
@@ -154,6 +156,32 @@ def hip_flags(amdgpu_target):
|
||||
archs = ",".join(amdgpu_target)
|
||||
return "--amdgpu-target={0}".format(archs)
|
||||
|
||||
# ASAN
|
||||
@staticmethod
|
||||
def asan_on(env, llvm_path):
|
||||
env.set("CC", llvm_path + "/bin/clang")
|
||||
env.set("CXX", llvm_path + "/bin/clang++")
|
||||
env.set("ASAN_OPTIONS", "detect_leaks=0")
|
||||
|
||||
for root, dirs, files in os.walk(llvm_path):
|
||||
if "libclang_rt.asan-x86_64.so" in files:
|
||||
asan_lib_path = root
|
||||
env.prepend_path("LD_LIBRARY_PATH", asan_lib_path)
|
||||
SET_DWARF_VERSION_4 = ""
|
||||
try:
|
||||
# This will throw an error if imported on a non-Linux platform.
|
||||
import distro
|
||||
|
||||
distname = distro.id()
|
||||
except ImportError:
|
||||
distname = "unknown"
|
||||
if "rhel" in distname or "sles" in distname:
|
||||
SET_DWARF_VERSION_4 = "-gdwarf-5"
|
||||
|
||||
env.set("CFLAGS", "-fsanitize=address -shared-libasan -g " + SET_DWARF_VERSION_4)
|
||||
env.set("CXXFLAGS", "-fsanitize=address -shared-libasan -g " + SET_DWARF_VERSION_4)
|
||||
env.set("LDFLAGS", "-Wl,--enable-new-dtags -fuse-ld=lld -fsanitize=address -g -Wl,")
|
||||
|
||||
# HIP version vs Architecture
|
||||
|
||||
# TODO: add a bunch of lines like:
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -334,8 +334,7 @@ def display_specs(specs, args=None, **kwargs):
|
||||
variants (bool): Show variants with specs
|
||||
indent (int): indent each line this much
|
||||
groups (bool): display specs grouped by arch/compiler (default True)
|
||||
decorators (dict): dictionary mappng specs to decorators
|
||||
header_callback (typing.Callable): called at start of arch/compiler groups
|
||||
decorator (typing.Callable): function to call to decorate specs
|
||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
|
||||
@@ -384,15 +383,13 @@ def get_arg(name, default=None):
|
||||
vfmt = "{variants}" if variants else ""
|
||||
format_string = nfmt + "{@version}" + ffmt + vfmt
|
||||
|
||||
transform = {"package": decorator, "fullpackage": decorator}
|
||||
|
||||
def fmt(s, depth=0):
|
||||
"""Formatter function for all output specs"""
|
||||
string = ""
|
||||
if hashes:
|
||||
string += gray_hash(s, hlen) + " "
|
||||
string += depth * " "
|
||||
string += s.cformat(format_string, transform=transform)
|
||||
string += decorator(s, s.cformat(format_string))
|
||||
return string
|
||||
|
||||
def format_list(specs):
|
||||
@@ -451,7 +448,7 @@ def filter_loaded_specs(specs):
|
||||
return [x for x in specs if x.dag_hash() in hashes]
|
||||
|
||||
|
||||
def print_how_many_pkgs(specs, pkg_type=""):
|
||||
def print_how_many_pkgs(specs, pkg_type="", suffix=""):
|
||||
"""Given a list of specs, this will print a message about how many
|
||||
specs are in that list.
|
||||
|
||||
@@ -462,7 +459,7 @@ def print_how_many_pkgs(specs, pkg_type=""):
|
||||
category, e.g. if pkg_type is "installed" then the message
|
||||
would be "3 installed packages"
|
||||
"""
|
||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package"))
|
||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package") + suffix)
|
||||
|
||||
|
||||
def spack_is_git_repo():
|
||||
|
@@ -275,23 +275,37 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
|
||||
# Sync buildcache entries from one mirror to another
|
||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||
sync.add_argument(
|
||||
"--manifest-glob", help="a quoted glob pattern identifying copy manifest files"
|
||||
|
||||
sync_manifest_source = sync.add_argument_group(
|
||||
"Manifest Source",
|
||||
"Specify a list of build cache objects to sync using manifest file(s)."
|
||||
'This option takes the place of the "source mirror" for synchronization'
|
||||
'and optionally takes a "destination mirror" ',
|
||||
)
|
||||
sync.add_argument(
|
||||
sync_manifest_source.add_argument(
|
||||
"--manifest-glob", help="a quoted glob pattern identifying CI rebuild manifest files"
|
||||
)
|
||||
sync_source_mirror = sync.add_argument_group(
|
||||
"Named Source",
|
||||
"Specify a single registered source mirror to synchronize from. This option requires"
|
||||
"the specification of a destination mirror.",
|
||||
)
|
||||
sync_source_mirror.add_argument(
|
||||
"src_mirror",
|
||||
metavar="source mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
nargs="?",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="source mirror name, path, or URL",
|
||||
)
|
||||
|
||||
sync.add_argument(
|
||||
"dest_mirror",
|
||||
metavar="destination mirror",
|
||||
type=arguments.mirror_name_or_url,
|
||||
nargs="?",
|
||||
type=arguments.mirror_name_or_url,
|
||||
help="destination mirror name, path, or URL",
|
||||
)
|
||||
|
||||
sync.set_defaults(func=sync_fn)
|
||||
|
||||
# Update buildcache index without copying any additional packages
|
||||
@@ -1070,7 +1084,17 @@ def sync_fn(args):
|
||||
requires an active environment in order to know which specs to sync
|
||||
"""
|
||||
if args.manifest_glob:
|
||||
manifest_copy(glob.glob(args.manifest_glob))
|
||||
# Passing the args.src_mirror here because it is not possible to
|
||||
# have the destination be required when specifying a named source
|
||||
# mirror and optional for the --manifest-glob argument. In the case
|
||||
# of manifest glob sync, the source mirror positional argument is the
|
||||
# destination mirror if it is specified. If there are two mirrors
|
||||
# specified, the second is ignored and the first is the override
|
||||
# destination.
|
||||
if args.dest_mirror:
|
||||
tty.warn(f"Ignoring unused arguemnt: {args.dest_mirror.name}")
|
||||
|
||||
manifest_copy(glob.glob(args.manifest_glob), args.src_mirror)
|
||||
return 0
|
||||
|
||||
if args.src_mirror is None or args.dest_mirror is None:
|
||||
@@ -1121,7 +1145,7 @@ def sync_fn(args):
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
|
||||
def manifest_copy(manifest_file_list):
|
||||
def manifest_copy(manifest_file_list, dest_mirror=None):
|
||||
"""Read manifest files containing information about specific specs to copy
|
||||
from source to destination, remove duplicates since any binary packge for
|
||||
a given hash should be the same as any other, and copy all files specified
|
||||
@@ -1135,10 +1159,17 @@ def manifest_copy(manifest_file_list):
|
||||
# Last duplicate hash wins
|
||||
deduped_manifest[spec_hash] = copy_list
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
for spec_hash, copy_list in deduped_manifest.items():
|
||||
for copy_file in copy_list:
|
||||
tty.debug("copying {0} to {1}".format(copy_file["src"], copy_file["dest"]))
|
||||
copy_buildcache_file(copy_file["src"], copy_file["dest"])
|
||||
dest = copy_file["dest"]
|
||||
if dest_mirror:
|
||||
src_relative_path = os.path.join(
|
||||
build_cache_dir, copy_file["src"].rsplit(build_cache_dir, 1)[1].lstrip("/")
|
||||
)
|
||||
dest = url_util.join(dest_mirror.push_url, src_relative_path)
|
||||
tty.debug("copying {0} to {1}".format(copy_file["src"], dest))
|
||||
copy_buildcache_file(copy_file["src"], dest)
|
||||
|
||||
|
||||
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
@@ -1165,14 +1196,18 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||
url, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path()
|
||||
)
|
||||
|
||||
bindist.generate_key_index(keys_url)
|
||||
try:
|
||||
bindist.generate_key_index(keys_url)
|
||||
except bindist.CannotListKeys as e:
|
||||
# Do not error out if listing keys went wrong. This usually means that the _gpg path
|
||||
# does not exist. TODO: distinguish between this and other errors.
|
||||
tty.warn(f"did not update the key index: {e}")
|
||||
|
||||
|
||||
def update_index_fn(args):
|
||||
"""update a buildcache index"""
|
||||
update_index(args.mirror, update_keys=args.keys)
|
||||
return update_index(args.mirror, update_keys=args.keys)
|
||||
|
||||
|
||||
def buildcache(parser, args):
|
||||
if args.func:
|
||||
args.func(args)
|
||||
return args.func(args)
|
||||
|
@@ -183,7 +183,7 @@ def checksum(parser, args):
|
||||
print()
|
||||
|
||||
if args.add_to_package:
|
||||
add_versions_to_package(pkg, version_lines)
|
||||
add_versions_to_package(pkg, version_lines, args.batch)
|
||||
|
||||
|
||||
def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||
@@ -229,7 +229,7 @@ def print_checksum_status(pkg: PackageBase, version_hashes: dict):
|
||||
tty.die("Invalid checksums found.")
|
||||
|
||||
|
||||
def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool):
|
||||
"""
|
||||
Add checksumed versions to a package's instructions and open a user's
|
||||
editor so they may double check the work of the function.
|
||||
@@ -282,5 +282,5 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str):
|
||||
tty.msg(f"Added {num_versions_added} new versions to {pkg.name}")
|
||||
tty.msg(f"Open {filename} to review the additions.")
|
||||
|
||||
if sys.stdout.isatty():
|
||||
if sys.stdout.isatty() and not is_batch:
|
||||
editor(filename)
|
||||
|
@@ -14,6 +14,7 @@
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.ci as spack_ci
|
||||
import spack.cmd
|
||||
import spack.cmd.buildcache as buildcache
|
||||
import spack.config as cfg
|
||||
import spack.environment as ev
|
||||
@@ -32,6 +33,7 @@
|
||||
SPACK_COMMAND = "spack"
|
||||
MAKE_COMMAND = "make"
|
||||
INSTALL_FAIL_CODE = 1
|
||||
FAILED_CREATE_BUILDCACHE_CODE = 100
|
||||
|
||||
|
||||
def deindent(desc):
|
||||
@@ -705,11 +707,9 @@ def ci_rebuild(args):
|
||||
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
|
||||
cdash_handler.copy_test_results(reports_dir, job_test_dir)
|
||||
|
||||
# If the install succeeded, create a buildcache entry for this job spec
|
||||
# and push it to one or more mirrors. If the install did not succeed,
|
||||
# print out some instructions on how to reproduce this build failure
|
||||
# outside of the pipeline environment.
|
||||
if install_exit_code == 0:
|
||||
# If the install succeeded, push it to one or more mirrors. Failure to push to any mirror
|
||||
# will result in a non-zero exit code. Pushing is best-effort.
|
||||
mirror_urls = [buildcache_mirror_url]
|
||||
|
||||
# TODO: Remove this block in Spack 0.23
|
||||
@@ -721,13 +721,12 @@ def ci_rebuild(args):
|
||||
destination_mirror_urls=mirror_urls,
|
||||
sign_binaries=spack_ci.can_sign_binaries(),
|
||||
):
|
||||
msg = tty.msg if result.success else tty.warn
|
||||
msg(
|
||||
"{} {} to {}".format(
|
||||
"Pushed" if result.success else "Failed to push",
|
||||
job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
|
||||
result.url,
|
||||
)
|
||||
if not result.success:
|
||||
install_exit_code = FAILED_CREATE_BUILDCACHE_CODE
|
||||
(tty.msg if result.success else tty.error)(
|
||||
f'{"Pushed" if result.success else "Failed to push"} '
|
||||
f'{job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when())} '
|
||||
f"to {result.url}"
|
||||
)
|
||||
|
||||
# If this is a develop pipeline, check if the spec that we just built is
|
||||
@@ -748,22 +747,22 @@ def ci_rebuild(args):
|
||||
tty.warn(msg.format(broken_spec_path, err))
|
||||
|
||||
else:
|
||||
# If the install did not succeed, print out some instructions on how to reproduce this
|
||||
# build failure outside of the pipeline environment.
|
||||
tty.debug("spack install exited non-zero, will not create buildcache")
|
||||
|
||||
api_root_url = os.environ.get("CI_API_V4_URL")
|
||||
ci_project_id = os.environ.get("CI_PROJECT_ID")
|
||||
ci_job_id = os.environ.get("CI_JOB_ID")
|
||||
|
||||
repro_job_url = "{0}/projects/{1}/jobs/{2}/artifacts".format(
|
||||
api_root_url, ci_project_id, ci_job_id
|
||||
)
|
||||
|
||||
repro_job_url = f"{api_root_url}/projects/{ci_project_id}/jobs/{ci_job_id}/artifacts"
|
||||
# Control characters cause this to be printed in blue so it stands out
|
||||
reproduce_msg = """
|
||||
print(
|
||||
f"""
|
||||
|
||||
\033[34mTo reproduce this build locally, run:
|
||||
|
||||
spack ci reproduce-build {0} [--working-dir <dir>] [--autostart]
|
||||
spack ci reproduce-build {repro_job_url} [--working-dir <dir>] [--autostart]
|
||||
|
||||
If this project does not have public pipelines, you will need to first:
|
||||
|
||||
@@ -771,12 +770,9 @@ def ci_rebuild(args):
|
||||
|
||||
... then follow the printed instructions.\033[0;0m
|
||||
|
||||
""".format(
|
||||
repro_job_url
|
||||
"""
|
||||
)
|
||||
|
||||
print(reproduce_msg)
|
||||
|
||||
rebuild_timer.stop()
|
||||
try:
|
||||
with open("install_timers.json", "w") as timelog:
|
||||
|
@@ -9,6 +9,7 @@
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import llnl.string as string
|
||||
@@ -44,6 +45,7 @@
|
||||
"deactivate",
|
||||
"create",
|
||||
["remove", "rm"],
|
||||
["rename", "mv"],
|
||||
["list", "ls"],
|
||||
["status", "st"],
|
||||
"loads",
|
||||
@@ -472,11 +474,82 @@ def env_remove(args):
|
||||
tty.msg(f"Successfully removed environment '{bad_env_name}'")
|
||||
|
||||
|
||||
#
|
||||
# env rename
|
||||
#
|
||||
def env_rename_setup_parser(subparser):
|
||||
"""rename an existing environment"""
|
||||
subparser.add_argument(
|
||||
"mv_from", metavar="from", help="name (or path) of existing environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"mv_to", metavar="to", help="new name (or path) for existing environment"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-d",
|
||||
"--dir",
|
||||
action="store_true",
|
||||
help="the specified arguments correspond to directory paths",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-f", "--force", action="store_true", help="allow overwriting of an existing environment"
|
||||
)
|
||||
|
||||
|
||||
def env_rename(args):
|
||||
"""Rename an environment.
|
||||
|
||||
This renames a managed environment or moves an anonymous environment.
|
||||
"""
|
||||
|
||||
# Directory option has been specified
|
||||
if args.dir:
|
||||
if not ev.is_env_dir(args.mv_from):
|
||||
tty.die("The specified path does not correspond to a valid spack environment")
|
||||
from_path = Path(args.mv_from)
|
||||
if not args.force:
|
||||
if ev.is_env_dir(args.mv_to):
|
||||
tty.die(
|
||||
"The new path corresponds to an existing environment;"
|
||||
" specify the --force flag to overwrite it."
|
||||
)
|
||||
if Path(args.mv_to).exists():
|
||||
tty.die("The new path already exists; specify the --force flag to overwrite it.")
|
||||
to_path = Path(args.mv_to)
|
||||
|
||||
# Name option being used
|
||||
elif ev.exists(args.mv_from):
|
||||
from_path = ev.environment.environment_dir_from_name(args.mv_from)
|
||||
if not args.force and ev.exists(args.mv_to):
|
||||
tty.die(
|
||||
"The new name corresponds to an existing environment;"
|
||||
" specify the --force flag to overwrite it."
|
||||
)
|
||||
to_path = ev.environment.root(args.mv_to)
|
||||
|
||||
# Neither
|
||||
else:
|
||||
tty.die("The specified name does not correspond to a managed spack environment")
|
||||
|
||||
# Guard against renaming from or to an active environment
|
||||
active_env = ev.active_environment()
|
||||
if active_env:
|
||||
from_env = ev.Environment(from_path)
|
||||
if from_env.path == active_env.path:
|
||||
tty.die("Cannot rename active environment")
|
||||
if to_path == active_env.path:
|
||||
tty.die(f"{args.mv_to} is an active environment")
|
||||
|
||||
shutil.rmtree(to_path, ignore_errors=True)
|
||||
fs.rename(from_path, to_path)
|
||||
tty.msg(f"Successfully renamed environment {args.mv_from} to {args.mv_to}")
|
||||
|
||||
|
||||
#
|
||||
# env list
|
||||
#
|
||||
def env_list_setup_parser(subparser):
|
||||
"""list available environments"""
|
||||
"""list managed environments"""
|
||||
|
||||
|
||||
def env_list(args):
|
||||
|
@@ -3,7 +3,6 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import copy
|
||||
import sys
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -14,6 +13,7 @@
|
||||
import spack.cmd as cmd
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
@@ -69,6 +69,12 @@ def setup_parser(subparser):
|
||||
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-r",
|
||||
"--only-roots",
|
||||
action="store_true",
|
||||
help="don't show full list of installed specs in an environment",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-c",
|
||||
"--show-concretized",
|
||||
@@ -140,6 +146,12 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
"--only-deprecated", action="store_true", help="show only deprecated packages"
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--install-tree",
|
||||
action="store",
|
||||
default="all",
|
||||
help="Install trees to query: 'all' (default), 'local', 'upstream', upstream name or path",
|
||||
)
|
||||
|
||||
subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
|
||||
subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
|
||||
@@ -168,6 +180,12 @@ def query_arguments(args):
|
||||
|
||||
q_args = {"installed": installed, "known": known, "explicit": explicit}
|
||||
|
||||
install_tree = args.install_tree
|
||||
upstreams = spack.config.get("upstreams", {})
|
||||
if install_tree in upstreams.keys():
|
||||
install_tree = upstreams[install_tree]["install_tree"]
|
||||
q_args["install_tree"] = install_tree
|
||||
|
||||
# Time window of installation
|
||||
for attribute in ("start_date", "end_date"):
|
||||
date = getattr(args, attribute)
|
||||
@@ -177,26 +195,22 @@ def query_arguments(args):
|
||||
return q_args
|
||||
|
||||
|
||||
def setup_env(env):
|
||||
def make_env_decorator(env):
|
||||
"""Create a function for decorating specs when in an environment."""
|
||||
|
||||
def strip_build(seq):
|
||||
return set(s.copy(deps=("link", "run")) for s in seq)
|
||||
|
||||
added = set(strip_build(env.added_specs()))
|
||||
roots = set(strip_build(env.roots()))
|
||||
removed = set(strip_build(env.removed_specs()))
|
||||
roots = set(env.roots())
|
||||
removed = set(env.removed_specs())
|
||||
|
||||
def decorator(spec, fmt):
|
||||
# add +/-/* to show added/removed/root specs
|
||||
if any(spec.dag_hash() == r.dag_hash() for r in roots):
|
||||
return color.colorize("@*{%s}" % fmt)
|
||||
return color.colorize(f"@*{{{fmt}}}")
|
||||
elif spec in removed:
|
||||
return color.colorize("@K{%s}" % fmt)
|
||||
return color.colorize(f"@K{{{fmt}}}")
|
||||
else:
|
||||
return "%s" % fmt
|
||||
return fmt
|
||||
|
||||
return decorator, added, roots, removed
|
||||
return decorator
|
||||
|
||||
|
||||
def display_env(env, args, decorator, results):
|
||||
@@ -211,28 +225,51 @@ def display_env(env, args, decorator, results):
|
||||
"""
|
||||
tty.msg("In environment %s" % env.name)
|
||||
|
||||
if not env.user_specs:
|
||||
tty.msg("No root specs")
|
||||
else:
|
||||
tty.msg("Root specs")
|
||||
num_roots = len(env.user_specs) or "No"
|
||||
tty.msg(f"{num_roots} root specs")
|
||||
|
||||
# Root specs cannot be displayed with prefixes, since those are not
|
||||
# set for abstract specs. Same for hashes
|
||||
root_args = copy.copy(args)
|
||||
root_args.paths = False
|
||||
concrete_specs = {
|
||||
root: concrete_root
|
||||
for root, concrete_root in zip(env.concretized_user_specs, env.concrete_roots())
|
||||
}
|
||||
|
||||
# Roots are displayed with variants, etc. so that we can see
|
||||
# specifically what the user asked for.
|
||||
def root_decorator(spec, string):
|
||||
"""Decorate root specs with their install status if needed"""
|
||||
concrete = concrete_specs.get(spec)
|
||||
if concrete:
|
||||
status = color.colorize(concrete.install_status().value)
|
||||
hash = concrete.dag_hash()
|
||||
else:
|
||||
status = color.colorize(spack.spec.InstallStatus.absent.value)
|
||||
hash = "-" * 32
|
||||
|
||||
# TODO: status has two extra spaces on the end of it, but fixing this and other spec
|
||||
# TODO: space format idiosyncrasies is complicated. Fix this eventually
|
||||
status = status[:-2]
|
||||
|
||||
if args.long or args.very_long:
|
||||
hash = color.colorize(f"@K{{{hash[: 7 if args.long else None]}}}")
|
||||
return f"{status} {hash} {string}"
|
||||
else:
|
||||
return f"{status} {string}"
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
cmd.display_specs(
|
||||
env.user_specs,
|
||||
root_args,
|
||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||
args,
|
||||
# these are overrides of CLI args
|
||||
paths=False,
|
||||
long=False,
|
||||
very_long=False,
|
||||
# these enforce details in the root specs to show what the user asked for
|
||||
namespaces=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
decorator=root_decorator,
|
||||
variants=True,
|
||||
)
|
||||
print()
|
||||
|
||||
print()
|
||||
|
||||
if args.show_concretized:
|
||||
tty.msg("Concretized roots")
|
||||
@@ -242,7 +279,7 @@ def display_env(env, args, decorator, results):
|
||||
# Display a header for the installed packages section IF there are installed
|
||||
# packages. If there aren't any, we'll just end up printing "0 installed packages"
|
||||
# later.
|
||||
if results:
|
||||
if results and not args.only_roots:
|
||||
tty.msg("Installed packages")
|
||||
|
||||
|
||||
@@ -251,9 +288,10 @@ def find(parser, args):
|
||||
results = args.specs(**q_args)
|
||||
|
||||
env = ev.active_environment()
|
||||
decorator = lambda s, f: f
|
||||
if env:
|
||||
decorator, _, roots, _ = setup_env(env)
|
||||
if not env and args.only_roots:
|
||||
tty.die("-r / --only-roots requires an active environment")
|
||||
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
# use groups by default except with format.
|
||||
if args.groups is None:
|
||||
@@ -280,9 +318,12 @@ def find(parser, args):
|
||||
if env:
|
||||
display_env(env, args, decorator, results)
|
||||
|
||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||
count_suffix = " (not shown)"
|
||||
if not args.only_roots:
|
||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||
count_suffix = ""
|
||||
|
||||
# print number of installed packages last (as the list may be long)
|
||||
if sys.stdout.isatty() and args.groups:
|
||||
pkg_type = "loaded" if args.loaded else "installed"
|
||||
spack.cmd.print_how_many_pkgs(results, pkg_type)
|
||||
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)
|
||||
|
@@ -420,10 +420,9 @@ def install_with_active_env(env: ev.Environment, args, install_kwargs, reporter_
|
||||
with reporter_factory(specs_to_install):
|
||||
env.install_specs(specs_to_install, **install_kwargs)
|
||||
finally:
|
||||
# TODO: this is doing way too much to trigger
|
||||
# views and modules to be generated.
|
||||
with env.write_transaction():
|
||||
env.write(regenerate=True)
|
||||
if env.views:
|
||||
with env.write_transaction():
|
||||
env.write(regenerate=True)
|
||||
|
||||
|
||||
def concrete_specs_from_cli(args, install_kwargs):
|
||||
|
@@ -5,8 +5,6 @@
|
||||
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.find
|
||||
import spack.environment as ev
|
||||
@@ -70,16 +68,6 @@ def setup_parser(subparser):
|
||||
help="load the first match if multiple packages match the spec",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--only",
|
||||
default="package,dependencies",
|
||||
dest="things_to_load",
|
||||
choices=["package", "dependencies"],
|
||||
help="select whether to load the package and its dependencies\n\n"
|
||||
"the default is to load the package and all dependencies. alternatively, "
|
||||
"one can decide to load only the package or only the dependencies",
|
||||
)
|
||||
|
||||
subparser.add_argument(
|
||||
"--list",
|
||||
action="store_true",
|
||||
@@ -110,11 +98,6 @@ def load(parser, args):
|
||||
)
|
||||
return 1
|
||||
|
||||
if args.things_to_load != "package,dependencies":
|
||||
tty.warn(
|
||||
"The `--only` flag in spack load is deprecated and will be removed in Spack v0.22"
|
||||
)
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
env_mod = uenv.environment_modifications_for_specs(*specs)
|
||||
for spec in specs:
|
||||
|
@@ -108,6 +108,11 @@ def setup_parser(subparser):
|
||||
"and source use `--type binary --type source` (default)"
|
||||
),
|
||||
)
|
||||
add_parser.add_argument(
|
||||
"--autopush",
|
||||
action="store_true",
|
||||
help=("set mirror to push automatically after installation"),
|
||||
)
|
||||
add_parser_signed = add_parser.add_mutually_exclusive_group(required=False)
|
||||
add_parser_signed.add_argument(
|
||||
"--unsigned",
|
||||
@@ -175,6 +180,21 @@ def setup_parser(subparser):
|
||||
),
|
||||
)
|
||||
set_parser.add_argument("--url", help="url of mirror directory from 'spack mirror create'")
|
||||
set_parser_autopush = set_parser.add_mutually_exclusive_group(required=False)
|
||||
set_parser_autopush.add_argument(
|
||||
"--autopush",
|
||||
help="set mirror to push automatically after installation",
|
||||
action="store_true",
|
||||
default=None,
|
||||
dest="autopush",
|
||||
)
|
||||
set_parser_autopush.add_argument(
|
||||
"--no-autopush",
|
||||
help="set mirror to not push automatically after installation",
|
||||
action="store_false",
|
||||
default=None,
|
||||
dest="autopush",
|
||||
)
|
||||
set_parser_unsigned = set_parser.add_mutually_exclusive_group(required=False)
|
||||
set_parser_unsigned.add_argument(
|
||||
"--unsigned",
|
||||
@@ -218,6 +238,7 @@ def mirror_add(args):
|
||||
or args.type
|
||||
or args.oci_username
|
||||
or args.oci_password
|
||||
or args.autopush
|
||||
or args.signed is not None
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
@@ -234,6 +255,8 @@ def mirror_add(args):
|
||||
if args.type:
|
||||
connection["binary"] = "binary" in args.type
|
||||
connection["source"] = "source" in args.type
|
||||
if args.autopush:
|
||||
connection["autopush"] = args.autopush
|
||||
if args.signed is not None:
|
||||
connection["signed"] = args.signed
|
||||
mirror = spack.mirror.Mirror(connection, name=args.name)
|
||||
@@ -270,6 +293,8 @@ def _configure_mirror(args):
|
||||
changes["access_pair"] = [args.oci_username, args.oci_password]
|
||||
if getattr(args, "signed", None) is not None:
|
||||
changes["signed"] = args.signed
|
||||
if getattr(args, "autopush", None) is not None:
|
||||
changes["autopush"] = args.autopush
|
||||
|
||||
# argparse cannot distinguish between --binary and --no-binary when same dest :(
|
||||
# notice that set-url does not have these args, so getattr
|
||||
|
@@ -116,39 +116,38 @@ def ipython_interpreter(args):
|
||||
|
||||
def python_interpreter(args):
|
||||
"""A python interpreter is the default interpreter"""
|
||||
# Fake a main python shell by setting __name__ to __main__.
|
||||
console = code.InteractiveConsole({"__name__": "__main__", "spack": spack})
|
||||
if "PYTHONSTARTUP" in os.environ:
|
||||
startup_file = os.environ["PYTHONSTARTUP"]
|
||||
if os.path.isfile(startup_file):
|
||||
with open(startup_file) as startup:
|
||||
console.runsource(startup.read(), startup_file, "exec")
|
||||
|
||||
if args.python_command:
|
||||
propagate_exceptions_from(console)
|
||||
console.runsource(args.python_command)
|
||||
elif args.python_args:
|
||||
propagate_exceptions_from(console)
|
||||
if args.python_args and not args.python_command:
|
||||
sys.argv = args.python_args
|
||||
with open(args.python_args[0]) as file:
|
||||
console.runsource(file.read(), args.python_args[0], "exec")
|
||||
runpy.run_path(args.python_args[0], run_name="__main__")
|
||||
else:
|
||||
# Provides readline support, allowing user to use arrow keys
|
||||
console.push("import readline")
|
||||
# Provide tabcompletion
|
||||
console.push("from rlcompleter import Completer")
|
||||
console.push("readline.set_completer(Completer(locals()).complete)")
|
||||
console.push('readline.parse_and_bind("tab: complete")')
|
||||
# Fake a main python shell by setting __name__ to __main__.
|
||||
console = code.InteractiveConsole({"__name__": "__main__", "spack": spack})
|
||||
if "PYTHONSTARTUP" in os.environ:
|
||||
startup_file = os.environ["PYTHONSTARTUP"]
|
||||
if os.path.isfile(startup_file):
|
||||
with open(startup_file) as startup:
|
||||
console.runsource(startup.read(), startup_file, "exec")
|
||||
if args.python_command:
|
||||
propagate_exceptions_from(console)
|
||||
console.runsource(args.python_command)
|
||||
else:
|
||||
# Provides readline support, allowing user to use arrow keys
|
||||
console.push("import readline")
|
||||
# Provide tabcompletion
|
||||
console.push("from rlcompleter import Completer")
|
||||
console.push("readline.set_completer(Completer(locals()).complete)")
|
||||
console.push('readline.parse_and_bind("tab: complete")')
|
||||
|
||||
console.interact(
|
||||
"Spack version %s\nPython %s, %s %s"
|
||||
% (
|
||||
spack.spack_version,
|
||||
platform.python_version(),
|
||||
platform.system(),
|
||||
platform.machine(),
|
||||
console.interact(
|
||||
"Spack version %s\nPython %s, %s %s"
|
||||
% (
|
||||
spack.spack_version,
|
||||
platform.python_version(),
|
||||
platform.system(),
|
||||
platform.machine(),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def propagate_exceptions_from(console):
|
||||
|
@@ -91,7 +91,6 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def _process_result(result, show, required_format, kwargs):
|
||||
result.raise_if_unsat()
|
||||
opt, _, _ = min(result.answers)
|
||||
if ("opt" in show) and (not required_format):
|
||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||
|
@@ -34,6 +34,13 @@ def setup_parser(subparser):
|
||||
default=False,
|
||||
help="show full pytest help, with advanced options",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-n",
|
||||
"--numprocesses",
|
||||
type=int,
|
||||
default=1,
|
||||
help="run tests in parallel up to this wide, default 1 for sequential",
|
||||
)
|
||||
|
||||
# extra spack arguments to list tests
|
||||
list_group = subparser.add_argument_group("listing tests")
|
||||
@@ -229,6 +236,16 @@ def unit_test(parser, args, unknown_args):
|
||||
if args.extension:
|
||||
pytest_root = spack.extensions.load_extension(args.extension)
|
||||
|
||||
if args.numprocesses is not None and args.numprocesses > 1:
|
||||
pytest_args.extend(
|
||||
[
|
||||
"--dist",
|
||||
"loadfile",
|
||||
"--tx",
|
||||
f"{args.numprocesses}*popen//python=spack-tmpconfig spack python",
|
||||
]
|
||||
)
|
||||
|
||||
# pytest.ini lives in the root of the spack repository.
|
||||
with llnl.util.filesystem.working_dir(pytest_root):
|
||||
if args.list:
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import warnings
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
@@ -109,27 +110,33 @@ def _to_dict(compiler):
|
||||
return {"compiler": d}
|
||||
|
||||
|
||||
def get_compiler_config(scope=None, init_config=False):
|
||||
def get_compiler_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = False,
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration for the specified architecture."""
|
||||
|
||||
config = spack.config.get("compilers", scope=scope) or []
|
||||
config = configuration.get("compilers", scope=scope) or []
|
||||
if config or not init_config:
|
||||
return config
|
||||
|
||||
merged_config = spack.config.get("compilers")
|
||||
merged_config = configuration.get("compilers")
|
||||
if merged_config:
|
||||
# Config is empty for this scope
|
||||
# Do not init config because there is a non-empty scope
|
||||
return config
|
||||
|
||||
_init_compiler_config(scope=scope)
|
||||
config = spack.config.get("compilers", scope=scope)
|
||||
_init_compiler_config(configuration, scope=scope)
|
||||
config = configuration.get("compilers", scope=scope)
|
||||
return config
|
||||
|
||||
|
||||
def get_compiler_config_from_packages(scope=None):
|
||||
def get_compiler_config_from_packages(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration from packages.yaml"""
|
||||
config = spack.config.get("packages", scope=scope)
|
||||
config = configuration.get("packages", scope=scope)
|
||||
if not config:
|
||||
return []
|
||||
|
||||
@@ -216,13 +223,15 @@ def _compiler_config_from_external(config):
|
||||
return compiler_entry
|
||||
|
||||
|
||||
def _init_compiler_config(*, scope):
|
||||
def _init_compiler_config(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str]
|
||||
) -> None:
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
compilers = find_compilers()
|
||||
compilers_dict = []
|
||||
for compiler in compilers:
|
||||
compilers_dict.append(_to_dict(compiler))
|
||||
spack.config.set("compilers", compilers_dict, scope=scope)
|
||||
configuration.set("compilers", compilers_dict, scope=scope)
|
||||
|
||||
|
||||
def compiler_config_files():
|
||||
@@ -233,7 +242,7 @@ def compiler_config_files():
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
if compiler_config:
|
||||
config_files.append(config.get_config_filename(name, "compilers"))
|
||||
compiler_config_from_packages = get_compiler_config_from_packages(scope=name)
|
||||
compiler_config_from_packages = get_compiler_config_from_packages(config, scope=name)
|
||||
if compiler_config_from_packages:
|
||||
config_files.append(config.get_config_filename(name, "packages"))
|
||||
return config_files
|
||||
@@ -246,7 +255,9 @@ def add_compilers_to_config(compilers, scope=None):
|
||||
compilers: a list of Compiler objects.
|
||||
scope: configuration scope to modify.
|
||||
"""
|
||||
compiler_config = get_compiler_config(scope, init_config=False)
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
for compiler in compilers:
|
||||
if not compiler.cc:
|
||||
tty.debug(f"{compiler.spec} does not have a C compiler")
|
||||
@@ -295,7 +306,9 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(scope, init_config=False)
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
filtered_compiler_config = [
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
@@ -310,21 +323,28 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
# We need to preserve the YAML type for comments, hence we are copying the
|
||||
# items in the list that has just been retrieved
|
||||
compiler_config[:] = filtered_compiler_config
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
spack.config.CONFIG.set("compilers", compiler_config, scope=scope)
|
||||
return True
|
||||
|
||||
|
||||
def all_compilers_config(scope=None, init_config=True):
|
||||
def all_compilers_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = True,
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
from_packages_yaml = get_compiler_config_from_packages(scope)
|
||||
from_packages_yaml = get_compiler_config_from_packages(configuration, scope=scope)
|
||||
if from_packages_yaml:
|
||||
init_config = False
|
||||
from_compilers_yaml = get_compiler_config(scope, init_config)
|
||||
from_compilers_yaml = get_compiler_config(configuration, scope=scope, init_config=init_config)
|
||||
|
||||
result = from_compilers_yaml + from_packages_yaml
|
||||
key = lambda c: _compiler_from_config_entry(c["compiler"])
|
||||
# Dedupe entries by the compiler they represent
|
||||
# If the entry is invalid, treat it as unique for deduplication
|
||||
key = lambda c: _compiler_from_config_entry(c["compiler"] or id(c))
|
||||
return list(llnl.util.lang.dedupe(result, key=key))
|
||||
|
||||
|
||||
@@ -332,7 +352,7 @@ def all_compiler_specs(scope=None, init_config=True):
|
||||
# Return compiler specs from the merged config.
|
||||
return [
|
||||
spack.spec.parse_with_version_concrete(s["compiler"]["spec"], compiler=True)
|
||||
for s in all_compilers_config(scope, init_config)
|
||||
for s in all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
]
|
||||
|
||||
|
||||
@@ -492,11 +512,20 @@ def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
|
||||
|
||||
|
||||
def all_compilers(scope=None, init_config=True):
|
||||
config = all_compilers_config(scope, init_config=init_config)
|
||||
compilers = list()
|
||||
for items in config:
|
||||
return all_compilers_from(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=init_config
|
||||
)
|
||||
|
||||
|
||||
def all_compilers_from(configuration, scope=None, init_config=True):
|
||||
compilers = []
|
||||
for items in all_compilers_config(
|
||||
configuration=configuration, scope=scope, init_config=init_config
|
||||
):
|
||||
items = items["compiler"]
|
||||
compilers.append(_compiler_from_config_entry(items))
|
||||
compiler = _compiler_from_config_entry(items) # can be None in error case
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
return compilers
|
||||
|
||||
|
||||
@@ -507,7 +536,7 @@ def compilers_for_spec(
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
config = all_compilers_config(scope, init_config)
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
|
||||
matches = set(find(compiler_spec, scope, init_config))
|
||||
compilers = []
|
||||
@@ -517,7 +546,7 @@ def compilers_for_spec(
|
||||
|
||||
|
||||
def compilers_for_arch(arch_spec, scope=None):
|
||||
config = all_compilers_config(scope)
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope)
|
||||
return list(get_compilers(config, arch_spec=arch_spec))
|
||||
|
||||
|
||||
@@ -603,7 +632,10 @@ def _compiler_from_config_entry(items):
|
||||
compiler = _compiler_cache.get(config_id, None)
|
||||
|
||||
if compiler is None:
|
||||
compiler = compiler_from_dict(items)
|
||||
try:
|
||||
compiler = compiler_from_dict(items)
|
||||
except UnknownCompilerError as e:
|
||||
warnings.warn(e.message)
|
||||
_compiler_cache[config_id] = compiler
|
||||
|
||||
return compiler
|
||||
@@ -656,7 +688,9 @@ def get_compilers(config, cspec=None, arch_spec=None):
|
||||
raise ValueError(msg)
|
||||
continue
|
||||
|
||||
compilers.append(_compiler_from_config_entry(items))
|
||||
compiler = _compiler_from_config_entry(items)
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
|
||||
return compilers
|
||||
|
||||
|
@@ -1,34 +0,0 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
|
||||
import spack.compilers.oneapi
|
||||
|
||||
|
||||
class Dpcpp(spack.compilers.oneapi.Oneapi):
|
||||
"""This is the same as the oneAPI compiler but uses dpcpp instead of
|
||||
icpx (for DPC++ source files). It explicitly refers to dpcpp, so that
|
||||
CMake test files which check the compiler name (e.g. CMAKE_CXX_COMPILER)
|
||||
detect it as dpcpp.
|
||||
|
||||
Ideally we could switch out icpx for dpcpp where needed in the oneAPI
|
||||
compiler definition, but two things are needed for that: (a) a way to
|
||||
tell the compiler that it should be using dpcpp and (b) a way to
|
||||
customize the link_paths
|
||||
|
||||
See also: https://www.intel.com/content/www/us/en/develop/documentation/oneapi-dpcpp-cpp-compiler-dev-guide-and-reference/top/compiler-setup/using-the-command-line/invoking-the-compiler.html
|
||||
"""
|
||||
|
||||
# Subclasses use possible names of C++ compiler
|
||||
cxx_names = ["dpcpp"]
|
||||
|
||||
# Named wrapper links within build_env_path
|
||||
link_paths = {
|
||||
"cc": os.path.join("oneapi", "icx"),
|
||||
"cxx": os.path.join("oneapi", "dpcpp"),
|
||||
"f77": os.path.join("oneapi", "ifx"),
|
||||
"fc": os.path.join("oneapi", "ifx"),
|
||||
}
|
@@ -8,7 +8,7 @@
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Dict, List, Set
|
||||
from typing import Dict, List
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -20,15 +20,7 @@
|
||||
from spack.error import SpackError
|
||||
from spack.version import Version, VersionRange
|
||||
|
||||
avail_fc_version: Set[str] = set()
|
||||
fc_path: Dict[str, str] = dict()
|
||||
|
||||
fortran_mapping = {
|
||||
"2021.3.0": "19.29.30133",
|
||||
"2021.2.1": "19.28.29913",
|
||||
"2021.2.0": "19.28.29334",
|
||||
"2021.1.0": "19.28.29333",
|
||||
}
|
||||
FC_PATH: Dict[str, str] = dict()
|
||||
|
||||
|
||||
class CmdCall:
|
||||
@@ -115,15 +107,13 @@ def command_str(self):
|
||||
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
|
||||
|
||||
|
||||
def get_valid_fortran_pth(comp_ver):
|
||||
cl_ver = str(comp_ver)
|
||||
def get_valid_fortran_pth():
|
||||
"""Assign maximum available fortran compiler version"""
|
||||
# TODO (johnwparent): validate compatibility w/ try compiler
|
||||
# functionality when added
|
||||
sort_fn = lambda fc_ver: Version(fc_ver)
|
||||
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
|
||||
for ver in sort_fc_ver:
|
||||
if ver in fortran_mapping:
|
||||
if Version(cl_ver) <= Version(fortran_mapping[ver]):
|
||||
return fc_path[ver]
|
||||
return None
|
||||
sort_fc_ver = sorted(list(FC_PATH.keys()), key=sort_fn)
|
||||
return FC_PATH[sort_fc_ver[-1]] if sort_fc_ver else None
|
||||
|
||||
|
||||
class Msvc(Compiler):
|
||||
@@ -167,11 +157,9 @@ def __init__(self, *args, **kwargs):
|
||||
# This positional argument "paths" is later parsed and process by the base class
|
||||
# via the call to `super` later in this method
|
||||
paths = args[3]
|
||||
# This positional argument "cspec" is also parsed and handled by the base class
|
||||
# constructor
|
||||
cspec = args[0]
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
|
||||
paths[:] = new_pth
|
||||
latest_fc = get_valid_fortran_pth()
|
||||
new_pth = [pth if pth else latest_fc for pth in paths[2:]]
|
||||
paths[2:] = new_pth
|
||||
# Initialize, deferring to base class but then adding the vcvarsallfile
|
||||
# file based on compiler executable path.
|
||||
super().__init__(*args, **kwargs)
|
||||
@@ -183,7 +171,7 @@ def __init__(self, *args, **kwargs):
|
||||
# and stores their path, but their respective VCVARS
|
||||
# file must be invoked before useage.
|
||||
env_cmds = []
|
||||
compiler_root = os.path.join(self.cc, "../../../../../../..")
|
||||
compiler_root = os.path.join(os.path.dirname(self.cc), "../../../../../..")
|
||||
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
|
||||
# get current platform architecture and format for vcvars argument
|
||||
arch = spack.platforms.real_host().default.lower()
|
||||
@@ -198,11 +186,34 @@ def __init__(self, *args, **kwargs):
|
||||
# paths[2] refers to the fc path and is a generic check
|
||||
# for a fortran compiler
|
||||
if paths[2]:
|
||||
|
||||
def get_oneapi_root(pth: str):
|
||||
"""From within a prefix known to be a oneAPI path
|
||||
determine the oneAPI root path from arbitrary point
|
||||
under root
|
||||
|
||||
Args:
|
||||
pth: path prefixed within oneAPI root
|
||||
"""
|
||||
if not pth:
|
||||
return ""
|
||||
while os.path.basename(pth) and os.path.basename(pth) != "oneAPI":
|
||||
pth = os.path.dirname(pth)
|
||||
return pth
|
||||
|
||||
# If this found, it sets all the vars
|
||||
oneapi_root = os.getenv("ONEAPI_ROOT")
|
||||
oneapi_root = get_oneapi_root(self.fc)
|
||||
if not oneapi_root:
|
||||
raise RuntimeError(f"Non-oneAPI Fortran compiler {self.fc} assigned to MSVC")
|
||||
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
|
||||
# some oneAPI exes return a version more precise than their
|
||||
# install paths specify, so we determine path from
|
||||
# the install path rather than the fc executable itself
|
||||
numver = r"\d+\.\d+(?:\.\d+)?"
|
||||
pattern = f"((?:{numver})|(?:latest))"
|
||||
version_from_path = re.search(pattern, self.fc).group(1)
|
||||
oneapi_version_setvars = os.path.join(
|
||||
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
|
||||
oneapi_root, "compiler", version_from_path, "env", "vars.bat"
|
||||
)
|
||||
# order matters here, the specific version env must be invoked first,
|
||||
# otherwise it will be ignored if the root setvars sets up the oneapi
|
||||
@@ -314,23 +325,19 @@ def setup_custom_environment(self, pkg, env):
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fc):
|
||||
# We're using intel for the Fortran compilers, which exist if
|
||||
# ONEAPI_ROOT is a meaningful variable
|
||||
if not sys.platform == "win32":
|
||||
return "unknown"
|
||||
fc_ver = cls.default_version(fc)
|
||||
avail_fc_version.add(fc_ver)
|
||||
fc_path[fc_ver] = fc
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
try:
|
||||
sps = spack.operating_systems.windows_os.WindowsOs().compiler_search_paths
|
||||
except AttributeError:
|
||||
raise SpackError("Windows compiler search paths not established")
|
||||
clp = spack.util.executable.which_string("cl", path=sps)
|
||||
ver = cls.default_version(clp)
|
||||
else:
|
||||
ver = fc_ver
|
||||
return ver
|
||||
FC_PATH[fc_ver] = fc
|
||||
try:
|
||||
sps = spack.operating_systems.windows_os.WindowsOs().compiler_search_paths
|
||||
except AttributeError:
|
||||
raise SpackError(
|
||||
"Windows compiler search paths not established, "
|
||||
"please report this behavior to github.com/spack/spack"
|
||||
)
|
||||
clp = spack.util.executable.which_string("cl", path=sps)
|
||||
return cls.default_version(clp) if clp else fc_ver
|
||||
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
|
@@ -749,7 +749,6 @@ def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
result = solver.solve(
|
||||
abstract_specs, tests=kwargs.get("tests", False), allow_deprecated=allow_deprecated
|
||||
)
|
||||
result.raise_if_unsat()
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
|
||||
|
@@ -107,7 +107,7 @@
|
||||
|
||||
#: metavar to use for commands that accept scopes
|
||||
#: this is shorter and more readable than listing all choices
|
||||
SCOPES_METAVAR = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
|
||||
SCOPES_METAVAR = "{defaults,system,site,user,command_line}[/PLATFORM] or env:ENVIRONMENT"
|
||||
|
||||
#: Base name for the (internal) overrides scope.
|
||||
_OVERRIDES_BASE_NAME = "overrides-"
|
||||
@@ -1562,8 +1562,9 @@ def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
|
||||
def use_configuration(
|
||||
*scopes_or_paths: Union[ConfigScope, str]
|
||||
) -> Generator[Configuration, None, None]:
|
||||
"""Use the configuration scopes passed as arguments within the
|
||||
context manager.
|
||||
"""Use the configuration scopes passed as arguments within the context manager.
|
||||
|
||||
This function invalidates caches, and is therefore very slow.
|
||||
|
||||
Args:
|
||||
*scopes_or_paths: scope objects or paths to be used
|
||||
|
@@ -1621,15 +1621,32 @@ def query_local(self, *args, **kwargs):
|
||||
query_local.__doc__ += _QUERY_DOCSTRING
|
||||
|
||||
def query(self, *args, **kwargs):
|
||||
"""Query the Spack database including all upstream databases."""
|
||||
"""Query the Spack database including all upstream databases.
|
||||
|
||||
Additional Arguments:
|
||||
install_tree (str): query 'all' (default), 'local', 'upstream', or upstream path
|
||||
"""
|
||||
install_tree = kwargs.pop("install_tree", "all")
|
||||
valid_trees = ["all", "upstream", "local", self.root] + [u.root for u in self.upstream_dbs]
|
||||
if install_tree not in valid_trees:
|
||||
msg = "Invalid install_tree argument to Database.query()\n"
|
||||
msg += f"Try one of {', '.join(valid_trees)}"
|
||||
tty.error(msg)
|
||||
return []
|
||||
|
||||
upstream_results = []
|
||||
for upstream_db in self.upstream_dbs:
|
||||
upstreams = self.upstream_dbs
|
||||
if install_tree not in ("all", "upstream"):
|
||||
upstreams = [u for u in self.upstream_dbs if u.root == install_tree]
|
||||
for upstream_db in upstreams:
|
||||
# queries for upstream DBs need to *not* lock - we may not
|
||||
# have permissions to do this and the upstream DBs won't know about
|
||||
# us anyway (so e.g. they should never uninstall specs)
|
||||
upstream_results.extend(upstream_db._query(*args, **kwargs) or [])
|
||||
|
||||
local_results = set(self.query_local(*args, **kwargs))
|
||||
local_results = []
|
||||
if install_tree in ("all", "local") or self.root == install_tree:
|
||||
local_results = set(self.query_local(*args, **kwargs))
|
||||
|
||||
results = list(local_results) + list(x for x in upstream_results if x not in local_results)
|
||||
|
||||
|
@@ -9,8 +9,6 @@
|
||||
import tempfile
|
||||
from typing import Any, Deque, Dict, Generator, List, NamedTuple, Tuple
|
||||
|
||||
import jinja2
|
||||
|
||||
from llnl.util import filesystem
|
||||
|
||||
import spack.repo
|
||||
@@ -85,6 +83,8 @@ def _mock_layout(self) -> Generator[List[str], None, None]:
|
||||
self.tmpdir.cleanup()
|
||||
|
||||
def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[pathlib.Path]:
|
||||
import jinja2
|
||||
|
||||
relative_paths = mock_executables.executables
|
||||
script = mock_executables.script
|
||||
script_template = jinja2.Template("#!/bin/bash\n{{ script }}\n")
|
||||
|
@@ -94,6 +94,9 @@ class OpenMpi(Package):
|
||||
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
|
||||
|
||||
|
||||
SUPPORTED_LANGUAGES = ("fortran", "cxx")
|
||||
|
||||
|
||||
def _make_when_spec(value: WhenType) -> Optional["spack.spec.Spec"]:
|
||||
"""Create a ``Spec`` that indicates when a directive should be applied.
|
||||
|
||||
@@ -585,6 +588,9 @@ def depends_on(
|
||||
@see The section "Dependency specs" in the Spack Packaging Guide.
|
||||
|
||||
"""
|
||||
if spack.spec.Spec(spec).name in SUPPORTED_LANGUAGES:
|
||||
assert type == "build", "languages must be of 'build' type"
|
||||
return _language(lang_spec_str=spec, when=when)
|
||||
|
||||
def _execute_depends_on(pkg: "spack.package_base.PackageBase"):
|
||||
_depends_on(pkg, spec, when=when, type=type, patches=patches)
|
||||
@@ -921,9 +927,9 @@ def maintainers(*names: str):
|
||||
"""
|
||||
|
||||
def _execute_maintainer(pkg):
|
||||
maintainers_from_base = getattr(pkg, "maintainers", [])
|
||||
# Here it is essential to copy, otherwise we might add to an empty list in the parent
|
||||
pkg.maintainers = list(sorted(set(maintainers_from_base + list(names))))
|
||||
maintainers = set(getattr(pkg, "maintainers", []))
|
||||
maintainers.update(names)
|
||||
pkg.maintainers = sorted(maintainers)
|
||||
|
||||
return _execute_maintainer
|
||||
|
||||
@@ -967,7 +973,6 @@ def license(
|
||||
checked_by: string or list of strings indicating which github user checked the
|
||||
license (if any).
|
||||
when: A spec specifying when the license applies.
|
||||
when: A spec specifying when the license applies.
|
||||
"""
|
||||
|
||||
return lambda pkg: _execute_license(pkg, license_identifier, when)
|
||||
@@ -1014,6 +1019,21 @@ def _execute_requires(pkg: "spack.package_base.PackageBase"):
|
||||
return _execute_requires
|
||||
|
||||
|
||||
@directive("languages")
|
||||
def _language(lang_spec_str: str, *, when: Optional[Union[str, bool]] = None):
|
||||
"""Temporary implementation of language virtuals, until compilers are proper dependencies."""
|
||||
|
||||
def _execute_languages(pkg: "spack.package_base.PackageBase"):
|
||||
when_spec = _make_when_spec(when)
|
||||
if not when_spec:
|
||||
return
|
||||
|
||||
languages = pkg.languages.setdefault(when_spec, set())
|
||||
languages.add(lang_spec_str)
|
||||
|
||||
return _execute_languages
|
||||
|
||||
|
||||
class DirectiveError(spack.error.SpackError):
|
||||
"""This is raised when something is wrong with a package directive."""
|
||||
|
||||
|
@@ -106,17 +106,16 @@ def environment_name(path: Union[str, pathlib.Path]) -> str:
|
||||
return path_str
|
||||
|
||||
|
||||
def check_disallowed_env_config_mods(scopes):
|
||||
def ensure_no_disallowed_env_config_mods(scopes: List[spack.config.ConfigScope]) -> None:
|
||||
for scope in scopes:
|
||||
with spack.config.use_configuration(scope):
|
||||
if spack.config.get("config:environments_root"):
|
||||
raise SpackEnvironmentError(
|
||||
"Spack environments are prohibited from modifying 'config:environments_root' "
|
||||
"because it can make the definition of the environment ill-posed. Please "
|
||||
"remove from your environment and place it in a permanent scope such as "
|
||||
"defaults, system, site, etc."
|
||||
)
|
||||
return scopes
|
||||
config = scope.get_section("config")
|
||||
if config and "environments_root" in config["config"]:
|
||||
raise SpackEnvironmentError(
|
||||
"Spack environments are prohibited from modifying 'config:environments_root' "
|
||||
"because it can make the definition of the environment ill-posed. Please "
|
||||
"remove from your environment and place it in a permanent scope such as "
|
||||
"defaults, system, site, etc."
|
||||
)
|
||||
|
||||
|
||||
def default_manifest_yaml():
|
||||
@@ -1427,7 +1426,7 @@ def _concretize_separately(self, tests=False):
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.get_compiler_config(init_config=True)
|
||||
_ = spack.compilers.get_compiler_config(spack.config.CONFIG, init_config=True)
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(args) == 0:
|
||||
@@ -2463,6 +2462,10 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
|
||||
self.scope_name = f"env:{environment_name(self.manifest_dir)}"
|
||||
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
|
||||
|
||||
#: Configuration scopes associated with this environment. Note that these are not
|
||||
#: invalidated by a re-read of the manifest file.
|
||||
self._config_scopes: Optional[List[spack.config.ConfigScope]] = None
|
||||
|
||||
if not self.manifest_file.exists():
|
||||
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
|
||||
raise SpackEnvironmentError(msg)
|
||||
@@ -2808,16 +2811,19 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
|
||||
@property
|
||||
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
"""A list of all configuration scopes for the environment manifest.
|
||||
|
||||
Returns: All configuration scopes associated with the environment
|
||||
"""
|
||||
config_name = self.scope_name
|
||||
env_scope = spack.config.SingleFileScope(
|
||||
config_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||
)
|
||||
|
||||
return check_disallowed_env_config_mods(self.included_config_scopes + [env_scope])
|
||||
"""A list of all configuration scopes for the environment manifest. On the first call this
|
||||
instantiates all the scopes, on subsequent calls it returns the cached list."""
|
||||
if self._config_scopes is not None:
|
||||
return self._config_scopes
|
||||
scopes: List[spack.config.ConfigScope] = [
|
||||
*self.included_config_scopes,
|
||||
spack.config.SingleFileScope(
|
||||
self.scope_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||
),
|
||||
]
|
||||
ensure_no_disallowed_env_config_mods(scopes)
|
||||
self._config_scopes = scopes
|
||||
return scopes
|
||||
|
||||
def prepare_config_scope(self) -> None:
|
||||
"""Add the manifest's scopes to the global configuration search path."""
|
||||
|
27
lib/spack/spack/hooks/autopush.py
Normal file
27
lib/spack/spack/hooks/autopush.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.mirror
|
||||
|
||||
|
||||
def post_install(spec, explicit):
|
||||
# Push package to all buildcaches with autopush==True
|
||||
|
||||
# Do nothing if package was not installed from source
|
||||
pkg = spec.package
|
||||
if pkg.installed_from_binary_cache:
|
||||
return
|
||||
|
||||
# Push the package to all autopush mirrors
|
||||
for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values():
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
mirror.push_url,
|
||||
bindist.PushOptions(force=True, regenerate_index=False, unsigned=not mirror.signed),
|
||||
)
|
||||
tty.msg(f"{spec.name}: Pushed to build cache: '{mirror.name}'")
|
@@ -119,7 +119,7 @@ def __init__(self, pkg_count: int):
|
||||
self.pkg_ids: Set[str] = set()
|
||||
|
||||
def next_pkg(self, pkg: "spack.package_base.PackageBase"):
|
||||
pkg_id = package_id(pkg)
|
||||
pkg_id = package_id(pkg.spec)
|
||||
|
||||
if pkg_id not in self.pkg_ids:
|
||||
self.pkg_num += 1
|
||||
@@ -221,12 +221,12 @@ def _handle_external_and_upstream(pkg: "spack.package_base.PackageBase", explici
|
||||
# consists in module file generation and registration in the DB.
|
||||
if pkg.spec.external:
|
||||
_process_external_package(pkg, explicit)
|
||||
_print_installed_pkg(f"{pkg.prefix} (external {package_id(pkg)})")
|
||||
_print_installed_pkg(f"{pkg.prefix} (external {package_id(pkg.spec)})")
|
||||
return True
|
||||
|
||||
if pkg.spec.installed_upstream:
|
||||
tty.verbose(
|
||||
f"{package_id(pkg)} is installed in an upstream Spack instance at "
|
||||
f"{package_id(pkg.spec)} is installed in an upstream Spack instance at "
|
||||
f"{pkg.spec.prefix}"
|
||||
)
|
||||
_print_installed_pkg(pkg.prefix)
|
||||
@@ -403,7 +403,7 @@ def _install_from_cache(
|
||||
return False
|
||||
t.stop()
|
||||
|
||||
pkg_id = package_id(pkg)
|
||||
pkg_id = package_id(pkg.spec)
|
||||
tty.debug(f"Successfully extracted {pkg_id} from binary cache")
|
||||
|
||||
_write_timer_json(pkg, t, True)
|
||||
@@ -484,7 +484,7 @@ def _process_binary_cache_tarball(
|
||||
if download_result is None:
|
||||
return False
|
||||
|
||||
tty.msg(f"Extracting {package_id(pkg)} from binary cache")
|
||||
tty.msg(f"Extracting {package_id(pkg.spec)} from binary cache")
|
||||
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
||||
@@ -513,7 +513,7 @@ def _try_install_from_binary_cache(
|
||||
if not spack.mirror.MirrorCollection(binary=True):
|
||||
return False
|
||||
|
||||
tty.debug(f"Searching for binary cache of {package_id(pkg)}")
|
||||
tty.debug(f"Searching for binary cache of {package_id(pkg.spec)}")
|
||||
|
||||
with timer.measure("search"):
|
||||
matches = binary_distribution.get_mirrors_for_spec(pkg.spec, index_only=True)
|
||||
@@ -610,7 +610,7 @@ def get_dependent_ids(spec: "spack.spec.Spec") -> List[str]:
|
||||
|
||||
Returns: list of package ids
|
||||
"""
|
||||
return [package_id(d.package) for d in spec.dependents()]
|
||||
return [package_id(d) for d in spec.dependents()]
|
||||
|
||||
|
||||
def install_msg(name: str, pid: int, install_status: InstallStatus) -> str:
|
||||
@@ -720,7 +720,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
|
||||
dump_packages(pkg.spec, packages_dir)
|
||||
|
||||
|
||||
def package_id(pkg: "spack.package_base.PackageBase") -> str:
|
||||
def package_id(spec: "spack.spec.Spec") -> str:
|
||||
"""A "unique" package identifier for installation purposes
|
||||
|
||||
The identifier is used to track build tasks, locks, install, and
|
||||
@@ -732,10 +732,10 @@ def package_id(pkg: "spack.package_base.PackageBase") -> str:
|
||||
Args:
|
||||
pkg: the package from which the identifier is derived
|
||||
"""
|
||||
if not pkg.spec.concrete:
|
||||
if not spec.concrete:
|
||||
raise ValueError("Cannot provide a unique, readable id when the spec is not concretized.")
|
||||
|
||||
return f"{pkg.name}-{pkg.version}-{pkg.spec.dag_hash()}"
|
||||
return f"{spec.name}-{spec.version}-{spec.dag_hash()}"
|
||||
|
||||
|
||||
class BuildRequest:
|
||||
@@ -765,7 +765,7 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
self.pkg.last_phase = install_args.pop("stop_at", None) # type: ignore[attr-defined]
|
||||
|
||||
# Cache the package id for convenience
|
||||
self.pkg_id = package_id(pkg)
|
||||
self.pkg_id = package_id(pkg.spec)
|
||||
|
||||
# Save off the original install arguments plus standard defaults
|
||||
# since they apply to the requested package *and* dependencies.
|
||||
@@ -780,9 +780,9 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
# are not able to return full dependents for all packages across
|
||||
# environment specs.
|
||||
self.dependencies = set(
|
||||
package_id(d.package)
|
||||
package_id(d)
|
||||
for d in self.pkg.spec.dependencies(deptype=self.get_depflags(self.pkg))
|
||||
if package_id(d.package) != self.pkg_id
|
||||
if package_id(d) != self.pkg_id
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
@@ -832,7 +832,7 @@ def get_depflags(self, pkg: "spack.package_base.PackageBase") -> int:
|
||||
depflag = dt.LINK | dt.RUN
|
||||
include_build_deps = self.install_args.get("include_build_deps")
|
||||
|
||||
if self.pkg_id == package_id(pkg):
|
||||
if self.pkg_id == package_id(pkg.spec):
|
||||
cache_only = self.install_args.get("package_cache_only")
|
||||
else:
|
||||
cache_only = self.install_args.get("dependencies_cache_only")
|
||||
@@ -927,7 +927,7 @@ def __init__(
|
||||
raise ValueError(f"{self.pkg.name} must have a concrete spec")
|
||||
|
||||
# The "unique" identifier for the task's package
|
||||
self.pkg_id = package_id(self.pkg)
|
||||
self.pkg_id = package_id(self.pkg.spec)
|
||||
|
||||
# The explicit build request associated with the package
|
||||
if not isinstance(request, BuildRequest):
|
||||
@@ -965,9 +965,9 @@ def __init__(
|
||||
# if use traverse for transitive dependencies, then must remove
|
||||
# transitive dependents on failure.
|
||||
self.dependencies = set(
|
||||
package_id(d.package)
|
||||
package_id(d)
|
||||
for d in self.pkg.spec.dependencies(deptype=self.request.get_depflags(self.pkg))
|
||||
if package_id(d.package) != self.pkg_id
|
||||
if package_id(d) != self.pkg_id
|
||||
)
|
||||
|
||||
# Handle bootstrapped compiler
|
||||
@@ -976,14 +976,18 @@ def __init__(
|
||||
# a dependency of the build task. Here we add it to self.dependencies
|
||||
compiler_spec = self.pkg.spec.compiler
|
||||
arch_spec = self.pkg.spec.architecture
|
||||
if not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec):
|
||||
strict = spack.concretize.Concretizer().check_for_compiler_existence
|
||||
if (
|
||||
not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec)
|
||||
and not strict
|
||||
):
|
||||
# The compiler is in the queue, identify it as dependency
|
||||
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
|
||||
dep.constrain(f"platform={str(arch_spec.platform)}")
|
||||
dep.constrain(f"os={str(arch_spec.os)}")
|
||||
dep.constrain(f"target={arch_spec.target.microarchitecture.family.name}:")
|
||||
dep.concretize()
|
||||
dep_id = package_id(dep.package)
|
||||
dep_id = package_id(dep)
|
||||
self.dependencies.add(dep_id)
|
||||
|
||||
# List of uninstalled dependencies, which is used to establish
|
||||
@@ -1194,7 +1198,7 @@ def _add_bootstrap_compilers(
|
||||
"""
|
||||
packages = _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs)
|
||||
for comp_pkg, is_compiler in packages:
|
||||
pkgid = package_id(comp_pkg)
|
||||
pkgid = package_id(comp_pkg.spec)
|
||||
if pkgid not in self.build_tasks:
|
||||
self._add_init_task(comp_pkg, request, is_compiler, all_deps)
|
||||
elif is_compiler:
|
||||
@@ -1241,7 +1245,7 @@ def _add_init_task(
|
||||
"""
|
||||
task = BuildTask(pkg, request, is_compiler, 0, 0, STATUS_ADDED, self.installed)
|
||||
for dep_id in task.dependencies:
|
||||
all_deps[dep_id].add(package_id(pkg))
|
||||
all_deps[dep_id].add(package_id(pkg.spec))
|
||||
|
||||
self._push_task(task)
|
||||
|
||||
@@ -1276,7 +1280,7 @@ def _check_deps_status(self, request: BuildRequest) -> None:
|
||||
err = "Cannot proceed with {0}: {1}"
|
||||
for dep in request.traverse_dependencies():
|
||||
dep_pkg = dep.package
|
||||
dep_id = package_id(dep_pkg)
|
||||
dep_id = package_id(dep)
|
||||
|
||||
# Check for failure since a prefix lock is not required
|
||||
if spack.store.STORE.failure_tracker.has_failed(dep):
|
||||
@@ -1409,7 +1413,7 @@ def _cleanup_task(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
Args:
|
||||
pkg: the package being installed
|
||||
"""
|
||||
self._remove_task(package_id(pkg))
|
||||
self._remove_task(package_id(pkg.spec))
|
||||
|
||||
# Ensure we have a read lock to prevent others from uninstalling the
|
||||
# spec during our installation.
|
||||
@@ -1423,7 +1427,7 @@ def _ensure_install_ready(self, pkg: "spack.package_base.PackageBase") -> None:
|
||||
Args:
|
||||
pkg: the package being locally installed
|
||||
"""
|
||||
pkg_id = package_id(pkg)
|
||||
pkg_id = package_id(pkg.spec)
|
||||
pre = f"{pkg_id} cannot be installed locally:"
|
||||
|
||||
# External packages cannot be installed locally.
|
||||
@@ -1465,7 +1469,7 @@ def _ensure_locked(
|
||||
"write",
|
||||
], f'"{lock_type}" is not a supported package management lock type'
|
||||
|
||||
pkg_id = package_id(pkg)
|
||||
pkg_id = package_id(pkg.spec)
|
||||
ltype, lock = self.locks.get(pkg_id, (lock_type, None))
|
||||
if lock and ltype == lock_type:
|
||||
return ltype, lock
|
||||
@@ -1601,7 +1605,7 @@ def _add_tasks(self, request: BuildRequest, all_deps):
|
||||
for dep in request.traverse_dependencies():
|
||||
dep_pkg = dep.package
|
||||
|
||||
dep_id = package_id(dep_pkg)
|
||||
dep_id = package_id(dep)
|
||||
if dep_id not in self.build_tasks:
|
||||
self._add_init_task(dep_pkg, request, False, all_deps)
|
||||
|
||||
@@ -1913,7 +1917,7 @@ def _flag_installed(
|
||||
dependent_ids: set of the package's dependent ids, or None if the dependent ids are
|
||||
limited to those maintained in the package (dependency DAG)
|
||||
"""
|
||||
pkg_id = package_id(pkg)
|
||||
pkg_id = package_id(pkg.spec)
|
||||
|
||||
if pkg_id in self.installed:
|
||||
# Already determined the package has been installed
|
||||
@@ -2274,11 +2278,15 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
# whether to install source code with the packag
|
||||
self.install_source = install_args.get("install_source", False)
|
||||
|
||||
is_develop = pkg.spec.is_develop
|
||||
# whether to keep the build stage after installation
|
||||
self.keep_stage = install_args.get("keep_stage", False)
|
||||
|
||||
# Note: user commands do not have an explicit choice to disable
|
||||
# keeping stages (i.e., we have a --keep-stage option, but not
|
||||
# a --destroy-stage option), so we can override a default choice
|
||||
# to destroy
|
||||
self.keep_stage = is_develop or install_args.get("keep_stage", False)
|
||||
# whether to restage
|
||||
self.restage = install_args.get("restage", False)
|
||||
self.restage = (not is_develop) and install_args.get("restage", False)
|
||||
|
||||
# whether to skip the patch phase
|
||||
self.skip_patch = install_args.get("skip_patch", False)
|
||||
@@ -2305,7 +2313,7 @@ def __init__(self, pkg: "spack.package_base.PackageBase", install_args: dict):
|
||||
|
||||
# info/debug information
|
||||
self.pre = _log_prefix(pkg.name)
|
||||
self.pkg_id = package_id(pkg)
|
||||
self.pkg_id = package_id(pkg.spec)
|
||||
|
||||
def run(self) -> bool:
|
||||
"""Main entry point from ``build_process`` to kick off install in child."""
|
||||
|
@@ -137,6 +137,12 @@ def source(self):
|
||||
def signed(self) -> bool:
|
||||
return isinstance(self._data, str) or self._data.get("signed", True)
|
||||
|
||||
@property
|
||||
def autopush(self) -> bool:
|
||||
if isinstance(self._data, str):
|
||||
return False
|
||||
return self._data.get("autopush", False)
|
||||
|
||||
@property
|
||||
def fetch_url(self):
|
||||
"""Get the valid, canonicalized fetch URL"""
|
||||
@@ -150,7 +156,7 @@ def push_url(self):
|
||||
def _update_connection_dict(self, current_data: dict, new_data: dict, top_level: bool):
|
||||
keys = ["url", "access_pair", "access_token", "profile", "endpoint_url"]
|
||||
if top_level:
|
||||
keys += ["binary", "source", "signed"]
|
||||
keys += ["binary", "source", "signed", "autopush"]
|
||||
changed = False
|
||||
for key in keys:
|
||||
if key in new_data and current_data.get(key) != new_data[key]:
|
||||
@@ -286,6 +292,7 @@ def __init__(
|
||||
scope=None,
|
||||
binary: Optional[bool] = None,
|
||||
source: Optional[bool] = None,
|
||||
autopush: Optional[bool] = None,
|
||||
):
|
||||
"""Initialize a mirror collection.
|
||||
|
||||
@@ -297,21 +304,27 @@ def __init__(
|
||||
If None, do not filter on binary mirrors.
|
||||
source: If True, only include source mirrors.
|
||||
If False, omit source mirrors.
|
||||
If None, do not filter on source mirrors."""
|
||||
self._mirrors = {
|
||||
name: Mirror(data=mirror, name=name)
|
||||
for name, mirror in (
|
||||
mirrors.items()
|
||||
if mirrors is not None
|
||||
else spack.config.get("mirrors", scope=scope).items()
|
||||
)
|
||||
}
|
||||
If None, do not filter on source mirrors.
|
||||
autopush: If True, only include mirrors that have autopush enabled.
|
||||
If False, omit mirrors that have autopush enabled.
|
||||
If None, do not filter on autopush."""
|
||||
mirrors_data = (
|
||||
mirrors.items()
|
||||
if mirrors is not None
|
||||
else spack.config.get("mirrors", scope=scope).items()
|
||||
)
|
||||
mirrors = (Mirror(data=mirror, name=name) for name, mirror in mirrors_data)
|
||||
|
||||
if source is not None:
|
||||
self._mirrors = {k: v for k, v in self._mirrors.items() if v.source == source}
|
||||
def _filter(m: Mirror):
|
||||
if source is not None and m.source != source:
|
||||
return False
|
||||
if binary is not None and m.binary != binary:
|
||||
return False
|
||||
if autopush is not None and m.autopush != autopush:
|
||||
return False
|
||||
return True
|
||||
|
||||
if binary is not None:
|
||||
self._mirrors = {k: v for k, v in self._mirrors.items() if v.binary == binary}
|
||||
self._mirrors = {m.name: m for m in mirrors if _filter(m)}
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._mirrors == other._mirrors
|
||||
|
@@ -73,17 +73,24 @@ def vs_install_paths(self):
|
||||
def msvc_paths(self):
|
||||
return [os.path.join(path, "VC", "Tools", "MSVC") for path in self.vs_install_paths]
|
||||
|
||||
@property
|
||||
def oneapi_root(self):
|
||||
root = os.environ.get("ONEAPI_ROOT", "") or os.path.join(
|
||||
os.environ.get("ProgramFiles(x86)", ""), "Intel", "oneAPI"
|
||||
)
|
||||
if os.path.exists(root):
|
||||
return root
|
||||
|
||||
@property
|
||||
def compiler_search_paths(self):
|
||||
# First Strategy: Find MSVC directories using vswhere
|
||||
_compiler_search_paths = []
|
||||
for p in self.msvc_paths:
|
||||
_compiler_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64")))
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
oneapi_root = self.oneapi_root
|
||||
if oneapi_root:
|
||||
_compiler_search_paths.extend(
|
||||
glob.glob(
|
||||
os.path.join(str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin")
|
||||
)
|
||||
glob.glob(os.path.join(oneapi_root, "compiler", "**", "bin"), recursive=True)
|
||||
)
|
||||
|
||||
# Second strategy: Find MSVC via the registry
|
||||
|
@@ -567,6 +567,7 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
provided_together: Dict["spack.spec.Spec", List[Set[str]]]
|
||||
patches: Dict["spack.spec.Spec", List["spack.patch.Patch"]]
|
||||
variants: Dict[str, Tuple["spack.variant.Variant", "spack.spec.Spec"]]
|
||||
languages: Dict["spack.spec.Spec", Set[str]]
|
||||
|
||||
#: By default, packages are not virtual
|
||||
#: Virtual packages override this attribute
|
||||
|
@@ -160,10 +160,15 @@ def detect(cls):
|
||||
system, as the Cray compiler wrappers and other components of the Cray
|
||||
programming environment are irrelevant without module support.
|
||||
"""
|
||||
craype_type, craype_version = cls.craype_type_and_version()
|
||||
if craype_type == "EX" and craype_version >= spack.version.Version("21.10"):
|
||||
if "opt/cray" not in os.environ.get("MODULEPATH", ""):
|
||||
return False
|
||||
return "opt/cray" in os.environ.get("MODULEPATH", "")
|
||||
|
||||
craype_type, craype_version = cls.craype_type_and_version()
|
||||
if craype_type == "XC":
|
||||
return True
|
||||
if craype_type == "EX" and craype_version < spack.version.Version("21.10"):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _default_target_from_env(self):
|
||||
"""Set and return the default CrayPE target loaded in a clean login
|
||||
|
@@ -14,7 +14,7 @@
|
||||
import xml.sax.saxutils
|
||||
from typing import Dict, Optional
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
from urllib.request import HTTPSHandler, Request, build_opener
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.filesystem import working_dir
|
||||
@@ -27,6 +27,7 @@
|
||||
from spack.error import SpackError
|
||||
from spack.util.crypto import checksum
|
||||
from spack.util.log_parse import parse_log_events
|
||||
from spack.util.web import urllib_ssl_cert_handler
|
||||
|
||||
from .base import Reporter
|
||||
from .extract import extract_test_parts
|
||||
@@ -427,7 +428,7 @@ def upload(self, filename):
|
||||
# Compute md5 checksum for the contents of this file.
|
||||
md5sum = checksum(hashlib.md5, filename, block_size=8192)
|
||||
|
||||
opener = build_opener(HTTPHandler)
|
||||
opener = build_opener(HTTPSHandler(context=urllib_ssl_cert_handler()))
|
||||
with open(filename, "rb") as f:
|
||||
params_dict = {
|
||||
"build": self.buildname,
|
||||
|
@@ -34,6 +34,7 @@
|
||||
"strategy": {"type": "string", "enum": ["none", "minimal", "full"]}
|
||||
},
|
||||
},
|
||||
"os_compatible": {"type": "object", "additionalProperties": {"type": "array"}},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@@ -73,6 +73,7 @@
|
||||
"environments_root": {"type": "string"},
|
||||
"connect_timeout": {"type": "integer", "minimum": 0},
|
||||
"verify_ssl": {"type": "boolean"},
|
||||
"ssl_certs": {"type": "string"},
|
||||
"suppress_gpg_warnings": {"type": "boolean"},
|
||||
"install_missing_compilers": {"type": "boolean"},
|
||||
"debug": {"type": "boolean"},
|
||||
|
@@ -46,6 +46,7 @@
|
||||
"signed": {"type": "boolean"},
|
||||
"fetch": fetch_and_push,
|
||||
"push": fetch_and_push,
|
||||
"autopush": {"type": "boolean"},
|
||||
**connection, # type: ignore
|
||||
},
|
||||
}
|
||||
|
@@ -15,6 +15,7 @@
|
||||
import types
|
||||
import typing
|
||||
import warnings
|
||||
from contextlib import contextmanager
|
||||
from typing import Callable, Dict, Iterator, List, NamedTuple, Optional, Set, Tuple, Type, Union
|
||||
|
||||
import archspec.cpu
|
||||
@@ -119,6 +120,17 @@ def __str__(self):
|
||||
return f"{self._name_.lower()}"
|
||||
|
||||
|
||||
@contextmanager
|
||||
def spec_with_name(spec, name):
|
||||
"""Context manager to temporarily set the name of a spec"""
|
||||
old_name = spec.name
|
||||
spec.name = name
|
||||
try:
|
||||
yield spec
|
||||
finally:
|
||||
spec.name = old_name
|
||||
|
||||
|
||||
class RequirementKind(enum.Enum):
|
||||
"""Purpose / provenance of a requirement"""
|
||||
|
||||
@@ -267,8 +279,8 @@ def _create_counter(specs: List[spack.spec.Spec], tests: bool):
|
||||
return NoDuplicatesCounter(specs, tests=tests)
|
||||
|
||||
|
||||
def all_compilers_in_config():
|
||||
return spack.compilers.all_compilers()
|
||||
def all_compilers_in_config(configuration):
|
||||
return spack.compilers.all_compilers_from(configuration)
|
||||
|
||||
|
||||
def extend_flag_list(flag_list, new_flags):
|
||||
@@ -541,6 +553,7 @@ def _concretization_version_order(version_info: Tuple[GitOrStandardVersion, dict
|
||||
info.get("preferred", False),
|
||||
not info.get("deprecated", False),
|
||||
not version.isdevelop(),
|
||||
not version.is_prerelease(),
|
||||
version,
|
||||
)
|
||||
|
||||
@@ -687,8 +700,9 @@ def on_model(model):
|
||||
raise UnsatisfiableSpecError(msg)
|
||||
|
||||
|
||||
#: Data class to collect information on a requirement
|
||||
class RequirementRule(NamedTuple):
|
||||
"""Data class to collect information on a requirement"""
|
||||
|
||||
pkg_name: str
|
||||
policy: str
|
||||
requirements: List["spack.spec.Spec"]
|
||||
@@ -697,6 +711,27 @@ class RequirementRule(NamedTuple):
|
||||
message: Optional[str]
|
||||
|
||||
|
||||
class KnownCompiler(NamedTuple):
|
||||
"""Data class to collect information on compilers"""
|
||||
|
||||
spec: "spack.spec.Spec"
|
||||
os: str
|
||||
target: str
|
||||
available: bool
|
||||
compiler_obj: Optional["spack.compiler.Compiler"]
|
||||
|
||||
def _key(self):
|
||||
return self.spec, self.os, self.target
|
||||
|
||||
def __eq__(self, other: object):
|
||||
if not isinstance(other, KnownCompiler):
|
||||
return NotImplemented
|
||||
return self._key() == other._key()
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._key())
|
||||
|
||||
|
||||
class PyclingoDriver:
|
||||
def __init__(self, cores=True):
|
||||
"""Driver for the Python clingo interface.
|
||||
@@ -762,7 +797,6 @@ def solve(self, setup, specs, reuse=None, output=None, control=None, allow_depre
|
||||
timer.stop("ground")
|
||||
|
||||
# With a grounded program, we can run the solve.
|
||||
result = Result(specs)
|
||||
models = [] # stable models if things go well
|
||||
cores = [] # unsatisfiable cores if they do not
|
||||
|
||||
@@ -783,6 +817,7 @@ def on_model(model):
|
||||
timer.stop("solve")
|
||||
|
||||
# once done, construct the solve result
|
||||
result = Result(specs)
|
||||
result.satisfiable = solve_result.satisfiable
|
||||
|
||||
if result.satisfiable:
|
||||
@@ -823,6 +858,8 @@ def on_model(model):
|
||||
print("Statistics:")
|
||||
pprint.pprint(self.control.statistics)
|
||||
|
||||
result.raise_if_unsat()
|
||||
|
||||
if result.satisfiable and result.unsolved_specs and setup.concretize_everything:
|
||||
unsolved_str = Result.format_unsolved(result.unsolved_specs)
|
||||
raise InternalConcretizerError(
|
||||
@@ -1039,41 +1076,52 @@ def conflict_rules(self, pkg):
|
||||
)
|
||||
self.gen.newline()
|
||||
|
||||
def package_languages(self, pkg):
|
||||
for when_spec, languages in pkg.languages.items():
|
||||
condition_msg = f"{pkg.name} needs the {', '.join(sorted(languages))} language"
|
||||
if when_spec != spack.spec.Spec():
|
||||
condition_msg += f" when {when_spec}"
|
||||
condition_id = self.condition(when_spec, name=pkg.name, msg=condition_msg)
|
||||
for language in sorted(languages):
|
||||
self.gen.fact(fn.pkg_fact(pkg.name, fn.language(condition_id, language)))
|
||||
self.gen.newline()
|
||||
|
||||
def config_compatible_os(self):
|
||||
"""Facts about compatible os's specified in configs"""
|
||||
self.gen.h2("Compatible OS from concretizer config file")
|
||||
os_data = spack.config.get("concretizer:os_compatible", {})
|
||||
for recent, reusable in os_data.items():
|
||||
for old in reusable:
|
||||
self.gen.fact(fn.os_compatible(recent, old))
|
||||
self.gen.newline()
|
||||
|
||||
def compiler_facts(self):
|
||||
"""Facts about available compilers."""
|
||||
|
||||
self.gen.h2("Available compilers")
|
||||
indexed_possible_compilers = list(enumerate(self.possible_compilers))
|
||||
for compiler_id, compiler in indexed_possible_compilers:
|
||||
for compiler_id, compiler in enumerate(self.possible_compilers):
|
||||
self.gen.fact(fn.compiler_id(compiler_id))
|
||||
self.gen.fact(fn.compiler_name(compiler_id, compiler.spec.name))
|
||||
self.gen.fact(fn.compiler_version(compiler_id, compiler.spec.version))
|
||||
|
||||
if compiler.operating_system:
|
||||
self.gen.fact(fn.compiler_os(compiler_id, compiler.operating_system))
|
||||
|
||||
if compiler.target == "any":
|
||||
compiler.target = None
|
||||
if compiler.os:
|
||||
self.gen.fact(fn.compiler_os(compiler_id, compiler.os))
|
||||
|
||||
if compiler.target is not None:
|
||||
self.gen.fact(fn.compiler_target(compiler_id, compiler.target))
|
||||
|
||||
for flag_type, flags in compiler.flags.items():
|
||||
for flag in flags:
|
||||
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag))
|
||||
if compiler.compiler_obj is not None:
|
||||
c = compiler.compiler_obj
|
||||
for flag_type, flags in c.flags.items():
|
||||
for flag in flags:
|
||||
self.gen.fact(fn.compiler_flag(compiler_id, flag_type, flag))
|
||||
|
||||
if compiler.available:
|
||||
self.gen.fact(fn.compiler_available(compiler_id))
|
||||
|
||||
self.gen.fact(fn.compiler_weight(compiler_id, compiler_id))
|
||||
self.gen.newline()
|
||||
|
||||
# Set compiler defaults, given a list of possible compilers
|
||||
self.gen.h2("Default compiler preferences (CompilerID, Weight)")
|
||||
|
||||
ppk = spack.package_prefs.PackagePrefs("all", "compiler", all=False)
|
||||
matches = sorted(indexed_possible_compilers, key=lambda x: ppk(x[1].spec))
|
||||
|
||||
for weight, (compiler_id, cspec) in enumerate(matches):
|
||||
f = fn.compiler_weight(compiler_id, weight)
|
||||
self.gen.fact(f)
|
||||
|
||||
def package_requirement_rules(self, pkg):
|
||||
parser = RequirementParser(spack.config.CONFIG)
|
||||
self.emit_facts_from_requirement_rules(parser.rules(pkg))
|
||||
@@ -1088,6 +1136,9 @@ def pkg_rules(self, pkg, tests):
|
||||
self.pkg_version_rules(pkg)
|
||||
self.gen.newline()
|
||||
|
||||
# languages
|
||||
self.package_languages(pkg)
|
||||
|
||||
# variants
|
||||
self.variant_rules(pkg)
|
||||
|
||||
@@ -1284,34 +1335,39 @@ def condition(
|
||||
Returns:
|
||||
int: id of the condition created by this function
|
||||
"""
|
||||
named_cond = required_spec.copy()
|
||||
named_cond.name = named_cond.name or name
|
||||
if not named_cond.name:
|
||||
raise ValueError(f"Must provide a name for anonymous condition: '{named_cond}'")
|
||||
name = required_spec.name or name
|
||||
if not name:
|
||||
raise ValueError(f"Must provide a name for anonymous condition: '{required_spec}'")
|
||||
|
||||
# Check if we can emit the requirements before updating the condition ID counter.
|
||||
# In this way, if a condition can't be emitted but the exception is handled in the caller,
|
||||
# we won't emit partial facts.
|
||||
with spec_with_name(required_spec, name):
|
||||
|
||||
condition_id = next(self._id_counter)
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition(condition_id)))
|
||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||
# Check if we can emit the requirements before updating the condition ID counter.
|
||||
# In this way, if a condition can't be emitted but the exception is handled in the
|
||||
# caller, we won't emit partial facts.
|
||||
|
||||
trigger_id = self._get_condition_id(
|
||||
named_cond, cache=self._trigger_cache, body=True, transform=transform_required
|
||||
)
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_trigger(condition_id, trigger_id)))
|
||||
condition_id = next(self._id_counter)
|
||||
self.gen.fact(fn.pkg_fact(required_spec.name, fn.condition(condition_id)))
|
||||
self.gen.fact(fn.condition_reason(condition_id, msg))
|
||||
|
||||
trigger_id = self._get_condition_id(
|
||||
required_spec, cache=self._trigger_cache, body=True, transform=transform_required
|
||||
)
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(required_spec.name, fn.condition_trigger(condition_id, trigger_id))
|
||||
)
|
||||
|
||||
if not imposed_spec:
|
||||
return condition_id
|
||||
|
||||
effect_id = self._get_condition_id(
|
||||
imposed_spec, cache=self._effect_cache, body=False, transform=transform_imposed
|
||||
)
|
||||
self.gen.fact(
|
||||
fn.pkg_fact(required_spec.name, fn.condition_effect(condition_id, effect_id))
|
||||
)
|
||||
|
||||
if not imposed_spec:
|
||||
return condition_id
|
||||
|
||||
effect_id = self._get_condition_id(
|
||||
imposed_spec, cache=self._effect_cache, body=False, transform=transform_imposed
|
||||
)
|
||||
self.gen.fact(fn.pkg_fact(named_cond.name, fn.condition_effect(condition_id, effect_id)))
|
||||
|
||||
return condition_id
|
||||
|
||||
def impose(self, condition_id, imposed_spec, node=True, name=None, body=False):
|
||||
imposed_constraints = self.spec_clauses(imposed_spec, body=body, required_from=name)
|
||||
for pred in imposed_constraints:
|
||||
@@ -1599,23 +1655,6 @@ def target_preferences(self):
|
||||
for i, preferred in enumerate(package_targets):
|
||||
self.gen.fact(fn.target_weight(str(preferred.architecture.target), i))
|
||||
|
||||
def flag_defaults(self):
|
||||
self.gen.h2("Compiler flag defaults")
|
||||
|
||||
# types of flags that can be on specs
|
||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
||||
self.gen.fact(fn.flag_type(flag))
|
||||
self.gen.newline()
|
||||
|
||||
# flags from compilers.yaml
|
||||
compilers = all_compilers_in_config()
|
||||
for compiler in compilers:
|
||||
for name, flags in compiler.flags.items():
|
||||
for flag in flags:
|
||||
self.gen.fact(
|
||||
fn.compiler_version_flag(compiler.name, compiler.version, name, flag)
|
||||
)
|
||||
|
||||
def spec_clauses(
|
||||
self,
|
||||
spec: spack.spec.Spec,
|
||||
@@ -2021,9 +2060,16 @@ def target_defaults(self, specs):
|
||||
candidate_targets.append(ancestor)
|
||||
|
||||
best_targets = {uarch.family.name}
|
||||
for compiler_id, compiler in enumerate(self.possible_compilers):
|
||||
for compiler_id, known_compiler in enumerate(self.possible_compilers):
|
||||
if not known_compiler.available:
|
||||
continue
|
||||
|
||||
compiler = known_compiler.compiler_obj
|
||||
# Stub support for cross-compilation, to be expanded later
|
||||
if compiler.target is not None and compiler.target != str(uarch.family):
|
||||
if known_compiler.target is not None and compiler.target not in (
|
||||
str(uarch.family),
|
||||
"any",
|
||||
):
|
||||
self.gen.fact(fn.compiler_supports_target(compiler_id, compiler.target))
|
||||
self.gen.newline()
|
||||
continue
|
||||
@@ -2079,58 +2125,6 @@ def virtual_providers(self):
|
||||
self.gen.fact(fn.virtual(vspec))
|
||||
self.gen.newline()
|
||||
|
||||
def generate_possible_compilers(self, specs):
|
||||
compilers = all_compilers_in_config()
|
||||
|
||||
# Search for compilers which differs only by aspects that are
|
||||
# not selectable by users using the spec syntax
|
||||
seen, sanitized_list = set(), []
|
||||
for compiler in compilers:
|
||||
key = compiler.spec, compiler.operating_system, compiler.target
|
||||
if key in seen:
|
||||
warnings.warn(
|
||||
f"duplicate found for {compiler.spec} on "
|
||||
f"{compiler.operating_system}/{compiler.target}. "
|
||||
f"Edit your compilers.yaml configuration to remove it."
|
||||
)
|
||||
continue
|
||||
sanitized_list.append(compiler)
|
||||
seen.add(key)
|
||||
|
||||
cspecs = set([c.spec for c in compilers])
|
||||
|
||||
# add compiler specs from the input line to possibilities if we
|
||||
# don't require compilers to exist.
|
||||
strict = spack.concretize.Concretizer().check_for_compiler_existence
|
||||
for s in traverse.traverse_nodes(specs):
|
||||
# we don't need to validate compilers for already-built specs
|
||||
if s.concrete or not s.compiler:
|
||||
continue
|
||||
|
||||
version = s.compiler.versions.concrete
|
||||
|
||||
if not version or any(c.satisfies(s.compiler) for c in cspecs):
|
||||
continue
|
||||
|
||||
# Error when a compiler is not found and strict mode is enabled
|
||||
if strict:
|
||||
raise spack.concretize.UnavailableCompilerVersionError(s.compiler)
|
||||
|
||||
# Make up a compiler matching the input spec. This is for bootstrapping.
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(s.compiler.name)
|
||||
compilers.append(
|
||||
compiler_cls(s.compiler, operating_system=None, target=None, paths=[None] * 4)
|
||||
)
|
||||
self.gen.fact(fn.allow_compiler(s.compiler.name, version))
|
||||
|
||||
return list(
|
||||
sorted(
|
||||
compilers,
|
||||
key=lambda compiler: (compiler.spec.name, compiler.spec.version),
|
||||
reverse=True,
|
||||
)
|
||||
)
|
||||
|
||||
def define_version_constraints(self):
|
||||
"""Define what version_satisfies(...) means in ASP logic."""
|
||||
for pkg_name, versions in sorted(self.version_constraints):
|
||||
@@ -2158,7 +2152,7 @@ def versions_for(v):
|
||||
if isinstance(v, vn.StandardVersion):
|
||||
return [v]
|
||||
elif isinstance(v, vn.ClosedOpenRange):
|
||||
return [v.lo, vn.prev_version(v.hi)]
|
||||
return [v.lo, vn._prev_version(v.hi)]
|
||||
elif isinstance(v, vn.VersionList):
|
||||
return sum((versions_for(e) for e in v), [])
|
||||
else:
|
||||
@@ -2293,8 +2287,6 @@ def setup(
|
||||
self.possible_virtuals = node_counter.possible_virtuals()
|
||||
self.pkgs = node_counter.possible_dependencies()
|
||||
|
||||
self.pkgs.update(spack.repo.PATH.packages_with_tags("runtime"))
|
||||
|
||||
# Fail if we already know an unreachable node is requested
|
||||
for spec in specs:
|
||||
missing_deps = [
|
||||
@@ -2308,6 +2300,7 @@ def setup(
|
||||
self.explicitly_required_namespaces[node.name] = node.namespace
|
||||
|
||||
self.gen = ProblemInstanceBuilder()
|
||||
compiler_parser = CompilerParser(configuration=spack.config.CONFIG).with_input_specs(specs)
|
||||
|
||||
if not allow_deprecated:
|
||||
self.gen.fact(fn.deprecated_versions_not_allowed())
|
||||
@@ -2327,17 +2320,17 @@ def setup(
|
||||
)
|
||||
specs = tuple(specs) # ensure compatible types to add
|
||||
|
||||
# get possible compilers
|
||||
self.possible_compilers = self.generate_possible_compilers(specs)
|
||||
|
||||
self.gen.h1("Reusable concrete specs")
|
||||
self.define_concrete_input_specs(specs, self.pkgs)
|
||||
if reuse:
|
||||
self.gen.fact(fn.optimize_for_reuse())
|
||||
for reusable_spec in reuse:
|
||||
compiler_parser.add_compiler_from_concrete_spec(reusable_spec)
|
||||
self.register_concrete_spec(reusable_spec, self.pkgs)
|
||||
self.concrete_specs()
|
||||
|
||||
self.possible_compilers = compiler_parser.possible_compilers()
|
||||
|
||||
self.gen.h1("Generic statements on possible packages")
|
||||
node_counter.possible_packages_facts(self.gen, fn)
|
||||
|
||||
@@ -2347,6 +2340,7 @@ def setup(
|
||||
self.gen.newline()
|
||||
|
||||
self.gen.h1("General Constraints")
|
||||
self.config_compatible_os()
|
||||
self.compiler_facts()
|
||||
|
||||
# architecture defaults
|
||||
@@ -2437,15 +2431,18 @@ def visit(node):
|
||||
def define_runtime_constraints(self):
|
||||
"""Define the constraints to be imposed on the runtimes"""
|
||||
recorder = RuntimePropertyRecorder(self)
|
||||
# TODO: Use only available compilers ?
|
||||
for compiler in self.possible_compilers:
|
||||
if compiler.name != "gcc":
|
||||
continue
|
||||
compiler_with_different_cls_names = {"oneapi": "intel-oneapi-compilers"}
|
||||
compiler_cls_name = compiler_with_different_cls_names.get(
|
||||
compiler.spec.name, compiler.spec.name
|
||||
)
|
||||
try:
|
||||
compiler_cls = spack.repo.PATH.get_pkg_class(compiler.name)
|
||||
compiler_cls = spack.repo.PATH.get_pkg_class(compiler_cls_name)
|
||||
except spack.repo.UnknownPackageError:
|
||||
continue
|
||||
if hasattr(compiler_cls, "runtime_constraints"):
|
||||
compiler_cls.runtime_constraints(compiler=compiler, pkg=recorder)
|
||||
compiler_cls.runtime_constraints(spec=compiler.spec, pkg=recorder)
|
||||
|
||||
recorder.consume_facts()
|
||||
|
||||
@@ -2817,6 +2814,90 @@ def reject_requirement_constraint(
|
||||
return False
|
||||
|
||||
|
||||
class CompilerParser:
|
||||
"""Parses configuration files, and builds a list of possible compilers for the solve."""
|
||||
|
||||
def __init__(self, configuration) -> None:
|
||||
self.compilers: Set[KnownCompiler] = set()
|
||||
for c in all_compilers_in_config(configuration):
|
||||
target = c.target if c.target != "any" else None
|
||||
candidate = KnownCompiler(
|
||||
spec=c.spec, os=c.operating_system, target=target, available=True, compiler_obj=c
|
||||
)
|
||||
if candidate in self.compilers:
|
||||
warnings.warn(
|
||||
f"duplicate found for {c.spec} on {c.operating_system}/{c.target}. "
|
||||
f"Edit your compilers.yaml configuration to remove it."
|
||||
)
|
||||
continue
|
||||
|
||||
self.compilers.add(candidate)
|
||||
|
||||
def with_input_specs(self, input_specs: List["spack.spec.Spec"]) -> "CompilerParser":
|
||||
"""Accounts for input specs when building the list of possible compilers.
|
||||
|
||||
Args:
|
||||
input_specs: specs to be concretized
|
||||
"""
|
||||
strict = spack.concretize.Concretizer().check_for_compiler_existence
|
||||
default_os = str(spack.platforms.host().default_os)
|
||||
default_target = str(archspec.cpu.host().family)
|
||||
for s in traverse.traverse_nodes(input_specs):
|
||||
# we don't need to validate compilers for already-built specs
|
||||
if s.concrete or not s.compiler:
|
||||
continue
|
||||
|
||||
version = s.compiler.versions.concrete
|
||||
|
||||
if not version or any(item.spec.satisfies(s.compiler) for item in self.compilers):
|
||||
continue
|
||||
|
||||
# Error when a compiler is not found and strict mode is enabled
|
||||
if strict:
|
||||
raise spack.concretize.UnavailableCompilerVersionError(s.compiler)
|
||||
|
||||
# Make up a compiler matching the input spec. This is for bootstrapping.
|
||||
compiler_cls = spack.compilers.class_for_compiler_name(s.compiler.name)
|
||||
compiler_obj = compiler_cls(
|
||||
s.compiler, operating_system=default_os, target=default_target, paths=[None] * 4
|
||||
)
|
||||
self.compilers.add(
|
||||
KnownCompiler(
|
||||
spec=s.compiler,
|
||||
os=default_os,
|
||||
target=default_target,
|
||||
available=True,
|
||||
compiler_obj=compiler_obj,
|
||||
)
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
def add_compiler_from_concrete_spec(self, spec: "spack.spec.Spec") -> None:
|
||||
"""Account for compilers that are coming from concrete specs, through reuse.
|
||||
|
||||
Args:
|
||||
spec: concrete spec to be reused
|
||||
"""
|
||||
assert spec.concrete, "the spec argument must be concrete"
|
||||
candidate = KnownCompiler(
|
||||
spec=spec.compiler,
|
||||
os=str(spec.architecture.os),
|
||||
target=str(spec.architecture.target.microarchitecture.family),
|
||||
available=False,
|
||||
compiler_obj=None,
|
||||
)
|
||||
self.compilers.add(candidate)
|
||||
|
||||
def possible_compilers(self) -> List[KnownCompiler]:
|
||||
# Here we have to sort two times, first sort by name and ascending version
|
||||
result = sorted(self.compilers, key=lambda x: (x.spec.name, x.spec.version), reverse=True)
|
||||
# Then stable sort to prefer available compilers and account for preferences
|
||||
ppk = spack.package_prefs.PackagePrefs("all", "compiler", all=False)
|
||||
result.sort(key=lambda x: (not x.available, ppk(x.spec)))
|
||||
return result
|
||||
|
||||
|
||||
class RuntimePropertyRecorder:
|
||||
"""An object of this class is injected in callbacks to compilers, to let them declare
|
||||
properties of the runtimes they support and of the runtimes they provide, and to add
|
||||
@@ -2857,13 +2938,24 @@ def reset(self):
|
||||
"""Resets the current state."""
|
||||
self.current_package = None
|
||||
|
||||
def depends_on(self, dependency_str: str, *, when: str, type: str, description: str) -> None:
|
||||
def depends_on(
|
||||
self,
|
||||
dependency_str: str,
|
||||
*,
|
||||
when: str,
|
||||
type: str,
|
||||
description: str,
|
||||
languages: Optional[List[str]] = None,
|
||||
) -> None:
|
||||
"""Injects conditional dependencies on packages.
|
||||
|
||||
Conditional dependencies can be either "real" packages or virtual dependencies.
|
||||
|
||||
Args:
|
||||
dependency_str: the dependency spec to inject
|
||||
when: anonymous condition to be met on a package to have the dependency
|
||||
type: dependency type
|
||||
languages: languages needed by the package for the dependency to be considered
|
||||
description: human-readable description of the rule for adding the dependency
|
||||
"""
|
||||
# TODO: The API for this function is not final, and is still subject to change. At
|
||||
@@ -2889,26 +2981,45 @@ def depends_on(self, dependency_str: str, *, when: str, type: str, description:
|
||||
f" not external({node_variable}),\n"
|
||||
f" not runtime(Package)"
|
||||
).replace(f'"{placeholder}"', f"{node_variable}")
|
||||
if languages:
|
||||
body_str += ",\n"
|
||||
for language in languages:
|
||||
body_str += f' attr("language", {node_variable}, "{language}")'
|
||||
|
||||
head_clauses = self._setup.spec_clauses(dependency_spec, body=False)
|
||||
|
||||
runtime_pkg = dependency_spec.name
|
||||
|
||||
is_virtual = head_clauses[0].args[0] == "virtual_node"
|
||||
main_rule = (
|
||||
f"% {description}\n"
|
||||
f'1 {{ attr("depends_on", {node_variable}, node(0..X-1, "{runtime_pkg}"), "{type}") :'
|
||||
f' max_dupes("gcc-runtime", X)}} 1:-\n'
|
||||
f' max_dupes("{runtime_pkg}", X)}} 1:-\n'
|
||||
f"{body_str}.\n\n"
|
||||
)
|
||||
if is_virtual:
|
||||
main_rule = (
|
||||
f"% {description}\n"
|
||||
f'attr("dependency_holds", {node_variable}, "{runtime_pkg}", "{type}") :-\n'
|
||||
f"{body_str}.\n\n"
|
||||
)
|
||||
|
||||
self.rules.append(main_rule)
|
||||
for clause in head_clauses:
|
||||
if clause.args[0] == "node":
|
||||
continue
|
||||
runtime_node = f'node(RuntimeID, "{runtime_pkg}")'
|
||||
head_str = str(clause).replace(f'"{runtime_pkg}"', runtime_node)
|
||||
rule = (
|
||||
f"{head_str} :-\n"
|
||||
depends_on_constraint = (
|
||||
f' attr("depends_on", {node_variable}, {runtime_node}, "{type}"),\n'
|
||||
f"{body_str}.\n\n"
|
||||
)
|
||||
if is_virtual:
|
||||
depends_on_constraint = (
|
||||
f' attr("depends_on", {node_variable}, ProviderNode, "{type}"),\n'
|
||||
f" provider(ProviderNode, {runtime_node}),\n"
|
||||
)
|
||||
|
||||
rule = f"{head_str} :-\n" f"{depends_on_constraint}" f"{body_str}.\n\n"
|
||||
self.rules.append(rule)
|
||||
|
||||
self.reset()
|
||||
@@ -3126,7 +3237,9 @@ def reorder_flags(self):
|
||||
imposes order afterwards.
|
||||
"""
|
||||
# reverse compilers so we get highest priority compilers that share a spec
|
||||
compilers = dict((c.spec, c) for c in reversed(all_compilers_in_config()))
|
||||
compilers = dict(
|
||||
(c.spec, c) for c in reversed(all_compilers_in_config(spack.config.CONFIG))
|
||||
)
|
||||
cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())
|
||||
|
||||
for spec in self._specs.values():
|
||||
@@ -3318,7 +3431,7 @@ def _is_reusable(spec: spack.spec.Spec, packages, local: bool) -> bool:
|
||||
return False
|
||||
|
||||
if not spec.external:
|
||||
return True
|
||||
return _has_runtime_dependencies(spec)
|
||||
|
||||
# Cray external manifest externals are always reusable
|
||||
if local:
|
||||
@@ -3343,6 +3456,19 @@ def _is_reusable(spec: spack.spec.Spec, packages, local: bool) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def _has_runtime_dependencies(spec: spack.spec.Spec) -> bool:
|
||||
if not WITH_RUNTIME:
|
||||
return True
|
||||
|
||||
if spec.compiler.name == "gcc" and not spec.dependencies("gcc-runtime"):
|
||||
return False
|
||||
|
||||
if spec.compiler.name == "oneapi" and not spec.dependencies("intel-oneapi-runtime"):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class Solver:
|
||||
"""This is the main external interface class for solving.
|
||||
|
||||
@@ -3484,9 +3610,14 @@ def solve_in_rounds(
|
||||
if not result.unsolved_specs:
|
||||
break
|
||||
|
||||
# This means we cannot progress with solving the input
|
||||
if not result.satisfiable or not result.specs:
|
||||
break
|
||||
if not result.specs:
|
||||
# This is also a problem: no specs were solved for, which
|
||||
# means we would be in a loop if we tried again
|
||||
unsolved_str = Result.format_unsolved(result.unsolved_specs)
|
||||
raise InternalConcretizerError(
|
||||
"Internal Spack error: a subset of input specs could not"
|
||||
f" be solved for.\n\t{unsolved_str}"
|
||||
)
|
||||
|
||||
input_specs = list(x for (x, y) in result.unsolved_specs)
|
||||
for spec in result.specs:
|
||||
|
@@ -80,6 +80,7 @@ unification_set(SetID, VirtualNode)
|
||||
|
||||
|
||||
#defined multiple_unification_sets/1.
|
||||
#defined runtime/1.
|
||||
|
||||
%----
|
||||
% Rules to break symmetry and speed-up searches
|
||||
@@ -126,10 +127,12 @@ trigger_node(TriggerID, Node, Node) :-
|
||||
trigger_condition_holds(TriggerID, Node),
|
||||
literal(TriggerID).
|
||||
|
||||
% Since we trigger the existence of literal nodes from a condition, we need to construct
|
||||
% the condition_set/2 manually below
|
||||
% Since we trigger the existence of literal nodes from a condition, we need to construct the condition_set/2
|
||||
mentioned_in_literal(Root, Mentioned) :- mentioned_in_literal(TriggerID, Root, Mentioned), solve_literal(TriggerID).
|
||||
condition_set(node(min_dupe_id, Root), node(min_dupe_id, Mentioned)) :- mentioned_in_literal(Root, Mentioned).
|
||||
condition_set(node(min_dupe_id, Root), node(min_dupe_id, Root)) :- mentioned_in_literal(Root, Root).
|
||||
|
||||
1 { condition_set(node(min_dupe_id, Root), node(0..Y-1, Mentioned)) : max_dupes(Mentioned, Y) } 1 :-
|
||||
mentioned_in_literal(Root, Mentioned), Mentioned != Root.
|
||||
|
||||
% Discriminate between "roots" that have been explicitly requested, and roots that are deduced from "virtual roots"
|
||||
explicitly_requested_root(node(min_dupe_id, Package)) :-
|
||||
@@ -137,6 +140,20 @@ explicitly_requested_root(node(min_dupe_id, Package)) :-
|
||||
trigger_and_effect(Package, TriggerID, EffectID),
|
||||
imposed_constraint(EffectID, "root", Package).
|
||||
|
||||
|
||||
% Keep track of which nodes are associated with which root DAG
|
||||
associated_with_root(RootNode, RootNode) :- attr("root", RootNode).
|
||||
|
||||
associated_with_root(RootNode, ChildNode) :-
|
||||
depends_on(ParentNode, ChildNode),
|
||||
associated_with_root(RootNode, ParentNode).
|
||||
|
||||
% We cannot have a node in the root condition set, that is not associated with that root
|
||||
:- attr("root", RootNode),
|
||||
condition_set(RootNode, node(X, Package)),
|
||||
not virtual(Package),
|
||||
not associated_with_root(RootNode, node(X, Package)).
|
||||
|
||||
#defined concretize_everything/0.
|
||||
#defined literal/1.
|
||||
|
||||
@@ -158,6 +175,14 @@ error(100, multiple_values_error, Attribute, Package)
|
||||
attr_single_value(Attribute),
|
||||
2 { attr(Attribute, node(ID, Package), Value) }.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Languages used
|
||||
%-----------------------------------------------------------------------------
|
||||
|
||||
attr("language", node(X, Package), Language) :-
|
||||
condition_holds(ConditionID, node(X, Package)),
|
||||
pkg_fact(Package,language(ConditionID, Language)).
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Version semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -514,6 +539,12 @@ attr("virtual_on_edge", PackageNode, ProviderNode, Virtual)
|
||||
provider(ProviderNode, node(_, Virtual)),
|
||||
not external(PackageNode).
|
||||
|
||||
% If a virtual node is in the answer set, it must be either a virtual root,
|
||||
% or used somewhere
|
||||
:- attr("virtual_node", node(_, Virtual)),
|
||||
not attr("virtual_on_incoming_edges", _, Virtual),
|
||||
not attr("virtual_root", node(_, Virtual)).
|
||||
|
||||
attr("virtual_on_incoming_edges", ProviderNode, Virtual)
|
||||
:- attr("virtual_on_edge", _, ProviderNode, Virtual).
|
||||
|
||||
@@ -876,12 +907,8 @@ error(100, "{0} variant '{1}' cannot have values '{2}' and '{3}' as they come fr
|
||||
Set1 < Set2, % see[1]
|
||||
build(node(ID, Package)).
|
||||
|
||||
% variant_set is an explicitly set variant value. If it's not 'set',
|
||||
% we revert to the default value. If it is set, we force the set value
|
||||
attr("variant_value", PackageNode, Variant, Value)
|
||||
:- attr("node", PackageNode),
|
||||
node_has_variant(PackageNode, Variant),
|
||||
attr("variant_set", PackageNode, Variant, Value).
|
||||
:- attr("variant_set", node(ID, Package), Variant, Value),
|
||||
not attr("variant_value", node(ID, Package), Variant, Value).
|
||||
|
||||
% The rules below allow us to prefer default values for variants
|
||||
% whenever possible. If a variant is set in a spec, or if it is
|
||||
@@ -1011,16 +1038,6 @@ node_os_weight(PackageNode, Weight)
|
||||
attr("node_os", PackageNode, OS),
|
||||
os(OS, Weight).
|
||||
|
||||
% match semantics for OS's
|
||||
node_os_match(PackageNode, DependencyNode) :-
|
||||
depends_on(PackageNode, DependencyNode),
|
||||
attr("node_os", PackageNode, OS),
|
||||
attr("node_os", DependencyNode, OS).
|
||||
|
||||
node_os_mismatch(PackageNode, DependencyNode) :-
|
||||
depends_on(PackageNode, DependencyNode),
|
||||
not node_os_match(PackageNode, DependencyNode).
|
||||
|
||||
% every OS is compatible with itself. We can use `os_compatible` to declare
|
||||
os_compatible(OS, OS) :- os(OS).
|
||||
|
||||
@@ -1111,7 +1128,7 @@ error(100, "'{0} target={1}' is not compatible with this machine", Package, Targ
|
||||
% Compiler semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
% There must be only one compiler set per built node.
|
||||
{ node_compiler(PackageNode, CompilerID) : compiler_id(CompilerID) } :-
|
||||
{ node_compiler(PackageNode, CompilerID) : compiler_id(CompilerID), compiler_available(CompilerID) } :-
|
||||
attr("node", PackageNode),
|
||||
build(PackageNode).
|
||||
|
||||
@@ -1128,6 +1145,7 @@ attr("node_compiler_version", PackageNode, CompilerName, CompilerVersion)
|
||||
:- node_compiler(PackageNode, CompilerID),
|
||||
compiler_name(CompilerID, CompilerName),
|
||||
compiler_version(CompilerID, CompilerVersion),
|
||||
compiler_available(CompilerID),
|
||||
build(PackageNode).
|
||||
|
||||
attr("node_compiler", PackageNode, CompilerName)
|
||||
@@ -1188,8 +1206,8 @@ error(100, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", Package, Compil
|
||||
node_compiler(node(X, Package), CompilerID),
|
||||
compiler_name(CompilerID, Compiler),
|
||||
compiler_version(CompilerID, Version),
|
||||
not compiler_os(CompilerID, OS),
|
||||
not allow_compiler(Compiler, Version),
|
||||
compiler_os(CompilerID, CompilerOS),
|
||||
not os_compatible(CompilerOS, OS),
|
||||
build(node(X, Package)).
|
||||
|
||||
% If a package and one of its dependencies don't have the
|
||||
@@ -1210,7 +1228,6 @@ compiler_mismatch_required(PackageNode, DependencyNode)
|
||||
not compiler_match(PackageNode, DependencyNode).
|
||||
|
||||
#defined compiler_os/3.
|
||||
#defined allow_compiler/2.
|
||||
|
||||
% compilers weighted by preference according to packages.yaml
|
||||
node_compiler_weight(node(ID, Package), Weight)
|
||||
@@ -1495,28 +1512,20 @@ opt_criterion(40, "compiler mismatches that are not from CLI").
|
||||
#minimize{ 0@240: #true }.
|
||||
#minimize{ 0@40: #true }.
|
||||
#minimize{
|
||||
1@40+Priority,PackageNode,DependencyNode
|
||||
: compiler_mismatch(PackageNode, DependencyNode),
|
||||
build_priority(PackageNode, Priority)
|
||||
1@40+Priority,PackageNode,node(ID, Dependency)
|
||||
: compiler_mismatch(PackageNode, node(ID, Dependency)),
|
||||
build_priority(node(ID, Dependency), Priority),
|
||||
not runtime(Dependency)
|
||||
}.
|
||||
|
||||
opt_criterion(39, "compiler mismatches that are not from CLI").
|
||||
#minimize{ 0@239: #true }.
|
||||
#minimize{ 0@39: #true }.
|
||||
#minimize{
|
||||
1@39+Priority,PackageNode,DependencyNode
|
||||
: compiler_mismatch_required(PackageNode, DependencyNode),
|
||||
build_priority(PackageNode, Priority)
|
||||
}.
|
||||
|
||||
% Try to minimize the number of compiler mismatches in the DAG.
|
||||
opt_criterion(35, "OS mismatches").
|
||||
#minimize{ 0@235: #true }.
|
||||
#minimize{ 0@35: #true }.
|
||||
#minimize{
|
||||
1@35+Priority,PackageNode,DependencyNode
|
||||
: node_os_mismatch(PackageNode, DependencyNode),
|
||||
build_priority(PackageNode, Priority)
|
||||
1@39+Priority,PackageNode,node(ID, Dependency)
|
||||
: compiler_mismatch_required(PackageNode, node(ID, Dependency)),
|
||||
build_priority(node(ID, Dependency), Priority),
|
||||
not runtime(Dependency)
|
||||
}.
|
||||
|
||||
opt_criterion(30, "non-preferred OS's").
|
||||
@@ -1533,9 +1542,10 @@ opt_criterion(25, "version badness").
|
||||
#minimize{ 0@225: #true }.
|
||||
#minimize{ 0@25: #true }.
|
||||
#minimize{
|
||||
Weight@25+Priority,PackageNode
|
||||
: version_weight(PackageNode, Weight),
|
||||
build_priority(PackageNode, Priority)
|
||||
Weight@25+Priority,node(X, Package)
|
||||
: version_weight(node(X, Package), Weight),
|
||||
build_priority(node(X, Package), Priority),
|
||||
not runtime(Package)
|
||||
}.
|
||||
|
||||
% Try to use all the default values of variants
|
||||
@@ -1554,9 +1564,10 @@ opt_criterion(15, "non-preferred compilers").
|
||||
#minimize{ 0@215: #true }.
|
||||
#minimize{ 0@15: #true }.
|
||||
#minimize{
|
||||
Weight@15+Priority,PackageNode
|
||||
: node_compiler_weight(PackageNode, Weight),
|
||||
build_priority(PackageNode, Priority)
|
||||
Weight@15+Priority,node(X, Package)
|
||||
: node_compiler_weight(node(X, Package), Weight),
|
||||
build_priority(node(X, Package), Priority),
|
||||
not runtime(Package)
|
||||
}.
|
||||
|
||||
% Minimize the number of mismatches for targets in the DAG, try
|
||||
@@ -1565,18 +1576,55 @@ opt_criterion(10, "target mismatches").
|
||||
#minimize{ 0@210: #true }.
|
||||
#minimize{ 0@10: #true }.
|
||||
#minimize{
|
||||
1@10+Priority,PackageNode,Dependency
|
||||
: node_target_mismatch(PackageNode, Dependency),
|
||||
build_priority(PackageNode, Priority)
|
||||
1@10+Priority,PackageNode,node(ID, Dependency)
|
||||
: node_target_mismatch(PackageNode, node(ID, Dependency)),
|
||||
build_priority(node(ID, Dependency), Priority),
|
||||
not runtime(Dependency)
|
||||
}.
|
||||
|
||||
opt_criterion(5, "non-preferred targets").
|
||||
#minimize{ 0@205: #true }.
|
||||
#minimize{ 0@5: #true }.
|
||||
#minimize{
|
||||
Weight@5+Priority,PackageNode
|
||||
: node_target_weight(PackageNode, Weight),
|
||||
build_priority(PackageNode, Priority)
|
||||
Weight@5+Priority,node(X, Package)
|
||||
: node_target_weight(node(X, Package), Weight),
|
||||
build_priority(node(X, Package), Priority),
|
||||
not runtime(Package)
|
||||
}.
|
||||
|
||||
|
||||
% Minimize the number of compiler mismatches for runtimes
|
||||
opt_criterion(4, "compiler mismatches (runtimes)").
|
||||
#minimize{ 0@204: #true }.
|
||||
#minimize{ 0@4: #true }.
|
||||
#minimize{
|
||||
1@4,PackageNode,node(ID, Dependency)
|
||||
: compiler_mismatch(PackageNode, node(ID, Dependency)), runtime(Dependency)
|
||||
}.
|
||||
#minimize{
|
||||
1@4,PackageNode,node(ID, Dependency)
|
||||
: compiler_mismatch_required(PackageNode, node(ID, Dependency)), runtime(Dependency)
|
||||
}.
|
||||
|
||||
|
||||
% Choose more recent versions for runtimes
|
||||
opt_criterion(3, "version badness (runtimes)").
|
||||
#minimize{ 0@203: #true }.
|
||||
#minimize{ 0@3: #true }.
|
||||
#minimize{
|
||||
Weight@3,node(X, Package)
|
||||
: version_weight(node(X, Package), Weight),
|
||||
runtime(Package)
|
||||
}.
|
||||
|
||||
% Choose best target for runtimes
|
||||
opt_criterion(2, "non-preferred targets (runtimes)").
|
||||
#minimize{ 0@202: #true }.
|
||||
#minimize{ 0@2: #true }.
|
||||
#minimize{
|
||||
Weight@2,node(X, Package)
|
||||
: node_target_weight(node(X, Package), Weight),
|
||||
runtime(Package)
|
||||
}.
|
||||
|
||||
% Choose more recent versions for nodes
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import spack.deptypes as dt
|
||||
import spack.package_base
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
|
||||
PossibleDependencies = Set[str]
|
||||
|
||||
@@ -24,7 +25,13 @@ class Counter:
|
||||
"""
|
||||
|
||||
def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
|
||||
self.specs = specs
|
||||
runtime_pkgs = spack.repo.PATH.packages_with_tags("runtime")
|
||||
runtime_virtuals = set()
|
||||
for x in runtime_pkgs:
|
||||
pkg_class = spack.repo.PATH.get_pkg_class(x)
|
||||
runtime_virtuals.update(pkg_class.provided_virtual_names())
|
||||
|
||||
self.specs = specs + [spack.spec.Spec(x) for x in runtime_pkgs]
|
||||
|
||||
self.link_run_types: dt.DepFlag = dt.LINK | dt.RUN | dt.TEST
|
||||
self.all_types: dt.DepFlag = dt.ALL
|
||||
@@ -33,7 +40,9 @@ def __init__(self, specs: List["spack.spec.Spec"], tests: bool) -> None:
|
||||
self.all_types = dt.LINK | dt.RUN | dt.BUILD
|
||||
|
||||
self._possible_dependencies: PossibleDependencies = set()
|
||||
self._possible_virtuals: Set[str] = set(x.name for x in specs if x.virtual)
|
||||
self._possible_virtuals: Set[str] = (
|
||||
set(x.name for x in specs if x.virtual) | runtime_virtuals
|
||||
)
|
||||
|
||||
def possible_dependencies(self) -> PossibleDependencies:
|
||||
"""Returns the list of possible dependencies"""
|
||||
|
@@ -911,6 +911,9 @@ def flags():
|
||||
yield flags
|
||||
|
||||
def __str__(self):
|
||||
if not self:
|
||||
return ""
|
||||
|
||||
sorted_items = sorted((k, v) for k, v in self.items() if v)
|
||||
|
||||
result = ""
|
||||
@@ -1408,6 +1411,13 @@ def external_path(self, ext_path):
|
||||
def external(self):
|
||||
return bool(self.external_path) or bool(self.external_modules)
|
||||
|
||||
@property
|
||||
def is_develop(self):
|
||||
"""Return whether the Spec represents a user-developed package
|
||||
in a Spack ``Environment`` (i.e. using `spack develop`).
|
||||
"""
|
||||
return bool(self.variants.get("dev_path", False))
|
||||
|
||||
def clear_dependencies(self):
|
||||
"""Trim the dependencies of this spec."""
|
||||
self._dependencies.clear()
|
||||
@@ -2961,7 +2971,6 @@ def _new_concretize(self, tests=False):
|
||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||
solver = spack.solver.asp.Solver()
|
||||
result = solver.solve([self], tests=tests, allow_deprecated=allow_deprecated)
|
||||
result.raise_if_unsat()
|
||||
|
||||
# take the best answer
|
||||
opt, i, answer = min(result.answers)
|
||||
@@ -4557,6 +4566,8 @@ def format_path(
|
||||
return str(path_ctor(*output_path_components))
|
||||
|
||||
def __str__(self):
|
||||
if not self._dependencies:
|
||||
return self.format()
|
||||
root_str = [self.format()]
|
||||
sorted_dependencies = sorted(
|
||||
self.traverse(root=False), key=lambda x: (x.name, x.abstract_hash)
|
||||
|
@@ -927,6 +927,10 @@ def destroy(self):
|
||||
shutil.rmtree(self.path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
try:
|
||||
os.remove(self.reference_link)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
self.created = False
|
||||
|
||||
def restage(self):
|
||||
|
@@ -142,7 +142,7 @@ def optimization_flags(self, compiler):
|
||||
# custom spec.
|
||||
compiler_version = compiler.version
|
||||
version_number, suffix = archspec.cpu.version_components(compiler.version)
|
||||
if not version_number or suffix not in ("", "apple"):
|
||||
if not version_number or suffix:
|
||||
# Try to deduce the underlying version of the compiler, regardless
|
||||
# of its name in compilers.yaml. Depending on where this function
|
||||
# is called we might get either a CompilerSpec or a fully fledged
|
||||
@@ -155,4 +155,6 @@ def optimization_flags(self, compiler):
|
||||
# log this and just return compiler.version instead
|
||||
tty.debug(str(e))
|
||||
|
||||
return self.microarchitecture.optimization_flags(compiler.name, str(compiler_version))
|
||||
return self.microarchitecture.optimization_flags(
|
||||
compiler.name, compiler_version.dotted_numeric_string
|
||||
)
|
||||
|
@@ -8,13 +8,16 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.compilers
|
||||
import spack.concretize
|
||||
import spack.operating_systems
|
||||
import spack.platforms
|
||||
import spack.target
|
||||
from spack.spec import ArchSpec, CompilerSpec, Spec
|
||||
from spack.spec import ArchSpec, Spec
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
@@ -123,52 +126,60 @@ def test_arch_spec_container_semantic(item, architecture_str):
|
||||
@pytest.mark.parametrize(
|
||||
"compiler_spec,target_name,expected_flags",
|
||||
[
|
||||
# Check compilers with version numbers from a single toolchain
|
||||
# Homogeneous compilers
|
||||
("gcc@4.7.2", "ivybridge", "-march=core-avx-i -mtune=core-avx-i"),
|
||||
# Check mixed toolchains
|
||||
("clang@8.0.0", "broadwell", ""),
|
||||
("clang@3.5", "x86_64", "-march=x86-64 -mtune=generic"),
|
||||
# Check Apple's Clang compilers
|
||||
("apple-clang@9.1.0", "x86_64", "-march=x86-64"),
|
||||
# Mixed toolchain
|
||||
("clang@8.0.0", "broadwell", ""),
|
||||
],
|
||||
)
|
||||
@pytest.mark.filterwarnings("ignore:microarchitecture specific")
|
||||
def test_optimization_flags(compiler_spec, target_name, expected_flags, config):
|
||||
def test_optimization_flags(compiler_spec, target_name, expected_flags, compiler_factory):
|
||||
target = spack.target.Target(target_name)
|
||||
compiler = spack.compilers.compilers_for_spec(compiler_spec).pop()
|
||||
compiler_dict = compiler_factory(spec=compiler_spec, operating_system="")["compiler"]
|
||||
if compiler_spec == "clang@8.0.0":
|
||||
compiler_dict["paths"] = {
|
||||
"cc": "/path/to/clang-8",
|
||||
"cxx": "/path/to/clang++-8",
|
||||
"f77": "/path/to/gfortran-9",
|
||||
"fc": "/path/to/gfortran-9",
|
||||
}
|
||||
compiler = spack.compilers.compiler_from_dict(compiler_dict)
|
||||
|
||||
opt_flags = target.optimization_flags(compiler)
|
||||
assert opt_flags == expected_flags
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"compiler,real_version,target_str,expected_flags",
|
||||
"compiler_str,real_version,target_str,expected_flags",
|
||||
[
|
||||
(CompilerSpec("gcc@=9.2.0"), None, "haswell", "-march=haswell -mtune=haswell"),
|
||||
("gcc@=9.2.0", None, "haswell", "-march=haswell -mtune=haswell"),
|
||||
# Check that custom string versions are accepted
|
||||
(
|
||||
CompilerSpec("gcc@=10foo"),
|
||||
"9.2.0",
|
||||
"icelake",
|
||||
"-march=icelake-client -mtune=icelake-client",
|
||||
),
|
||||
("gcc@=10foo", "9.2.0", "icelake", "-march=icelake-client -mtune=icelake-client"),
|
||||
# Check that we run version detection (4.4.0 doesn't support icelake)
|
||||
(
|
||||
CompilerSpec("gcc@=4.4.0-special"),
|
||||
"9.2.0",
|
||||
"icelake",
|
||||
"-march=icelake-client -mtune=icelake-client",
|
||||
),
|
||||
("gcc@=4.4.0-special", "9.2.0", "icelake", "-march=icelake-client -mtune=icelake-client"),
|
||||
# Check that the special case for Apple's clang is treated correctly
|
||||
# i.e. it won't try to detect the version again
|
||||
(CompilerSpec("apple-clang@=9.1.0"), None, "x86_64", "-march=x86-64"),
|
||||
("apple-clang@=9.1.0", None, "x86_64", "-march=x86-64"),
|
||||
],
|
||||
)
|
||||
def test_optimization_flags_with_custom_versions(
|
||||
compiler, real_version, target_str, expected_flags, monkeypatch, config
|
||||
compiler_str,
|
||||
real_version,
|
||||
target_str,
|
||||
expected_flags,
|
||||
monkeypatch,
|
||||
mutable_config,
|
||||
compiler_factory,
|
||||
):
|
||||
target = spack.target.Target(target_str)
|
||||
compiler_dict = compiler_factory(spec=compiler_str, operating_system="redhat6")
|
||||
mutable_config.set("compilers", [compiler_dict])
|
||||
if real_version:
|
||||
monkeypatch.setattr(spack.compiler.Compiler, "get_real_version", lambda x: real_version)
|
||||
compiler = spack.compilers.compiler_from_dict(compiler_dict["compiler"])
|
||||
|
||||
opt_flags = target.optimization_flags(compiler)
|
||||
assert opt_flags == expected_flags
|
||||
|
||||
@@ -203,9 +214,10 @@ def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constra
|
||||
)
|
||||
@pytest.mark.usefixtures("mock_packages", "config")
|
||||
@pytest.mark.only_clingo("Fixing the parser broke this test for the original concretizer.")
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="tests are for x86_64 uarch ranges"
|
||||
)
|
||||
def test_concretize_target_ranges(root_target_range, dep_target_range, result, monkeypatch):
|
||||
# Monkeypatch so that all concretization is done as if the machine is core2
|
||||
monkeypatch.setattr(spack.platforms.test.Test, "default", "core2")
|
||||
spec = Spec(f"a %gcc@10 foobar=bar target={root_target_range} ^b target={dep_target_range}")
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
spec.concretize()
|
||||
|
@@ -19,6 +19,8 @@
|
||||
import py
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
from llnl.util.filesystem import join_path, visit_directory_tree
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
@@ -34,7 +36,7 @@
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
from spack.binary_distribution import get_buildfile_manifest
|
||||
from spack.binary_distribution import CannotListKeys, GenerateIndexError, get_buildfile_manifest
|
||||
from spack.directory_layout import DirectoryLayout
|
||||
from spack.paths import test_path
|
||||
from spack.spec import Spec
|
||||
@@ -463,50 +465,57 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
||||
assert "libelf" not in cache_list
|
||||
|
||||
|
||||
def test_generate_indices_key_error(monkeypatch, capfd):
|
||||
def test_generate_key_index_failure(monkeypatch):
|
||||
def list_url(url, recursive=False):
|
||||
if "fails-listing" in url:
|
||||
raise Exception("Couldn't list the directory")
|
||||
return ["first.pub", "second.pub"]
|
||||
|
||||
def push_to_url(*args, **kwargs):
|
||||
raise Exception("Couldn't upload the file")
|
||||
|
||||
monkeypatch.setattr(web_util, "list_url", list_url)
|
||||
monkeypatch.setattr(web_util, "push_to_url", push_to_url)
|
||||
|
||||
with pytest.raises(CannotListKeys, match="Encountered problem listing keys"):
|
||||
bindist.generate_key_index("s3://non-existent/fails-listing")
|
||||
|
||||
with pytest.raises(GenerateIndexError, match="problem pushing .* Couldn't upload"):
|
||||
bindist.generate_key_index("s3://non-existent/fails-uploading")
|
||||
|
||||
|
||||
def test_generate_package_index_failure(monkeypatch, capfd):
|
||||
def mock_list_url(url, recursive=False):
|
||||
print("mocked list_url({0}, {1})".format(url, recursive))
|
||||
raise KeyError("Test KeyError handling")
|
||||
raise Exception("Some HTTP error")
|
||||
|
||||
monkeypatch.setattr(web_util, "list_url", mock_list_url)
|
||||
|
||||
test_url = "file:///fake/keys/dir"
|
||||
|
||||
# Make sure generate_key_index handles the KeyError
|
||||
bindist.generate_key_index(test_url)
|
||||
with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
|
||||
bindist.generate_package_index(test_url)
|
||||
|
||||
err = capfd.readouterr()[1]
|
||||
assert "Warning: No keys at {0}".format(test_url) in err
|
||||
|
||||
# Make sure generate_package_index handles the KeyError
|
||||
bindist.generate_package_index(test_url)
|
||||
|
||||
err = capfd.readouterr()[1]
|
||||
assert "Warning: No packages at {0}".format(test_url) in err
|
||||
assert (
|
||||
f"Warning: Encountered problem listing packages at {test_url}: Some HTTP error"
|
||||
in capfd.readouterr().err
|
||||
)
|
||||
|
||||
|
||||
def test_generate_indices_exception(monkeypatch, capfd):
|
||||
def mock_list_url(url, recursive=False):
|
||||
print("mocked list_url({0}, {1})".format(url, recursive))
|
||||
raise Exception("Test Exception handling")
|
||||
|
||||
monkeypatch.setattr(web_util, "list_url", mock_list_url)
|
||||
|
||||
test_url = "file:///fake/keys/dir"
|
||||
url = "file:///fake/keys/dir"
|
||||
|
||||
# Make sure generate_key_index handles the Exception
|
||||
bindist.generate_key_index(test_url)
|
||||
with pytest.raises(GenerateIndexError, match=f"Encountered problem listing keys at {url}"):
|
||||
bindist.generate_key_index(url)
|
||||
|
||||
err = capfd.readouterr()[1]
|
||||
expect = "Encountered problem listing keys at {0}".format(test_url)
|
||||
assert expect in err
|
||||
with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
|
||||
bindist.generate_package_index(url)
|
||||
|
||||
# Make sure generate_package_index handles the Exception
|
||||
bindist.generate_package_index(test_url)
|
||||
|
||||
err = capfd.readouterr()[1]
|
||||
expect = "Encountered problem listing packages at {0}".format(test_url)
|
||||
assert expect in err
|
||||
assert f"Encountered problem listing packages at {url}" in capfd.readouterr().err
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_fetch", "install_mockery")
|
||||
@@ -573,11 +582,20 @@ def test_update_sbang(tmpdir, test_mirror):
|
||||
uninstall_cmd("-y", "/%s" % new_spec.dag_hash())
|
||||
|
||||
|
||||
def test_install_legacy_buildcache_layout(install_mockery_mutable_config):
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64",
|
||||
reason="test data uses gcc 4.5.0 which does not support aarch64",
|
||||
)
|
||||
def test_install_legacy_buildcache_layout(
|
||||
mutable_config, compiler_factory, install_mockery_mutable_config
|
||||
):
|
||||
"""Legacy buildcache layout involved a nested archive structure
|
||||
where the .spack file contained a repeated spec.json and another
|
||||
compressed archive file containing the install tree. This test
|
||||
makes sure we can still read that layout."""
|
||||
mutable_config.set(
|
||||
"compilers", [compiler_factory(spec="gcc@4.5.0", operating_system="debian6")]
|
||||
)
|
||||
legacy_layout_dir = os.path.join(test_path, "data", "mirrors", "legacy_layout")
|
||||
mirror_url = "file://{0}".format(legacy_layout_dir)
|
||||
filename = (
|
||||
|
@@ -63,7 +63,8 @@ def build_environment(working_env):
|
||||
os.environ["SPACK_LINKER_ARG"] = "-Wl,"
|
||||
os.environ["SPACK_DTAGS_TO_ADD"] = "--disable-new-dtags"
|
||||
os.environ["SPACK_DTAGS_TO_STRIP"] = "--enable-new-dtags"
|
||||
os.environ["SPACK_SYSTEM_DIRS"] = "/usr/include /usr/lib"
|
||||
os.environ["SPACK_SYSTEM_DIRS"] = "/usr/include|/usr/lib"
|
||||
os.environ["SPACK_MANAGED_DIRS"] = f"{prefix}/opt/spack"
|
||||
os.environ["SPACK_TARGET_ARGS"] = ""
|
||||
|
||||
if "SPACK_DEPENDENCIES" in os.environ:
|
||||
|
@@ -9,6 +9,8 @@
|
||||
import py.path
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.build_systems.autotools
|
||||
@@ -209,6 +211,9 @@ def test_autotools_gnuconfig_replacement_disabled(
|
||||
assert "gnuconfig version of config.guess" not in f.read()
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
@pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="test data is specific for x86_64"
|
||||
)
|
||||
def test_autotools_gnuconfig_replacement_no_gnuconfig(self, mutable_database, monkeypatch):
|
||||
"""
|
||||
Tests whether a useful error message is shown when patch_config_files is
|
||||
|
@@ -15,7 +15,7 @@
|
||||
import spack.config
|
||||
import spack.spec
|
||||
from spack.paths import build_env_path
|
||||
from spack.util.environment import SYSTEM_DIRS, set_env
|
||||
from spack.util.environment import SYSTEM_DIR_CASE_ENTRY, set_env
|
||||
from spack.util.executable import Executable, ProcessError
|
||||
|
||||
#
|
||||
@@ -159,7 +159,8 @@ def wrapper_environment(working_env):
|
||||
SPACK_DEBUG_LOG_ID="foo-hashabc",
|
||||
SPACK_COMPILER_SPEC="gcc@4.4.7",
|
||||
SPACK_SHORT_SPEC="foo@1.2 arch=linux-rhel6-x86_64 /hashabc",
|
||||
SPACK_SYSTEM_DIRS=":".join(SYSTEM_DIRS),
|
||||
SPACK_SYSTEM_DIRS=SYSTEM_DIR_CASE_ENTRY,
|
||||
SPACK_MANAGED_DIRS="/path/to/spack-1/opt/spack/*|/path/to/spack-2/opt/spack/*",
|
||||
SPACK_CC_RPATH_ARG="-Wl,-rpath,",
|
||||
SPACK_CXX_RPATH_ARG="-Wl,-rpath,",
|
||||
SPACK_F77_RPATH_ARG="-Wl,-rpath,",
|
||||
@@ -907,3 +908,108 @@ def test_linker_strips_loopopt(wrapper_environment, wrapper_flags):
|
||||
result = cc(*(test_args + ["-loopopt=0", "-c", "x.c"]), output=str)
|
||||
result = result.strip().split("\n")
|
||||
assert "-loopopt=0" in result
|
||||
|
||||
|
||||
def test_spack_managed_dirs_are_prioritized(wrapper_environment):
|
||||
# We have two different stores with 5 packages divided over them
|
||||
pkg1 = "/path/to/spack-1/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-1.0-abcdef"
|
||||
pkg2 = "/path/to/spack-1/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-2.0-abcdef"
|
||||
pkg3 = "/path/to/spack-2/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-3.0-abcdef"
|
||||
pkg4 = "/path/to/spack-2/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-4.0-abcdef"
|
||||
pkg5 = "/path/to/spack-2/opt/spack/linux-ubuntu22.04-zen2/gcc-13.2.0/pkg-5.0-abcdef"
|
||||
|
||||
variables = {
|
||||
# cppflags, ldflags from the command line, config or package.py take highest priority
|
||||
"SPACK_CPPFLAGS": f"-I/usr/local/include -I/external-1/include -I{pkg1}/include",
|
||||
"SPACK_LDFLAGS": f"-L/usr/local/lib -L/external-1/lib -L{pkg1}/lib "
|
||||
f"-Wl,-rpath,/usr/local/lib -Wl,-rpath,/external-1/lib -Wl,-rpath,{pkg1}/lib",
|
||||
# automatic -L, -Wl,-rpath, -I flags from dependencies -- on the spack side they are
|
||||
# already partitioned into "spack owned prefixes" and "non-spack owned prefixes"
|
||||
"SPACK_STORE_LINK_DIRS": f"{pkg4}/lib:{pkg5}/lib",
|
||||
"SPACK_STORE_RPATH_DIRS": f"{pkg4}/lib:{pkg5}/lib",
|
||||
"SPACK_STORE_INCLUDE_DIRS": f"{pkg4}/include:{pkg5}/include",
|
||||
"SPACK_LINK_DIRS": "/external-3/lib:/external-4/lib",
|
||||
"SPACK_RPATH_DIRS": "/external-3/lib:/external-4/lib",
|
||||
"SPACK_INCLUDE_DIRS": "/external-3/include:/external-4/include",
|
||||
}
|
||||
|
||||
with set_env(SPACK_TEST_COMMAND="dump-args", **variables):
|
||||
effective_call = (
|
||||
cc(
|
||||
# system paths
|
||||
"-I/usr/include",
|
||||
"-L/usr/lib",
|
||||
"-Wl,-rpath,/usr/lib",
|
||||
# some other externals
|
||||
"-I/external-2/include",
|
||||
"-L/external-2/lib",
|
||||
"-Wl,-rpath,/external-2/lib",
|
||||
# relative paths are considered "spack managed" since they are in the stage dir
|
||||
"-I..",
|
||||
"-L..",
|
||||
"-Wl,-rpath,..", # pathological but simpler for the test.
|
||||
# spack store paths
|
||||
f"-I{pkg2}/include",
|
||||
f"-I{pkg3}/include",
|
||||
f"-L{pkg2}/lib",
|
||||
f"-L{pkg3}/lib",
|
||||
f"-Wl,-rpath,{pkg2}/lib",
|
||||
f"-Wl,-rpath,{pkg3}/lib",
|
||||
"hello.c",
|
||||
"-o",
|
||||
"hello",
|
||||
output=str,
|
||||
)
|
||||
.strip()
|
||||
.split("\n")
|
||||
)
|
||||
|
||||
dash_I = [flag[2:] for flag in effective_call if flag.startswith("-I")]
|
||||
dash_L = [flag[2:] for flag in effective_call if flag.startswith("-L")]
|
||||
dash_Wl_rpath = [flag[11:] for flag in effective_call if flag.startswith("-Wl,-rpath")]
|
||||
|
||||
assert dash_I == [
|
||||
# spack owned dirs from SPACK_*FLAGS
|
||||
f"{pkg1}/include",
|
||||
# spack owned dirs from command line & automatic flags for deps (in that order)]
|
||||
"..",
|
||||
f"{pkg2}/include", # from command line
|
||||
f"{pkg3}/include", # from command line
|
||||
f"{pkg4}/include", # from SPACK_STORE_INCLUDE_DIRS
|
||||
f"{pkg5}/include", # from SPACK_STORE_INCLUDE_DIRS
|
||||
# non-system dirs from SPACK_*FLAGS
|
||||
"/external-1/include",
|
||||
# non-system dirs from command line & automatic flags for deps (in that order)
|
||||
"/external-2/include", # from command line
|
||||
"/external-3/include", # from SPACK_INCLUDE_DIRS
|
||||
"/external-4/include", # from SPACK_INCLUDE_DIRS
|
||||
# system dirs from SPACK_*FLAGS
|
||||
"/usr/local/include",
|
||||
# system dirs from command line
|
||||
"/usr/include",
|
||||
]
|
||||
|
||||
assert (
|
||||
dash_L
|
||||
== dash_Wl_rpath
|
||||
== [
|
||||
# spack owned dirs from SPACK_*FLAGS
|
||||
f"{pkg1}/lib",
|
||||
# spack owned dirs from command line & automatic flags for deps (in that order)
|
||||
"..",
|
||||
f"{pkg2}/lib", # from command line
|
||||
f"{pkg3}/lib", # from command line
|
||||
f"{pkg4}/lib", # from SPACK_STORE_LINK_DIRS
|
||||
f"{pkg5}/lib", # from SPACK_STORE_LINK_DIRS
|
||||
# non-system dirs from SPACK_*FLAGS
|
||||
"/external-1/lib",
|
||||
# non-system dirs from command line & automatic flags for deps (in that order)
|
||||
"/external-2/lib", # from command line
|
||||
"/external-3/lib", # from SPACK_LINK_DIRS
|
||||
"/external-4/lib", # from SPACK_LINK_DIRS
|
||||
# system dirs from SPACK_*FLAGS
|
||||
"/usr/local/lib",
|
||||
# system dirs from command line
|
||||
"/usr/lib",
|
||||
]
|
||||
)
|
||||
|
@@ -448,7 +448,7 @@ def _fail(self, args):
|
||||
def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkeypatch):
|
||||
"""Test that create_buildcache returns a list of objects with the correct
|
||||
keys and types."""
|
||||
monkeypatch.setattr(spack.ci, "push_mirror_contents", lambda a, b, c: True)
|
||||
monkeypatch.setattr(spack.ci, "_push_to_build_cache", lambda a, b, c: True)
|
||||
|
||||
results = ci.create_buildcache(
|
||||
None, destination_mirror_urls=["file:///fake-url-one", "file:///fake-url-two"]
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import errno
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
|
||||
@@ -168,6 +169,25 @@ def test_update_key_index(
|
||||
assert "index.json" in key_dir_list
|
||||
|
||||
|
||||
def test_buildcache_autopush(tmp_path, install_mockery, mock_fetch):
|
||||
"""Test buildcache with autopush"""
|
||||
mirror_dir = tmp_path / "mirror"
|
||||
mirror_autopush_dir = tmp_path / "mirror_autopush"
|
||||
|
||||
mirror("add", "--unsigned", "mirror", mirror_dir.as_uri())
|
||||
mirror("add", "--autopush", "--unsigned", "mirror-autopush", mirror_autopush_dir.as_uri())
|
||||
|
||||
s = Spec("libdwarf").concretized()
|
||||
|
||||
# Install and generate build cache index
|
||||
s.package.do_install()
|
||||
|
||||
metadata_file = spack.binary_distribution.tarball_name(s, ".spec.json")
|
||||
|
||||
assert not (mirror_dir / "build_cache" / metadata_file).exists()
|
||||
assert (mirror_autopush_dir / "build_cache" / metadata_file).exists()
|
||||
|
||||
|
||||
def test_buildcache_sync(
|
||||
mutable_mock_env_path,
|
||||
install_mockery_mutable_config,
|
||||
@@ -234,10 +254,71 @@ def verify_mirror_contents():
|
||||
# Use mirror names to specify mirrors
|
||||
mirror("add", "src", src_mirror_url)
|
||||
mirror("add", "dest", dest_mirror_url)
|
||||
mirror("add", "ignored", "file:///dummy/io")
|
||||
|
||||
buildcache("sync", "src", "dest")
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
|
||||
def manifest_insert(manifest, spec, dest_url):
|
||||
manifest[spec.dag_hash()] = [
|
||||
{
|
||||
"src": spack.util.url.join(
|
||||
src_mirror_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_name(spec, ".spec.json"),
|
||||
),
|
||||
"dest": spack.util.url.join(
|
||||
dest_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_name(spec, ".spec.json"),
|
||||
),
|
||||
},
|
||||
{
|
||||
"src": spack.util.url.join(
|
||||
src_mirror_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_path_name(spec, ".spack"),
|
||||
),
|
||||
"dest": spack.util.url.join(
|
||||
dest_url,
|
||||
spack.binary_distribution.build_cache_relative_path(),
|
||||
spack.binary_distribution.tarball_path_name(spec, ".spack"),
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
manifest_file = os.path.join(tmpdir.strpath, "manifest_dest.json")
|
||||
with open(manifest_file, "w") as fd:
|
||||
test_env = ev.active_environment()
|
||||
|
||||
manifest = {}
|
||||
for spec in test_env.specs_by_hash.values():
|
||||
manifest_insert(manifest, spec, dest_mirror_url)
|
||||
json.dump(manifest, fd)
|
||||
|
||||
buildcache("sync", "--manifest-glob", manifest_file)
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
|
||||
manifest_file = os.path.join(tmpdir.strpath, "manifest_bad_dest.json")
|
||||
with open(manifest_file, "w") as fd:
|
||||
manifest = {}
|
||||
for spec in test_env.specs_by_hash.values():
|
||||
manifest_insert(
|
||||
manifest, spec, spack.util.url.join(dest_mirror_url, "invalid_path")
|
||||
)
|
||||
json.dump(manifest, fd)
|
||||
|
||||
# Trigger the warning
|
||||
output = buildcache("sync", "--manifest-glob", manifest_file, "dest", "ignored")
|
||||
|
||||
assert "Ignoring unused arguemnt: ignored" in output
|
||||
|
||||
verify_mirror_contents()
|
||||
shutil.rmtree(dest_mirror_dir)
|
||||
|
||||
|
||||
def test_buildcache_create_install(
|
||||
|
@@ -26,6 +26,7 @@
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE
|
||||
from spack.schema.buildcache_spec import schema as specfile_schema
|
||||
from spack.schema.ci import schema as ci_schema
|
||||
from spack.schema.database_index import schema as db_idx_schema
|
||||
@@ -47,6 +48,8 @@
|
||||
@pytest.fixture()
|
||||
def ci_base_environment(working_env, tmpdir):
|
||||
os.environ["CI_PROJECT_DIR"] = tmpdir.strpath
|
||||
os.environ["CI_PIPELINE_ID"] = "7192"
|
||||
os.environ["CI_JOB_NAME"] = "mock"
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
@@ -114,13 +117,13 @@ def test_specs_staging(config, tmpdir):
|
||||
with repo.use_repositories(builder.root):
|
||||
spec_a = Spec("a").concretized()
|
||||
|
||||
spec_a_label = ci._spec_deps_key(spec_a)
|
||||
spec_b_label = ci._spec_deps_key(spec_a["b"])
|
||||
spec_c_label = ci._spec_deps_key(spec_a["c"])
|
||||
spec_d_label = ci._spec_deps_key(spec_a["d"])
|
||||
spec_e_label = ci._spec_deps_key(spec_a["e"])
|
||||
spec_f_label = ci._spec_deps_key(spec_a["f"])
|
||||
spec_g_label = ci._spec_deps_key(spec_a["g"])
|
||||
spec_a_label = ci._spec_ci_label(spec_a)
|
||||
spec_b_label = ci._spec_ci_label(spec_a["b"])
|
||||
spec_c_label = ci._spec_ci_label(spec_a["c"])
|
||||
spec_d_label = ci._spec_ci_label(spec_a["d"])
|
||||
spec_e_label = ci._spec_ci_label(spec_a["e"])
|
||||
spec_f_label = ci._spec_ci_label(spec_a["f"])
|
||||
spec_g_label = ci._spec_ci_label(spec_a["g"])
|
||||
|
||||
spec_labels, dependencies, stages = ci.stage_spec_jobs([spec_a])
|
||||
|
||||
@@ -776,6 +779,43 @@ def test_ci_rebuild_mock_success(
|
||||
assert "Cannot copy test logs" in out
|
||||
|
||||
|
||||
def test_ci_rebuild_mock_failure_to_push(
|
||||
tmpdir,
|
||||
working_env,
|
||||
mutable_mock_env_path,
|
||||
install_mockery_mutable_config,
|
||||
mock_gnupghome,
|
||||
mock_stage,
|
||||
mock_fetch,
|
||||
mock_binary_index,
|
||||
ci_base_environment,
|
||||
monkeypatch,
|
||||
):
|
||||
pkg_name = "trivial-install-test-package"
|
||||
rebuild_env = create_rebuild_env(tmpdir, pkg_name)
|
||||
|
||||
# Mock the install script succuess
|
||||
def mock_success(*args, **kwargs):
|
||||
return 0
|
||||
|
||||
monkeypatch.setattr(spack.ci, "process_command", mock_success)
|
||||
|
||||
# Mock failure to push to the build cache
|
||||
def mock_push_or_raise(*args, **kwargs):
|
||||
raise spack.binary_distribution.PushToBuildCacheError(
|
||||
"Encountered problem pushing binary <url>: <expection>"
|
||||
)
|
||||
|
||||
monkeypatch.setattr(spack.binary_distribution, "push_or_raise", mock_push_or_raise)
|
||||
|
||||
with rebuild_env.env_dir.as_cwd():
|
||||
activate_rebuild_env(tmpdir, pkg_name, rebuild_env)
|
||||
|
||||
expect = f"Command exited with code {FAILED_CREATE_BUILDCACHE_CODE}"
|
||||
with pytest.raises(spack.main.SpackCommandError, match=expect):
|
||||
ci_cmd("rebuild", fail_on_error=True)
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="fails intermittently and covered by gitlab ci")
|
||||
def test_ci_rebuild(
|
||||
tmpdir,
|
||||
@@ -1063,7 +1103,7 @@ def test_ci_generate_mirror_override(
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_push_mirror_contents(
|
||||
def test_push_to_build_cache(
|
||||
tmpdir,
|
||||
mutable_mock_env_path,
|
||||
install_mockery_mutable_config,
|
||||
@@ -1124,7 +1164,7 @@ def test_push_mirror_contents(
|
||||
install_cmd("--add", "--keep-stage", json_path)
|
||||
|
||||
for s in concrete_spec.traverse():
|
||||
ci.push_mirror_contents(s, mirror_url, True)
|
||||
ci.push_to_build_cache(s, mirror_url, True)
|
||||
|
||||
buildcache_path = os.path.join(mirror_dir.strpath, "build_cache")
|
||||
|
||||
@@ -1217,21 +1257,16 @@ def test_push_mirror_contents(
|
||||
assert len(dl_dir_list) == 2
|
||||
|
||||
|
||||
def test_push_mirror_contents_exceptions(monkeypatch, capsys):
|
||||
def failing_access(*args, **kwargs):
|
||||
def test_push_to_build_cache_exceptions(monkeypatch, tmp_path, capsys):
|
||||
def _push_to_build_cache(spec, sign_binaries, mirror_url):
|
||||
raise Exception("Error: Access Denied")
|
||||
|
||||
monkeypatch.setattr(spack.ci, "_push_mirror_contents", failing_access)
|
||||
monkeypatch.setattr(spack.ci, "_push_to_build_cache", _push_to_build_cache)
|
||||
|
||||
# Input doesn't matter, as wwe are faking exceptional output
|
||||
url = "fakejunk"
|
||||
ci.push_mirror_contents(None, url, None)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
std_out = captured[0]
|
||||
expect_msg = "Permission problem writing to {0}".format(url)
|
||||
|
||||
assert expect_msg in std_out
|
||||
# Input doesn't matter, as we are faking exceptional output
|
||||
url = tmp_path.as_uri()
|
||||
ci.push_to_build_cache(None, url, None)
|
||||
assert f"Permission problem writing to {url}" in capsys.readouterr().err
|
||||
|
||||
|
||||
@pytest.mark.parametrize("match_behavior", ["first", "merge"])
|
||||
@@ -1461,26 +1496,24 @@ def test_ci_rebuild_index(
|
||||
working_dir = tmpdir.join("working_dir")
|
||||
|
||||
mirror_dir = working_dir.join("mirror")
|
||||
mirror_url = "file://{0}".format(mirror_dir.strpath)
|
||||
mirror_url = url_util.path_to_file_url(str(mirror_dir))
|
||||
|
||||
spack_yaml_contents = """
|
||||
spack_yaml_contents = f"""
|
||||
spack:
|
||||
specs:
|
||||
- callpath
|
||||
mirrors:
|
||||
test-mirror: {0}
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- patchelf
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
""".format(
|
||||
mirror_url
|
||||
)
|
||||
specs:
|
||||
- callpath
|
||||
mirrors:
|
||||
test-mirror: {mirror_url}
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
- match:
|
||||
- patchelf
|
||||
build-job:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
"""
|
||||
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
|
@@ -112,10 +112,10 @@ def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, mock_executa
|
||||
@pytest.mark.regression("37996")
|
||||
def test_compiler_remove(mutable_config, mock_packages):
|
||||
"""Tests that we can remove a compiler from configuration."""
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
assert spack.spec.CompilerSpec("gcc@=9.4.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@9.4.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
assert spack.spec.CompilerSpec("gcc@=9.4.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.regression("37996")
|
||||
@@ -124,10 +124,10 @@ def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
|
||||
site_config = spack.config.get("compilers", scope="site")
|
||||
spack.config.set("compilers", site_config, scope="user")
|
||||
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
|
||||
assert spack.spec.CompilerSpec("gcc@=9.4.0") in spack.compilers.all_compiler_specs()
|
||||
args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@9.4.0", add_paths=[], scope=None)
|
||||
spack.cmd.compiler.compiler_remove(args)
|
||||
assert spack.spec.CompilerSpec("gcc@=4.5.0") not in spack.compilers.all_compiler_specs()
|
||||
assert spack.spec.CompilerSpec("gcc@=9.4.0") not in spack.compilers.all_compiler_specs()
|
||||
|
||||
|
||||
@pytest.mark.not_on_windows("Cannot execute bash script on Windows")
|
||||
@@ -175,7 +175,9 @@ def test_compiler_find_mixed_suffixes(
|
||||
assert "clang@11.0.0" in output
|
||||
assert "gcc@8.4.0" in output
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
config = spack.compilers.get_compiler_config(
|
||||
no_compilers_yaml, scope="site", init_config=False
|
||||
)
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
|
||||
|
||||
@@ -210,7 +212,9 @@ def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, compiler
|
||||
assert "clang@11.0.0" in output
|
||||
assert "gcc@8.4.0" in output
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
config = spack.compilers.get_compiler_config(
|
||||
no_compilers_yaml, scope="site", init_config=False
|
||||
)
|
||||
clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@=11.0.0")
|
||||
|
||||
assert clang["paths"]["cc"] == str(compilers_dir / "clang")
|
||||
@@ -229,7 +233,9 @@ def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir)
|
||||
|
||||
compiler("find", "--scope=site")
|
||||
|
||||
config = spack.compilers.get_compiler_config("site", False)
|
||||
config = spack.compilers.get_compiler_config(
|
||||
no_compilers_yaml, scope="site", init_config=False
|
||||
)
|
||||
gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@=8.4.0")
|
||||
assert gcc["paths"] == {
|
||||
"cc": str(new_dir / "gcc-8"),
|
||||
|
@@ -20,7 +20,10 @@
|
||||
install = SpackCommand("install")
|
||||
env = SpackCommand("env")
|
||||
|
||||
pytestmark = pytest.mark.not_on_windows("does not run on windows")
|
||||
pytestmark = [
|
||||
pytest.mark.not_on_windows("does not run on windows"),
|
||||
pytest.mark.disable_clean_stage_check,
|
||||
]
|
||||
|
||||
|
||||
def test_dev_build_basics(tmpdir, install_mockery):
|
||||
@@ -41,7 +44,6 @@ def test_dev_build_basics(tmpdir, install_mockery):
|
||||
assert os.path.exists(str(tmpdir))
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_dev_build_before(tmpdir, install_mockery):
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
|
||||
@@ -58,7 +60,6 @@ def test_dev_build_before(tmpdir, install_mockery):
|
||||
assert not os.path.exists(spec.prefix)
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_dev_build_until(tmpdir, install_mockery):
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
|
||||
@@ -76,7 +77,6 @@ def test_dev_build_until(tmpdir, install_mockery):
|
||||
assert not spack.store.STORE.db.query(spec, installed=True)
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_dev_build_until_last_phase(tmpdir, install_mockery):
|
||||
# Test that we ignore the last_phase argument if it is already last
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
@@ -96,7 +96,6 @@ def test_dev_build_until_last_phase(tmpdir, install_mockery):
|
||||
assert os.path.exists(str(tmpdir))
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_dev_build_before_until(tmpdir, install_mockery, capsys):
|
||||
spec = spack.spec.Spec(f"dev-build-test-install@0.0.0 dev_path={tmpdir}").concretized()
|
||||
|
||||
@@ -134,7 +133,6 @@ def mock_module_noop(*args):
|
||||
pass
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, install_mockery, working_env):
|
||||
monkeypatch.setattr(os, "execvp", print_spack_cc)
|
||||
monkeypatch.setattr(spack.build_environment, "module", mock_module_noop)
|
||||
|
@@ -188,6 +188,127 @@ def test_env_remove(capfd):
|
||||
assert "bar" not in out
|
||||
|
||||
|
||||
def test_env_rename_managed(capfd):
|
||||
# Need real environment
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "foo", "bar")
|
||||
assert (
|
||||
"The specified name does not correspond to a managed spack environment"
|
||||
in capfd.readouterr()[0]
|
||||
)
|
||||
|
||||
env("create", "foo")
|
||||
|
||||
out = env("list")
|
||||
assert "foo" in out
|
||||
|
||||
out = env("rename", "foo", "bar")
|
||||
assert "Successfully renamed environment foo to bar" in out
|
||||
|
||||
out = env("list")
|
||||
assert "foo" not in out
|
||||
assert "bar" in out
|
||||
|
||||
bar = ev.read("bar")
|
||||
with bar:
|
||||
# Cannot rename active environment
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "bar", "baz")
|
||||
assert "Cannot rename active environment" in capfd.readouterr()[0]
|
||||
|
||||
env("create", "qux")
|
||||
|
||||
# Cannot rename to an active environment (even with force flag)
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "-f", "qux", "bar")
|
||||
assert "bar is an active environment" in capfd.readouterr()[0]
|
||||
|
||||
# Can rename inactive environment when another's active
|
||||
out = env("rename", "qux", "quux")
|
||||
assert "Successfully renamed environment qux to quux" in out
|
||||
|
||||
out = env("list")
|
||||
assert "bar" in out
|
||||
assert "baz" not in out
|
||||
|
||||
env("create", "baz")
|
||||
|
||||
# Cannot rename to existing environment without --force
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "bar", "baz")
|
||||
errmsg = (
|
||||
"The new name corresponds to an existing environment;"
|
||||
" specify the --force flag to overwrite it."
|
||||
)
|
||||
assert errmsg in capfd.readouterr()[0]
|
||||
|
||||
env("rename", "-f", "bar", "baz")
|
||||
out = env("list")
|
||||
assert "bar" not in out
|
||||
assert "baz" in out
|
||||
|
||||
|
||||
def test_env_rename_anonymous(capfd, tmpdir):
|
||||
# Need real environment
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "-d", "./non-existing", "./also-non-existing")
|
||||
assert (
|
||||
"The specified path does not correspond to a valid spack environment"
|
||||
in capfd.readouterr()[0]
|
||||
)
|
||||
|
||||
anon_foo = str(tmpdir / "foo")
|
||||
env("create", "-d", anon_foo)
|
||||
|
||||
anon_bar = str(tmpdir / "bar")
|
||||
out = env("rename", "-d", anon_foo, anon_bar)
|
||||
assert f"Successfully renamed environment {anon_foo} to {anon_bar}" in out
|
||||
assert not ev.is_env_dir(anon_foo)
|
||||
assert ev.is_env_dir(anon_bar)
|
||||
|
||||
# Cannot rename active environment
|
||||
anon_baz = str(tmpdir / "baz")
|
||||
env("activate", "--sh", "-d", anon_bar)
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "-d", anon_bar, anon_baz)
|
||||
assert "Cannot rename active environment" in capfd.readouterr()[0]
|
||||
env("deactivate", "--sh")
|
||||
|
||||
assert ev.is_env_dir(anon_bar)
|
||||
assert not ev.is_env_dir(anon_baz)
|
||||
|
||||
# Cannot rename to existing environment without --force
|
||||
env("create", "-d", anon_baz)
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "-d", anon_bar, anon_baz)
|
||||
errmsg = (
|
||||
"The new path corresponds to an existing environment;"
|
||||
" specify the --force flag to overwrite it."
|
||||
)
|
||||
assert errmsg in capfd.readouterr()[0]
|
||||
assert ev.is_env_dir(anon_bar)
|
||||
assert ev.is_env_dir(anon_baz)
|
||||
|
||||
env("rename", "-f", "-d", anon_bar, anon_baz)
|
||||
assert not ev.is_env_dir(anon_bar)
|
||||
assert ev.is_env_dir(anon_baz)
|
||||
|
||||
# Cannot rename to existing (non-environment) path without --force
|
||||
qux = tmpdir / "qux"
|
||||
qux.mkdir()
|
||||
anon_qux = str(qux)
|
||||
assert not ev.is_env_dir(anon_qux)
|
||||
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
env("rename", "-d", anon_baz, anon_qux)
|
||||
errmsg = "The new path already exists; specify the --force flag to overwrite it."
|
||||
assert errmsg in capfd.readouterr()[0]
|
||||
|
||||
env("rename", "-f", "-d", anon_baz, anon_qux)
|
||||
assert not ev.is_env_dir(anon_baz)
|
||||
assert ev.is_env_dir(anon_qux)
|
||||
|
||||
|
||||
def test_concretize():
|
||||
e = ev.create("test")
|
||||
e.add("mpileaks")
|
||||
@@ -737,8 +858,7 @@ def test_with_config_bad_include_activate(environment_from_manifest, tmpdir):
|
||||
"""
|
||||
)
|
||||
|
||||
e = ev.Environment(env_root)
|
||||
with e:
|
||||
with ev.Environment(env_root) as e:
|
||||
e.concretize()
|
||||
|
||||
# we've created an environment with some included config files (which do
|
||||
@@ -748,7 +868,7 @@ def test_with_config_bad_include_activate(environment_from_manifest, tmpdir):
|
||||
os.remove(abs_include_path)
|
||||
os.remove(include1)
|
||||
with pytest.raises(spack.config.ConfigFileError) as exc:
|
||||
ev.activate(e)
|
||||
ev.activate(ev.Environment(env_root))
|
||||
|
||||
err = exc.value.message
|
||||
assert "missing include" in err
|
||||
@@ -856,6 +976,7 @@ def test_env_with_included_config_file(mutable_mock_env_path, packages_file):
|
||||
assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
|
||||
|
||||
|
||||
@pytest.mark.only_clingo("original concretizer does not support requirements")
|
||||
def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages, mutable_config):
|
||||
"""Test ``config change`` with config in the ``spack.yaml`` as well as an
|
||||
included file scope.
|
||||
@@ -931,6 +1052,7 @@ def test_config_change_existing(mutable_mock_env_path, tmp_path, mock_packages,
|
||||
spack.spec.Spec("bowtie@1.2.2").concretized()
|
||||
|
||||
|
||||
@pytest.mark.only_clingo("original concretizer does not support requirements")
|
||||
def test_config_change_new(mutable_mock_env_path, tmp_path, mock_packages, mutable_config):
|
||||
spack_yaml = tmp_path / ev.manifest_name
|
||||
spack_yaml.write_text(
|
||||
@@ -940,8 +1062,7 @@ def test_config_change_new(mutable_mock_env_path, tmp_path, mock_packages, mutab
|
||||
"""
|
||||
)
|
||||
|
||||
e = ev.Environment(tmp_path)
|
||||
with e:
|
||||
with ev.Environment(tmp_path):
|
||||
config("change", "packages:mpich:require:~debug")
|
||||
with pytest.raises(spack.solver.asp.UnsatisfiableSpecError):
|
||||
spack.spec.Spec("mpich+debug").concretized()
|
||||
@@ -958,7 +1079,7 @@ def test_config_change_new(mutable_mock_env_path, tmp_path, mock_packages, mutab
|
||||
require: "@3.0.3"
|
||||
"""
|
||||
)
|
||||
with e:
|
||||
with ev.Environment(tmp_path):
|
||||
assert spack.spec.Spec("mpich").concretized().satisfies("@3.0.3")
|
||||
with pytest.raises(spack.config.ConfigError, match="not a list"):
|
||||
config("change", "packages:mpich:require:~debug")
|
||||
@@ -3038,6 +3159,41 @@ def test_modules_exist_after_env_install(
|
||||
assert spec.prefix in contents
|
||||
|
||||
|
||||
@pytest.mark.disable_clean_stage_check
|
||||
def test_install_develop_keep_stage(
|
||||
environment_from_manifest, install_mockery, mock_fetch, monkeypatch, tmpdir
|
||||
):
|
||||
"""Develop a dependency of a package and make sure that the associated
|
||||
stage for the package is retained after a successful install.
|
||||
"""
|
||||
environment_from_manifest(
|
||||
"""
|
||||
spack:
|
||||
specs:
|
||||
- mpileaks
|
||||
"""
|
||||
)
|
||||
|
||||
monkeypatch.setattr(spack.stage.DevelopStage, "destroy", _always_fail)
|
||||
|
||||
with ev.read("test") as e:
|
||||
libelf_dev_path = tmpdir.ensure("libelf-test-dev-path", dir=True)
|
||||
develop(f"--path={libelf_dev_path}", "libelf@0.8.13")
|
||||
concretize()
|
||||
(libelf_spec,) = e.all_matching_specs("libelf")
|
||||
(mpileaks_spec,) = e.all_matching_specs("mpileaks")
|
||||
assert not os.path.exists(libelf_spec.package.stage.path)
|
||||
assert not os.path.exists(mpileaks_spec.package.stage.path)
|
||||
install()
|
||||
assert os.path.exists(libelf_spec.package.stage.path)
|
||||
assert not os.path.exists(mpileaks_spec.package.stage.path)
|
||||
|
||||
|
||||
# Helper method for test_install_develop_keep_stage
|
||||
def _always_fail(cls, *args, **kwargs):
|
||||
raise Exception("Restage or destruction of dev stage detected during install")
|
||||
|
||||
|
||||
@pytest.mark.regression("24148")
|
||||
def test_virtual_spec_concretize_together(tmpdir):
|
||||
# An environment should permit to concretize "mpi"
|
||||
@@ -3131,7 +3287,7 @@ def test_create_and_activate_managed(tmp_path):
|
||||
env("deactivate")
|
||||
|
||||
|
||||
def test_create_and_activate_unmanaged(tmp_path):
|
||||
def test_create_and_activate_anonymous(tmp_path):
|
||||
with fs.working_dir(str(tmp_path)):
|
||||
env_dir = os.path.join(str(tmp_path), "foo")
|
||||
shell = env("activate", "--without-view", "--create", "--sh", "-d", env_dir)
|
||||
|
@@ -64,6 +64,7 @@ def test_query_arguments():
|
||||
implicit=False,
|
||||
start_date="2018-02-23",
|
||||
end_date=None,
|
||||
install_tree="all",
|
||||
)
|
||||
|
||||
q_args = query_arguments(args)
|
||||
@@ -75,6 +76,7 @@ def test_query_arguments():
|
||||
assert q_args["explicit"] is any
|
||||
assert "start_date" in q_args
|
||||
assert "end_date" not in q_args
|
||||
assert q_args["install_tree"] == "all"
|
||||
|
||||
# Check that explicit works correctly
|
||||
args.explicit = True
|
||||
|
@@ -407,3 +407,27 @@ def test_mirror_add_set_signed(mutable_config):
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "signed": False}
|
||||
mirror("set", "--signed", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "signed": True}
|
||||
|
||||
|
||||
def test_mirror_add_set_autopush(mutable_config):
|
||||
# Add mirror without autopush
|
||||
mirror("add", "example", "http://example.com")
|
||||
assert spack.config.get("mirrors:example") == "http://example.com"
|
||||
mirror("set", "--no-autopush", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": False}
|
||||
mirror("set", "--autopush", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
|
||||
mirror("set", "--no-autopush", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": False}
|
||||
mirror("remove", "example")
|
||||
|
||||
# Add mirror with autopush
|
||||
mirror("add", "--autopush", "example", "http://example.com")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
|
||||
mirror("set", "--autopush", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
|
||||
mirror("set", "--no-autopush", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": False}
|
||||
mirror("set", "--autopush", "example")
|
||||
assert spack.config.get("mirrors:example") == {"url": "http://example.com", "autopush": True}
|
||||
mirror("remove", "example")
|
||||
|
@@ -31,7 +31,7 @@ def test_spec():
|
||||
|
||||
|
||||
@pytest.mark.only_clingo("Known failure of the original concretizer")
|
||||
def test_spec_concretizer_args(mutable_config, mutable_database):
|
||||
def test_spec_concretizer_args(mutable_config, mutable_database, do_not_check_runtimes_on_reuse):
|
||||
"""End-to-end test of CLI concretizer prefs.
|
||||
|
||||
It's here to make sure that everything works from CLI
|
||||
|
@@ -62,10 +62,16 @@ def test_multiple_conflicting_compiler_definitions(mutable_config):
|
||||
assert cmp.f77 == "f77"
|
||||
|
||||
|
||||
def test_get_compiler_duplicates(config):
|
||||
def test_get_compiler_duplicates(mutable_config, compiler_factory):
|
||||
# In this case there is only one instance of the specified compiler in
|
||||
# the test configuration (so it is not actually a duplicate), but the
|
||||
# method behaves the same.
|
||||
cnl_compiler = compiler_factory(spec="gcc@4.5.0", operating_system="CNL")
|
||||
# CNL compiler has no target attribute, and this is essential to make detection pass
|
||||
del cnl_compiler["compiler"]["target"]
|
||||
mutable_config.set(
|
||||
"compilers", [compiler_factory(spec="gcc@4.5.0", operating_system="SuSE11"), cnl_compiler]
|
||||
)
|
||||
cfg_file_to_duplicates = spack.compilers.get_compiler_duplicates(
|
||||
"gcc@4.5.0", spack.spec.ArchSpec("cray-CNL-xeon")
|
||||
)
|
||||
@@ -75,13 +81,6 @@ def test_get_compiler_duplicates(config):
|
||||
assert len(duplicates) == 1
|
||||
|
||||
|
||||
def test_all_compilers(config):
|
||||
all_compilers = spack.compilers.all_compilers()
|
||||
filtered = [x for x in all_compilers if str(x.spec) == "clang@=3.3"]
|
||||
filtered = [x for x in filtered if x.operating_system == "SuSE11"]
|
||||
assert len(filtered) == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"input_version,expected_version,expected_error",
|
||||
[(None, None, "Couldn't get version for compiler /usr/bin/gcc"), ("4.9", "4.9", None)],
|
||||
@@ -654,7 +653,25 @@ def test_xl_r_flags():
|
||||
"compiler_spec,expected_result",
|
||||
[("gcc@4.7.2", False), ("clang@3.3", False), ("clang@8.0.0", True)],
|
||||
)
|
||||
def test_detecting_mixed_toolchains(compiler_spec, expected_result, config):
|
||||
def test_detecting_mixed_toolchains(
|
||||
compiler_spec, expected_result, mutable_config, compiler_factory
|
||||
):
|
||||
mixed_c = compiler_factory(spec="clang@8.0.0", operating_system="debian6")
|
||||
mixed_c["compiler"]["paths"] = {
|
||||
"cc": "/path/to/clang-8",
|
||||
"cxx": "/path/to/clang++-8",
|
||||
"f77": "/path/to/gfortran-9",
|
||||
"fc": "/path/to/gfortran-9",
|
||||
}
|
||||
mutable_config.set(
|
||||
"compilers",
|
||||
[
|
||||
compiler_factory(spec="gcc@4.7.2", operating_system="debian6"),
|
||||
compiler_factory(spec="clang@3.3", operating_system="debian6"),
|
||||
mixed_c,
|
||||
],
|
||||
)
|
||||
|
||||
compiler = spack.compilers.compilers_for_spec(compiler_spec).pop()
|
||||
assert spack.compilers.is_mixed_toolchain(compiler) is expected_result
|
||||
|
||||
@@ -683,7 +700,7 @@ def test_raising_if_compiler_target_is_over_specific(config):
|
||||
]
|
||||
arch_spec = spack.spec.ArchSpec(("linux", "ubuntu18.04", "haswell"))
|
||||
with spack.config.override("compilers", compilers):
|
||||
cfg = spack.compilers.get_compiler_config()
|
||||
cfg = spack.compilers.get_compiler_config(config)
|
||||
with pytest.raises(ValueError):
|
||||
spack.compilers.get_compilers(cfg, spack.spec.CompilerSpec("gcc@9.0.1"), arch_spec)
|
||||
|
||||
|
@@ -120,14 +120,16 @@ def current_host(request, monkeypatch):
|
||||
# is_preference is not empty if we want to supply the
|
||||
# preferred target via packages.yaml
|
||||
cpu, _, is_preference = request.param.partition("-")
|
||||
target = archspec.cpu.TARGETS[cpu]
|
||||
|
||||
monkeypatch.setattr(spack.platforms.Test, "default", cpu)
|
||||
monkeypatch.setattr(spack.platforms.Test, "front_end", cpu)
|
||||
if not is_preference:
|
||||
target = archspec.cpu.TARGETS[cpu]
|
||||
monkeypatch.setattr(archspec.cpu, "host", lambda: target)
|
||||
yield target
|
||||
else:
|
||||
target = archspec.cpu.TARGETS["sapphirerapids"]
|
||||
monkeypatch.setattr(archspec.cpu, "host", lambda: target)
|
||||
with spack.config.override("packages:all", {"target": [cpu]}):
|
||||
yield target
|
||||
|
||||
@@ -237,10 +239,24 @@ def change(self, changes=None):
|
||||
yield _changing_pkg
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def clang12_with_flags(compiler_factory):
|
||||
c = compiler_factory(spec="clang@12.2.0", operating_system="redhat6")
|
||||
c["compiler"]["flags"] = {"cflags": "-O3", "cxxflags": "-O3"}
|
||||
return c
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def gcc11_with_flags(compiler_factory):
|
||||
c = compiler_factory(spec="gcc@11.1.0", operating_system="redhat6")
|
||||
c["compiler"]["flags"] = {"cflags": "-O0 -g", "cxxflags": "-O0 -g", "fflags": "-O0 -g"}
|
||||
return c
|
||||
|
||||
|
||||
# This must use the mutable_config fixture because the test
|
||||
# adjusting_default_target_based_on_compiler uses the current_host fixture,
|
||||
# which changes the config.
|
||||
@pytest.mark.usefixtures("mutable_config", "mock_packages")
|
||||
@pytest.mark.usefixtures("mutable_config", "mock_packages", "do_not_check_runtimes_on_reuse")
|
||||
class TestConcretize:
|
||||
def test_concretize(self, spec):
|
||||
check_concretize(spec)
|
||||
@@ -329,18 +345,34 @@ def test_provides_handles_multiple_providers_of_same_version(self):
|
||||
assert Spec("builtin.mock.multi-provider-mpi@1.10.0") in providers
|
||||
assert Spec("builtin.mock.multi-provider-mpi@1.8.8") in providers
|
||||
|
||||
def test_different_compilers_get_different_flags(self):
|
||||
def test_different_compilers_get_different_flags(
|
||||
self, mutable_config, clang12_with_flags, gcc11_with_flags
|
||||
):
|
||||
"""Tests that nodes get the flags of the associated compiler."""
|
||||
mutable_config.set("compilers", [clang12_with_flags, gcc11_with_flags])
|
||||
client = Spec(
|
||||
"cmake-client %gcc@11.1.0 platform=test os=fe target=fe"
|
||||
+ " ^cmake %clang@12.2.0 platform=test os=fe target=fe"
|
||||
)
|
||||
client.concretize()
|
||||
" ^cmake %clang@12.2.0 platform=test os=fe target=fe"
|
||||
).concretized()
|
||||
cmake = client["cmake"]
|
||||
assert set(client.compiler_flags["cflags"]) == set(["-O0", "-g"])
|
||||
assert set(cmake.compiler_flags["cflags"]) == set(["-O3"])
|
||||
assert set(client.compiler_flags["fflags"]) == set(["-O0", "-g"])
|
||||
assert set(client.compiler_flags["cflags"]) == {"-O0", "-g"}
|
||||
assert set(cmake.compiler_flags["cflags"]) == {"-O3"}
|
||||
assert set(client.compiler_flags["fflags"]) == {"-O0", "-g"}
|
||||
assert not set(cmake.compiler_flags["fflags"])
|
||||
|
||||
@pytest.mark.regression("9908")
|
||||
def test_spec_flags_maintain_order(self, mutable_config, gcc11_with_flags):
|
||||
"""Tests that Spack assembles flags in a consistent way (i.e. with the same ordering),
|
||||
for successive concretizations.
|
||||
"""
|
||||
mutable_config.set("compilers", [gcc11_with_flags])
|
||||
spec_str = "libelf %gcc@11.1.0 os=redhat6"
|
||||
for _ in range(3):
|
||||
s = Spec(spec_str).concretized()
|
||||
assert all(
|
||||
s.compiler_flags[x] == ["-O0", "-g"] for x in ("cflags", "cxxflags", "fflags")
|
||||
)
|
||||
|
||||
@pytest.mark.xfail(reason="Broken, needs to be fixed")
|
||||
def test_compiler_flags_from_compiler_and_dependent(self):
|
||||
client = Spec("cmake-client %clang@12.2.0 platform=test os=fe target=fe cflags==-g")
|
||||
@@ -349,7 +381,8 @@ def test_compiler_flags_from_compiler_and_dependent(self):
|
||||
for spec in [client, cmake]:
|
||||
assert spec.compiler_flags["cflags"] == ["-O3", "-g"]
|
||||
|
||||
def test_compiler_flags_differ_identical_compilers(self):
|
||||
def test_compiler_flags_differ_identical_compilers(self, mutable_config, clang12_with_flags):
|
||||
mutable_config.set("compilers", [clang12_with_flags])
|
||||
# Correct arch to use test compiler that has flags
|
||||
spec = Spec("a %clang@12.2.0 platform=test os=fe target=fe")
|
||||
|
||||
@@ -404,25 +437,20 @@ def test_compiler_inherited_upwards(self):
|
||||
for dep in spec.traverse():
|
||||
assert "%clang" in dep
|
||||
|
||||
def test_architecture_inheritance(self):
|
||||
"""test_architecture_inheritance is likely to fail with an
|
||||
UnavailableCompilerVersionError if the architecture is concretized
|
||||
incorrectly.
|
||||
"""
|
||||
spec = Spec("cmake-client %gcc@11.1.0 os=fe ^ cmake")
|
||||
spec.concretize()
|
||||
assert spec["cmake"].architecture == spec.architecture
|
||||
|
||||
@pytest.mark.only_clingo("Fixing the parser broke this test for the original concretizer")
|
||||
def test_architecture_deep_inheritance(self, mock_targets):
|
||||
def test_architecture_deep_inheritance(self, mock_targets, compiler_factory):
|
||||
"""Make sure that indirect dependencies receive architecture
|
||||
information from the root even when partial architecture information
|
||||
is provided by an intermediate dependency.
|
||||
"""
|
||||
spec_str = "mpileaks %gcc@4.5.0 os=CNL target=nocona" " ^dyninst os=CNL ^callpath os=CNL"
|
||||
spec = Spec(spec_str).concretized()
|
||||
for s in spec.traverse(root=False):
|
||||
assert s.architecture.target == spec.architecture.target
|
||||
cnl_compiler = compiler_factory(spec="gcc@4.5.0", operating_system="CNL")
|
||||
# CNL compiler has no target attribute, and this is essential to make detection pass
|
||||
del cnl_compiler["compiler"]["target"]
|
||||
with spack.config.override("compilers", [cnl_compiler]):
|
||||
spec_str = "mpileaks %gcc@4.5.0 os=CNL target=nocona ^dyninst os=CNL ^callpath os=CNL"
|
||||
spec = Spec(spec_str).concretized()
|
||||
for s in spec.traverse(root=False):
|
||||
assert s.architecture.target == spec.architecture.target
|
||||
|
||||
def test_compiler_flags_from_user_are_grouped(self):
|
||||
spec = Spec('a%gcc cflags="-O -foo-flag foo-val" platform=test')
|
||||
@@ -588,7 +616,7 @@ def test_my_dep_depends_on_provider_of_my_virtual_dep(self):
|
||||
spec.normalize()
|
||||
spec.concretize()
|
||||
|
||||
@pytest.mark.parametrize("compiler_str", ["clang", "gcc", "gcc@10.2.1", "clang@:12.0.0"])
|
||||
@pytest.mark.parametrize("compiler_str", ["clang", "gcc", "gcc@10.2.1", "clang@:15.0.0"])
|
||||
def test_compiler_inheritance(self, compiler_str):
|
||||
spec_str = "mpileaks %{0}".format(compiler_str)
|
||||
spec = Spec(spec_str).concretized()
|
||||
@@ -848,18 +876,21 @@ def test_concretize_anonymous_dep(self, spec_str):
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,expected_str",
|
||||
[
|
||||
# Unconstrained versions select default compiler (gcc@4.5.0)
|
||||
# Unconstrained versions select default compiler (gcc@10.2.1)
|
||||
("bowtie@1.4.0", "%gcc@10.2.1"),
|
||||
# Version with conflicts and no valid gcc select another compiler
|
||||
("bowtie@1.3.0", "%clang@12.0.0"),
|
||||
("bowtie@1.3.0", "%clang@15.0.0"),
|
||||
# If a higher gcc is available still prefer that
|
||||
("bowtie@1.2.2 os=redhat6", "%gcc@11.1.0"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.only_clingo("Original concretizer cannot work around conflicts")
|
||||
def test_compiler_conflicts_in_package_py(self, spec_str, expected_str):
|
||||
s = Spec(spec_str).concretized()
|
||||
assert s.satisfies(expected_str)
|
||||
def test_compiler_conflicts_in_package_py(
|
||||
self, spec_str, expected_str, clang12_with_flags, gcc11_with_flags
|
||||
):
|
||||
with spack.config.override("compilers", [clang12_with_flags, gcc11_with_flags]):
|
||||
s = Spec(spec_str).concretized()
|
||||
assert s.satisfies(expected_str)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,expected,unexpected",
|
||||
@@ -983,7 +1014,7 @@ def test_working_around_conflicting_defaults(self, spec_str, expected):
|
||||
[("cmake", ["%clang"]), ("cmake %gcc", ["%gcc"]), ("cmake %clang", ["%clang"])],
|
||||
)
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_external_package_and_compiler_preferences(self, spec_str, expected):
|
||||
def test_external_package_and_compiler_preferences(self, spec_str, expected, mutable_config):
|
||||
packages_yaml = {
|
||||
"all": {"compiler": ["clang", "gcc"]},
|
||||
"cmake": {
|
||||
@@ -991,7 +1022,7 @@ def test_external_package_and_compiler_preferences(self, spec_str, expected):
|
||||
"buildable": False,
|
||||
},
|
||||
}
|
||||
spack.config.set("packages", packages_yaml)
|
||||
mutable_config.set("packages", packages_yaml)
|
||||
s = Spec(spec_str).concretized()
|
||||
|
||||
assert s.external
|
||||
@@ -1157,16 +1188,18 @@ def test_activating_test_dependencies(self, spec_str, tests_arg, with_dep, witho
|
||||
|
||||
@pytest.mark.regression("20019")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_compiler_match_is_preferred_to_newer_version(self):
|
||||
def test_compiler_match_is_preferred_to_newer_version(self, compiler_factory):
|
||||
# This spec depends on openblas. Openblas has a conflict
|
||||
# that doesn't allow newer versions with gcc@4.4.0. Check
|
||||
# that an old version of openblas is selected, rather than
|
||||
# a different compiler for just that node.
|
||||
spec_str = "simple-inheritance+openblas %gcc@10.1.0 os=redhat6"
|
||||
s = Spec(spec_str).concretized()
|
||||
|
||||
assert "openblas@0.2.15" in s
|
||||
assert s["openblas"].satisfies("%gcc@10.1.0")
|
||||
with spack.config.override(
|
||||
"compilers", [compiler_factory(spec="gcc@10.1.0", operating_system="redhat6")]
|
||||
):
|
||||
spec_str = "simple-inheritance+openblas %gcc@10.1.0 os=redhat6"
|
||||
s = Spec(spec_str).concretized()
|
||||
assert "openblas@0.2.15" in s
|
||||
assert s["openblas"].satisfies("%gcc@10.1.0")
|
||||
|
||||
@pytest.mark.regression("19981")
|
||||
def test_target_ranges_in_conflicts(self):
|
||||
@@ -1191,7 +1224,10 @@ def test_variant_not_default(self):
|
||||
|
||||
@pytest.mark.regression("20055")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_custom_compiler_version(self):
|
||||
def test_custom_compiler_version(self, mutable_config, compiler_factory):
|
||||
mutable_config.set(
|
||||
"compilers", [compiler_factory(spec="gcc@10foo", operating_system="redhat6")]
|
||||
)
|
||||
s = Spec("a %gcc@10foo os=redhat6").concretized()
|
||||
assert "%gcc@10foo" in s
|
||||
|
||||
@@ -1314,6 +1350,22 @@ def test_reuse_installed_packages_when_package_def_changes(
|
||||
# Structure and package hash will be different without reuse
|
||||
assert root.dag_hash() != new_root_without_reuse.dag_hash()
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
@pytest.mark.regression("43663")
|
||||
def test_no_reuse_when_variant_condition_does_not_hold(self, mutable_database, mock_packages):
|
||||
spack.config.set("concretizer:reuse", True)
|
||||
|
||||
# Install a spec for which the `version_based` variant condition does not hold
|
||||
old = Spec("conditional-variant-pkg @1").concretized()
|
||||
old.package.do_install(fake=True, explicit=True)
|
||||
|
||||
# Then explicitly require a spec with `+version_based`, which shouldn't reuse previous spec
|
||||
new1 = Spec("conditional-variant-pkg +version_based").concretized()
|
||||
assert new1.satisfies("@2 +version_based")
|
||||
|
||||
new2 = Spec("conditional-variant-pkg +two_whens").concretized()
|
||||
assert new2.satisfies("@2 +two_whens +version_based")
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_reuse_with_flags(self, mutable_database, mutable_config):
|
||||
spack.config.set("concretizer:reuse", True)
|
||||
@@ -1391,16 +1443,21 @@ def test_external_with_non_default_variant_as_dependency(self):
|
||||
("mpileaks%gcc@10.2.1 platform=test os=redhat6", "os=redhat6"),
|
||||
],
|
||||
)
|
||||
def test_os_selection_when_multiple_choices_are_possible(self, spec_str, expected_os):
|
||||
s = Spec(spec_str).concretized()
|
||||
|
||||
for node in s.traverse():
|
||||
assert node.satisfies(expected_os)
|
||||
def test_os_selection_when_multiple_choices_are_possible(
|
||||
self, spec_str, expected_os, compiler_factory
|
||||
):
|
||||
# GCC 10.2.1 is defined both for debian and for redhat
|
||||
with spack.config.override(
|
||||
"compilers", [compiler_factory(spec="gcc@10.2.1", operating_system="redhat6")]
|
||||
):
|
||||
s = Spec(spec_str).concretized()
|
||||
for node in s.traverse():
|
||||
assert node.satisfies(expected_os)
|
||||
|
||||
@pytest.mark.regression("22718")
|
||||
@pytest.mark.parametrize(
|
||||
"spec_str,expected_compiler",
|
||||
[("mpileaks", "%gcc@10.2.1"), ("mpileaks ^mpich%clang@12.0.0", "%clang@12.0.0")],
|
||||
[("mpileaks", "%gcc@10.2.1"), ("mpileaks ^mpich%clang@15.0.0", "%clang@15.0.0")],
|
||||
)
|
||||
def test_compiler_is_unique(self, spec_str, expected_compiler):
|
||||
s = Spec(spec_str).concretized()
|
||||
@@ -1688,7 +1745,7 @@ def test_reuse_with_unknown_package_dont_raise(self, tmpdir, temporary_store, mo
|
||||
[
|
||||
(["libelf", "libelf@0.8.10"], 1),
|
||||
(["libdwarf%gcc", "libelf%clang"], 2),
|
||||
(["libdwarf%gcc", "libdwarf%clang"], 4),
|
||||
(["libdwarf%gcc", "libdwarf%clang"], 3),
|
||||
(["libdwarf^libelf@0.8.12", "libdwarf^libelf@0.8.13"], 4),
|
||||
(["hdf5", "zmpi"], 3),
|
||||
(["hdf5", "mpich"], 2),
|
||||
@@ -1755,6 +1812,22 @@ def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occura
|
||||
counter += 1
|
||||
assert counter == occurances, concrete_specs
|
||||
|
||||
@pytest.mark.only_clingo("Original concretizer cannot concretize in rounds")
|
||||
def test_solve_in_rounds_all_unsolved(self, monkeypatch, mock_packages, config):
|
||||
specs = [Spec(x) for x in ["libdwarf%gcc", "libdwarf%clang"]]
|
||||
solver = spack.solver.asp.Solver()
|
||||
solver.reuse = False
|
||||
|
||||
simulate_unsolved_property = list((x, None) for x in specs)
|
||||
monkeypatch.setattr(spack.solver.asp.Result, "unsolved_specs", simulate_unsolved_property)
|
||||
monkeypatch.setattr(spack.solver.asp.Result, "specs", list())
|
||||
|
||||
with pytest.raises(
|
||||
spack.solver.asp.InternalConcretizerError,
|
||||
match="a subset of input specs could not be solved for",
|
||||
):
|
||||
list(solver.solve_in_rounds(specs))
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_coconcretize_reuse_and_virtuals(self):
|
||||
reusable_specs = []
|
||||
@@ -1822,20 +1895,36 @@ def test_version_weight_and_provenance(self):
|
||||
|
||||
@pytest.mark.regression("31169")
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_not_reusing_incompatible_os_or_compiler(self):
|
||||
def test_not_reusing_incompatible_os(self):
|
||||
root_spec = Spec("b")
|
||||
s = root_spec.concretized()
|
||||
wrong_compiler, wrong_os = s.copy(), s.copy()
|
||||
wrong_compiler.compiler = spack.spec.CompilerSpec("gcc@12.1.0")
|
||||
wrong_os = s.copy()
|
||||
wrong_os.architecture = spack.spec.ArchSpec("test-ubuntu2204-x86_64")
|
||||
reusable_specs = [wrong_compiler, wrong_os]
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(setup, [root_spec], reuse=[wrong_os])
|
||||
concrete_spec = result.specs[0]
|
||||
assert concrete_spec.satisfies("os={}".format(s.architecture.os))
|
||||
|
||||
@pytest.mark.only_clingo("Use case not supported by the original concretizer")
|
||||
def test_reuse_succeeds_with_config_compatible_os(self):
|
||||
root_spec = Spec("b")
|
||||
s = root_spec.concretized()
|
||||
other_os = s.copy()
|
||||
mock_os = "ubuntu2204"
|
||||
other_os.architecture = spack.spec.ArchSpec(
|
||||
"test-{os}-{target}".format(os=mock_os, target=str(s.architecture.target))
|
||||
)
|
||||
reusable_specs = [other_os]
|
||||
overrides = {"concretizer": {"reuse": True, "os_compatible": {s.os: [mock_os]}}}
|
||||
custom_scope = spack.config.InternalConfigScope("concretize_override", overrides)
|
||||
with spack.config.override(custom_scope):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
result, _, _ = solver.driver.solve(setup, [root_spec], reuse=reusable_specs)
|
||||
concrete_spec = result.specs[0]
|
||||
assert concrete_spec.satisfies("%{}".format(s.compiler))
|
||||
assert concrete_spec.satisfies("os={}".format(s.architecture.os))
|
||||
assert concrete_spec.satisfies("os={}".format(other_os.architecture.os))
|
||||
|
||||
def test_git_hash_assigned_version_is_preferred(self):
|
||||
hash = "a" * 40
|
||||
@@ -2286,6 +2375,29 @@ def test_select_lower_priority_package_from_repository_stack(
|
||||
assert s[name].concrete
|
||||
assert s[name].namespace == namespace
|
||||
|
||||
@pytest.mark.only_clingo("Old concretizer cannot reuse")
|
||||
def test_reuse_specs_from_non_available_compilers(self, mutable_config, mutable_database):
|
||||
"""Tests that we can reuse specs with compilers that are not configured locally."""
|
||||
# All the specs in the mutable DB have been compiled with %gcc@=10.2.1
|
||||
specs = mutable_database.query_local()
|
||||
assert all(s.satisfies("%gcc@=10.2.1") for s in specs)
|
||||
|
||||
spack.compilers.remove_compiler_from_config("gcc@=10.2.1")
|
||||
assert not spack.compilers.compilers_for_spec("gcc@=10.2.1")
|
||||
mutable_config.set("concretizer:reuse", True)
|
||||
|
||||
# mpileaks is in the database, it will be reused with gcc@=10.2.1
|
||||
root = Spec("mpileaks").concretized()
|
||||
for s in root.traverse():
|
||||
assert s.satisfies("%gcc@10.2.1")
|
||||
|
||||
# fftw is not in the database, therefore the root will be compiled with gcc@=9.4.0,
|
||||
# while the mpi is reused from the database and is compiled with gcc@=10.2.1
|
||||
root = Spec("fftw").concretized()
|
||||
assert root.satisfies("%gcc@=9.4.0")
|
||||
for s in root.traverse(root=False):
|
||||
assert s.satisfies("%gcc@10.2.1")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def duplicates_test_repository():
|
||||
@@ -2424,6 +2536,29 @@ def test_no_multiple_solutions_with_different_edges_same_nodes(self):
|
||||
assert len(edges) == 1
|
||||
assert edges[0].spec.satisfies("@=60")
|
||||
|
||||
@pytest.mark.regression("43647")
|
||||
def test_specifying_different_versions_build_deps(self):
|
||||
"""Tests that we can concretize a spec with nodes using the same build
|
||||
dependency pinned at different versions, when the constraint is specified
|
||||
in the root spec.
|
||||
|
||||
o hdf5@1.0
|
||||
|\
|
||||
o | pinned-gmake@1.0
|
||||
o | gmake@3.0
|
||||
/
|
||||
o gmake@4.1
|
||||
|
||||
"""
|
||||
hdf5_str = "hdf5@1.0 ^gmake@4.1"
|
||||
pinned_str = "pinned-gmake@1.0 ^gmake@3.0"
|
||||
input_specs = [Spec(hdf5_str), Spec(pinned_str)]
|
||||
solver = spack.solver.asp.Solver()
|
||||
result = solver.solve(input_specs)
|
||||
|
||||
assert any(x.satisfies(hdf5_str) for x in result.specs)
|
||||
assert any(x.satisfies(pinned_str) for x in result.specs)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"v_str,v_opts,checksummed",
|
||||
@@ -2613,3 +2748,28 @@ def test_reusable_externals_different_spec(mock_packages, tmpdir):
|
||||
{"mpich": {"externals": [{"spec": "mpich@4.1 +debug", "prefix": tmpdir.strpath}]}},
|
||||
local=False,
|
||||
)
|
||||
|
||||
|
||||
def test_concretization_version_order():
|
||||
versions = [
|
||||
(Version("develop"), {}),
|
||||
(Version("1.0"), {}),
|
||||
(Version("2.0"), {"deprecated": True}),
|
||||
(Version("1.1"), {}),
|
||||
(Version("1.1alpha1"), {}),
|
||||
(Version("0.9"), {"preferred": True}),
|
||||
]
|
||||
result = [
|
||||
v
|
||||
for v, _ in sorted(
|
||||
versions, key=spack.solver.asp._concretization_version_order, reverse=True
|
||||
)
|
||||
]
|
||||
assert result == [
|
||||
Version("0.9"), # preferred
|
||||
Version("1.1"), # latest non-deprecated final version
|
||||
Version("1.0"), # latest non-deprecated final version
|
||||
Version("1.1alpha1"), # prereleases
|
||||
Version("develop"), # likely development version
|
||||
Version("2.0"), # deprecated
|
||||
]
|
||||
|
@@ -7,6 +7,8 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.solver.asp
|
||||
@@ -24,9 +26,7 @@ def _concretize_with_reuse(*, root_str, reused_str):
|
||||
reused_spec = spack.spec.Spec(reused_str).concretized()
|
||||
setup = spack.solver.asp.SpackSolverSetup(tests=False)
|
||||
driver = spack.solver.asp.PyclingoDriver()
|
||||
result, _, _ = driver.solve(
|
||||
setup, [spack.spec.Spec(f"{root_str} ^{reused_str}")], reuse=[reused_spec]
|
||||
)
|
||||
result, _, _ = driver.solve(setup, [spack.spec.Spec(f"{root_str}")], reuse=[reused_spec])
|
||||
root = result.specs[0]
|
||||
return root, reused_spec
|
||||
|
||||
@@ -47,7 +47,7 @@ def enable_runtimes():
|
||||
|
||||
|
||||
def test_correct_gcc_runtime_is_injected_as_dependency(runtime_repo):
|
||||
s = spack.spec.Spec("a%gcc@10.2.1 ^b%gcc@4.5.0").concretized()
|
||||
s = spack.spec.Spec("a%gcc@10.2.1 ^b%gcc@9.4.0").concretized()
|
||||
a, b = s["a"], s["b"]
|
||||
|
||||
# Both a and b should depend on the same gcc-runtime directly
|
||||
@@ -78,9 +78,28 @@ def test_external_nodes_do_not_have_runtimes(runtime_repo, mutable_config, tmp_p
|
||||
"root_str,reused_str,expected,nruntime",
|
||||
[
|
||||
# The reused runtime is older than we need, thus we'll add a more recent one for a
|
||||
("a%gcc@10.2.1", "b%gcc@4.5.0", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@4.5.0"}, 2),
|
||||
("a%gcc@10.2.1", "b%gcc@9.4.0", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@9.4.0"}, 2),
|
||||
# The root is compiled with an older compiler, thus we'll reuse the runtime from b
|
||||
("a%gcc@4.5.0", "b%gcc@10.2.1", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@10.2.1"}, 1),
|
||||
("a%gcc@9.4.0", "b%gcc@10.2.1", {"a": "gcc-runtime@10.2.1", "b": "gcc-runtime@10.2.1"}, 1),
|
||||
# Same as before, but tests that we can reuse from a more generic target
|
||||
pytest.param(
|
||||
"a%gcc@9.4.0",
|
||||
"b%gcc@10.2.1 target=x86_64",
|
||||
{"a": "gcc-runtime@10.2.1 target=x86_64", "b": "gcc-runtime@10.2.1 target=x86_64"},
|
||||
1,
|
||||
marks=pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="test data is x86_64 specific"
|
||||
),
|
||||
),
|
||||
pytest.param(
|
||||
"a%gcc@10.2.1",
|
||||
"b%gcc@9.4.0 target=x86_64",
|
||||
{"a": "gcc-runtime@10.2.1 target=x86_64", "b": "gcc-runtime@9.4.0 target=x86_64"},
|
||||
2,
|
||||
marks=pytest.mark.skipif(
|
||||
str(archspec.cpu.host().family) != "x86_64", reason="test data is x86_64 specific"
|
||||
),
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_reusing_specs_with_gcc_runtime(root_str, reused_str, expected, nruntime, runtime_repo):
|
||||
@@ -104,8 +123,8 @@ def test_reusing_specs_with_gcc_runtime(root_str, reused_str, expected, nruntime
|
||||
[
|
||||
# Ensure that, whether we have multiple runtimes in the DAG or not,
|
||||
# we always link only the latest version
|
||||
("a%gcc@10.2.1", "b%gcc@4.5.0", ["gcc-runtime@10.2.1"], ["gcc-runtime@4.5.0"]),
|
||||
("a%gcc@4.5.0", "b%gcc@10.2.1", ["gcc-runtime@10.2.1"], ["gcc-runtime@4.5.0"]),
|
||||
("a%gcc@10.2.1", "b%gcc@9.4.0", ["gcc-runtime@10.2.1"], ["gcc-runtime@9.4.0"]),
|
||||
("a%gcc@9.4.0", "b%gcc@10.2.1", ["gcc-runtime@10.2.1"], ["gcc-runtime@9.4.0"]),
|
||||
],
|
||||
)
|
||||
def test_views_can_handle_duplicate_runtime_nodes(
|
||||
|
@@ -105,7 +105,7 @@ def test_preferred_variants_from_wildcard(self):
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"compiler_str,spec_str",
|
||||
[("gcc@=4.5.0", "mpileaks"), ("clang@=12.0.0", "mpileaks"), ("gcc@=4.5.0", "openmpi")],
|
||||
[("gcc@=9.4.0", "mpileaks"), ("clang@=15.0.0", "mpileaks"), ("gcc@=9.4.0", "openmpi")],
|
||||
)
|
||||
def test_preferred_compilers(self, compiler_str, spec_str):
|
||||
"""Test preferred compilers are applied correctly"""
|
||||
|
@@ -977,7 +977,7 @@ def test_single_file_scope(config, env_yaml):
|
||||
# from the single-file config
|
||||
assert spack.config.get("config:verify_ssl") is False
|
||||
assert spack.config.get("config:dirty") is False
|
||||
assert spack.config.get("packages:all:compiler") == ["gcc@4.5.3"]
|
||||
assert spack.config.get("packages:all:compiler") == ["gcc@4.5.3", "gcc", "clang"]
|
||||
|
||||
# from the lower config scopes
|
||||
assert spack.config.get("config:checksum") is True
|
||||
|
@@ -22,6 +22,7 @@
|
||||
import py
|
||||
import pytest
|
||||
|
||||
import archspec.cpu
|
||||
import archspec.cpu.microarchitecture
|
||||
import archspec.cpu.schema
|
||||
|
||||
@@ -54,6 +55,7 @@
|
||||
import spack.util.gpg
|
||||
import spack.util.spack_yaml as syaml
|
||||
import spack.util.url as url_util
|
||||
import spack.version
|
||||
from spack.fetch_strategy import URLFetchStrategy
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
@@ -710,7 +712,9 @@ def configuration_dir(tmpdir_factory, linux_os):
|
||||
t.write(content)
|
||||
|
||||
compilers_yaml = test_config.join("compilers.yaml")
|
||||
content = "".join(compilers_yaml.read()).format(linux_os)
|
||||
content = "".join(compilers_yaml.read()).format(
|
||||
linux_os=linux_os, target=str(archspec.cpu.host().family)
|
||||
)
|
||||
t = tmpdir.join("site", "compilers.yaml")
|
||||
t.write(content)
|
||||
yield tmpdir
|
||||
@@ -787,6 +791,7 @@ def no_compilers_yaml(mutable_config):
|
||||
compilers_yaml = os.path.join(local_config.path, "compilers.yaml")
|
||||
if os.path.exists(compilers_yaml):
|
||||
os.remove(compilers_yaml)
|
||||
return mutable_config
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@@ -1436,6 +1441,15 @@ def mock_git_repository(git, tmpdir_factory):
|
||||
yield t
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def mock_git_test_package(mock_git_repository, mutable_mock_repo, monkeypatch):
|
||||
# install a fake git version in the package class
|
||||
pkg_class = spack.repo.PATH.get_pkg_class("git-test")
|
||||
monkeypatch.delattr(pkg_class, "git")
|
||||
monkeypatch.setitem(pkg_class.versions, spack.version.Version("git"), mock_git_repository.url)
|
||||
return pkg_class
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def mock_hg_repository(tmpdir_factory):
|
||||
"""Creates a very simple hg repository with two commits."""
|
||||
@@ -1986,3 +2000,36 @@ def create_test_repo(tmpdir, pkg_name_content_tuples):
|
||||
f.write(pkg_str)
|
||||
|
||||
return spack.repo.Repo(repo_path)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def compiler_factory():
|
||||
"""Factory for a compiler dict, taking a spec and an OS as arguments."""
|
||||
|
||||
def _factory(*, spec, operating_system):
|
||||
return {
|
||||
"compiler": {
|
||||
"spec": spec,
|
||||
"operating_system": operating_system,
|
||||
"paths": {"cc": "/path/to/cc", "cxx": "/path/to/cxx", "f77": None, "fc": None},
|
||||
"modules": [],
|
||||
"target": str(archspec.cpu.host().family),
|
||||
}
|
||||
}
|
||||
|
||||
return _factory
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def host_architecture_str():
|
||||
"""Returns the broad architecture family (x86_64, aarch64, etc.)"""
|
||||
return str(archspec.cpu.host().family)
|
||||
|
||||
|
||||
def _true(x):
|
||||
return True
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def do_not_check_runtimes_on_reuse(monkeypatch):
|
||||
monkeypatch.setattr(spack.solver.asp, "_has_runtime_dependencies", _true)
|
||||
|
@@ -1,353 +1,41 @@
|
||||
compilers:
|
||||
- compiler:
|
||||
spec: clang@3.3
|
||||
operating_system: {0.name}{0.version}
|
||||
spec: gcc@=9.4.0
|
||||
operating_system: {linux_os.name}{linux_os.version}
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: []
|
||||
target: {target}
|
||||
- compiler:
|
||||
spec: gcc@=9.4.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: []
|
||||
target: {target}
|
||||
- compiler:
|
||||
spec: clang@=15.0.0
|
||||
operating_system: {linux_os.name}{linux_os.version}
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
modules: []
|
||||
target: {target}
|
||||
- compiler:
|
||||
spec: gcc@4.5.0
|
||||
operating_system: {0.name}{0.version}
|
||||
spec: gcc@=10.2.1
|
||||
operating_system: {linux_os.name}{linux_os.version}
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@4.5.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@3.3
|
||||
operating_system: CNL
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
- compiler:
|
||||
spec: clang@3.3
|
||||
operating_system: SuSE11
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@3.3
|
||||
operating_system: yosemite
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
operating_system: CNL
|
||||
spec: gcc@4.5.0
|
||||
modules: 'None'
|
||||
- compiler:
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
operating_system: SuSE11
|
||||
spec: gcc@4.5.0
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
operating_system: yosemite
|
||||
spec: gcc@4.5.0
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
operating_system: elcapitan
|
||||
spec: gcc@4.5.0
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@3.3
|
||||
operating_system: elcapitan
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@4.7.2
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc472
|
||||
cxx: /path/to/g++472
|
||||
f77: /path/to/gfortran472
|
||||
fc: /path/to/gfortran472
|
||||
flags:
|
||||
cflags: -O0 -g
|
||||
cxxflags: -O0 -g
|
||||
fflags: -O0 -g
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@4.4.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc440
|
||||
cxx: /path/to/g++440
|
||||
f77: /path/to/gfortran440
|
||||
fc: /path/to/gfortran440
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@3.5
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/clang35
|
||||
cxx: /path/to/clang++35
|
||||
f77: None
|
||||
fc: None
|
||||
flags:
|
||||
cflags: -O3
|
||||
cxxflags: -O3
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@8.0.0
|
||||
operating_system: redhat7
|
||||
paths:
|
||||
cc: /path/to/clang-8
|
||||
cxx: /path/to/clang++-8
|
||||
f77: /path/to/gfortran-9
|
||||
fc: /path/to/gfortran-9
|
||||
flags:
|
||||
cflags: -O3
|
||||
cxxflags: -O3
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: apple-clang@9.1.0
|
||||
operating_system: elcapitan
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@10foo
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@4.4.0-special
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@12.0.0
|
||||
operating_system: {0.name}{0.version}
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: gcc@10.2.1
|
||||
operating_system: {0.name}{0.version}
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: clang@12.0.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: gcc@10.2.1
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: gcc@10.1.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: gcc@11.1.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
flags:
|
||||
cflags: -O0 -g
|
||||
cxxflags: -O0 -g
|
||||
fflags: -O0 -g
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: clang@12.2.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/clang35
|
||||
cxx: /path/to/clang++35
|
||||
f77: None
|
||||
fc: None
|
||||
flags:
|
||||
cflags: -O3
|
||||
cxxflags: -O3
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: gcc@10foo
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: /path/to/gfortran
|
||||
fc: /path/to/gfortran
|
||||
modules: 'None'
|
||||
target: aarch64
|
||||
- compiler:
|
||||
spec: clang@12.0.0
|
||||
operating_system: {0.name}{0.version}
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@10.2.1
|
||||
operating_system: {0.name}{0.version}
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@12.0.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/clang
|
||||
cxx: /path/to/clang++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@10.2.1
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@10.1.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: gcc@11.1.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/gcc
|
||||
cxx: /path/to/g++
|
||||
f77: None
|
||||
fc: None
|
||||
flags:
|
||||
cflags: -O0 -g
|
||||
cxxflags: -O0 -g
|
||||
fflags: -O0 -g
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
- compiler:
|
||||
spec: clang@12.2.0
|
||||
operating_system: redhat6
|
||||
paths:
|
||||
cc: /path/to/clang35
|
||||
cxx: /path/to/clang++35
|
||||
f77: None
|
||||
fc: None
|
||||
flags:
|
||||
cflags: -O3
|
||||
cxxflags: -O3
|
||||
modules: 'None'
|
||||
target: x86_64
|
||||
modules: []
|
||||
target: {target}
|
||||
|
@@ -10,6 +10,7 @@ config:
|
||||
source_cache: $user_cache_path/source
|
||||
misc_cache: $user_cache_path/cache
|
||||
verify_ssl: true
|
||||
ssl_certs: $SSL_CERT_FILE
|
||||
checksum: true
|
||||
dirty: false
|
||||
concretizer: {0}
|
||||
|
@@ -1,5 +1,6 @@
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc, clang]
|
||||
providers:
|
||||
mpi: [openmpi, mpich, zmpi]
|
||||
lapack: [openblas-with-lapack]
|
||||
@@ -16,7 +17,7 @@ packages:
|
||||
externalvirtual:
|
||||
buildable: False
|
||||
externals:
|
||||
- spec: externalvirtual@2.0%clang@12.0.0
|
||||
- spec: externalvirtual@2.0%clang@15.0.0
|
||||
prefix: /path/to/external_virtual_clang
|
||||
- spec: externalvirtual@1.0%gcc@10.2.1
|
||||
prefix: /path/to/external_virtual_gcc
|
||||
|
@@ -4,7 +4,7 @@ lmod:
|
||||
hash_length: 0
|
||||
|
||||
core_compilers:
|
||||
- 'clang@12.0.0'
|
||||
- 'clang@15.0.0'
|
||||
|
||||
core_specs:
|
||||
- 'mpich@3.0.1'
|
||||
|
@@ -2,4 +2,4 @@ enable:
|
||||
- lmod
|
||||
lmod:
|
||||
core_compilers:
|
||||
- 'clang@12.0.0'
|
||||
- 'clang@15.0.0'
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user