Compare commits
406 Commits
packages/m
...
v0.22.0
Author | SHA1 | Date | |
---|---|---|---|
![]() |
5fe93fee1e | ||
![]() |
8207f11333 | ||
![]() |
5bb5d2696f | ||
![]() |
55f37dffe5 | ||
![]() |
252a5bd71b | ||
![]() |
f55224f161 | ||
![]() |
189ae4b06e | ||
![]() |
5e9c702fa7 | ||
![]() |
965bb4d3c0 | ||
![]() |
354f98c94a | ||
![]() |
5dce480154 | ||
![]() |
f634d48b7c | ||
![]() |
4daee565ae | ||
![]() |
8e4dbdc2d7 | ||
![]() |
4f6adc03cd | ||
![]() |
f7afd67a26 | ||
![]() |
d22bdc1c4e | ||
![]() |
540f9eefb7 | ||
![]() |
2db5bca778 | ||
![]() |
bcd05407b8 | ||
![]() |
b35ec605fe | ||
![]() |
0a353abc42 | ||
![]() |
e178c58847 | ||
![]() |
d7297e67a5 | ||
![]() |
ee8addf04a | ||
![]() |
fd3cd3a1c6 | ||
![]() |
e585aeb883 | ||
![]() |
1f43384db4 | ||
![]() |
814b328fca | ||
![]() |
125206d44d | ||
![]() |
a081b875b4 | ||
![]() |
a16ee3348b | ||
![]() |
d654d6b1f4 | ||
![]() |
9b4ca0be40 | ||
![]() |
dc71dcfdc2 | ||
![]() |
1f31c3374c | ||
![]() |
27aeb6e293 | ||
![]() |
715214c1a1 | ||
![]() |
b471d62dbd | ||
![]() |
a5f62889ca | ||
![]() |
2a942d98e3 | ||
![]() |
4a4077d4ef | ||
![]() |
c0fcccc232 | ||
![]() |
0b2cbfefce | ||
![]() |
c499514322 | ||
![]() |
ae392b5935 | ||
![]() |
62e9bb5d51 | ||
![]() |
6cd948184e | ||
![]() |
44ff24f558 | ||
![]() |
c657dfb768 | ||
![]() |
f2e410d95a | ||
![]() |
df443a38d6 | ||
![]() |
74fe498cb8 | ||
![]() |
5f13a48bf2 | ||
![]() |
c4824f7fd2 | ||
![]() |
49a8634584 | ||
![]() |
eac5ea869f | ||
![]() |
f5946c4621 | ||
![]() |
8564ab19c3 | ||
![]() |
aae7a22d39 | ||
![]() |
09cea230b4 | ||
![]() |
a1f34ec58b | ||
![]() |
4d7cd4c0bf | ||
![]() |
4adbfa3835 | ||
![]() |
8a1b69c1d3 | ||
![]() |
a1d69f8661 | ||
![]() |
e05dbc529e | ||
![]() |
99d33bf1f2 | ||
![]() |
bd1918cd71 | ||
![]() |
2a967c7df4 | ||
![]() |
7596aac958 | ||
![]() |
c73ded8ed6 | ||
![]() |
df1d783035 | ||
![]() |
47051c3690 | ||
![]() |
3fd83c637d | ||
![]() |
ef5afb66da | ||
![]() |
ecc4336bf9 | ||
![]() |
d2ed217796 | ||
![]() |
272c7c069a | ||
![]() |
23f16041cd | ||
![]() |
e2329adac0 | ||
![]() |
4ec788ca12 | ||
![]() |
c1cea9d304 | ||
![]() |
5c96e67bb1 | ||
![]() |
7008bb6335 | ||
![]() |
14561fafff | ||
![]() |
89bf1edb6e | ||
![]() |
cc85dc05b7 | ||
![]() |
ae171f8b83 | ||
![]() |
578dd18b34 | ||
![]() |
a7a51ee5cf | ||
![]() |
960cc90667 | ||
![]() |
dea44bad8b | ||
![]() |
e37870ff43 | ||
![]() |
3751642a27 | ||
![]() |
0f386697c6 | ||
![]() |
67ce103b2c | ||
![]() |
a8c9fa0e45 | ||
![]() |
b56a133fce | ||
![]() |
f0b3d33145 | ||
![]() |
32564da9d0 | ||
![]() |
8f2faf65dc | ||
![]() |
1d59637051 | ||
![]() |
97dc353cb0 | ||
![]() |
beebe2c9d3 | ||
![]() |
2eb7add8c4 | ||
![]() |
a9fea9f611 | ||
![]() |
9b62a9c238 | ||
![]() |
f7eb0ccfc9 | ||
![]() |
a0aa35667c | ||
![]() |
b1d4fd14bc | ||
![]() |
7e8415a3a6 | ||
![]() |
7f4f42894d | ||
![]() |
4e876b4014 | ||
![]() |
77a8a4fe08 | ||
![]() |
597e5a4e5e | ||
![]() |
3c31c32f62 | ||
![]() |
3a93a716e4 | ||
![]() |
82229a0784 | ||
![]() |
5d846a69d1 | ||
![]() |
d21aa1cc12 | ||
![]() |
7896ff51f6 | ||
![]() |
5849a24a74 | ||
![]() |
38c49d6b82 | ||
![]() |
0d8900986d | ||
![]() |
62554cebc4 | ||
![]() |
067155cff5 | ||
![]() |
08e68d779f | ||
![]() |
05b04cd4c3 | ||
![]() |
be48f762a9 | ||
![]() |
de5b4840e9 | ||
![]() |
20f9884445 | ||
![]() |
deb78bcd93 | ||
![]() |
06239de0e9 | ||
![]() |
1f904c38b3 | ||
![]() |
f2d0ba8fcc | ||
![]() |
49d3eb1723 | ||
![]() |
7c5439f48a | ||
![]() |
7f2cedd31f | ||
![]() |
d47951a1e3 | ||
![]() |
f2bd0c5cf1 | ||
![]() |
4362382223 | ||
![]() |
ba4859b33d | ||
![]() |
e8472714ef | ||
![]() |
ee6960e53e | ||
![]() |
dad266c955 | ||
![]() |
7a234ce00a | ||
![]() |
a0c2ed97c8 | ||
![]() |
a3aa5b59cd | ||
![]() |
f7dbb59d13 | ||
![]() |
0df27bc0f7 | ||
![]() |
877e09dcc1 | ||
![]() |
c4439e86a2 | ||
![]() |
aa00dcac96 | ||
![]() |
4c9a946b3b | ||
![]() |
0c6e6ad226 | ||
![]() |
bf8f32443f | ||
![]() |
c2eef8bab2 | ||
![]() |
2df4b307d7 | ||
![]() |
3c57440c10 | ||
![]() |
3e6e9829da | ||
![]() |
859745f1a9 | ||
![]() |
ddabb8b12c | ||
![]() |
16bba32124 | ||
![]() |
7d87369ead | ||
![]() |
7723bd28ed | ||
![]() |
43f3a35150 | ||
![]() |
ae9f2d4d40 | ||
![]() |
5a3814ff15 | ||
![]() |
946c539dbd | ||
![]() |
0037462f9e | ||
![]() |
e5edac4d0c | ||
![]() |
3e1474dbbb | ||
![]() |
0f502bb6c3 | ||
![]() |
1eecbd3208 | ||
![]() |
6e92b9180c | ||
![]() |
ac9012da0c | ||
![]() |
e3cb4f09f0 | ||
![]() |
2e8600bb71 | ||
![]() |
d946c37cbb | ||
![]() |
47a9f0bdf7 | ||
![]() |
2bf900a893 | ||
![]() |
99bba0b1ce | ||
![]() |
a8506f9022 | ||
![]() |
4a40a76291 | ||
![]() |
fe9ddf22fc | ||
![]() |
1cae1299eb | ||
![]() |
8b106416c0 | ||
![]() |
e2088b599e | ||
![]() |
56446685ca | ||
![]() |
47a8d875c8 | ||
![]() |
56b2d250c1 | ||
![]() |
abbd09b4b2 | ||
![]() |
9e5fdc6614 | ||
![]() |
1224a3e8cf | ||
![]() |
6c3218920f | ||
![]() |
02cc3ea005 | ||
![]() |
641ab95a31 | ||
![]() |
e8b76c27e4 | ||
![]() |
0dbe4d54b6 | ||
![]() |
1eb6977049 | ||
![]() |
3f1cfdb7d7 | ||
![]() |
d438d7993d | ||
![]() |
aa0825d642 | ||
![]() |
978c20f35a | ||
![]() |
d535124500 | ||
![]() |
01f61a2eba | ||
![]() |
7d5e27d5e8 | ||
![]() |
d210425eef | ||
![]() |
6be07da201 | ||
![]() |
02b38716bf | ||
![]() |
d7bc624c61 | ||
![]() |
b7cecc9726 | ||
![]() |
393a2f562b | ||
![]() |
682fcec0b2 | ||
![]() |
d6baae525f | ||
![]() |
e1f2612581 | ||
![]() |
080fc875eb | ||
![]() |
69f417b26a | ||
![]() |
80b5106611 | ||
![]() |
34146c197a | ||
![]() |
209a3bf302 | ||
![]() |
e8c41cdbcb | ||
![]() |
a450dd31fa | ||
![]() |
7c1a309453 | ||
![]() |
78b6fa96e5 | ||
![]() |
1b315a9ede | ||
![]() |
82df0e549d | ||
![]() |
f5591f9068 | ||
![]() |
98c08d277d | ||
![]() |
facca4e2c8 | ||
![]() |
764029bcd1 | ||
![]() |
44cb4eca93 | ||
![]() |
39888d4df6 | ||
![]() |
f68ea49e54 | ||
![]() |
78b5e4cdfa | ||
![]() |
26515b8871 | ||
![]() |
74640987c7 | ||
![]() |
d6154645c7 | ||
![]() |
faed43704b | ||
![]() |
6fba31ce34 | ||
![]() |
112cead00b | ||
![]() |
9e2558bd56 | ||
![]() |
019058226f | ||
![]() |
ac0040f67d | ||
![]() |
38f341f12d | ||
![]() |
26ad22743f | ||
![]() |
46c2b8a565 | ||
![]() |
5cbb59f2b8 | ||
![]() |
f29fa1cfdf | ||
![]() |
c69951d6e1 | ||
![]() |
f406f27d9c | ||
![]() |
36ea208e12 | ||
![]() |
17e0774189 | ||
![]() |
3162c2459d | ||
![]() |
7cad6c62a3 | ||
![]() |
eb2ddf6fa2 | ||
![]() |
2bc2902fed | ||
![]() |
b362362291 | ||
![]() |
32bb5c7523 | ||
![]() |
a2b76c68a0 | ||
![]() |
62132919e1 | ||
![]() |
b06929f6df | ||
![]() |
0f33de157b | ||
![]() |
03a074ebe7 | ||
![]() |
4d12b6a4fd | ||
![]() |
26bb15e1fb | ||
![]() |
1bf92c7881 | ||
![]() |
eefe0b2eec | ||
![]() |
de6c6f0cd9 | ||
![]() |
309d3aa1ec | ||
![]() |
feff11f914 | ||
![]() |
de3b324983 | ||
![]() |
747cd374df | ||
![]() |
8b3ac40436 | ||
![]() |
28e9be443c | ||
![]() |
1381bede80 | ||
![]() |
6502785908 | ||
![]() |
53257408a3 | ||
![]() |
28d02dff60 | ||
![]() |
9d60b42a97 | ||
![]() |
9ff5a30574 | ||
![]() |
9a6c013365 | ||
![]() |
9f62a3e819 | ||
![]() |
e380e9a0ab | ||
![]() |
8415ea9ada | ||
![]() |
6960766e0c | ||
![]() |
0c2ca8c841 | ||
![]() |
273960fdbb | ||
![]() |
0cd2a1102c | ||
![]() |
e40676e901 | ||
![]() |
4ddb07e94f | ||
![]() |
50585d55c5 | ||
![]() |
5d6b5f3f6f | ||
![]() |
2351c19489 | ||
![]() |
08d49361f0 | ||
![]() |
c3c63e5ca4 | ||
![]() |
e72d4075bd | ||
![]() |
f9f97bf22b | ||
![]() |
8033455d5f | ||
![]() |
50a5a6fea4 | ||
![]() |
0de8a0e3f3 | ||
![]() |
0a26e74cc8 | ||
![]() |
9dfd91efbb | ||
![]() |
1a7baadbff | ||
![]() |
afcfd56ae5 | ||
![]() |
7eb2e704b6 | ||
![]() |
564b4fa263 | ||
![]() |
0a941b43ca | ||
![]() |
35ff24ddea | ||
![]() |
7019e4e3cb | ||
![]() |
cb16b8a047 | ||
![]() |
381acb3726 | ||
![]() |
d87ea0b256 | ||
![]() |
1a757e7f70 | ||
![]() |
704e2c53a8 | ||
![]() |
478d8a668c | ||
![]() |
7903f9fcfd | ||
![]() |
670d3d3fdc | ||
![]() |
e8aab6b31c | ||
![]() |
1ce408ecc5 | ||
![]() |
dc81a2dcdb | ||
![]() |
b10f51f020 | ||
![]() |
4f4e3f5607 | ||
![]() |
00fb80e766 | ||
![]() |
057603cad8 | ||
![]() |
5b8b6e492d | ||
![]() |
763279cd61 | ||
![]() |
e4237b9153 | ||
![]() |
d288658cf0 | ||
![]() |
2c22ae0576 | ||
![]() |
fc3fc94689 | ||
![]() |
b5013c1372 | ||
![]() |
e220674c4d | ||
![]() |
7f13518225 | ||
![]() |
96a13a97e6 | ||
![]() |
6d244b3f67 | ||
![]() |
6bc66db141 | ||
![]() |
acfb2b9270 | ||
![]() |
d92a2c31fb | ||
![]() |
e32561aff6 | ||
![]() |
4b0479159f | ||
![]() |
03bfd36926 | ||
![]() |
4d30c8dce4 | ||
![]() |
49d4104f22 | ||
![]() |
07fb83b493 | ||
![]() |
263007ba81 | ||
![]() |
3b6e99381f | ||
![]() |
a30af1ac54 | ||
![]() |
294742ab7b | ||
![]() |
6391559fb6 | ||
![]() |
d4d4f813a9 | ||
![]() |
4667163dc4 | ||
![]() |
439f105285 | ||
![]() |
f65b1fd7b6 | ||
![]() |
d23e06c27e | ||
![]() |
b76e9a887b | ||
![]() |
55ffd439ce | ||
![]() |
d8a7b88e7b | ||
![]() |
aaa1bb1d98 | ||
![]() |
0d94b8044b | ||
![]() |
5a52780f7c | ||
![]() |
dd0a8452ee | ||
![]() |
c467bba73e | ||
![]() |
d680a0cb99 | ||
![]() |
efadee26ef | ||
![]() |
2077b3a006 | ||
![]() |
8e0c659b51 | ||
![]() |
863ab5a597 | ||
![]() |
db4e76ab27 | ||
![]() |
6728a46a84 | ||
![]() |
5a09459dd5 | ||
![]() |
7e14ff806a | ||
![]() |
7e88cf795c | ||
![]() |
1536e3d422 | ||
![]() |
1fe8e63481 | ||
![]() |
dfca2c285e | ||
![]() |
2686f778fa | ||
![]() |
925e9c73b1 | ||
![]() |
aba447e885 | ||
![]() |
1113de0dad | ||
![]() |
4110225166 | ||
![]() |
24c839c837 | ||
![]() |
42c6a6b189 | ||
![]() |
b0ea1c6f24 | ||
![]() |
735102eb2b | ||
![]() |
2e3cdb349b | ||
![]() |
05c8030119 | ||
![]() |
bbcd4224fa | ||
![]() |
4c0cdb99b3 | ||
![]() |
f22d009c6d | ||
![]() |
c5a3e36ad0 | ||
![]() |
1c76ba1c3e | ||
![]() |
b969f739bd | ||
![]() |
4788c4774c | ||
![]() |
34de028dbc | ||
![]() |
a69254fd79 | ||
![]() |
af5f205759 | ||
![]() |
77f9100a59 | ||
![]() |
386bb71392 | ||
![]() |
0676d6457f | ||
![]() |
0b80e36867 | ||
![]() |
4c9816f10c | ||
![]() |
fb6741cf85 | ||
![]() |
3f2fa256fc | ||
![]() |
d5c8864942 |
38
.github/workflows/audit.yaml
vendored
38
.github/workflows/audit.yaml
vendored
@@ -17,33 +17,51 @@ concurrency:
|
||||
jobs:
|
||||
# Run audits on all the packages in the built-in repository
|
||||
package-audits:
|
||||
runs-on: ${{ matrix.operating_system }}
|
||||
runs-on: ${{ matrix.system.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
operating_system: ["ubuntu-latest", "macos-latest"]
|
||||
system:
|
||||
- { os: windows-latest, shell: 'powershell Invoke-Expression -Command "./share/spack/qa/windows_test_setup.ps1"; {0}' }
|
||||
- { os: ubuntu-latest, shell: bash }
|
||||
- { os: macos-latest, shell: bash }
|
||||
defaults:
|
||||
run:
|
||||
shell: ${{ matrix.system.shell }}
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip setuptools pytest coverage[toml]
|
||||
- name: Setup for Windows run
|
||||
if: runner.os == 'Windows'
|
||||
run: |
|
||||
python -m pip install --upgrade pywin32
|
||||
- name: Package audits (with coverage)
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) audit packages
|
||||
coverage run $(which spack) audit externals
|
||||
coverage run $(which spack) -d audit externals
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ inputs.with_coverage == 'false' }}
|
||||
if: ${{ inputs.with_coverage == 'false' && runner.os != 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
$(which spack) audit packages
|
||||
$(which spack) audit externals
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
spack -d audit externals
|
||||
- name: Package audits (without coverage)
|
||||
if: ${{ runner.os == 'Windows' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack -d audit packages
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
spack -d audit externals
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
if: ${{ inputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,audits
|
||||
|
5
.github/workflows/bootstrap-test.sh
vendored
5
.github/workflows/bootstrap-test.sh
vendored
@@ -1,7 +1,8 @@
|
||||
#!/bin/bash
|
||||
set -ex
|
||||
set -e
|
||||
source share/spack/setup-env.sh
|
||||
$PYTHON bin/spack bootstrap disable github-actions-v0.4
|
||||
$PYTHON bin/spack bootstrap disable spack-install
|
||||
$PYTHON bin/spack -d solve zlib
|
||||
$PYTHON bin/spack $SPACK_FLAGS solve zlib
|
||||
tree $BOOTSTRAP/store
|
||||
exit 0
|
||||
|
344
.github/workflows/bootstrap.yml
vendored
344
.github/workflows/bootstrap.yml
vendored
@@ -13,118 +13,22 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
fedora-clingo-sources:
|
||||
distros-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "fedora:latest"
|
||||
container: ${{ matrix.image }}
|
||||
strategy:
|
||||
matrix:
|
||||
image: ["fedora:latest", "opensuse/leap:latest"]
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
- name: Setup Fedora
|
||||
if: ${{ matrix.image == 'fedora:latest' }}
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-clingo-binaries-and-patchelf:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
opensuse-clingo-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "opensuse/leap:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
- name: Setup OpenSUSE
|
||||
if: ${{ matrix.image == 'opensuse/leap:latest' }}
|
||||
run: |
|
||||
# Harden CI by applying the workaround described here: https://www.suse.com/support/kb/doc/?id=000019505
|
||||
zypper update -y || zypper update -y
|
||||
@@ -133,15 +37,9 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -151,77 +49,102 @@ jobs:
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-clingo-sources:
|
||||
runs-on: macos-latest
|
||||
clingo-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
brew install cmake bison tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -d solve zlib
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-clingo-binaries:
|
||||
runs-on: ${{ matrix.macos-version }}
|
||||
gnupg-sources:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
macos-version: ['macos-11', 'macos-12']
|
||||
runner: [ 'macos-13', 'macos-14', "ubuntu-latest" ]
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Bootstrap clingo
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
if $ver_dir/python --version ; then
|
||||
export PYTHON="$ver_dir/python"
|
||||
not_found=0
|
||||
old_path="$PATH"
|
||||
export PATH="$ver_dir:$PATH"
|
||||
./bin/spack-tmpconfig -b ./.github/workflows/bootstrap-test.sh
|
||||
export PATH="$old_path"
|
||||
fi
|
||||
fi
|
||||
# NOTE: test all pythons that exist, not all do on 12
|
||||
done
|
||||
|
||||
ubuntu-clingo-binaries:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
from-binaries:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
runner: ['macos-13', 'macos-14', "ubuntu-latest"]
|
||||
steps:
|
||||
- name: Setup macOS
|
||||
if: ${{ matrix.runner != 'ubuntu-latest' }}
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Setup Ubuntu
|
||||
if: ${{ matrix.runner == 'ubuntu-latest' }}
|
||||
run: |
|
||||
sudo rm -rf $(which gpg) $(which gpg2) $(which patchelf)
|
||||
- name: Checkout
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: |
|
||||
3.8
|
||||
3.9
|
||||
3.10
|
||||
3.11
|
||||
3.12
|
||||
- name: Set bootstrap sources
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
for ver in '3.7' '3.8' '3.9' '3.10' '3.11' ; do
|
||||
set -e
|
||||
for ver in '3.8' '3.9' '3.10' '3.11' '3.12' ; do
|
||||
not_found=1
|
||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||
echo "Testing $ver_dir"
|
||||
if [[ -d "$ver_dir" ]] ; then
|
||||
echo "Testing $ver_dir"
|
||||
if $ver_dir/python --version ; then
|
||||
export PYTHON="$ver_dir/python"
|
||||
not_found=0
|
||||
@@ -236,122 +159,9 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
ubuntu-gnupg-binaries:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
ubuntu-gnupg-sources:
|
||||
runs-on: ubuntu-latest
|
||||
container: "ubuntu:latest"
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
run: |
|
||||
apt-get update -y && apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
run: |
|
||||
# See [1] below
|
||||
git config --global --add safe.directory /__w/spack/spack
|
||||
useradd spack-test && mkdir -p ~spack-test
|
||||
chown -R spack-test . ~spack-test
|
||||
- name: Setup repo
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Bootstrap GnuPG
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-gnupg-binaries:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack bootstrap disable spack-install
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
macos-gnupg-sources:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install gawk tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack solve zlib
|
||||
spack bootstrap disable github-actions-v0.5
|
||||
spack bootstrap disable github-actions-v0.4
|
||||
spack -d gpg list
|
||||
tree ~/.spack/bootstrap/store/
|
||||
|
||||
|
||||
# [1] Distros that have patched git to resolve CVE-2022-24765 (e.g. Ubuntu patching v2.25.1)
|
||||
# introduce breaking behaviorso we have to set `safe.directory` in gitconfig ourselves.
|
||||
# See:
|
||||
# - https://github.blog/2022-04-12-git-security-vulnerability-announced/
|
||||
# - https://github.com/actions/checkout/issues/760
|
||||
# - http://changelogs.ubuntu.com/changelogs/pool/main/g/git/git_2.25.1-1ubuntu3.3/changelog
|
||||
|
24
.github/workflows/build-containers.yml
vendored
24
.github/workflows/build-containers.yml
vendored
@@ -45,17 +45,18 @@ jobs:
|
||||
[leap15, 'linux/amd64,linux/arm64,linux/ppc64le', 'opensuse/leap:15'],
|
||||
[ubuntu-focal, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:20.04'],
|
||||
[ubuntu-jammy, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:22.04'],
|
||||
[ubuntu-noble, 'linux/amd64,linux/arm64,linux/ppc64le', 'ubuntu:24.04'],
|
||||
[almalinux8, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:8'],
|
||||
[almalinux9, 'linux/amd64,linux/arm64,linux/ppc64le', 'almalinux:9'],
|
||||
[rockylinux8, 'linux/amd64,linux/arm64', 'rockylinux:8'],
|
||||
[rockylinux9, 'linux/amd64,linux/arm64', 'rockylinux:9'],
|
||||
[fedora37, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:37'],
|
||||
[fedora38, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:38']]
|
||||
[fedora39, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:39'],
|
||||
[fedora40, 'linux/amd64,linux/arm64,linux/ppc64le', 'fedora:40']]
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
|
||||
- uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81
|
||||
id: docker_meta
|
||||
@@ -87,16 +88,16 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Upload Dockerfile
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808
|
||||
with:
|
||||
name: dockerfiles
|
||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||
path: dockerfiles
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@2b51285047da1547ffb1b2203d8be4c0af6b1f20
|
||||
uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@e92390c5fb421da1463c202d546fed0ec5c39f20
|
||||
@@ -120,3 +121,14 @@ jobs:
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||
|
||||
merge-dockerfiles:
|
||||
runs-on: ubuntu-latest
|
||||
needs: deploy-images
|
||||
steps:
|
||||
- name: Merge Artifacts
|
||||
uses: actions/upload-artifact/merge@65462800fd760344b1a7b4382951275a0abb4808
|
||||
with:
|
||||
name: dockerfiles
|
||||
pattern: dockerfiles_*
|
||||
delete-merged: true
|
||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
2
.github/workflows/nightly-win-builds.yml
vendored
2
.github/workflows/nightly-win-builds.yml
vendored
@@ -14,7 +14,7 @@ jobs:
|
||||
build-paraview-deps:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
|
2
.github/workflows/style/requirements.txt
vendored
2
.github/workflows/style/requirements.txt
vendored
@@ -1,4 +1,4 @@
|
||||
black==24.3.0
|
||||
black==24.4.2
|
||||
clingo==5.7.1
|
||||
flake8==7.0.0
|
||||
isort==5.13.2
|
||||
|
20
.github/workflows/unit_tests.yaml
vendored
20
.github/workflows/unit_tests.yaml
vendored
@@ -51,7 +51,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -91,7 +91,7 @@ jobs:
|
||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -100,7 +100,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -124,7 +124,7 @@ jobs:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -141,7 +141,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -160,7 +160,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -185,7 +185,7 @@ jobs:
|
||||
SPACK_TEST_SOLVER: clingo
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -195,10 +195,10 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [macos-latest, macos-14]
|
||||
os: [macos-13, macos-14]
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -223,7 +223,7 @@ jobs:
|
||||
$(which spack) solve zlib
|
||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: unittests,macos
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
|
10
.github/workflows/windows_python.yml
vendored
10
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -33,7 +33,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
@@ -59,7 +59,7 @@ jobs:
|
||||
./share/spack/qa/validate_last_exit.ps1
|
||||
coverage combine -a
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@c16abc29c95fcf9174b58eb7e1abf4c866893bc8
|
||||
- uses: codecov/codecov-action@5ecb98a3c6b747ed38dc09f787459979aebb39be
|
||||
with:
|
||||
flags: unittests,windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633
|
||||
- uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
|
||||
|
362
CHANGELOG.md
362
CHANGELOG.md
@@ -1,3 +1,365 @@
|
||||
# v0.22.0 (2024-05-12)
|
||||
|
||||
`v0.22.0` is a major feature release.
|
||||
|
||||
## Features in this release
|
||||
|
||||
1. **Compiler dependencies**
|
||||
|
||||
We are in the process of making compilers proper dependencies in Spack, and a number
|
||||
of changes in `v0.22` support that effort. You may notice nodes in your dependency
|
||||
graphs for compiler runtime libraries like `gcc-runtime` or `libgfortran`, and you
|
||||
may notice that Spack graphs now include `libc`. We've also begun moving compiler
|
||||
configuration from `compilers.yaml` to `packages.yaml` to make it consistent with
|
||||
other externals. We are trying to do this with the least disruption possible, so
|
||||
your existing `compilers.yaml` files should still work. We expect to be done with
|
||||
this transition by the `v0.23` release in November.
|
||||
|
||||
* #41104: Packages compiled with `%gcc` on Linux, macOS and FreeBSD now depend on a
|
||||
new package `gcc-runtime`, which contains a copy of the shared compiler runtime
|
||||
libraries. This enables gcc runtime libraries to be installed and relocated when
|
||||
using a build cache. When building minimal Spack-generated container images it is
|
||||
no longer necessary to install libgfortran, libgomp etc. using the system package
|
||||
manager.
|
||||
|
||||
* #42062: Packages compiled with `%oneapi` now depend on a new package
|
||||
`intel-oneapi-runtime`. This is similar to `gcc-runtime`, and the runtimes can
|
||||
provide virtuals and compilers can inject dependencies on virtuals into compiled
|
||||
packages. This allows us to model library soname compatibility and allows
|
||||
compilers like `%oneapi` to provide virtuals like `sycl` (which can also be
|
||||
provided by standalone libraries). Note that until we have an agreement in place
|
||||
with intel, Intel packages are marked `redistribute(source=False, binary=False)`
|
||||
and must be downloaded outside of Spack.
|
||||
|
||||
* #43272: changes to the optimization criteria of the solver improve the hit-rate of
|
||||
buildcaches by a fair amount. The solver more relaxed compatibility rules and will
|
||||
not try to strictly match compilers or targets of reused specs. Users can still
|
||||
enforce the previous strict behavior with `require:` sections in `packages.yaml`.
|
||||
Note that to enforce correct linking, Spack will *not* reuse old `%gcc` and
|
||||
`%oneapi` specs that do not have the runtime libraries as a dependency.
|
||||
|
||||
* #43539: Spack will reuse specs built with compilers that are *not* explicitly
|
||||
configured in `compilers.yaml`. Because we can now keep runtime libraries in build
|
||||
cache, we do not require you to also have a local configured compiler to *use* the
|
||||
runtime libraries. This improves reuse in buildcaches and avoids conflicts with OS
|
||||
updates that happen underneath Spack.
|
||||
|
||||
* #43190: binary compatibility on `linux` is now based on the `libc` version,
|
||||
instead of on the `os` tag. Spack builds now detect the host `libc` (`glibc` or
|
||||
`musl`) and add it as an implicit external node in the dependency graph. Binaries
|
||||
with a `libc` with the same name and a version less than or equal to that of the
|
||||
detected `libc` can be reused. This is only on `linux`, not `macos` or `Windows`.
|
||||
|
||||
* #43464: each package that can provide a compiler is now detectable using `spack
|
||||
external find`. External packages defining compiler paths are effectively used as
|
||||
compilers, and `spack external find -t compiler` can be used as a substitute for
|
||||
`spack compiler find`. More details on this transition are in
|
||||
[the docs](https://spack.readthedocs.io/en/latest/getting_started.html#manual-compiler-configuration)
|
||||
|
||||
2. **Improved `spack find` UI for Environments**
|
||||
|
||||
If you're working in an enviroment, you likely care about:
|
||||
|
||||
* What are the roots
|
||||
* Which ones are installed / not installed
|
||||
* What's been added that still needs to be concretized
|
||||
|
||||
We've tweaked `spack find` in environments to show this information much more
|
||||
clearly. Installation status is shown next to each root, so you can see what is
|
||||
installed. Roots are also shown in bold in the list of installed packages. There is
|
||||
also a new option for `spack find -r` / `--only-roots` that will only show env
|
||||
roots, if you don't want to look at all the installed specs.
|
||||
|
||||
More details in #42334.
|
||||
|
||||
3. **Improved command-line string quoting**
|
||||
|
||||
We are making some breaking changes to how Spack parses specs on the CLI in order to
|
||||
respect shell quoting instead of trying to fight it. If you (sadly) had to write
|
||||
something like this on the command line:
|
||||
|
||||
```
|
||||
spack install zlib cflags=\"-O2 -g\"
|
||||
```
|
||||
|
||||
That will now result in an error. The correct format (which you probably expected in
|
||||
the first place) is:
|
||||
|
||||
```
|
||||
spack install zlib cflags="-O2 -g"
|
||||
```
|
||||
|
||||
Quoted can also now include special characters, enabling commands like:
|
||||
|
||||
```
|
||||
spack install zlib ldflags='-Wl,-rpath=$ORIGIN/_libs'
|
||||
```
|
||||
|
||||
To reduce ambiguity in parsing, do *not* put spaces around `=` and `==` in
|
||||
flags or variants, as this will now result in an error:
|
||||
|
||||
```
|
||||
spack install zlib cflags = "-O2 -g"
|
||||
```
|
||||
|
||||
More details and discussion in #30634.
|
||||
|
||||
4. **Revert default `spack install` behavior to `--reuse`**
|
||||
|
||||
We changed the default concretizer behavior from `--reuse` to `--reuse-deps` in
|
||||
#30990 (in `v0.20`), which meant that *every* `spack install` invocation would
|
||||
attempt to build a new version of the requested package / any environment roots.
|
||||
While this is a common ask for *upgrading* and for *developer* workflows, we don't
|
||||
think it should be the default for a package manager.
|
||||
|
||||
We are going to try to stick to this policy:
|
||||
1. Prioritize reuse and build as little as possible by default.
|
||||
2. Only upgrade or install duplicates if they are explicitly asked for, or if there
|
||||
is a known security issue that necessitates an upgrade.
|
||||
|
||||
With the install command you now have three options:
|
||||
|
||||
* `--reuse` (default): reuse as many existing installations as possible.
|
||||
* `--reuse-deps` / `--fresh-roots`: upgrade (freshen) roots but reuse dependencies if possible.
|
||||
* `--fresh`: install fresh versions of requested packages (roots) and their dependencies.
|
||||
|
||||
We've also introduced `--fresh-roots` as an alias for `--reuse-deps` to make it more clear
|
||||
that it may give you fresh versions. More details in #41302 and #43988.
|
||||
|
||||
5. **More control over reused specs**
|
||||
|
||||
You can now control which packages to reuse and how. There is a new
|
||||
`concretizer:reuse` config option, which accepts the following properties:
|
||||
|
||||
- `roots`: `true` to reuse roots, `false` to reuse just dependencies
|
||||
- `exclude`: list of constraints used to select which specs *not* to reuse
|
||||
- `include`: list of constraints used to select which specs *to* reuse
|
||||
- `from`: list of sources for reused specs (some combination of `local`,
|
||||
`buildcache`, or `external`)
|
||||
|
||||
For example, to reuse only specs compiled with GCC, you could write:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
include:
|
||||
- "%gcc"
|
||||
```
|
||||
|
||||
Or, if `openmpi` must be used from externals, and it must be the only external used:
|
||||
|
||||
```yaml
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
from:
|
||||
- type: local
|
||||
exclude: ["openmpi"]
|
||||
- type: buildcache
|
||||
exclude: ["openmpi"]
|
||||
- type: external
|
||||
include: ["openmpi"]
|
||||
```
|
||||
|
||||
6. **Add new `redistribute()` directive**
|
||||
|
||||
Some packages can't be redistributed in source or binary form. We need an explicit
|
||||
way to say that in a package.
|
||||
|
||||
Now there is a `redistribute()` directive so that package authors can write:
|
||||
|
||||
```python
|
||||
class MyPackage(Package):
|
||||
redistribute(source=False, binary=False)
|
||||
```
|
||||
|
||||
Like other directives, this works with `when=`:
|
||||
|
||||
```python
|
||||
class MyPackage(Package):
|
||||
# 12.0 and higher are proprietary
|
||||
redistribute(source=False, binary=False, when="@12.0:")
|
||||
|
||||
# can't redistribute when we depend on some proprietary dependency
|
||||
redistribute(source=False, binary=False, when="^proprietary-dependency")
|
||||
```
|
||||
|
||||
More in #20185.
|
||||
|
||||
7. **New `conflict:` and `prefer:` syntax for package preferences**
|
||||
|
||||
Previously, you could express conflicts and preferences in `packages.yaml` through
|
||||
some contortions with `require:`:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
zlib-ng:
|
||||
require:
|
||||
- one_of: ["%clang", "@:"] # conflict on %clang
|
||||
- any_of: ["+shared", "@:"] # strong preference for +shared
|
||||
```
|
||||
|
||||
You can now use `require:` and `prefer:` for a much more readable configuration:
|
||||
|
||||
```yaml
|
||||
packages:
|
||||
zlib-ng:
|
||||
conflict:
|
||||
- "%clang"
|
||||
prefer:
|
||||
- "+shared"
|
||||
```
|
||||
|
||||
See [the documentation](https://spack.readthedocs.io/en/latest/packages_yaml.html#conflicts-and-strong-preferences)
|
||||
and #41832 for more details.
|
||||
|
||||
8. **`include_concrete` in environments**
|
||||
|
||||
You may want to build on the *concrete* contents of another environment without
|
||||
changing that environment. You can now include the concrete specs from another
|
||||
environment's `spack.lock` with `include_concrete`:
|
||||
|
||||
```yaml
|
||||
spack:
|
||||
specs: []
|
||||
concretizer:
|
||||
unify: true
|
||||
include_concrete:
|
||||
- /path/to/environment1
|
||||
- /path/to/environment2
|
||||
```
|
||||
|
||||
Now, when *this* environment is concretized, it will bring in the already concrete
|
||||
specs from `environment1` and `environment2`, and build on top of them without
|
||||
changing them. This is useful if you have phased deployments, where old deployments
|
||||
should not be modified but you want to use as many of them as possible. More details
|
||||
in #33768.
|
||||
|
||||
9. **`python-venv` isolation**
|
||||
|
||||
Spack has unique requirements for Python because it:
|
||||
1. installs every package in its own independent directory, and
|
||||
2. allows users to register *external* python installations.
|
||||
|
||||
External installations may contain their own installed packages that can interfere
|
||||
with Spack installations, and some distributions (Debian and Ubuntu) even change the
|
||||
`sysconfig` in ways that alter the installation layout of installed Python packages
|
||||
(e.g., with the addition of a `/local` prefix on Debian or Ubuntu). To isolate Spack
|
||||
from these and other issues, we now insert a small `python-venv` package in between
|
||||
`python` and packages that need to install Python code. This isolates Spack's build
|
||||
environment, isolates Spack from any issues with an external python, and resolves a
|
||||
large number of issues we've had with Python installations.
|
||||
|
||||
See #40773 for further details.
|
||||
|
||||
## New commands, options, and directives
|
||||
|
||||
* Allow packages to be pushed to build cache after install from source (#42423)
|
||||
* `spack develop`: stage build artifacts in same root as non-dev builds #41373
|
||||
* Don't delete `spack develop` build artifacts after install (#43424)
|
||||
* `spack find`: add options for local/upstream only (#42999)
|
||||
* `spack logs`: print log files for packages (either partially built or installed) (#42202)
|
||||
* `patch`: support reversing patches (#43040)
|
||||
* `develop`: Add -b/--build-directory option to set build_directory package attribute (#39606)
|
||||
* `spack list`: add `--namesapce` / `--repo` option (#41948)
|
||||
* directives: add `checked_by` field to `license()`, add some license checks
|
||||
* `spack gc`: add options for environments and build dependencies (#41731)
|
||||
* Add `--create` to `spack env activate` (#40896)
|
||||
|
||||
## Performance improvements
|
||||
|
||||
* environment.py: fix excessive re-reads (#43746)
|
||||
* ruamel yaml: fix quadratic complexity bug (#43745)
|
||||
* Refactor to improve `spec format` speed (#43712)
|
||||
* Do not acquire a write lock on the env post install if no views (#43505)
|
||||
* asp.py: fewer calls to `spec.copy()` (#43715)
|
||||
* spec.py: early return in `__str__`
|
||||
* avoid `jinja2` import at startup unless needed (#43237)
|
||||
|
||||
## Other new features of note
|
||||
|
||||
* `archspec`: update to `v0.2.4`: support for Windows, bugfixes for `neoverse-v1` and
|
||||
`neoverse-v2` detection.
|
||||
* `spack config get`/`blame`: with no args, show entire config
|
||||
* `spack env create <env>`: dir if dir-like (#44024)
|
||||
* ASP-based solver: update os compatibility for macOS (#43862)
|
||||
* Add handling of custom ssl certs in urllib ops (#42953)
|
||||
* Add ability to rename environments (#43296)
|
||||
* Add config option and compiler support to reuse across OS's (#42693)
|
||||
* Support for prereleases (#43140)
|
||||
* Only reuse externals when configured (#41707)
|
||||
* Environments: Add support for including views (#42250)
|
||||
* Make signed/unsigned a mirror configuration property (#41507)
|
||||
|
||||
## Removals, deprecations, and syntax changes
|
||||
* remove `dpcpp` compiler and package (#43418)
|
||||
* `spack load`: remove --only argument (#42120)
|
||||
|
||||
## Notable Bugfixes
|
||||
* repo.py: drop deleted packages from provider cache (#43779)
|
||||
* Allow `+` in module file names (#41999)
|
||||
* `cmd/python`: use runpy to allow multiprocessing in scripts (#41789)
|
||||
* Show extension commands with spack -h (#41726)
|
||||
* Support environment variable expansion inside module projections (#42917)
|
||||
* Alert user to failed concretizations (#42655)
|
||||
* shell: fix zsh color formatting for PS1 in environments (#39497)
|
||||
* spack mirror create --all: include patches (#41579)
|
||||
|
||||
## Spack community stats
|
||||
|
||||
* 7,994 total packages; 525 since `v0.21.0`
|
||||
* 178 new Python packages, 5 new R packages
|
||||
* 358 people contributed to this release
|
||||
* 344 committers to packages
|
||||
* 45 committers to core
|
||||
|
||||
|
||||
# v0.21.2 (2024-03-01)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- Containerize: accommodate nested or pre-existing spack-env paths (#41558)
|
||||
- Fix setup-env script, when going back and forth between instances (#40924)
|
||||
- Fix using fully-qualified namespaces from root specs (#41957)
|
||||
- Fix a bug when a required provider is requested for multiple virtuals (#42088)
|
||||
- OCI buildcaches:
|
||||
- only push in parallel when forking (#42143)
|
||||
- use pickleable errors (#42160)
|
||||
- Fix using sticky variants in externals (#42253)
|
||||
- Fix a rare issue with conditional requirements and multi-valued variants (#42566)
|
||||
|
||||
## Package updates
|
||||
- rust: add v1.75, rework a few variants (#41161,#41903)
|
||||
- py-transformers: add v4.35.2 (#41266)
|
||||
- mgard: fix OpenMP on AppleClang (#42933)
|
||||
|
||||
# v0.21.1 (2024-01-11)
|
||||
|
||||
## New features
|
||||
- Add support for reading buildcaches created by Spack v0.22 (#41773)
|
||||
|
||||
## Bugfixes
|
||||
|
||||
- spack graph: fix coloring with environments (#41240)
|
||||
- spack info: sort variants in --variants-by-name (#41389)
|
||||
- Spec.format: error on old style format strings (#41934)
|
||||
- ASP-based solver:
|
||||
- fix infinite recursion when computing concretization errors (#41061)
|
||||
- don't error for type mismatch on preferences (#41138)
|
||||
- don't emit spurious debug output (#41218)
|
||||
- Improve the error message for deprecated preferences (#41075)
|
||||
- Fix MSVC preview version breaking clingo build on Windows (#41185)
|
||||
- Fix multi-word aliases (#41126)
|
||||
- Add a warning for unconfigured compiler (#41213)
|
||||
- environment: fix an issue with deconcretization/reconcretization of specs (#41294)
|
||||
- buildcache: don't error if a patch is missing, when installing from binaries (#41986)
|
||||
- Multiple improvements to unit-tests (#41215,#41369,#41495,#41359,#41361,#41345,#41342,#41308,#41226)
|
||||
|
||||
## Package updates
|
||||
- root: add a webgui patch to address security issue (#41404)
|
||||
- BerkeleyGW: update source urls (#38218)
|
||||
|
||||
# v0.21.0 (2023-11-11)
|
||||
|
||||
`v0.21.0` is a major feature release.
|
||||
|
@@ -88,7 +88,7 @@ Resources:
|
||||
[bridged](https://github.com/matrix-org/matrix-appservice-slack#matrix-appservice-slack) to Slack.
|
||||
* [**Github Discussions**](https://github.com/spack/spack/discussions):
|
||||
for Q&A and discussions. Note the pinned discussions for announcements.
|
||||
* **Twitter**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
* **X**: [@spackpm](https://twitter.com/spackpm). Be sure to
|
||||
`@mention` us!
|
||||
* **Mailing list**: [groups.google.com/d/forum/spack](https://groups.google.com/d/forum/spack):
|
||||
only for announcements. Please use other venues for discussions.
|
||||
|
@@ -15,7 +15,7 @@ concretizer:
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization. If `dependencies`, we'll only reuse dependencies but
|
||||
# give you a fresh concretization for your root specs.
|
||||
reuse: dependencies
|
||||
reuse: true
|
||||
# Options that tune which targets are considered for concretization. The
|
||||
# concretization process is very sensitive to the number targets, and the time
|
||||
# needed to reach a solution increases noticeably with the number of targets
|
||||
|
19
etc/spack/defaults/cray/packages.yaml
Normal file
19
etc/spack/defaults/cray/packages.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This file controls default concretization preferences for Spack.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/packages.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
iconv: [glibc, musl, libiconv]
|
@@ -19,7 +19,6 @@ packages:
|
||||
- apple-clang
|
||||
- clang
|
||||
- gcc
|
||||
- intel
|
||||
providers:
|
||||
elf: [libelf]
|
||||
fuse: [macfuse]
|
||||
|
19
etc/spack/defaults/linux/packages.yaml
Normal file
19
etc/spack/defaults/linux/packages.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This file controls default concretization preferences for Spack.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing the following files.
|
||||
#
|
||||
# Per-spack-instance settings (overrides defaults):
|
||||
# $SPACK_ROOT/etc/spack/packages.yaml
|
||||
#
|
||||
# Per-user settings (overrides default and site settings):
|
||||
# ~/.spack/packages.yaml
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
providers:
|
||||
iconv: [glibc, musl, libiconv]
|
@@ -15,9 +15,10 @@
|
||||
# -------------------------------------------------------------------------
|
||||
packages:
|
||||
all:
|
||||
compiler: [gcc, intel, pgi, clang, xl, nag, fj, aocc]
|
||||
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
|
||||
providers:
|
||||
awk: [gawk]
|
||||
armci: [armcimpi]
|
||||
blas: [openblas, amdblis]
|
||||
D: [ldc]
|
||||
daal: [intel-oneapi-daal]
|
||||
@@ -35,6 +36,7 @@ packages:
|
||||
java: [openjdk, jdk, ibm-java]
|
||||
jpeg: [libjpeg-turbo, libjpeg]
|
||||
lapack: [openblas, amdlibflame]
|
||||
libc: [glibc, musl]
|
||||
libgfortran: [ gcc-runtime ]
|
||||
libglx: [mesa+glx, mesa18+glx]
|
||||
libifcore: [ intel-oneapi-runtime ]
|
||||
|
12
lib/spack/docs/_templates/layout.html
vendored
Normal file
12
lib/spack/docs/_templates/layout.html
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
{% extends "!layout.html" %}
|
||||
|
||||
{%- block extrahead %}
|
||||
<!-- Google tag (gtag.js) -->
|
||||
<script async src="https://www.googletagmanager.com/gtag/js?id=G-S0PQ7WV75K"></script>
|
||||
<script>
|
||||
window.dataLayer = window.dataLayer || [];
|
||||
function gtag(){dataLayer.push(arguments);}
|
||||
gtag('js', new Date());
|
||||
gtag('config', 'G-S0PQ7WV75K');
|
||||
</script>
|
||||
{% endblock %}
|
@@ -865,7 +865,7 @@ There are several different ways to use Spack packages once you have
|
||||
installed them. As you've seen, spack packages are installed into long
|
||||
paths with hashes, and you need a way to get them into your path. The
|
||||
easiest way is to use :ref:`spack load <cmd-spack-load>`, which is
|
||||
described in the next section.
|
||||
described in this section.
|
||||
|
||||
Some more advanced ways to use Spack packages include:
|
||||
|
||||
@@ -959,7 +959,86 @@ use ``spack find --loaded``.
|
||||
You can also use ``spack load --list`` to get the same output, but it
|
||||
does not have the full set of query options that ``spack find`` offers.
|
||||
|
||||
We'll learn more about Spack's spec syntax in the next section.
|
||||
We'll learn more about Spack's spec syntax in :ref:`a later section <sec-specs>`.
|
||||
|
||||
|
||||
.. _extensions:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Python packages and virtual environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack can install a large number of Python packages. Their names are
|
||||
typically prefixed with ``py-``. Installing and using them is no
|
||||
different from any other package:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install py-numpy
|
||||
$ spack load py-numpy
|
||||
$ python3
|
||||
>>> import numpy
|
||||
|
||||
The ``spack load`` command sets the ``PATH`` variable so that the right Python
|
||||
executable is used, and makes sure that ``numpy`` and its dependencies can be
|
||||
located in the ``PYTHONPATH``.
|
||||
|
||||
Spack is different from other Python package managers in that it installs
|
||||
every package into its *own* prefix. This is in contrast to ``pip``, which
|
||||
installs all packages into the same prefix, be it in a virtual environment
|
||||
or not.
|
||||
|
||||
For many users, **virtual environments** are more convenient than repeated
|
||||
``spack load`` commands, particularly when working with multiple Python
|
||||
packages. Fortunately Spack supports environments itself, which together
|
||||
with a view are no different from Python virtual environments.
|
||||
|
||||
The recommended way of working with Python extensions such as ``py-numpy``
|
||||
is through :ref:`Environments <environments>`. The following example creates
|
||||
a Spack environment with ``numpy`` in the current working directory. It also
|
||||
puts a filesystem view in ``./view``, which is a more traditional combined
|
||||
prefix for all packages in the environment.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create --with-view view --dir .
|
||||
$ spack -e . add py-numpy
|
||||
$ spack -e . concretize
|
||||
$ spack -e . install
|
||||
|
||||
Now you can activate the environment and start using the packages:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate .
|
||||
$ python3
|
||||
>>> import numpy
|
||||
|
||||
The environment view is also a virtual environment, which is useful if you are
|
||||
sharing the environment with others who are unfamiliar with Spack. They can
|
||||
either use the Python executable directly:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ./view/bin/python3
|
||||
>>> import numpy
|
||||
|
||||
or use the activation script:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ source ./view/bin/activate
|
||||
$ python3
|
||||
>>> import numpy
|
||||
|
||||
In general, there should not be much difference between ``spack env activate``
|
||||
and using the virtual environment. The main advantage of ``spack env activate``
|
||||
is that it knows about more packages than just Python packages, and it may set
|
||||
additional runtime variables that are not covered by the virtual environment
|
||||
activation script.
|
||||
|
||||
See :ref:`environments` for a more in-depth description of Spack
|
||||
environments and customizations to views.
|
||||
|
||||
|
||||
.. _sec-specs:
|
||||
@@ -1705,165 +1784,6 @@ check only local packages (as opposed to those used transparently from
|
||||
``upstream`` spack instances) and the ``-j,--json`` option to output
|
||||
machine-readable json data for any errors.
|
||||
|
||||
|
||||
.. _extensions:
|
||||
|
||||
---------------------------
|
||||
Extensions & Python support
|
||||
---------------------------
|
||||
|
||||
Spack's installation model assumes that each package will live in its
|
||||
own install prefix. However, certain packages are typically installed
|
||||
*within* the directory hierarchy of other packages. For example,
|
||||
`Python <https://www.python.org>`_ packages are typically installed in the
|
||||
``$prefix/lib/python-2.7/site-packages`` directory.
|
||||
|
||||
In Spack, installation prefixes are immutable, so this type of installation
|
||||
is not directly supported. However, it is possible to create views that
|
||||
allow you to merge install prefixes of multiple packages into a single new prefix.
|
||||
Views are a convenient way to get a more traditional filesystem structure.
|
||||
Using *extensions*, you can ensure that Python packages always share the
|
||||
same prefix in the view as Python itself. Suppose you have
|
||||
Python installed like so:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find python
|
||||
==> 1 installed packages.
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
python@2.7.8
|
||||
|
||||
.. _cmd-spack-extensions:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
``spack extensions``
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
You can find extensions for your Python installation like this:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack extensions python
|
||||
==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
|
||||
==> 36 extensions:
|
||||
geos py-ipython py-pexpect py-pyside py-sip
|
||||
py-basemap py-libxml2 py-pil py-pytz py-six
|
||||
py-biopython py-mako py-pmw py-rpy2 py-sympy
|
||||
py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
|
||||
py-dateutil py-mpi4py py-pygments py-scikit-learn
|
||||
py-epydoc py-mx py-pylint py-scipy
|
||||
py-gnuplot py-nose py-pyparsing py-setuptools
|
||||
py-h5py py-numpy py-pyqt py-shiboken
|
||||
|
||||
==> 12 installed:
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
|
||||
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
|
||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
||||
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
||||
|
||||
The extensions are a subset of what's returned by ``spack list``, and
|
||||
they are packages like any other. They are installed into their own
|
||||
prefixes, and you can see this with ``spack find --paths``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find --paths py-numpy
|
||||
==> 1 installed packages.
|
||||
-- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
|
||||
py-numpy@1.9.1 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/py-numpy@1.9.1-66733244
|
||||
|
||||
However, even though this package is installed, you cannot use it
|
||||
directly when you run ``python``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load python
|
||||
$ python
|
||||
Python 2.7.8 (default, Feb 17 2015, 01:35:25)
|
||||
[GCC 4.4.7 20120313 (Red Hat 4.4.7-11)] on linux2
|
||||
Type "help", "copyright", "credits" or "license" for more information.
|
||||
>>> import numpy
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
ImportError: No module named numpy
|
||||
>>>
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Using Extensions in Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
The recommended way of working with extensions such as ``py-numpy``
|
||||
above is through :ref:`Environments <environments>`. For example,
|
||||
the following creates an environment in the current working directory
|
||||
with a filesystem view in the ``./view`` directory:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create --with-view view --dir .
|
||||
$ spack -e . add py-numpy
|
||||
$ spack -e . concretize
|
||||
$ spack -e . install
|
||||
|
||||
We recommend environments for two reasons. Firstly, environments
|
||||
can be activated (requires :ref:`shell-support`):
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate .
|
||||
|
||||
which sets all the right environment variables such as ``PATH`` and
|
||||
``PYTHONPATH``. This ensures that
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ python
|
||||
>>> import numpy
|
||||
|
||||
works. Secondly, even without shell support, the view ensures
|
||||
that Python can locate its extensions:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ ./view/bin/python
|
||||
>>> import numpy
|
||||
|
||||
See :ref:`environments` for a more in-depth description of Spack
|
||||
environments and customizations to views.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
Using ``spack load``
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A more traditional way of using Spack and extensions is ``spack load``
|
||||
(requires :ref:`shell-support`). This will add the extension to ``PYTHONPATH``
|
||||
in your current shell, and Python itself will be available in the ``PATH``:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack load py-numpy
|
||||
$ python
|
||||
>>> import numpy
|
||||
|
||||
The loaded packages can be checked using ``spack find --loaded``
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Loading Extensions via Modules
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Apart from ``spack env activate`` and ``spack load``, you can load numpy
|
||||
through your environment modules (using ``environment-modules`` or
|
||||
``lmod``). This will also add the extension to the ``PYTHONPATH`` in
|
||||
your current shell.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ module load <name of numpy module>
|
||||
|
||||
If you do not know the name of the specific numpy module you wish to
|
||||
load, you can use the ``spack module tcl|lmod loads`` command to get
|
||||
the name of the module from the Spack spec.
|
||||
|
||||
-----------------------
|
||||
Filesystem requirements
|
||||
-----------------------
|
||||
|
@@ -220,6 +220,40 @@ section of the configuration:
|
||||
|
||||
.. _binary_caches_oci:
|
||||
|
||||
---------------------------------
|
||||
Automatic push to a build cache
|
||||
---------------------------------
|
||||
|
||||
Sometimes it is convenient to push packages to a build cache as soon as they are installed. Spack can do this by setting autopush flag when adding a mirror:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror add --autopush <name> <url or path>
|
||||
|
||||
Or the autopush flag can be set for an existing mirror:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack mirror set --autopush <name> # enable automatic push for an existing mirror
|
||||
$ spack mirror set --no-autopush <name> # disable automatic push for an existing mirror
|
||||
|
||||
Then after installing a package it is automatically pushed to all mirrors with ``autopush: true``. The command
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install <package>
|
||||
|
||||
will have the same effect as
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack install <package>
|
||||
$ spack buildcache push <cache> <package> # for all caches with autopush: true
|
||||
|
||||
.. note::
|
||||
|
||||
Packages are automatically pushed to a build cache only if they are built from source.
|
||||
|
||||
-----------------------------------------
|
||||
OCI / Docker V2 registries as build cache
|
||||
-----------------------------------------
|
||||
|
@@ -21,23 +21,86 @@ is the following:
|
||||
Reuse already installed packages
|
||||
--------------------------------
|
||||
|
||||
The ``reuse`` attribute controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``).
|
||||
You can use:
|
||||
The ``reuse`` attribute controls how aggressively Spack reuses binary packages during concretization. The
|
||||
attribute can either be a single value, or an object for more complex configurations.
|
||||
|
||||
In the former case ("single value") it allows Spack to:
|
||||
|
||||
1. Reuse installed packages and buildcaches for all the specs to be concretized, when ``true``
|
||||
2. Reuse installed packages and buildcaches only for the dependencies of the root specs, when ``dependencies``
|
||||
3. Disregard reusing installed packages and buildcaches, when ``false``
|
||||
|
||||
In case a finer control over which specs are reused is needed, then the value of this attribute can be
|
||||
an object, with the following keys:
|
||||
|
||||
1. ``roots``: if ``true`` root specs are reused, if ``false`` only dependencies of root specs are reused
|
||||
2. ``from``: list of sources from which reused specs are taken
|
||||
|
||||
Each source in ``from`` is itself an object:
|
||||
|
||||
.. list-table:: Attributes for a source or reusable specs
|
||||
:header-rows: 1
|
||||
|
||||
* - Attribute name
|
||||
- Description
|
||||
* - type (mandatory, string)
|
||||
- Can be ``local``, ``buildcache``, or ``external``
|
||||
* - include (optional, list of specs)
|
||||
- If present, reusable specs must match at least one of the constraint in the list
|
||||
* - exclude (optional, list of specs)
|
||||
- If present, reusable specs must not match any of the constraint in the list.
|
||||
|
||||
For instance, the following configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
from:
|
||||
- type: local
|
||||
include:
|
||||
- "%gcc"
|
||||
- "%clang"
|
||||
|
||||
tells the concretizer to reuse all specs compiled with either ``gcc`` or ``clang``, that are installed
|
||||
in the local store. Any spec from remote buildcaches is disregarded.
|
||||
|
||||
To reduce the boilerplate in configuration files, default values for the ``include`` and
|
||||
``exclude`` options can be pushed up one level:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
concretizer:
|
||||
reuse:
|
||||
roots: true
|
||||
include:
|
||||
- "%gcc"
|
||||
from:
|
||||
- type: local
|
||||
- type: buildcache
|
||||
- type: local
|
||||
include:
|
||||
- "foo %oneapi"
|
||||
|
||||
In the example above we reuse all specs compiled with ``gcc`` from the local store
|
||||
and remote buildcaches, and we also reuse ``foo %oneapi``. Note that the last source of
|
||||
specs override the default ``include`` attribute.
|
||||
|
||||
For one-off concretizations, the are command line arguments for each of the simple "single value"
|
||||
configurations. This means a user can:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
% spack install --reuse <spec>
|
||||
|
||||
to enable reuse for a single installation, and you can use:
|
||||
to enable reuse for a single installation, or:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
spack install --fresh <spec>
|
||||
|
||||
to do a fresh install if ``reuse`` is enabled by default.
|
||||
``reuse: dependencies`` is the default.
|
||||
|
||||
.. seealso::
|
||||
|
||||
|
@@ -718,23 +718,45 @@ command-line tool, or C/C++/Fortran program with optional Python
|
||||
modules? The former should be prepended with ``py-``, while the
|
||||
latter should not.
|
||||
|
||||
""""""""""""""""""""""
|
||||
extends vs. depends_on
|
||||
""""""""""""""""""""""
|
||||
""""""""""""""""""""""""""""""
|
||||
``extends`` vs. ``depends_on``
|
||||
""""""""""""""""""""""""""""""
|
||||
|
||||
This is very similar to the naming dilemma above, with a slight twist.
|
||||
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
||||
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
||||
that the extension and extendee share the same prefix in views.
|
||||
This allows the user to import a Python module without
|
||||
having to add that module to ``PYTHONPATH``.
|
||||
|
||||
When deciding between ``extends`` and ``depends_on``, the best rule of
|
||||
thumb is to check the installation prefix. If Python libraries are
|
||||
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
|
||||
should use ``extends``. If Python libraries are installed elsewhere
|
||||
or the only files that get installed reside in ``<prefix>/bin``, then
|
||||
don't use ``extends``.
|
||||
Additionally, ``extends("python")`` adds a dependency on the package
|
||||
``python-venv``. This improves isolation from the system, whether
|
||||
it's during the build or at runtime: user and system site packages
|
||||
cannot accidentally be used by any package that ``extends("python")``.
|
||||
|
||||
As a rule of thumb: if a package does not install any Python modules
|
||||
of its own, and merely puts a Python script in the ``bin`` directory,
|
||||
then there is no need for ``extends``. If the package installs modules
|
||||
in the ``site-packages`` directory, it requires ``extends``.
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
Executing ``python`` during the build
|
||||
"""""""""""""""""""""""""""""""""""""
|
||||
|
||||
Whenever you need to execute a Python command or pass the path of the
|
||||
Python interpreter to the build system, it is best to use the global
|
||||
variable ``python`` directly. For example:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
@run_before("install")
|
||||
def recythonize(self):
|
||||
python("setup.py", "clean") # use the `python` global
|
||||
|
||||
As mentioned in the previous section, ``extends("python")`` adds an
|
||||
automatic dependency on ``python-venv``, which is a virtual environment
|
||||
that guarantees build isolation. The ``python`` global always refers to
|
||||
the correct Python interpreter, whether the package uses ``extends("python")``
|
||||
or ``depends_on("python")``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Alternatives to Spack
|
||||
|
@@ -150,7 +150,7 @@ this can expose you to attacks. Use at your own risk.
|
||||
--------------------
|
||||
|
||||
Path to custom certificats for SSL verification. The value can be a
|
||||
filesytem path, or an environment variable that expands to a file path.
|
||||
filesytem path, or an environment variable that expands to an absolute file path.
|
||||
The default value is set to the environment variable ``SSL_CERT_FILE``
|
||||
to use the same syntax used by many other applications that automatically
|
||||
detect custom certificates.
|
||||
@@ -160,6 +160,9 @@ in the subprocess calling ``curl``.
|
||||
If ``url_fetch_method:urllib`` then files and directories are supported i.e.
|
||||
``config:ssl_certs:$SSL_CERT_FILE`` or ``config:ssl_certs:$SSL_CERT_DIR``
|
||||
will work.
|
||||
In all cases the expanded path must be absolute for Spack to use the certificates.
|
||||
Certificates relative to an environment can be created by prepending the path variable
|
||||
with the Spack configuration variable``$env``.
|
||||
|
||||
--------------------
|
||||
``checksum``
|
||||
|
@@ -194,15 +194,15 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Operating System
|
||||
- Base Image
|
||||
- Spack Image
|
||||
* - Ubuntu 18.04
|
||||
- ``ubuntu:18.04``
|
||||
- ``spack/ubuntu-bionic``
|
||||
* - Ubuntu 20.04
|
||||
- ``ubuntu:20.04``
|
||||
- ``spack/ubuntu-focal``
|
||||
* - Ubuntu 22.04
|
||||
- ``ubuntu:22.04``
|
||||
- ``spack/ubuntu-jammy``
|
||||
* - Ubuntu 24.04
|
||||
- ``ubuntu:24.04``
|
||||
- ``spack/ubuntu-noble``
|
||||
* - CentOS 7
|
||||
- ``centos:7``
|
||||
- ``spack/centos7``
|
||||
@@ -227,12 +227,12 @@ The OS that are currently supported are summarized in the table below:
|
||||
* - Rocky Linux 9
|
||||
- ``rockylinux:9``
|
||||
- ``spack/rockylinux9``
|
||||
* - Fedora Linux 37
|
||||
- ``fedora:37``
|
||||
- ``spack/fedora37``
|
||||
* - Fedora Linux 38
|
||||
- ``fedora:38``
|
||||
- ``spack/fedora38``
|
||||
* - Fedora Linux 39
|
||||
- ``fedora:39``
|
||||
- ``spack/fedora39``
|
||||
* - Fedora Linux 40
|
||||
- ``fedora:40``
|
||||
- ``spack/fedora40``
|
||||
|
||||
|
||||
|
||||
|
@@ -552,11 +552,11 @@ With either interpreter you can run a single command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack python -c 'import distro; distro.linux_distribution()'
|
||||
('Ubuntu', '18.04', 'Bionic Beaver')
|
||||
$ spack python -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||
...
|
||||
|
||||
$ spack python -i ipython -c 'import distro; distro.linux_distribution()'
|
||||
Out[1]: ('Ubuntu', '18.04', 'Bionic Beaver')
|
||||
$ spack python -i ipython -c 'from spack.spec import Spec; Spec("python").concretized()'
|
||||
Out[1]: ...
|
||||
|
||||
or a file:
|
||||
|
||||
@@ -1071,9 +1071,9 @@ Announcing a release
|
||||
|
||||
We announce releases in all of the major Spack communication channels.
|
||||
Publishing the release takes care of GitHub. The remaining channels are
|
||||
Twitter, Slack, and the mailing list. Here are the steps:
|
||||
X, Slack, and the mailing list. Here are the steps:
|
||||
|
||||
#. Announce the release on Twitter.
|
||||
#. Announce the release on X.
|
||||
|
||||
* Compose the tweet on the ``@spackpm`` account per the
|
||||
``spack-twitter`` slack channel.
|
||||
|
@@ -142,12 +142,8 @@ user's prompt to begin with the environment name in brackets.
|
||||
$ spack env activate -p myenv
|
||||
[myenv] $ ...
|
||||
|
||||
The ``activate`` command can also be used to create a new environment, if it is
|
||||
not already defined, by adding the ``--create`` flag. Managed and anonymous
|
||||
environments, anonymous environments are explained in the next section,
|
||||
can both be created using the same flags that `spack env create` accepts.
|
||||
If an environment already exists then spack will simply activate it and ignore the
|
||||
create specific flags.
|
||||
The ``activate`` command can also be used to create a new environment if it does not already
|
||||
exist.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -176,21 +172,36 @@ environment will remove the view from the user environment.
|
||||
Anonymous Environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Any directory can be treated as an environment if it contains a file
|
||||
``spack.yaml``. To load an anonymous environment, use:
|
||||
Apart from managed environments, Spack also supports anonymous environments.
|
||||
|
||||
Anonymous environments can be placed in any directory of choice.
|
||||
|
||||
.. note::
|
||||
|
||||
When uninstalling packages, Spack asks the user to confirm the removal of packages
|
||||
that are still used in a managed environment. This is not the case for anonymous
|
||||
environments.
|
||||
|
||||
To create an anonymous environment, use one of the following commands:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate -d /path/to/directory
|
||||
$ spack env create --dir my_env
|
||||
$ spack env create ./my_env
|
||||
|
||||
Anonymous specs can be created in place using the command:
|
||||
As a shorthand, you can also create an anonymous environment upon activation if it does not
|
||||
already exist:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create -d .
|
||||
$ spack env activate --create ./my_env
|
||||
|
||||
For convenience, Spack can also place an anonymous environment in a temporary directory for you:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate --temp
|
||||
|
||||
In this case Spack simply creates a ``spack.yaml`` file in the requested
|
||||
directory.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Environment Sensitive Commands
|
||||
@@ -449,6 +460,125 @@ Sourcing that file in Bash will make the environment available to the
|
||||
user; and can be included in ``.bashrc`` files, etc. The ``loads``
|
||||
file may also be copied out of the environment, renamed, etc.
|
||||
|
||||
|
||||
.. _environment_include_concrete:
|
||||
|
||||
------------------------------
|
||||
Included Concrete Environments
|
||||
------------------------------
|
||||
|
||||
Spack environments can create an environment based off of information in already
|
||||
established environments. You can think of it as a combination of existing
|
||||
environments. It will gather information from the existing environment's
|
||||
``spack.lock`` and use that during the creation of this included concrete
|
||||
environment. When an included concrete environment is created it will generate
|
||||
a ``spack.lock`` file for the newly created environment.
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Creating included environments
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
To create a combined concrete environment, you must have at least one existing
|
||||
concrete environment. You will use the command ``spack env create`` with the
|
||||
argument ``--include-concrete`` followed by the name or path of the environment
|
||||
you'd like to include. Here is an example of how to create a combined environment
|
||||
from the command line.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv
|
||||
$ spack -e myenv add python
|
||||
$ spack -e myenv concretize
|
||||
$ spack env create --include-concrete myenv included_env
|
||||
|
||||
|
||||
You can also include an environment directly in the ``spack.yaml`` file. It
|
||||
involves adding the ``include_concrete`` heading in the yaml followed by the
|
||||
absolute path to the independent environments.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
spack:
|
||||
specs: []
|
||||
concretizer:
|
||||
unify: true
|
||||
include_concrete:
|
||||
- /absolute/path/to/environment1
|
||||
- /absolute/path/to/environment2
|
||||
|
||||
|
||||
Once the ``spack.yaml`` has been updated you must concretize the environment to
|
||||
get the concrete specs from the included environments.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Updating an included environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
If changes were made to the base environment and you want that reflected in the
|
||||
included environment you will need to reconcretize both the base environment and the
|
||||
included environment for the change to be implemented. For example:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env create myenv
|
||||
$ spack -e myenv add python
|
||||
$ spack -e myenv concretize
|
||||
$ spack env create --include-concrete myenv included_env
|
||||
|
||||
|
||||
$ spack -e myenv find
|
||||
==> In environment myenv
|
||||
==> Root specs
|
||||
python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
|
||||
$ spack -e included_env find
|
||||
==> In environment included_env
|
||||
==> No root specs
|
||||
==> Included specs
|
||||
python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
Here we see that ``included_env`` has access to the python package through
|
||||
the ``myenv`` environment. But if we were to add another spec to ``myenv``,
|
||||
``included_env`` will not be able to access the new information.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e myenv add perl
|
||||
$ spack -e myenv concretize
|
||||
$ spack -e myenv find
|
||||
==> In environment myenv
|
||||
==> Root specs
|
||||
perl python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
|
||||
$ spack -e included_env find
|
||||
==> In environment included_env
|
||||
==> No root specs
|
||||
==> Included specs
|
||||
python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
It isn't until you run the ``spack concretize`` command that the combined
|
||||
environment will get the updated information from the reconcretized base environmennt.
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack -e included_env concretize
|
||||
$ spack -e included_env find
|
||||
==> In environment included_env
|
||||
==> No root specs
|
||||
==> Included specs
|
||||
perl python
|
||||
|
||||
==> 0 installed packages
|
||||
|
||||
.. _environment-configuration:
|
||||
|
||||
------------------------
|
||||
@@ -800,6 +930,7 @@ For example, the following environment has three root packages:
|
||||
This allows for a much-needed reduction in redundancy between packages
|
||||
and constraints.
|
||||
|
||||
|
||||
----------------
|
||||
Filesystem Views
|
||||
----------------
|
||||
@@ -1033,7 +1164,7 @@ other targets to depend on the environment installation.
|
||||
|
||||
A typical workflow is as follows:
|
||||
|
||||
.. code:: console
|
||||
.. code-block:: console
|
||||
|
||||
spack env create -d .
|
||||
spack -e . add perl
|
||||
@@ -1126,7 +1257,7 @@ its dependencies. This can be useful when certain flags should only apply to
|
||||
dependencies. Below we show a use case where a spec is installed with verbose
|
||||
output (``spack install --verbose``) while its dependencies are installed silently:
|
||||
|
||||
.. code:: console
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env depfile -o Makefile
|
||||
|
||||
@@ -1148,7 +1279,7 @@ This can be accomplished through the generated ``[<prefix>/]SPACK_PACKAGE_IDS``
|
||||
variable. Assuming we have an active and concrete environment, we generate the
|
||||
associated ``Makefile`` with a prefix ``example``:
|
||||
|
||||
.. code:: console
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env depfile -o env.mk --make-prefix example
|
||||
|
||||
|
@@ -478,6 +478,13 @@ prefix, you can add them to the ``extra_attributes`` field. Similarly,
|
||||
all other fields from the compilers config can be added to the
|
||||
``extra_attributes`` field for an external representing a compiler.
|
||||
|
||||
Note that the format for the ``paths`` field in the
|
||||
``extra_attributes`` section is different than in the ``compilers``
|
||||
config. For compilers configured as external packages, the section is
|
||||
named ``compilers`` and the dictionary maps language names (``c``,
|
||||
``cxx``, ``fortran``) to paths, rather than using the names ``cc``,
|
||||
``fc``, and ``f77``.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
packages:
|
||||
@@ -493,11 +500,10 @@ all other fields from the compilers config can be added to the
|
||||
- spec: llvm+clang@15.0.0 arch=linux-rhel8-skylake
|
||||
prefix: /usr
|
||||
extra_attributes:
|
||||
paths:
|
||||
cc: /usr/bin/clang-with-suffix
|
||||
compilers:
|
||||
c: /usr/bin/clang-with-suffix
|
||||
cxx: /usr/bin/clang++-with-extra-info
|
||||
fc: /usr/bin/gfortran
|
||||
f77: /usr/bin/gfortran
|
||||
fortran: /usr/bin/gfortran
|
||||
extra_rpaths:
|
||||
- /usr/lib/llvm/
|
||||
|
||||
@@ -1572,6 +1578,8 @@ Microsoft Visual Studio
|
||||
"""""""""""""""""""""""
|
||||
|
||||
Microsoft Visual Studio provides the only Windows C/C++ compiler that is currently supported by Spack.
|
||||
Spack additionally requires that the Windows SDK (including WGL) to be installed as part of your
|
||||
visual studio installation as it is required to build many packages from source.
|
||||
|
||||
We require several specific components to be included in the Visual Studio installation.
|
||||
One is the C/C++ toolset, which can be selected as "Desktop development with C++" or "C++ build tools,"
|
||||
@@ -1579,6 +1587,7 @@ depending on installation type (Professional, Build Tools, etc.) The other requ
|
||||
"C++ CMake tools for Windows," which can be selected from among the optional packages.
|
||||
This provides CMake and Ninja for use during Spack configuration.
|
||||
|
||||
|
||||
If you already have Visual Studio installed, you can make sure these components are installed by
|
||||
rerunning the installer. Next to your installation, select "Modify" and look at the
|
||||
"Installation details" pane on the right.
|
||||
|
@@ -6435,9 +6435,12 @@ the ``paths`` attribute:
|
||||
echo "Target: x86_64-pc-linux-gnu"
|
||||
echo "Thread model: posix"
|
||||
echo "InstalledDir: /usr/bin"
|
||||
platforms: ["linux", "darwin"]
|
||||
results:
|
||||
- spec: 'llvm@3.9.1 +clang~lld~lldb'
|
||||
|
||||
If the ``platforms`` attribute is present, tests are run only if the current host
|
||||
matches one of the listed platforms.
|
||||
Each test is performed by first creating a temporary directory structure as
|
||||
specified in the corresponding ``layout`` and by then running
|
||||
package detection and checking that the outcome matches the expected
|
||||
@@ -6471,6 +6474,10 @@ package detection and checking that the outcome matches the expected
|
||||
- A spec that is expected from detection
|
||||
- Any valid spec
|
||||
- Yes
|
||||
* - ``results:[0]:extra_attributes``
|
||||
- Extra attributes expected on the associated Spec
|
||||
- Nested dictionary with string as keys, and regular expressions as leaf values
|
||||
- No
|
||||
|
||||
"""""""""""""""""""""""""""""""
|
||||
Reuse tests from other packages
|
||||
|
@@ -2,12 +2,12 @@ sphinx==7.2.6
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_design==0.5.0
|
||||
sphinx-rtd-theme==2.0.0
|
||||
python-levenshtein==0.25.0
|
||||
python-levenshtein==0.25.1
|
||||
docutils==0.20.1
|
||||
pygments==2.17.2
|
||||
urllib3==2.2.1
|
||||
pytest==8.1.1
|
||||
pytest==8.2.0
|
||||
isort==5.13.2
|
||||
black==24.3.0
|
||||
black==24.4.2
|
||||
flake8==7.0.0
|
||||
mypy==1.9.0
|
||||
mypy==1.10.0
|
||||
|
250
lib/spack/env/cc
vendored
250
lib/spack/env/cc
vendored
@@ -47,7 +47,8 @@ SPACK_F77_RPATH_ARG
|
||||
SPACK_FC_RPATH_ARG
|
||||
SPACK_LINKER_ARG
|
||||
SPACK_SHORT_SPEC
|
||||
SPACK_SYSTEM_DIRS"
|
||||
SPACK_SYSTEM_DIRS
|
||||
SPACK_MANAGED_DIRS"
|
||||
|
||||
# Optional parameters that aren't required to be set
|
||||
|
||||
@@ -173,22 +174,6 @@ preextend() {
|
||||
unset IFS
|
||||
}
|
||||
|
||||
# system_dir PATH
|
||||
# test whether a path is a system directory
|
||||
system_dir() {
|
||||
IFS=':' # SPACK_SYSTEM_DIRS is colon-separated
|
||||
path="$1"
|
||||
for sd in $SPACK_SYSTEM_DIRS; do
|
||||
if [ "${path}" = "${sd}" ] || [ "${path}" = "${sd}/" ]; then
|
||||
# success if path starts with a system prefix
|
||||
unset IFS
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
unset IFS
|
||||
return 1 # fail if path starts no system prefix
|
||||
}
|
||||
|
||||
# Fail with a clear message if the input contains any bell characters.
|
||||
if eval "[ \"\${*#*${lsep}}\" != \"\$*\" ]"; then
|
||||
die "Compiler command line contains our separator ('${lsep}'). Cannot parse."
|
||||
@@ -201,6 +186,18 @@ for param in $params; do
|
||||
fi
|
||||
done
|
||||
|
||||
# eval this because SPACK_MANAGED_DIRS and SPACK_SYSTEM_DIRS are inputs we don't wanna loop over.
|
||||
# moving the eval inside the function would eval it every call.
|
||||
eval "\
|
||||
path_order() {
|
||||
case \"\$1\" in
|
||||
$SPACK_MANAGED_DIRS) return 0 ;;
|
||||
$SPACK_SYSTEM_DIRS) return 2 ;;
|
||||
/*) return 1 ;;
|
||||
esac
|
||||
}
|
||||
"
|
||||
|
||||
# Check if optional parameters are defined
|
||||
# If we aren't asking for debug flags, don't add them
|
||||
if [ -z "${SPACK_ADD_DEBUG_FLAGS:-}" ]; then
|
||||
@@ -420,11 +417,12 @@ input_command="$*"
|
||||
parse_Wl() {
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
@@ -432,21 +430,25 @@ parse_Wl() {
|
||||
arg="${1#-rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if [ -z "$arg" ]; then
|
||||
shift; continue
|
||||
elif system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
wl_expect_rpath=yes
|
||||
@@ -473,12 +475,20 @@ categorize_arguments() {
|
||||
|
||||
return_other_args_list=""
|
||||
return_isystem_was_used=""
|
||||
|
||||
return_isystem_spack_store_include_dirs_list=""
|
||||
return_isystem_system_include_dirs_list=""
|
||||
return_isystem_include_dirs_list=""
|
||||
|
||||
return_spack_store_include_dirs_list=""
|
||||
return_system_include_dirs_list=""
|
||||
return_include_dirs_list=""
|
||||
|
||||
return_spack_store_lib_dirs_list=""
|
||||
return_system_lib_dirs_list=""
|
||||
return_lib_dirs_list=""
|
||||
|
||||
return_spack_store_rpath_dirs_list=""
|
||||
return_system_rpath_dirs_list=""
|
||||
return_rpath_dirs_list=""
|
||||
|
||||
@@ -546,29 +556,32 @@ categorize_arguments() {
|
||||
arg="${1#-isystem}"
|
||||
return_isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_isystem_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_isystem_include_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_isystem_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_isystem_include_dirs_list "$arg" ;;
|
||||
2) append return_isystem_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-I*)
|
||||
arg="${1#-I}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_include_dirs_list "$arg"
|
||||
else
|
||||
append return_include_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_include_dirs_list "$arg" ;;
|
||||
1) append return_include_dirs_list "$arg" ;;
|
||||
2) append return_system_include_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-L*)
|
||||
arg="${1#-L}"
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
append return_system_lib_dirs_list "$arg"
|
||||
else
|
||||
append return_lib_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_lib_dirs_list "$arg" ;;
|
||||
1) append return_lib_dirs_list "$arg" ;;
|
||||
2) append return_system_lib_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-l*)
|
||||
# -loopopt=0 is generated erroneously in autoconf <= 2.69,
|
||||
@@ -601,29 +614,32 @@ categorize_arguments() {
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append return_system_rpath_dirs_list "$1"
|
||||
else
|
||||
append return_rpath_dirs_list "$1"
|
||||
fi
|
||||
path_order "$1"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$1" ;;
|
||||
1) append return_rpath_dirs_list "$1" ;;
|
||||
2) append return_system_rpath_dirs_list "$1" ;;
|
||||
esac
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append return_system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append return_rpath_dirs_list "$arg"
|
||||
fi
|
||||
path_order "$arg"
|
||||
case $? in
|
||||
0) append return_spack_store_rpath_dirs_list "$arg" ;;
|
||||
1) append return_rpath_dirs_list "$arg" ;;
|
||||
2) append return_system_rpath_dirs_list "$arg" ;;
|
||||
esac
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
@@ -661,16 +677,25 @@ categorize_arguments() {
|
||||
}
|
||||
|
||||
categorize_arguments "$@"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
other_args_list="$return_other_args_list"
|
||||
|
||||
spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
system_include_dirs_list="$return_system_include_dirs_list"
|
||||
include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
isystem_was_used="$return_isystem_was_used"
|
||||
other_args_list="$return_other_args_list"
|
||||
|
||||
#
|
||||
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
||||
@@ -730,7 +755,7 @@ esac
|
||||
|
||||
# Linker flags
|
||||
case "$mode" in
|
||||
ld|ccld)
|
||||
ccld)
|
||||
extend spack_flags_list SPACK_LDFLAGS
|
||||
;;
|
||||
esac
|
||||
@@ -738,16 +763,25 @@ esac
|
||||
IFS="$lsep"
|
||||
categorize_arguments $spack_flags_list
|
||||
unset IFS
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
spack_flags_isystem_spack_store_include_dirs_list="$return_isystem_spack_store_include_dirs_list"
|
||||
spack_flags_isystem_system_include_dirs_list="$return_isystem_system_include_dirs_list"
|
||||
spack_flags_isystem_include_dirs_list="$return_isystem_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_include_dirs_list="$return_spack_store_include_dirs_list"
|
||||
spack_flags_system_include_dirs_list="$return_system_include_dirs_list"
|
||||
spack_flags_include_dirs_list="$return_include_dirs_list"
|
||||
|
||||
spack_flags_spack_store_lib_dirs_list="$return_spack_store_lib_dirs_list"
|
||||
spack_flags_system_lib_dirs_list="$return_system_lib_dirs_list"
|
||||
spack_flags_lib_dirs_list="$return_lib_dirs_list"
|
||||
|
||||
spack_flags_spack_store_rpath_dirs_list="$return_spack_store_rpath_dirs_list"
|
||||
spack_flags_system_rpath_dirs_list="$return_system_rpath_dirs_list"
|
||||
spack_flags_rpath_dirs_list="$return_rpath_dirs_list"
|
||||
|
||||
spack_flags_isystem_was_used="$return_isystem_was_used"
|
||||
spack_flags_other_args_list="$return_other_args_list"
|
||||
|
||||
|
||||
# On macOS insert headerpad_max_install_names linker flag
|
||||
@@ -767,11 +801,13 @@ if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
# Append RPATH directories. Note that in the case of the
|
||||
# top-level package these directories may not exist yet. For dependencies
|
||||
# it is assumed that paths have already been confirmed.
|
||||
extend spack_store_rpath_dirs_list SPACK_STORE_RPATH_DIRS
|
||||
extend rpath_dirs_list SPACK_RPATH_DIRS
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$mode" = ccld ] || [ "$mode" = ld ]; then
|
||||
extend spack_store_lib_dirs_list SPACK_STORE_LINK_DIRS
|
||||
extend lib_dirs_list SPACK_LINK_DIRS
|
||||
fi
|
||||
|
||||
@@ -798,38 +834,50 @@ case "$mode" in
|
||||
;;
|
||||
esac
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ] || [ "$isystem_was_used" = "true" ]; then
|
||||
extend isystem_spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend isystem_include_dirs_list SPACK_INCLUDE_DIRS
|
||||
else
|
||||
extend spack_store_include_dirs_list SPACK_STORE_INCLUDE_DIRS
|
||||
extend include_dirs_list SPACK_INCLUDE_DIRS
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
#
|
||||
# Finally, reassemble the command line.
|
||||
#
|
||||
args_list="$flags_list"
|
||||
|
||||
# Insert include directories just prior to any system include directories
|
||||
# Include search paths partitioned by (in store, non-sytem, system)
|
||||
# NOTE: adding ${lsep} to the prefix here turns every added element into two
|
||||
extend args_list spack_flags_include_dirs_list "-I"
|
||||
extend args_list include_dirs_list "-I"
|
||||
extend args_list spack_flags_spack_store_include_dirs_list -I
|
||||
extend args_list spack_store_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_include_dirs_list -I
|
||||
extend args_list include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_spack_store_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
extend args_list spack_flags_isystem_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
case "$mode" in
|
||||
cpp|cc|as|ccld)
|
||||
if [ "$spack_flags_isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
elif [ "$isystem_was_used" = "true" ]; then
|
||||
extend args_list SPACK_INCLUDE_DIRS "-isystem${lsep}"
|
||||
else
|
||||
extend args_list SPACK_INCLUDE_DIRS "-I"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
extend args_list spack_flags_system_include_dirs_list -I
|
||||
extend args_list system_include_dirs_list -I
|
||||
|
||||
extend args_list spack_flags_isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
extend args_list isystem_system_include_dirs_list "-isystem${lsep}"
|
||||
|
||||
# Library search paths
|
||||
# Library search paths partitioned by (in store, non-sytem, system)
|
||||
extend args_list spack_flags_spack_store_lib_dirs_list "-L"
|
||||
extend args_list spack_store_lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_lib_dirs_list "-L"
|
||||
extend args_list lib_dirs_list "-L"
|
||||
|
||||
extend args_list spack_flags_system_lib_dirs_list "-L"
|
||||
extend args_list system_lib_dirs_list "-L"
|
||||
|
||||
@@ -839,8 +887,12 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$linker_arg$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "$rpath"
|
||||
extend args_list spack_store_rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "$rpath"
|
||||
extend args_list rpath_dirs_list "$rpath"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "$rpath"
|
||||
extend args_list system_rpath_dirs_list "$rpath"
|
||||
;;
|
||||
@@ -848,8 +900,12 @@ case "$mode" in
|
||||
if [ -n "$dtags_to_add" ] ; then
|
||||
append args_list "$dtags_to_add"
|
||||
fi
|
||||
extend args_list spack_flags_spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list spack_store_rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list rpath_dirs_list "-rpath${lsep}"
|
||||
|
||||
extend args_list spack_flags_system_rpath_dirs_list "-rpath${lsep}"
|
||||
extend args_list system_rpath_dirs_list "-rpath${lsep}"
|
||||
;;
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.3 (commit 7b8fe60b69e2861e7dac104bc1c183decfcd3daf)
|
||||
* Version: 0.2.4 (commit 48b92512b9ce203ded0ebd1ac41b42593e931f7c)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
|
||||
Tag.attrib, merge_attrib]:
|
||||
if hasattr(self, a):
|
||||
if memo is not None:
|
||||
setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
|
||||
setattr(t, a, copy.deepcopy(getattr(self, a), memo))
|
||||
else:
|
||||
setattr(t, a, getattr(self, a))
|
||||
# fmt: on
|
||||
|
2
lib/spack/external/archspec/__init__.py
vendored
2
lib/spack/external/archspec/__init__.py
vendored
@@ -1,3 +1,3 @@
|
||||
"""Init file to avoid namespace packages"""
|
||||
|
||||
__version__ = "0.2.3"
|
||||
__version__ = "0.2.4"
|
||||
|
9
lib/spack/external/archspec/cpu/__init__.py
vendored
9
lib/spack/external/archspec/cpu/__init__.py
vendored
@@ -5,9 +5,10 @@
|
||||
"""The "cpu" package permits to query and compare different
|
||||
CPU microarchitectures.
|
||||
"""
|
||||
from .detect import host
|
||||
from .detect import brand_string, host
|
||||
from .microarchitecture import (
|
||||
TARGETS,
|
||||
InvalidCompilerVersion,
|
||||
Microarchitecture,
|
||||
UnsupportedMicroarchitecture,
|
||||
generic_microarchitecture,
|
||||
@@ -15,10 +16,12 @@
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"brand_string",
|
||||
"host",
|
||||
"TARGETS",
|
||||
"InvalidCompilerVersion",
|
||||
"Microarchitecture",
|
||||
"UnsupportedMicroarchitecture",
|
||||
"TARGETS",
|
||||
"generic_microarchitecture",
|
||||
"host",
|
||||
"version_components",
|
||||
]
|
||||
|
42
lib/spack/external/archspec/cpu/detect.py
vendored
42
lib/spack/external/archspec/cpu/detect.py
vendored
@@ -155,6 +155,31 @@ def _is_bit_set(self, register: int, bit: int) -> bool:
|
||||
mask = 1 << bit
|
||||
return register & mask > 0
|
||||
|
||||
def brand_string(self) -> Optional[str]:
|
||||
"""Returns the brand string, if available."""
|
||||
if self.highest_extension_support < 0x80000004:
|
||||
return None
|
||||
|
||||
r1 = self.cpuid.registers_for(eax=0x80000002, ecx=0)
|
||||
r2 = self.cpuid.registers_for(eax=0x80000003, ecx=0)
|
||||
r3 = self.cpuid.registers_for(eax=0x80000004, ecx=0)
|
||||
result = struct.pack(
|
||||
"IIIIIIIIIIII",
|
||||
r1.eax,
|
||||
r1.ebx,
|
||||
r1.ecx,
|
||||
r1.edx,
|
||||
r2.eax,
|
||||
r2.ebx,
|
||||
r2.ecx,
|
||||
r2.edx,
|
||||
r3.eax,
|
||||
r3.ebx,
|
||||
r3.ecx,
|
||||
r3.edx,
|
||||
).decode("utf-8")
|
||||
return result.strip("\x00")
|
||||
|
||||
|
||||
@detection(operating_system="Windows")
|
||||
def cpuid_info():
|
||||
@@ -174,8 +199,8 @@ def _check_output(args, env):
|
||||
|
||||
|
||||
WINDOWS_MAPPING = {
|
||||
"AMD64": "x86_64",
|
||||
"ARM64": "aarch64",
|
||||
"AMD64": X86_64,
|
||||
"ARM64": AARCH64,
|
||||
}
|
||||
|
||||
|
||||
@@ -409,3 +434,16 @@ def compatibility_check_for_riscv64(info, target):
|
||||
return (target == arch_root or arch_root in target.ancestors) and (
|
||||
target.name == info.name or target.vendor == "generic"
|
||||
)
|
||||
|
||||
|
||||
def brand_string() -> Optional[str]:
|
||||
"""Returns the brand string of the host, if detected, or None."""
|
||||
if platform.system() == "Darwin":
|
||||
return _check_output(
|
||||
["sysctl", "-n", "machdep.cpu.brand_string"], env=_ensure_bin_usrbin_in_path()
|
||||
).strip()
|
||||
|
||||
if host().family == X86_64:
|
||||
return CpuidInfoCollector().brand_string()
|
||||
|
||||
return None
|
||||
|
@@ -208,6 +208,8 @@ def optimization_flags(self, compiler, version):
|
||||
"""Returns a string containing the optimization flags that needs
|
||||
to be used to produce code optimized for this micro-architecture.
|
||||
|
||||
The version is expected to be a string of dot separated digits.
|
||||
|
||||
If there is no information on the compiler passed as argument the
|
||||
function returns an empty string. If it is known that the compiler
|
||||
version we want to use does not support this architecture the function
|
||||
@@ -216,6 +218,11 @@ def optimization_flags(self, compiler, version):
|
||||
Args:
|
||||
compiler (str): name of the compiler to be used
|
||||
version (str): version of the compiler to be used
|
||||
|
||||
Raises:
|
||||
UnsupportedMicroarchitecture: if the requested compiler does not support
|
||||
this micro-architecture.
|
||||
ValueError: if the version doesn't match the expected format
|
||||
"""
|
||||
# If we don't have information on compiler at all return an empty string
|
||||
if compiler not in self.family.compilers:
|
||||
@@ -232,6 +239,14 @@ def optimization_flags(self, compiler, version):
|
||||
msg = msg.format(compiler, best_target, best_target.family)
|
||||
raise UnsupportedMicroarchitecture(msg)
|
||||
|
||||
# Check that the version matches the expected format
|
||||
if not re.match(r"^(?:\d+\.)*\d+$", version):
|
||||
msg = (
|
||||
"invalid format for the compiler version argument. "
|
||||
"Only dot separated digits are allowed."
|
||||
)
|
||||
raise InvalidCompilerVersion(msg)
|
||||
|
||||
# If we have information on this compiler we need to check the
|
||||
# version being used
|
||||
compiler_info = self.compilers[compiler]
|
||||
@@ -292,7 +307,7 @@ def generic_microarchitecture(name):
|
||||
Args:
|
||||
name (str): name of the micro-architecture
|
||||
"""
|
||||
return Microarchitecture(name, parents=[], vendor="generic", features=[], compilers={})
|
||||
return Microarchitecture(name, parents=[], vendor="generic", features=set(), compilers={})
|
||||
|
||||
|
||||
def version_components(version):
|
||||
@@ -367,7 +382,15 @@ def fill_target_from_dict(name, data, targets):
|
||||
TARGETS = LazyDictionary(_known_microarchitectures)
|
||||
|
||||
|
||||
class UnsupportedMicroarchitecture(ValueError):
|
||||
class ArchspecError(Exception):
|
||||
"""Base class for errors within archspec"""
|
||||
|
||||
|
||||
class UnsupportedMicroarchitecture(ArchspecError, ValueError):
|
||||
"""Raised if a compiler version does not support optimization for a given
|
||||
micro-architecture.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidCompilerVersion(ArchspecError, ValueError):
|
||||
"""Raised when an invalid format is used for compiler versions in archspec."""
|
||||
|
@@ -2937,8 +2937,6 @@
|
||||
"ilrcpc",
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"svei8mm",
|
||||
"svebf16",
|
||||
@@ -3066,8 +3064,6 @@
|
||||
"flagm",
|
||||
"ssbs",
|
||||
"sb",
|
||||
"paca",
|
||||
"pacg",
|
||||
"dcpodp",
|
||||
"sve2",
|
||||
"sveaes",
|
||||
@@ -3081,8 +3077,7 @@
|
||||
"svebf16",
|
||||
"i8mm",
|
||||
"bf16",
|
||||
"dgh",
|
||||
"bti"
|
||||
"dgh"
|
||||
],
|
||||
"compilers" : {
|
||||
"gcc": [
|
||||
|
13
lib/spack/external/patches/ruamelyaml.patch
vendored
Normal file
13
lib/spack/external/patches/ruamelyaml.patch
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
diff --git a/lib/spack/external/_vendoring/ruamel/yaml/comments.py b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||
index 1badeda585..892c868af3 100644
|
||||
--- a/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||
+++ b/lib/spack/external/_vendoring/ruamel/yaml/comments.py
|
||||
@@ -497,7 +497,7 @@ def copy_attributes(self, t, memo=None):
|
||||
Tag.attrib, merge_attrib]:
|
||||
if hasattr(self, a):
|
||||
if memo is not None:
|
||||
- setattr(t, a, copy.deepcopy(getattr(self, a, memo)))
|
||||
+ setattr(t, a, copy.deepcopy(getattr(self, a), memo))
|
||||
else:
|
||||
setattr(t, a, getattr(self, a))
|
||||
# fmt: on
|
@@ -198,15 +198,32 @@ def getuid():
|
||||
return os.getuid()
|
||||
|
||||
|
||||
def _win_rename(src, dst):
|
||||
# os.replace will still fail if on Windows (but not POSIX) if the dst
|
||||
# is a symlink to a directory (all other cases have parity Windows <-> Posix)
|
||||
if os.path.islink(dst) and os.path.isdir(os.path.realpath(dst)):
|
||||
if os.path.samefile(src, dst):
|
||||
# src and dst are the same
|
||||
# do nothing and exit early
|
||||
return
|
||||
# If dst exists and is a symlink to a directory
|
||||
# we need to remove dst and then perform rename/replace
|
||||
# this is safe to do as there's no chance src == dst now
|
||||
os.remove(dst)
|
||||
os.replace(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
def rename(src, dst):
|
||||
# On Windows, os.rename will fail if the destination file already exists
|
||||
# os.replace is the same as os.rename on POSIX and is MoveFileExW w/
|
||||
# the MOVEFILE_REPLACE_EXISTING flag on Windows
|
||||
# Windows invocation is abstracted behind additonal logic handling
|
||||
# remaining cases of divergent behavior accross platforms
|
||||
if sys.platform == "win32":
|
||||
# Windows path existence checks will sometimes fail on junctions/links/symlinks
|
||||
# so check for that case
|
||||
if os.path.exists(dst) or islink(dst):
|
||||
os.remove(dst)
|
||||
os.rename(src, dst)
|
||||
_win_rename(src, dst)
|
||||
else:
|
||||
os.replace(src, dst)
|
||||
|
||||
|
||||
@system_path_filter
|
||||
@@ -1217,10 +1234,12 @@ def windows_sfn(path: os.PathLike):
|
||||
import ctypes
|
||||
|
||||
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
|
||||
# Method with null values returns size of short path name
|
||||
sz = k32.GetShortPathNameW(path, None, 0)
|
||||
# stub Windows types TCHAR[LENGTH]
|
||||
TCHAR_arr = ctypes.c_wchar * len(path)
|
||||
TCHAR_arr = ctypes.c_wchar * sz
|
||||
ret_str = TCHAR_arr()
|
||||
k32.GetShortPathNameW(path, ret_str, len(path))
|
||||
k32.GetShortPathNameW(path, ctypes.byref(ret_str), sz)
|
||||
return ret_str.value
|
||||
|
||||
|
||||
|
@@ -12,7 +12,7 @@
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from sys import platform as _platform
|
||||
from typing import NoReturn
|
||||
from typing import Any, NoReturn
|
||||
|
||||
if _platform != "win32":
|
||||
import fcntl
|
||||
@@ -158,21 +158,22 @@ def get_timestamp(force=False):
|
||||
return ""
|
||||
|
||||
|
||||
def msg(message, *args, **kwargs):
|
||||
def msg(message: Any, *args: Any, newline: bool = True) -> None:
|
||||
if not msg_enabled():
|
||||
return
|
||||
|
||||
if isinstance(message, Exception):
|
||||
message = "%s: %s" % (message.__class__.__name__, str(message))
|
||||
message = f"{message.__class__.__name__}: {message}"
|
||||
else:
|
||||
message = str(message)
|
||||
|
||||
newline = kwargs.get("newline", True)
|
||||
st_text = ""
|
||||
if _stacktrace:
|
||||
st_text = process_stacktrace(2)
|
||||
if newline:
|
||||
cprint("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
|
||||
else:
|
||||
cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
|
||||
|
||||
nl = "\n" if newline else ""
|
||||
cwrite(f"@*b{{{st_text}==>}} {get_timestamp()}{cescape(_output_filter(message))}{nl}")
|
||||
|
||||
for arg in args:
|
||||
print(indent + _output_filter(str(arg)))
|
||||
|
||||
|
@@ -237,7 +237,6 @@ def transpose():
|
||||
def colified(
|
||||
elts: List[Any],
|
||||
cols: int = 0,
|
||||
output: Optional[IO] = None,
|
||||
indent: int = 0,
|
||||
padding: int = 2,
|
||||
tty: Optional[bool] = None,
|
||||
|
@@ -59,9 +59,11 @@
|
||||
|
||||
To output an @, use '@@'. To output a } inside braces, use '}}'.
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class ColorParseError(Exception):
|
||||
@@ -95,14 +97,34 @@ def __init__(self, message):
|
||||
} # white
|
||||
|
||||
# Regex to be used for color formatting
|
||||
color_re = r"@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)"
|
||||
COLOR_RE = re.compile(r"@(?:(@)|(\.)|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)")
|
||||
|
||||
# Mapping from color arguments to values for tty.set_color
|
||||
color_when_values = {"always": True, "auto": None, "never": False}
|
||||
|
||||
# Force color; None: Only color if stdout is a tty
|
||||
# True: Always colorize output, False: Never colorize output
|
||||
_force_color = None
|
||||
|
||||
def _color_when_value(when):
|
||||
"""Raise a ValueError for an invalid color setting.
|
||||
|
||||
Valid values are 'always', 'never', and 'auto', or equivalently,
|
||||
True, False, and None.
|
||||
"""
|
||||
if when in color_when_values:
|
||||
return color_when_values[when]
|
||||
elif when not in color_when_values.values():
|
||||
raise ValueError("Invalid color setting: %s" % when)
|
||||
return when
|
||||
|
||||
|
||||
def _color_from_environ() -> Optional[bool]:
|
||||
try:
|
||||
return _color_when_value(os.environ.get("SPACK_COLOR", "auto"))
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
#: When `None` colorize when stdout is tty, when `True` or `False` always or never colorize resp.
|
||||
_force_color = _color_from_environ()
|
||||
|
||||
|
||||
def try_enable_terminal_color_on_windows():
|
||||
@@ -163,19 +185,6 @@ def _err_check(result, func, args):
|
||||
debug("Unable to support color on Windows terminal")
|
||||
|
||||
|
||||
def _color_when_value(when):
|
||||
"""Raise a ValueError for an invalid color setting.
|
||||
|
||||
Valid values are 'always', 'never', and 'auto', or equivalently,
|
||||
True, False, and None.
|
||||
"""
|
||||
if when in color_when_values:
|
||||
return color_when_values[when]
|
||||
elif when not in color_when_values.values():
|
||||
raise ValueError("Invalid color setting: %s" % when)
|
||||
return when
|
||||
|
||||
|
||||
def get_color_when():
|
||||
"""Return whether commands should print color or not."""
|
||||
if _force_color is not None:
|
||||
@@ -203,77 +212,64 @@ def color_when(value):
|
||||
set_color_when(old_value)
|
||||
|
||||
|
||||
class match_to_ansi:
|
||||
def __init__(self, color=True, enclose=False, zsh=False):
|
||||
self.color = _color_when_value(color)
|
||||
self.enclose = enclose
|
||||
self.zsh = zsh
|
||||
|
||||
def escape(self, s):
|
||||
"""Returns a TTY escape sequence for a color"""
|
||||
if self.color:
|
||||
if self.zsh:
|
||||
result = rf"\e[0;{s}m"
|
||||
else:
|
||||
result = f"\033[{s}m"
|
||||
|
||||
if self.enclose:
|
||||
result = rf"\[{result}\]"
|
||||
|
||||
return result
|
||||
def _escape(s: str, color: bool, enclose: bool, zsh: bool) -> str:
|
||||
"""Returns a TTY escape sequence for a color"""
|
||||
if color:
|
||||
if zsh:
|
||||
result = rf"\e[0;{s}m"
|
||||
else:
|
||||
return ""
|
||||
result = f"\033[{s}m"
|
||||
|
||||
def __call__(self, match):
|
||||
"""Convert a match object generated by ``color_re`` into an ansi
|
||||
color code. This can be used as a handler in ``re.sub``.
|
||||
"""
|
||||
style, color, text = match.groups()
|
||||
m = match.group(0)
|
||||
if enclose:
|
||||
result = rf"\[{result}\]"
|
||||
|
||||
if m == "@@":
|
||||
return "@"
|
||||
elif m == "@.":
|
||||
return self.escape(0)
|
||||
elif m == "@":
|
||||
raise ColorParseError("Incomplete color format: '%s' in %s" % (m, match.string))
|
||||
|
||||
string = styles[style]
|
||||
if color:
|
||||
if color not in colors:
|
||||
raise ColorParseError(
|
||||
"Invalid color specifier: '%s' in '%s'" % (color, match.string)
|
||||
)
|
||||
string += ";" + str(colors[color])
|
||||
|
||||
colored_text = ""
|
||||
if text:
|
||||
colored_text = text + self.escape(0)
|
||||
|
||||
return self.escape(string) + colored_text
|
||||
return result
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
def colorize(string, **kwargs):
|
||||
def colorize(
|
||||
string: str, color: Optional[bool] = None, enclose: bool = False, zsh: bool = False
|
||||
) -> str:
|
||||
"""Replace all color expressions in a string with ANSI control codes.
|
||||
|
||||
Args:
|
||||
string (str): The string to replace
|
||||
string: The string to replace
|
||||
|
||||
Returns:
|
||||
str: The filtered string
|
||||
The filtered string
|
||||
|
||||
Keyword Arguments:
|
||||
color (bool): If False, output will be plain text without control
|
||||
codes, for output to non-console devices.
|
||||
enclose (bool): If True, enclose ansi color sequences with
|
||||
color: If False, output will be plain text without control codes, for output to
|
||||
non-console devices (default: automatically choose color or not)
|
||||
enclose: If True, enclose ansi color sequences with
|
||||
square brackets to prevent misestimation of terminal width.
|
||||
zsh (bool): If True, use zsh ansi codes instead of bash ones (for variables like PS1)
|
||||
zsh: If True, use zsh ansi codes instead of bash ones (for variables like PS1)
|
||||
"""
|
||||
color = _color_when_value(kwargs.get("color", get_color_when()))
|
||||
zsh = kwargs.get("zsh", False)
|
||||
string = re.sub(color_re, match_to_ansi(color, kwargs.get("enclose")), string, zsh)
|
||||
string = string.replace("}}", "}")
|
||||
return string
|
||||
color = color if color is not None else get_color_when()
|
||||
|
||||
def match_to_ansi(match):
|
||||
"""Convert a match object generated by ``COLOR_RE`` into an ansi
|
||||
color code. This can be used as a handler in ``re.sub``.
|
||||
"""
|
||||
escaped_at, dot, style, color_code, text = match.groups()
|
||||
|
||||
if escaped_at:
|
||||
return "@"
|
||||
elif dot:
|
||||
return _escape(0, color, enclose, zsh)
|
||||
elif not (style or color_code):
|
||||
raise ColorParseError(
|
||||
f"Incomplete color format: '{match.group(0)}' in '{match.string}'"
|
||||
)
|
||||
|
||||
ansi_code = _escape(f"{styles[style]};{colors.get(color_code, '')}", color, enclose, zsh)
|
||||
if text:
|
||||
return f"{ansi_code}{text}{_escape(0, color, enclose, zsh)}"
|
||||
else:
|
||||
return ansi_code
|
||||
|
||||
return COLOR_RE.sub(match_to_ansi, string).replace("}}", "}")
|
||||
|
||||
|
||||
def clen(string):
|
||||
@@ -305,7 +301,7 @@ def cprint(string, stream=None, color=None):
|
||||
cwrite(string + "\n", stream, color)
|
||||
|
||||
|
||||
def cescape(string):
|
||||
def cescape(string: str) -> str:
|
||||
"""Escapes special characters needed for color codes.
|
||||
|
||||
Replaces the following symbols with their equivalent literal forms:
|
||||
@@ -321,10 +317,7 @@ def cescape(string):
|
||||
Returns:
|
||||
(str): the string with color codes escaped
|
||||
"""
|
||||
string = str(string)
|
||||
string = string.replace("@", "@@")
|
||||
string = string.replace("}", "}}")
|
||||
return string
|
||||
return string.replace("@", "@@").replace("}", "}}")
|
||||
|
||||
|
||||
class ColorStream:
|
||||
|
@@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||
__version__ = "0.22.0.dev0"
|
||||
__version__ = "0.22.0"
|
||||
spack_version = __version__
|
||||
|
||||
|
||||
|
@@ -254,8 +254,8 @@ def _search_duplicate_specs_in_externals(error_cls):
|
||||
|
||||
@config_packages
|
||||
def _deprecated_preferences(error_cls):
|
||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.22)"""
|
||||
# TODO (v0.22): remove this audit as the attributes will not be allowed in config
|
||||
"""Search package preferences deprecated in v0.21 (and slated for removal in v0.23)"""
|
||||
# TODO (v0.23): remove this audit as the attributes will not be allowed in config
|
||||
errors = []
|
||||
packages_yaml = spack.config.CONFIG.get_config("packages")
|
||||
|
||||
@@ -1046,7 +1046,7 @@ def _extracts_errors(triggers, summary):
|
||||
group="externals",
|
||||
tag="PKG-EXTERNALS",
|
||||
description="Sanity checks for external software detection",
|
||||
kwargs=("pkgs",),
|
||||
kwargs=("pkgs", "debug_log"),
|
||||
)
|
||||
|
||||
|
||||
@@ -1069,7 +1069,7 @@ def packages_with_detection_tests():
|
||||
|
||||
|
||||
@external_detection
|
||||
def _test_detection_by_executable(pkgs, error_cls):
|
||||
def _test_detection_by_executable(pkgs, debug_log, error_cls):
|
||||
"""Test drive external detection for packages"""
|
||||
import spack.detection
|
||||
|
||||
@@ -1095,6 +1095,7 @@ def _test_detection_by_executable(pkgs, error_cls):
|
||||
for idx, test_runner in enumerate(
|
||||
spack.detection.detection_tests(pkg_name, spack.repo.PATH)
|
||||
):
|
||||
debug_log(f"[{__file__}]: running test {idx} for package {pkg_name}")
|
||||
specs = test_runner.execute()
|
||||
expected_specs = test_runner.expected_specs
|
||||
|
||||
@@ -1111,4 +1112,75 @@ def _test_detection_by_executable(pkgs, error_cls):
|
||||
details = [msg.format(s, idx) for s in sorted(not_expected)]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
matched_detection = []
|
||||
for candidate in expected_specs:
|
||||
try:
|
||||
idx = specs.index(candidate)
|
||||
matched_detection.append((candidate, specs[idx]))
|
||||
except (AttributeError, ValueError):
|
||||
pass
|
||||
|
||||
def _compare_extra_attribute(_expected, _detected, *, _spec):
|
||||
result = []
|
||||
# Check items are of the same type
|
||||
if not isinstance(_detected, type(_expected)):
|
||||
_summary = f'{pkg_name}: error when trying to detect "{_expected}"'
|
||||
_details = [f"{_detected} was detected instead"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
# If they are string expected is a regex
|
||||
if isinstance(_expected, str):
|
||||
try:
|
||||
_regex = re.compile(_expected)
|
||||
except re.error:
|
||||
_summary = f'{pkg_name}: illegal regex in "{_spec}" extra attributes'
|
||||
_details = [f"{_expected} is not a valid regex"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
if not _regex.match(_detected):
|
||||
_summary = (
|
||||
f'{pkg_name}: error when trying to match "{_expected}" '
|
||||
f"in extra attributes"
|
||||
)
|
||||
_details = [f"{_detected} does not match the regex"]
|
||||
return [error_cls(summary=_summary, details=_details)]
|
||||
|
||||
if isinstance(_expected, dict):
|
||||
_not_detected = set(_expected.keys()) - set(_detected.keys())
|
||||
if _not_detected:
|
||||
_summary = f"{pkg_name}: cannot detect some attributes for spec {_spec}"
|
||||
_details = [
|
||||
f'"{_expected}" was expected',
|
||||
f'"{_detected}" was detected',
|
||||
] + [f'attribute "{s}" was not detected' for s in sorted(_not_detected)]
|
||||
result.append(error_cls(summary=_summary, details=_details))
|
||||
|
||||
_common = set(_expected.keys()) & set(_detected.keys())
|
||||
for _key in _common:
|
||||
result.extend(
|
||||
_compare_extra_attribute(_expected[_key], _detected[_key], _spec=_spec)
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
for expected, detected in matched_detection:
|
||||
# We might not want to test all attributes, so avoid not_expected
|
||||
not_detected = set(expected.extra_attributes) - set(detected.extra_attributes)
|
||||
if not_detected:
|
||||
summary = f"{pkg_name}: cannot detect some attributes for spec {expected}"
|
||||
details = [
|
||||
f'"{s}" was not detected [test_id={idx}]' for s in sorted(not_detected)
|
||||
]
|
||||
errors.append(error_cls(summary=summary, details=details))
|
||||
|
||||
common = set(expected.extra_attributes) & set(detected.extra_attributes)
|
||||
for key in common:
|
||||
errors.extend(
|
||||
_compare_extra_attribute(
|
||||
expected.extra_attributes[key],
|
||||
detected.extra_attributes[key],
|
||||
_spec=expected,
|
||||
)
|
||||
)
|
||||
|
||||
return errors
|
||||
|
@@ -5,7 +5,13 @@
|
||||
"""Function and classes needed to bootstrap Spack itself."""
|
||||
|
||||
from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
|
||||
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
|
||||
from .core import (
|
||||
all_core_root_specs,
|
||||
ensure_clingo_importable_or_raise,
|
||||
ensure_core_dependencies,
|
||||
ensure_gpg_in_path_or_raise,
|
||||
ensure_patchelf_in_path_or_raise,
|
||||
)
|
||||
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||
from .status import status_message
|
||||
|
||||
@@ -13,6 +19,8 @@
|
||||
"is_bootstrapping",
|
||||
"ensure_bootstrap_configuration",
|
||||
"ensure_core_dependencies",
|
||||
"ensure_gpg_in_path_or_raise",
|
||||
"ensure_clingo_importable_or_raise",
|
||||
"ensure_patchelf_in_path_or_raise",
|
||||
"all_core_root_specs",
|
||||
"ensure_environment_dependencies",
|
||||
|
@@ -54,10 +54,14 @@ def _try_import_from_store(
|
||||
installed_specs = spack.store.STORE.db.query(query_spec, installed=True)
|
||||
|
||||
for candidate_spec in installed_specs:
|
||||
pkg = candidate_spec["python"].package
|
||||
# previously bootstrapped specs may not have a python-venv dependency.
|
||||
if candidate_spec.dependencies("python-venv"):
|
||||
python, *_ = candidate_spec.dependencies("python-venv")
|
||||
else:
|
||||
python, *_ = candidate_spec.dependencies("python")
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||
os.path.join(candidate_spec.prefix, python.package.purelib),
|
||||
os.path.join(candidate_spec.prefix, python.package.platlib),
|
||||
]
|
||||
path_before = list(sys.path)
|
||||
|
||||
|
@@ -173,35 +173,14 @@ def _read_metadata(self, package_name: str) -> Any:
|
||||
return data
|
||||
|
||||
def _install_by_hash(
|
||||
self,
|
||||
pkg_hash: str,
|
||||
pkg_sha256: str,
|
||||
index: List[spack.spec.Spec],
|
||||
bincache_platform: spack.platforms.Platform,
|
||||
self, pkg_hash: str, pkg_sha256: str, bincache_platform: spack.platforms.Platform
|
||||
) -> None:
|
||||
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
||||
# Reconstruct the compiler that we need to use for bootstrapping
|
||||
compiler_entry = {
|
||||
"modules": [],
|
||||
"operating_system": str(index_spec.os),
|
||||
"paths": {
|
||||
"cc": "/dev/null",
|
||||
"cxx": "/dev/null",
|
||||
"f77": "/dev/null",
|
||||
"fc": "/dev/null",
|
||||
},
|
||||
"spec": str(index_spec.compiler),
|
||||
"target": str(index_spec.target.family),
|
||||
}
|
||||
with spack.platforms.use_platform(bincache_platform):
|
||||
with spack.config.override("compilers", [{"compiler": compiler_entry}]):
|
||||
spec_str = "/" + pkg_hash
|
||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||
matches = spack.store.find([spec_str], multiple=False, query_fn=query)
|
||||
for match in matches:
|
||||
spack.binary_distribution.install_root_node(
|
||||
match, unsigned=True, force=True, sha256=pkg_sha256
|
||||
)
|
||||
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||
for match in spack.store.find([f"/{pkg_hash}"], multiple=False, query_fn=query):
|
||||
spack.binary_distribution.install_root_node(
|
||||
match, unsigned=True, force=True, sha256=pkg_sha256
|
||||
)
|
||||
|
||||
def _install_and_test(
|
||||
self,
|
||||
@@ -232,7 +211,7 @@ def _install_and_test(
|
||||
continue
|
||||
|
||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||
self._install_by_hash(pkg_hash, pkg_sha256, index, bincache_platform)
|
||||
self._install_by_hash(pkg_hash, pkg_sha256, bincache_platform)
|
||||
|
||||
info: ConfigDictionary = {}
|
||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||
@@ -291,10 +270,6 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
with spack_python_interpreter():
|
||||
# Add hint to use frontend operating system on Cray
|
||||
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||
# This is needed to help the old concretizer taking the `setuptools` dependency
|
||||
# only when bootstrapping from sources on Python 3.12
|
||||
if spec_for_current_python() == "python@3.12":
|
||||
concrete_spec.constrain("+force_setuptools")
|
||||
|
||||
if module == "clingo":
|
||||
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||
@@ -559,6 +534,41 @@ def ensure_patchelf_in_path_or_raise() -> spack.util.executable.Executable:
|
||||
)
|
||||
|
||||
|
||||
def ensure_winsdk_external_or_raise() -> None:
|
||||
"""Ensure the Windows SDK + WGL are available on system
|
||||
If both of these package are found, the Spack user or bootstrap
|
||||
configuration (depending on where Spack is running)
|
||||
will be updated to include all versions and variants detected.
|
||||
If either the WDK or WSDK are not found, this method will raise
|
||||
a RuntimeError.
|
||||
|
||||
**NOTE:** This modifies the Spack config in the current scope,
|
||||
either user or environment depending on the calling context.
|
||||
This is different from all other current bootstrap dependency
|
||||
checks.
|
||||
"""
|
||||
if set(["win-sdk", "wgl"]).issubset(spack.config.get("packages").keys()):
|
||||
return
|
||||
externals = spack.detection.by_path(["win-sdk", "wgl"])
|
||||
if not set(["win-sdk", "wgl"]) == externals.keys():
|
||||
missing_packages_lst = []
|
||||
if "wgl" not in externals:
|
||||
missing_packages_lst.append("wgl")
|
||||
if "win-sdk" not in externals:
|
||||
missing_packages_lst.append("win-sdk")
|
||||
missing_packages = " & ".join(missing_packages_lst)
|
||||
raise RuntimeError(
|
||||
f"Unable to find the {missing_packages}, please install these packages \
|
||||
via the Visual Studio installer \
|
||||
before proceeding with Spack or provide the path to a non standard install with \
|
||||
'spack external find --path'"
|
||||
)
|
||||
# wgl/sdk are not required for bootstrapping Spack, but
|
||||
# are required for building anything non trivial
|
||||
# add to user config so they can be used by subsequent Spack ops
|
||||
spack.detection.update_configuration(externals, buildable=False)
|
||||
|
||||
|
||||
def ensure_core_dependencies() -> None:
|
||||
"""Ensure the presence of all the core dependencies."""
|
||||
if sys.platform.lower() == "linux":
|
||||
|
@@ -3,13 +3,11 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Bootstrap non-core Spack dependencies from an environment."""
|
||||
import glob
|
||||
import hashlib
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
import warnings
|
||||
from typing import List
|
||||
from typing import Iterable, List
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -28,6 +26,16 @@
|
||||
class BootstrapEnvironment(spack.environment.Environment):
|
||||
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
if not self.spack_yaml().exists():
|
||||
self._write_spack_yaml_file()
|
||||
super().__init__(self.environment_root())
|
||||
|
||||
# Remove python package roots created before python-venv was introduced
|
||||
for s in self.concrete_roots():
|
||||
if "python" in s.package.extendees and not s.dependencies("python-venv"):
|
||||
self.deconcretize(s)
|
||||
|
||||
@classmethod
|
||||
def spack_dev_requirements(cls) -> List[str]:
|
||||
"""Spack development requirements"""
|
||||
@@ -59,31 +67,19 @@ def view_root(cls) -> pathlib.Path:
|
||||
return cls.environment_root().joinpath("view")
|
||||
|
||||
@classmethod
|
||||
def pythonpaths(cls) -> List[str]:
|
||||
"""Paths to be added to sys.path or PYTHONPATH"""
|
||||
python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}"
|
||||
glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**"))
|
||||
result = glob.glob(glob_expr)
|
||||
if not result:
|
||||
msg = f"Cannot find any Python path in {cls.view_root()}"
|
||||
warnings.warn(msg)
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def bin_dirs(cls) -> List[pathlib.Path]:
|
||||
def bin_dir(cls) -> pathlib.Path:
|
||||
"""Paths to be added to PATH"""
|
||||
return [cls.view_root().joinpath("bin")]
|
||||
return cls.view_root().joinpath("bin")
|
||||
|
||||
def python_dirs(self) -> Iterable[pathlib.Path]:
|
||||
python = next(s for s in self.all_specs_generator() if s.name == "python-venv").package
|
||||
return {self.view_root().joinpath(p) for p in (python.platlib, python.purelib)}
|
||||
|
||||
@classmethod
|
||||
def spack_yaml(cls) -> pathlib.Path:
|
||||
"""Environment spack.yaml file"""
|
||||
return cls.environment_root().joinpath("spack.yaml")
|
||||
|
||||
def __init__(self) -> None:
|
||||
if not self.spack_yaml().exists():
|
||||
self._write_spack_yaml_file()
|
||||
super().__init__(self.environment_root())
|
||||
|
||||
def update_installations(self) -> None:
|
||||
"""Update the installations of this environment."""
|
||||
log_enabled = tty.is_debug() or tty.is_verbose()
|
||||
@@ -100,21 +96,13 @@ def update_installations(self) -> None:
|
||||
self.install_all()
|
||||
self.write(regenerate=True)
|
||||
|
||||
def update_syspath_and_environ(self) -> None:
|
||||
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
||||
the environment view.
|
||||
"""
|
||||
# Do minimal modifications to sys.path and environment variables. In particular, pay
|
||||
# attention to have the smallest PYTHONPATH / sys.path possible, since that may impact
|
||||
# the performance of the current interpreter
|
||||
sys.path.extend(self.pythonpaths())
|
||||
os.environ["PATH"] = os.pathsep.join(
|
||||
[str(x) for x in self.bin_dirs()] + os.environ.get("PATH", "").split(os.pathsep)
|
||||
)
|
||||
os.environ["PYTHONPATH"] = os.pathsep.join(
|
||||
os.environ.get("PYTHONPATH", "").split(os.pathsep)
|
||||
+ [str(x) for x in self.pythonpaths()]
|
||||
)
|
||||
def load(self) -> None:
|
||||
"""Update PATH and sys.path."""
|
||||
# Make executables available (shouldn't need PYTHONPATH)
|
||||
os.environ["PATH"] = f"{self.bin_dir()}{os.pathsep}{os.environ.get('PATH', '')}"
|
||||
|
||||
# Spack itself imports pytest
|
||||
sys.path.extend(str(p) for p in self.python_dirs())
|
||||
|
||||
def _write_spack_yaml_file(self) -> None:
|
||||
tty.msg(
|
||||
@@ -164,4 +152,4 @@ def ensure_environment_dependencies() -> None:
|
||||
_add_externals_if_missing()
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_installations()
|
||||
env.update_syspath_and_environ()
|
||||
env.load()
|
||||
|
@@ -43,7 +43,7 @@
|
||||
from collections import defaultdict
|
||||
from enum import Flag, auto
|
||||
from itertools import chain
|
||||
from typing import List, Tuple
|
||||
from typing import List, Set, Tuple
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.string import plural
|
||||
@@ -57,8 +57,10 @@
|
||||
import spack.build_systems.meson
|
||||
import spack.build_systems.python
|
||||
import spack.builder
|
||||
import spack.compilers
|
||||
import spack.config
|
||||
import spack.deptypes as dt
|
||||
import spack.error
|
||||
import spack.main
|
||||
import spack.package_base
|
||||
import spack.paths
|
||||
@@ -66,6 +68,7 @@
|
||||
import spack.repo
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.stage
|
||||
import spack.store
|
||||
import spack.subprocess_context
|
||||
import spack.user_environment
|
||||
@@ -78,7 +81,7 @@
|
||||
from spack.installer import InstallError
|
||||
from spack.util.cpus import determine_number_of_jobs
|
||||
from spack.util.environment import (
|
||||
SYSTEM_DIRS,
|
||||
SYSTEM_DIR_CASE_ENTRY,
|
||||
EnvironmentModifications,
|
||||
env_flag,
|
||||
filter_system_paths,
|
||||
@@ -101,9 +104,13 @@
|
||||
# Spack's compiler wrappers.
|
||||
#
|
||||
SPACK_ENV_PATH = "SPACK_ENV_PATH"
|
||||
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
|
||||
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
|
||||
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
|
||||
SPACK_RPATH_DIRS = "SPACK_RPATH_DIRS"
|
||||
SPACK_STORE_INCLUDE_DIRS = "SPACK_STORE_INCLUDE_DIRS"
|
||||
SPACK_STORE_LINK_DIRS = "SPACK_STORE_LINK_DIRS"
|
||||
SPACK_STORE_RPATH_DIRS = "SPACK_STORE_RPATH_DIRS"
|
||||
SPACK_RPATH_DEPS = "SPACK_RPATH_DEPS"
|
||||
SPACK_LINK_DEPS = "SPACK_LINK_DEPS"
|
||||
SPACK_PREFIX = "SPACK_PREFIX"
|
||||
@@ -416,7 +423,7 @@ def set_compiler_environment_variables(pkg, env):
|
||||
|
||||
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
||||
|
||||
env.set("SPACK_SYSTEM_DIRS", ":".join(SYSTEM_DIRS))
|
||||
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
|
||||
|
||||
compiler.setup_custom_environment(pkg, env)
|
||||
|
||||
@@ -544,9 +551,26 @@ def update_compiler_args_for_dep(dep):
|
||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||
|
||||
env.set(SPACK_LINK_DIRS, ":".join(link_dirs))
|
||||
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs))
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
|
||||
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
||||
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
||||
spack_managed_dirs: Set[str] = {
|
||||
spack.stage.get_stage_root(),
|
||||
spack.store.STORE.db.root,
|
||||
*(db.root for db in spack.store.STORE.db.upstream_dbs),
|
||||
}
|
||||
spack_managed_dirs.update([os.path.realpath(p) for p in spack_managed_dirs])
|
||||
|
||||
env.set(SPACK_MANAGED_DIRS, "|".join(f'"{p}/"*' for p in sorted(spack_managed_dirs)))
|
||||
is_spack_managed = lambda p: any(p.startswith(store) for store in spack_managed_dirs)
|
||||
link_dirs_spack, link_dirs_system = stable_partition(link_dirs, is_spack_managed)
|
||||
include_dirs_spack, include_dirs_system = stable_partition(include_dirs, is_spack_managed)
|
||||
rpath_dirs_spack, rpath_dirs_system = stable_partition(rpath_dirs, is_spack_managed)
|
||||
env.set(SPACK_LINK_DIRS, ":".join(link_dirs_system))
|
||||
env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs_system))
|
||||
env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs_system))
|
||||
env.set(SPACK_STORE_LINK_DIRS, ":".join(link_dirs_spack))
|
||||
env.set(SPACK_STORE_INCLUDE_DIRS, ":".join(include_dirs_spack))
|
||||
env.set(SPACK_STORE_RPATH_DIRS, ":".join(rpath_dirs_spack))
|
||||
|
||||
|
||||
def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
@@ -583,10 +607,22 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
||||
# Put spack compiler paths in module scope. (Some packages use it
|
||||
# in setup_run_environment etc, so don't put it context == build)
|
||||
link_dir = spack.paths.build_env_path
|
||||
module.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
|
||||
pkg_compiler = None
|
||||
try:
|
||||
pkg_compiler = pkg.compiler
|
||||
except spack.compilers.NoCompilerForSpecError as e:
|
||||
tty.debug(f"cannot set 'spack_cc': {str(e)}")
|
||||
|
||||
if pkg_compiler is not None:
|
||||
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
|
||||
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
|
||||
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
|
||||
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
|
||||
else:
|
||||
module.spack_cc = None
|
||||
module.spack_cxx = None
|
||||
module.spack_f77 = None
|
||||
module.spack_fc = None
|
||||
|
||||
# Useful directories within the prefix are encapsulated in
|
||||
# a Prefix object.
|
||||
|
@@ -139,11 +139,6 @@ def initconfig_compiler_entries(self):
|
||||
"endif()\n",
|
||||
]
|
||||
|
||||
# We defined hipcc as top-level compiler for packages when +rocm.
|
||||
# This avoid problems coming from rocm flags being applied to another compiler.
|
||||
if "+rocm" in spec:
|
||||
entries.insert(0, cmake_cache_path("CMAKE_CXX_COMPILER", self.spec["hip"].hipcc))
|
||||
|
||||
flags = spec.compiler_flags
|
||||
|
||||
# use global spack compiler flags
|
||||
|
@@ -16,7 +16,7 @@
|
||||
|
||||
|
||||
class CargoPackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using a Makefiles."""
|
||||
"""Specialized class for packages built using cargo."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
|
@@ -39,16 +39,11 @@ def _maybe_set_python_hints(pkg: spack.package_base.PackageBase, args: List[str]
|
||||
"""Set the PYTHON_EXECUTABLE, Python_EXECUTABLE, and Python3_EXECUTABLE CMake variables
|
||||
if the package has Python as build or link dep and ``find_python_hints`` is set to True. See
|
||||
``find_python_hints`` for context."""
|
||||
if not getattr(pkg, "find_python_hints", False):
|
||||
if not getattr(pkg, "find_python_hints", False) or not pkg.spec.dependencies(
|
||||
"python", dt.BUILD | dt.LINK
|
||||
):
|
||||
return
|
||||
pythons = pkg.spec.dependencies("python", dt.BUILD | dt.LINK)
|
||||
if len(pythons) != 1:
|
||||
return
|
||||
try:
|
||||
python_executable = pythons[0].package.command.path
|
||||
except RuntimeError:
|
||||
return
|
||||
|
||||
python_executable = pkg.spec["python"].command.path
|
||||
args.extend(
|
||||
[
|
||||
CMakeBuilder.define("PYTHON_EXECUTABLE", python_executable),
|
||||
|
144
lib/spack/spack/build_systems/compiler.py
Normal file
144
lib/spack/spack/build_systems/compiler.py
Normal file
@@ -0,0 +1,144 @@
|
||||
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import itertools
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import sys
|
||||
from typing import Dict, List, Sequence, Tuple, Union
|
||||
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.lang import classproperty
|
||||
|
||||
import spack.compiler
|
||||
import spack.package_base
|
||||
|
||||
# Local "type" for type hints
|
||||
Path = Union[str, pathlib.Path]
|
||||
|
||||
|
||||
class CompilerPackage(spack.package_base.PackageBase):
|
||||
"""A Package mixin for all common logic for packages that implement compilers"""
|
||||
|
||||
# TODO: how do these play nicely with other tags
|
||||
tags: Sequence[str] = ["compiler"]
|
||||
|
||||
#: Optional suffix regexes for searching for this type of compiler.
|
||||
#: Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
|
||||
#: version suffix for gcc.
|
||||
compiler_suffixes: List[str] = [r"-.*"]
|
||||
|
||||
#: Optional prefix regexes for searching for this compiler
|
||||
compiler_prefixes: List[str] = []
|
||||
|
||||
#: Compiler argument(s) that produces version information
|
||||
#: If multiple arguments, the earlier arguments must produce errors when invalid
|
||||
compiler_version_argument: Union[str, Tuple[str]] = "-dumpversion"
|
||||
|
||||
#: Regex used to extract version from compiler's output
|
||||
compiler_version_regex: str = "(.*)"
|
||||
|
||||
#: Static definition of languages supported by this class
|
||||
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
||||
|
||||
def __init__(self, spec: "spack.spec.Spec"):
|
||||
super().__init__(spec)
|
||||
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||
msg += f" supports: {self.supported_languages}, valid values: {self.compiler_languages}"
|
||||
assert set(self.supported_languages) <= set(self.compiler_languages), msg
|
||||
|
||||
@property
|
||||
def supported_languages(self) -> Sequence[str]:
|
||||
"""Dynamic definition of languages supported by this package"""
|
||||
return self.compiler_languages
|
||||
|
||||
@classproperty
|
||||
def compiler_names(cls) -> Sequence[str]:
|
||||
"""Construct list of compiler names from per-language names"""
|
||||
names = []
|
||||
for language in cls.compiler_languages:
|
||||
names.extend(getattr(cls, f"{language}_names"))
|
||||
return names
|
||||
|
||||
@classproperty
|
||||
def executables(cls) -> Sequence[str]:
|
||||
"""Construct executables for external detection from names, prefixes, and suffixes."""
|
||||
regexp_fmt = r"^({0}){1}({2})$"
|
||||
prefixes = [""] + cls.compiler_prefixes
|
||||
suffixes = [""] + cls.compiler_suffixes
|
||||
if sys.platform == "win32":
|
||||
ext = r"\.(?:exe|bat)"
|
||||
suffixes += [suf + ext for suf in suffixes]
|
||||
return [
|
||||
regexp_fmt.format(prefix, re.escape(name), suffix)
|
||||
for prefix, name, suffix in itertools.product(prefixes, cls.compiler_names, suffixes)
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def determine_version(cls, exe: Path):
|
||||
version_argument = cls.compiler_version_argument
|
||||
if isinstance(version_argument, str):
|
||||
version_argument = (version_argument,)
|
||||
|
||||
for va in version_argument:
|
||||
try:
|
||||
output = spack.compiler.get_compiler_version_output(exe, va)
|
||||
match = re.search(cls.compiler_version_regex, output)
|
||||
if match:
|
||||
return ".".join(match.groups())
|
||||
except spack.util.executable.ProcessError:
|
||||
pass
|
||||
except Exception as e:
|
||||
tty.debug(
|
||||
f"[{__file__}] Cannot detect a valid version for the executable "
|
||||
f"{str(exe)}, for package '{cls.name}': {e}"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def compiler_bindir(cls, prefix: Path) -> Path:
|
||||
"""Overridable method for the location of the compiler bindir within the preifx"""
|
||||
return os.path.join(prefix, "bin")
|
||||
|
||||
@classmethod
|
||||
def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
|
||||
"""Compute the paths to compiler executables associated with this package
|
||||
|
||||
This is a helper method for ``determine_variants`` to compute the ``extra_attributes``
|
||||
to include with each spec object."""
|
||||
# There are often at least two copies (not symlinks) of each compiler executable in the
|
||||
# same directory: one with a canonical name, e.g. "gfortran", and another one with the
|
||||
# target prefix, e.g. "x86_64-pc-linux-gnu-gfortran". There also might be a copy of "gcc"
|
||||
# with the version suffix, e.g. "x86_64-pc-linux-gnu-gcc-6.3.0". To ensure the consistency
|
||||
# of values in the "paths" dictionary (i.e. we prefer all of them to reference copies
|
||||
# with canonical names if possible), we iterate over the executables in the reversed sorted
|
||||
# order:
|
||||
# First pass over languages identifies exes that are perfect matches for canonical names
|
||||
# Second pass checks for names with prefix/suffix
|
||||
# Second pass is sorted by language name length because longer named languages
|
||||
# e.g. cxx can often contain the names of shorter named languages
|
||||
# e.g. c (e.g. clang/clang++)
|
||||
paths = {}
|
||||
exes = sorted(exes, reverse=True)
|
||||
languages = {
|
||||
lang: getattr(cls, f"{lang}_names")
|
||||
for lang in sorted(cls.compiler_languages, key=len, reverse=True)
|
||||
}
|
||||
for exe in exes:
|
||||
for lang, names in languages.items():
|
||||
if os.path.basename(exe) in names:
|
||||
paths[lang] = exe
|
||||
break
|
||||
else:
|
||||
for lang, names in languages.items():
|
||||
if any(name in os.path.basename(exe) for name in names):
|
||||
paths[lang] = exe
|
||||
break
|
||||
|
||||
return paths
|
||||
|
||||
@classmethod
|
||||
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
||||
# path determination is separated so it can be reused in subclasses
|
||||
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
|
@@ -21,7 +21,7 @@
|
||||
|
||||
|
||||
class MakefilePackage(spack.package_base.PackageBase):
|
||||
"""Specialized class for packages built using a Makefiles."""
|
||||
"""Specialized class for packages built using Makefiles."""
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
#: system base class
|
||||
|
@@ -14,7 +14,7 @@
|
||||
from llnl.util.link_tree import LinkTree
|
||||
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.directives import conflicts, license, variant
|
||||
from spack.directives import conflicts, license, redistribute, variant
|
||||
from spack.package_base import InstallError
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
@@ -30,7 +30,7 @@ class IntelOneApiPackage(Package):
|
||||
|
||||
# oneAPI license does not allow mirroring outside of the
|
||||
# organization (e.g. University/Company).
|
||||
redistribute_source = False
|
||||
redistribute(source=False, binary=False)
|
||||
|
||||
for c in [
|
||||
"target=ppc64:",
|
||||
|
@@ -138,16 +138,21 @@ def view_file_conflicts(self, view, merge_map):
|
||||
return conflicts
|
||||
|
||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||
# Patch up shebangs to the python linked in the view only if python is built by Spack.
|
||||
if not self.extendee_spec or self.extendee_spec.external:
|
||||
# Patch up shebangs if the package extends Python and we put a Python interpreter in the
|
||||
# view.
|
||||
if not self.extendee_spec:
|
||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||
|
||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||
|
||||
if python.external:
|
||||
return super().add_files_to_view(view, merge_map, skip_if_exists)
|
||||
|
||||
# We only patch shebangs in the bin directory.
|
||||
copied_files: Dict[Tuple[int, int], str] = {} # File identifier -> source
|
||||
delayed_links: List[Tuple[str, str]] = [] # List of symlinks from merge map
|
||||
|
||||
bin_dir = self.spec.prefix.bin
|
||||
python_prefix = self.extendee_spec.prefix
|
||||
|
||||
for src, dst in merge_map.items():
|
||||
if skip_if_exists and os.path.lexists(dst):
|
||||
continue
|
||||
@@ -168,7 +173,7 @@ def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||
copied_files[(s.st_dev, s.st_ino)] = dst
|
||||
shutil.copy2(src, dst)
|
||||
fs.filter_file(
|
||||
python_prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
||||
python.prefix, os.path.abspath(view.get_projection_for_spec(self.spec)), dst
|
||||
)
|
||||
else:
|
||||
view.link(src, dst)
|
||||
@@ -199,14 +204,13 @@ def remove_files_from_view(self, view, merge_map):
|
||||
ignore_namespace = True
|
||||
|
||||
bin_dir = self.spec.prefix.bin
|
||||
global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec)
|
||||
|
||||
to_remove = []
|
||||
for src, dst in merge_map.items():
|
||||
if ignore_namespace and namespace_init(dst):
|
||||
continue
|
||||
|
||||
if global_view or not fs.path_contains_subdirectory(src, bin_dir):
|
||||
if not fs.path_contains_subdirectory(src, bin_dir):
|
||||
to_remove.append(dst)
|
||||
else:
|
||||
os.remove(dst)
|
||||
@@ -362,6 +366,12 @@ def list_url(cls) -> Optional[str]: # type: ignore[override]
|
||||
return f"https://pypi.org/simple/{name}/"
|
||||
return None
|
||||
|
||||
@property
|
||||
def python_spec(self):
|
||||
"""Get python-venv if it exists or python otherwise."""
|
||||
python, *_ = self.spec.dependencies("python-venv") or self.spec.dependencies("python")
|
||||
return python
|
||||
|
||||
@property
|
||||
def headers(self) -> HeaderList:
|
||||
"""Discover header files in platlib."""
|
||||
@@ -371,8 +381,9 @@ def headers(self) -> HeaderList:
|
||||
|
||||
# Headers should only be in include or platlib, but no harm in checking purelib too
|
||||
include = self.prefix.join(self.spec["python"].package.include).join(name)
|
||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
||||
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
|
||||
python = self.python_spec
|
||||
platlib = self.prefix.join(python.package.platlib).join(name)
|
||||
purelib = self.prefix.join(python.package.purelib).join(name)
|
||||
|
||||
headers_list = map(fs.find_all_headers, [include, platlib, purelib])
|
||||
headers = functools.reduce(operator.add, headers_list)
|
||||
@@ -391,8 +402,9 @@ def libs(self) -> LibraryList:
|
||||
name = self.spec.name[3:]
|
||||
|
||||
# Libraries should only be in platlib, but no harm in checking purelib too
|
||||
platlib = self.prefix.join(self.spec["python"].package.platlib).join(name)
|
||||
purelib = self.prefix.join(self.spec["python"].package.purelib).join(name)
|
||||
python = self.python_spec
|
||||
platlib = self.prefix.join(python.package.platlib).join(name)
|
||||
purelib = self.prefix.join(python.package.purelib).join(name)
|
||||
|
||||
find_all_libraries = functools.partial(fs.find_all_libraries, recursive=True)
|
||||
libs_list = map(find_all_libraries, [platlib, purelib])
|
||||
@@ -504,6 +516,8 @@ def global_options(self, spec: Spec, prefix: Prefix) -> Iterable[str]:
|
||||
|
||||
def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
"""Install everything from build directory."""
|
||||
pip = spec["python"].command
|
||||
pip.add_default_arg("-m", "pip")
|
||||
|
||||
args = PythonPipBuilder.std_args(pkg) + [f"--prefix={prefix}"]
|
||||
|
||||
@@ -519,14 +533,6 @@ def install(self, pkg: PythonPackage, spec: Spec, prefix: Prefix) -> None:
|
||||
else:
|
||||
args.append(".")
|
||||
|
||||
pip = spec["python"].command
|
||||
# Hide user packages, since we don't have build isolation. This is
|
||||
# necessary because pip / setuptools may run hooks from arbitrary
|
||||
# packages during the build. There is no equivalent variable to hide
|
||||
# system packages, so this is not reliable for external Python.
|
||||
pip.add_default_env("PYTHONNOUSERSITE", "1")
|
||||
pip.add_default_arg("-m")
|
||||
pip.add_default_arg("pip")
|
||||
with fs.working_dir(self.build_directory):
|
||||
pip(*args)
|
||||
|
||||
|
@@ -80,6 +80,7 @@
|
||||
import spack.variant
|
||||
from spack.directives import conflicts, depends_on, variant
|
||||
from spack.package_base import PackageBase
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
|
||||
class ROCmPackage(PackageBase):
|
||||
@@ -156,30 +157,23 @@ def hip_flags(amdgpu_target):
|
||||
archs = ",".join(amdgpu_target)
|
||||
return "--amdgpu-target={0}".format(archs)
|
||||
|
||||
# ASAN
|
||||
@staticmethod
|
||||
def asan_on(env, llvm_path):
|
||||
def asan_on(self, env: EnvironmentModifications):
|
||||
llvm_path = self.spec["llvm-amdgpu"].prefix
|
||||
env.set("CC", llvm_path + "/bin/clang")
|
||||
env.set("CXX", llvm_path + "/bin/clang++")
|
||||
env.set("ASAN_OPTIONS", "detect_leaks=0")
|
||||
|
||||
for root, dirs, files in os.walk(llvm_path):
|
||||
for root, _, files in os.walk(llvm_path):
|
||||
if "libclang_rt.asan-x86_64.so" in files:
|
||||
asan_lib_path = root
|
||||
env.prepend_path("LD_LIBRARY_PATH", asan_lib_path)
|
||||
SET_DWARF_VERSION_4 = ""
|
||||
try:
|
||||
# This will throw an error if imported on a non-Linux platform.
|
||||
import distro
|
||||
|
||||
distname = distro.id()
|
||||
except ImportError:
|
||||
distname = "unknown"
|
||||
if "rhel" in distname or "sles" in distname:
|
||||
if "rhel" in self.spec.os or "sles" in self.spec.os:
|
||||
SET_DWARF_VERSION_4 = "-gdwarf-5"
|
||||
else:
|
||||
SET_DWARF_VERSION_4 = ""
|
||||
|
||||
env.set("CFLAGS", "-fsanitize=address -shared-libasan -g " + SET_DWARF_VERSION_4)
|
||||
env.set("CXXFLAGS", "-fsanitize=address -shared-libasan -g " + SET_DWARF_VERSION_4)
|
||||
env.set("CFLAGS", f"-fsanitize=address -shared-libasan -g {SET_DWARF_VERSION_4}")
|
||||
env.set("CXXFLAGS", f"-fsanitize=address -shared-libasan -g {SET_DWARF_VERSION_4}")
|
||||
env.set("LDFLAGS", "-Wl,--enable-new-dtags -fuse-ld=lld -fsanitize=address -g -Wl,")
|
||||
|
||||
# HIP version vs Architecture
|
||||
|
@@ -16,8 +16,8 @@
|
||||
import tempfile
|
||||
import time
|
||||
import zipfile
|
||||
from collections import namedtuple
|
||||
from typing import List, Optional
|
||||
from collections import defaultdict, namedtuple
|
||||
from typing import Dict, List, Optional, Set, Tuple
|
||||
from urllib.error import HTTPError, URLError
|
||||
from urllib.parse import urlencode
|
||||
from urllib.request import HTTPHandler, Request, build_opener
|
||||
@@ -113,54 +113,24 @@ def _remove_reserved_tags(tags):
|
||||
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
||||
|
||||
|
||||
def _spec_deps_key(s):
|
||||
def _spec_ci_label(s):
|
||||
return f"{s.name}/{s.dag_hash(7)}"
|
||||
|
||||
|
||||
def _add_dependency(spec_label, dep_label, deps):
|
||||
if spec_label == dep_label:
|
||||
return
|
||||
if spec_label not in deps:
|
||||
deps[spec_label] = set()
|
||||
deps[spec_label].add(dep_label)
|
||||
PlainNodes = Dict[str, spack.spec.Spec]
|
||||
PlainEdges = Dict[str, Set[str]]
|
||||
|
||||
|
||||
def _get_spec_dependencies(specs, deps, spec_labels):
|
||||
spec_deps_obj = _compute_spec_deps(specs)
|
||||
|
||||
if spec_deps_obj:
|
||||
dependencies = spec_deps_obj["dependencies"]
|
||||
specs = spec_deps_obj["specs"]
|
||||
|
||||
for entry in specs:
|
||||
spec_labels[entry["label"]] = entry["spec"]
|
||||
|
||||
for entry in dependencies:
|
||||
_add_dependency(entry["spec"], entry["depends"], deps)
|
||||
|
||||
|
||||
def stage_spec_jobs(specs):
|
||||
"""Take a set of release specs and generate a list of "stages", where the
|
||||
jobs in any stage are dependent only on jobs in previous stages. This
|
||||
allows us to maximize build parallelism within the gitlab-ci framework.
|
||||
def stage_spec_jobs(specs: List[spack.spec.Spec]) -> Tuple[PlainNodes, PlainEdges, List[Set[str]]]:
|
||||
"""Turn a DAG into a list of stages (set of nodes), the list is ordered topologically, so that
|
||||
each node in a stage has dependencies only in previous stages.
|
||||
|
||||
Arguments:
|
||||
specs (Iterable): Specs to build
|
||||
|
||||
Returns: A tuple of information objects describing the specs, dependencies
|
||||
and stages:
|
||||
|
||||
spec_labels: A dictionary mapping the spec labels (which are formatted
|
||||
as pkg-name/hash-prefix) to concrete specs.
|
||||
|
||||
deps: A dictionary where the keys should also have appeared as keys in
|
||||
the spec_labels dictionary, and the values are the set of
|
||||
dependencies for that spec.
|
||||
|
||||
stages: An ordered list of sets, each of which contains all the jobs to
|
||||
built in that stage. The jobs are expressed in the same format as
|
||||
the keys in the spec_labels and deps objects.
|
||||
specs: Specs to build
|
||||
|
||||
Returns: A tuple (nodes, edges, stages) where ``nodes`` maps labels to specs, ``edges`` maps
|
||||
labels to a set of labels of dependencies, and ``stages`` is a topologically ordered list
|
||||
of sets of labels.
|
||||
"""
|
||||
|
||||
# The convenience method below, "_remove_satisfied_deps()", does not modify
|
||||
@@ -177,17 +147,12 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
|
||||
return new_deps
|
||||
|
||||
deps = {}
|
||||
spec_labels = {}
|
||||
nodes, edges = _extract_dag(specs)
|
||||
|
||||
_get_spec_dependencies(specs, deps, spec_labels)
|
||||
|
||||
# Save the original deps, as we need to return them at the end of the
|
||||
# function. In the while loop below, the "dependencies" variable is
|
||||
# overwritten rather than being modified each time through the loop,
|
||||
# thus preserving the original value of "deps" saved here.
|
||||
dependencies = deps
|
||||
unstaged = set(spec_labels.keys())
|
||||
# Save the original edges, as we need to return them at the end of the function. In the loop
|
||||
# below, the "dependencies" variable is rebound rather than mutated, so "edges" is not mutated.
|
||||
dependencies = edges
|
||||
unstaged = set(nodes.keys())
|
||||
stages = []
|
||||
|
||||
while dependencies:
|
||||
@@ -203,7 +168,7 @@ def _remove_satisfied_deps(deps, satisfied_list):
|
||||
if unstaged:
|
||||
stages.append(unstaged.copy())
|
||||
|
||||
return spec_labels, deps, stages
|
||||
return nodes, edges, stages
|
||||
|
||||
|
||||
def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisions):
|
||||
@@ -235,87 +200,22 @@ def _print_staging_summary(spec_labels, stages, mirrors_to_check, rebuild_decisi
|
||||
tty.msg(msg)
|
||||
|
||||
|
||||
def _compute_spec_deps(spec_list):
|
||||
"""
|
||||
Computes all the dependencies for the spec(s) and generates a JSON
|
||||
object which provides both a list of unique spec names as well as a
|
||||
comprehensive list of all the edges in the dependency graph. For
|
||||
example, given a single spec like 'readline@7.0', this function
|
||||
generates the following JSON object:
|
||||
def _extract_dag(specs: List[spack.spec.Spec]) -> Tuple[PlainNodes, PlainEdges]:
|
||||
"""Extract a sub-DAG as plain old Python objects with external nodes removed."""
|
||||
nodes: PlainNodes = {}
|
||||
edges: PlainEdges = defaultdict(set)
|
||||
|
||||
.. code-block:: JSON
|
||||
for edge in traverse.traverse_edges(specs, cover="edges"):
|
||||
if (edge.parent and edge.parent.external) or edge.spec.external:
|
||||
continue
|
||||
child_id = _spec_ci_label(edge.spec)
|
||||
nodes[child_id] = edge.spec
|
||||
if edge.parent:
|
||||
parent_id = _spec_ci_label(edge.parent)
|
||||
nodes[parent_id] = edge.parent
|
||||
edges[parent_id].add(child_id)
|
||||
|
||||
{
|
||||
"dependencies": [
|
||||
{
|
||||
"depends": "readline/ip6aiun",
|
||||
"spec": "readline/ip6aiun"
|
||||
},
|
||||
{
|
||||
"depends": "ncurses/y43rifz",
|
||||
"spec": "readline/ip6aiun"
|
||||
},
|
||||
{
|
||||
"depends": "ncurses/y43rifz",
|
||||
"spec": "readline/ip6aiun"
|
||||
},
|
||||
{
|
||||
"depends": "pkgconf/eg355zb",
|
||||
"spec": "ncurses/y43rifz"
|
||||
},
|
||||
{
|
||||
"depends": "pkgconf/eg355zb",
|
||||
"spec": "readline/ip6aiun"
|
||||
}
|
||||
],
|
||||
"specs": [
|
||||
{
|
||||
"spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-highs...",
|
||||
"label": "readline/ip6aiun"
|
||||
},
|
||||
{
|
||||
"spec": "ncurses@6.1%apple-clang@9.1.0 arch=darwin-highsi...",
|
||||
"label": "ncurses/y43rifz"
|
||||
},
|
||||
{
|
||||
"spec": "pkgconf@1.5.4%apple-clang@9.1.0 arch=darwin-high...",
|
||||
"label": "pkgconf/eg355zb"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
"""
|
||||
spec_labels = {}
|
||||
|
||||
specs = []
|
||||
dependencies = []
|
||||
|
||||
def append_dep(s, d):
|
||||
dependencies.append({"spec": s, "depends": d})
|
||||
|
||||
for spec in spec_list:
|
||||
for s in spec.traverse(deptype="all"):
|
||||
if s.external:
|
||||
tty.msg(f"Will not stage external pkg: {s}")
|
||||
continue
|
||||
|
||||
skey = _spec_deps_key(s)
|
||||
spec_labels[skey] = s
|
||||
|
||||
for d in s.dependencies(deptype="all"):
|
||||
dkey = _spec_deps_key(d)
|
||||
if d.external:
|
||||
tty.msg(f"Will not stage external dep: {d}")
|
||||
continue
|
||||
|
||||
append_dep(skey, dkey)
|
||||
|
||||
for spec_label, concrete_spec in spec_labels.items():
|
||||
specs.append({"label": spec_label, "spec": concrete_spec})
|
||||
|
||||
deps_json_obj = {"specs": specs, "dependencies": dependencies}
|
||||
|
||||
return deps_json_obj
|
||||
return nodes, edges
|
||||
|
||||
|
||||
def _spec_matches(spec, match_string):
|
||||
@@ -327,7 +227,7 @@ def _format_job_needs(
|
||||
):
|
||||
needs_list = []
|
||||
for dep_job in dep_jobs:
|
||||
dep_spec_key = _spec_deps_key(dep_job)
|
||||
dep_spec_key = _spec_ci_label(dep_job)
|
||||
rebuild = rebuild_decisions[dep_spec_key].rebuild
|
||||
|
||||
if not prune_dag or rebuild:
|
||||
|
@@ -334,8 +334,7 @@ def display_specs(specs, args=None, **kwargs):
|
||||
variants (bool): Show variants with specs
|
||||
indent (int): indent each line this much
|
||||
groups (bool): display specs grouped by arch/compiler (default True)
|
||||
decorators (dict): dictionary mappng specs to decorators
|
||||
header_callback (typing.Callable): called at start of arch/compiler groups
|
||||
decorator (typing.Callable): function to call to decorate specs
|
||||
all_headers (bool): show headers even when arch/compiler aren't defined
|
||||
output (typing.IO): A file object to write to. Default is ``sys.stdout``
|
||||
|
||||
@@ -384,15 +383,13 @@ def get_arg(name, default=None):
|
||||
vfmt = "{variants}" if variants else ""
|
||||
format_string = nfmt + "{@version}" + ffmt + vfmt
|
||||
|
||||
transform = {"package": decorator, "fullpackage": decorator}
|
||||
|
||||
def fmt(s, depth=0):
|
||||
"""Formatter function for all output specs"""
|
||||
string = ""
|
||||
if hashes:
|
||||
string += gray_hash(s, hlen) + " "
|
||||
string += depth * " "
|
||||
string += s.cformat(format_string, transform=transform)
|
||||
string += decorator(s, s.cformat(format_string))
|
||||
return string
|
||||
|
||||
def format_list(specs):
|
||||
@@ -451,7 +448,7 @@ def filter_loaded_specs(specs):
|
||||
return [x for x in specs if x.dag_hash() in hashes]
|
||||
|
||||
|
||||
def print_how_many_pkgs(specs, pkg_type=""):
|
||||
def print_how_many_pkgs(specs, pkg_type="", suffix=""):
|
||||
"""Given a list of specs, this will print a message about how many
|
||||
specs are in that list.
|
||||
|
||||
@@ -462,7 +459,7 @@ def print_how_many_pkgs(specs, pkg_type=""):
|
||||
category, e.g. if pkg_type is "installed" then the message
|
||||
would be "3 installed packages"
|
||||
"""
|
||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package"))
|
||||
tty.msg("%s" % llnl.string.plural(len(specs), pkg_type + " package") + suffix)
|
||||
|
||||
|
||||
def spack_is_git_repo():
|
||||
|
@@ -84,7 +84,7 @@ def externals(parser, args):
|
||||
return
|
||||
|
||||
pkgs = args.name or spack.repo.PATH.all_package_names()
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
|
||||
reports = spack.audit.run_group(args.subcommand, pkgs=pkgs, debug_log=tty.debug)
|
||||
_process_reports(reports)
|
||||
|
||||
|
||||
|
@@ -133,6 +133,11 @@ def setup_parser(subparser: argparse.ArgumentParser):
|
||||
help="when pushing to an OCI registry, tag an image containing all root specs and their "
|
||||
"runtime dependencies",
|
||||
)
|
||||
push.add_argument(
|
||||
"--private",
|
||||
action="store_true",
|
||||
help="for a private mirror, include non-redistributable packages",
|
||||
)
|
||||
arguments.add_common_arguments(push, ["specs", "jobs"])
|
||||
push.set_defaults(func=push_fn)
|
||||
|
||||
@@ -367,6 +372,25 @@ def _make_pool() -> MaybePool:
|
||||
return NoPool()
|
||||
|
||||
|
||||
def _skip_no_redistribute_for_public(specs):
|
||||
remaining_specs = list()
|
||||
removed_specs = list()
|
||||
for spec in specs:
|
||||
if spec.package.redistribute_binary:
|
||||
remaining_specs.append(spec)
|
||||
else:
|
||||
removed_specs.append(spec)
|
||||
if removed_specs:
|
||||
colified_output = tty.colify.colified(list(s.name for s in removed_specs), indent=4)
|
||||
tty.debug(
|
||||
"The following specs will not be added to the binary cache"
|
||||
" because they cannot be redistributed:\n"
|
||||
f"{colified_output}\n"
|
||||
"You can use `--private` to include them."
|
||||
)
|
||||
return remaining_specs
|
||||
|
||||
|
||||
def push_fn(args):
|
||||
"""create a binary package and push it to a mirror"""
|
||||
if args.spec_file:
|
||||
@@ -417,6 +441,8 @@ def push_fn(args):
|
||||
root="package" in args.things_to_install,
|
||||
dependencies="dependencies" in args.things_to_install,
|
||||
)
|
||||
if not args.private:
|
||||
specs = _skip_no_redistribute_for_public(specs)
|
||||
|
||||
# When pushing multiple specs, print the url once ahead of time, as well as how
|
||||
# many specs are being pushed.
|
||||
|
@@ -563,12 +563,13 @@ def add_concretizer_args(subparser):
|
||||
help="reuse installed packages/buildcaches when possible",
|
||||
)
|
||||
subgroup.add_argument(
|
||||
"--fresh-roots",
|
||||
"--reuse-deps",
|
||||
action=ConfigSetAction,
|
||||
dest="concretizer:reuse",
|
||||
const="dependencies",
|
||||
default=None,
|
||||
help="reuse installed dependencies only",
|
||||
help="concretize with fresh roots and reused dependencies",
|
||||
)
|
||||
subgroup.add_argument(
|
||||
"--deprecated",
|
||||
|
@@ -10,13 +10,13 @@
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from typing import List, Optional
|
||||
|
||||
import llnl.string as string
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
from llnl.util.tty.colify import colify
|
||||
from llnl.util.tty.color import colorize
|
||||
from llnl.util.tty.color import cescape, colorize
|
||||
|
||||
import spack.cmd
|
||||
import spack.cmd.common
|
||||
@@ -61,14 +61,7 @@
|
||||
#
|
||||
def env_create_setup_parser(subparser):
|
||||
"""create a new environment"""
|
||||
subparser.add_argument(
|
||||
"env_name",
|
||||
metavar="env",
|
||||
help=(
|
||||
"name of managed environment or directory of the anonymous env "
|
||||
"(when using --dir/-d) to activate"
|
||||
),
|
||||
)
|
||||
subparser.add_argument("env_name", metavar="env", help="name or directory of environment")
|
||||
subparser.add_argument(
|
||||
"-d", "--dir", action="store_true", help="create an environment in a specific directory"
|
||||
)
|
||||
@@ -94,6 +87,9 @@ def env_create_setup_parser(subparser):
|
||||
default=None,
|
||||
help="either a lockfile (must end with '.json' or '.lock') or a manifest file",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"--include-concrete", action="append", help="name of old environment to copy specs from"
|
||||
)
|
||||
|
||||
|
||||
def env_create(args):
|
||||
@@ -111,19 +107,32 @@ def env_create(args):
|
||||
# the environment should not include a view.
|
||||
with_view = None
|
||||
|
||||
include_concrete = None
|
||||
if hasattr(args, "include_concrete"):
|
||||
include_concrete = args.include_concrete
|
||||
|
||||
env = _env_create(
|
||||
args.env_name,
|
||||
init_file=args.envfile,
|
||||
dir=args.dir,
|
||||
dir=args.dir or os.path.sep in args.env_name or args.env_name in (".", ".."),
|
||||
with_view=with_view,
|
||||
keep_relative=args.keep_relative,
|
||||
include_concrete=include_concrete,
|
||||
)
|
||||
|
||||
# Generate views, only really useful for environments created from spack.lock files.
|
||||
env.regenerate_views()
|
||||
|
||||
|
||||
def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep_relative=False):
|
||||
def _env_create(
|
||||
name_or_path: str,
|
||||
*,
|
||||
init_file: Optional[str] = None,
|
||||
dir: bool = False,
|
||||
with_view: Optional[str] = None,
|
||||
keep_relative: bool = False,
|
||||
include_concrete: Optional[List[str]] = None,
|
||||
):
|
||||
"""Create a new environment, with an optional yaml description.
|
||||
|
||||
Arguments:
|
||||
@@ -135,22 +144,31 @@ def _env_create(name_or_path, *, init_file=None, dir=False, with_view=None, keep
|
||||
keep_relative (bool): if True, develop paths are copied verbatim into
|
||||
the new environment file, otherwise they may be made absolute if the
|
||||
new environment is in a different location
|
||||
include_concrete (list): list of the included concrete environments
|
||||
"""
|
||||
if not dir:
|
||||
env = ev.create(
|
||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||
name_or_path,
|
||||
init_file=init_file,
|
||||
with_view=with_view,
|
||||
keep_relative=keep_relative,
|
||||
include_concrete=include_concrete,
|
||||
)
|
||||
tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
|
||||
tty.msg("You can activate this environment with:")
|
||||
tty.msg(" spack env activate %s" % (name_or_path))
|
||||
return env
|
||||
|
||||
env = ev.create_in_dir(
|
||||
name_or_path, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||
)
|
||||
tty.msg("Created environment in %s" % env.path)
|
||||
tty.msg("You can activate this environment with:")
|
||||
tty.msg(" spack env activate %s" % env.path)
|
||||
tty.msg(
|
||||
colorize(
|
||||
f"Created environment @c{{{cescape(name_or_path)}}} in: @c{{{cescape(env.path)}}}"
|
||||
)
|
||||
)
|
||||
else:
|
||||
env = ev.create_in_dir(
|
||||
name_or_path,
|
||||
init_file=init_file,
|
||||
with_view=with_view,
|
||||
keep_relative=keep_relative,
|
||||
include_concrete=include_concrete,
|
||||
)
|
||||
tty.msg(colorize(f"Created independent environment in: @c{{{cescape(env.path)}}}"))
|
||||
tty.msg(f"Activate with: {colorize(f'@c{{spack env activate {cescape(name_or_path)}}}')}")
|
||||
return env
|
||||
|
||||
|
||||
@@ -436,6 +454,12 @@ def env_remove_setup_parser(subparser):
|
||||
"""remove an existing environment"""
|
||||
subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
|
||||
arguments.add_common_arguments(subparser, ["yes_to_all"])
|
||||
subparser.add_argument(
|
||||
"-f",
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="remove the environment even if it is included in another environment",
|
||||
)
|
||||
|
||||
|
||||
def env_remove(args):
|
||||
@@ -445,13 +469,35 @@ def env_remove(args):
|
||||
and manifests embedded in repositories should be removed manually.
|
||||
"""
|
||||
read_envs = []
|
||||
valid_envs = []
|
||||
bad_envs = []
|
||||
for env_name in args.rm_env:
|
||||
invalid_envs = []
|
||||
|
||||
for env_name in ev.all_environment_names():
|
||||
try:
|
||||
env = ev.read(env_name)
|
||||
read_envs.append(env)
|
||||
valid_envs.append(env_name)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
read_envs.append(env)
|
||||
except (spack.config.ConfigFormatError, ev.SpackEnvironmentConfigError):
|
||||
bad_envs.append(env_name)
|
||||
invalid_envs.append(env_name)
|
||||
|
||||
if env_name in args.rm_env:
|
||||
bad_envs.append(env_name)
|
||||
|
||||
# Check if env is linked to another before trying to remove
|
||||
for name in valid_envs:
|
||||
# don't check if environment is included to itself
|
||||
if name == env_name:
|
||||
continue
|
||||
environ = ev.Environment(ev.root(name))
|
||||
if ev.root(env_name) in environ.included_concrete_envs:
|
||||
msg = f'Environment "{env_name}" is being used by environment "{name}"'
|
||||
if args.force:
|
||||
tty.warn(msg)
|
||||
else:
|
||||
tty.die(msg)
|
||||
|
||||
if not args.yes_to_all:
|
||||
environments = string.plural(len(args.rm_env), "environment", show_n=False)
|
||||
|
@@ -14,6 +14,7 @@
|
||||
import spack.cmd as cmd
|
||||
import spack.environment as ev
|
||||
import spack.repo
|
||||
import spack.store
|
||||
from spack.cmd.common import arguments
|
||||
from spack.database import InstallStatuses
|
||||
|
||||
@@ -69,6 +70,12 @@ def setup_parser(subparser):
|
||||
|
||||
arguments.add_common_arguments(subparser, ["long", "very_long", "tags", "namespaces"])
|
||||
|
||||
subparser.add_argument(
|
||||
"-r",
|
||||
"--only-roots",
|
||||
action="store_true",
|
||||
help="don't show full list of installed specs in an environment",
|
||||
)
|
||||
subparser.add_argument(
|
||||
"-c",
|
||||
"--show-concretized",
|
||||
@@ -189,26 +196,22 @@ def query_arguments(args):
|
||||
return q_args
|
||||
|
||||
|
||||
def setup_env(env):
|
||||
def make_env_decorator(env):
|
||||
"""Create a function for decorating specs when in an environment."""
|
||||
|
||||
def strip_build(seq):
|
||||
return set(s.copy(deps=("link", "run")) for s in seq)
|
||||
|
||||
added = set(strip_build(env.added_specs()))
|
||||
roots = set(strip_build(env.roots()))
|
||||
removed = set(strip_build(env.removed_specs()))
|
||||
roots = set(env.roots())
|
||||
removed = set(env.removed_specs())
|
||||
|
||||
def decorator(spec, fmt):
|
||||
# add +/-/* to show added/removed/root specs
|
||||
if any(spec.dag_hash() == r.dag_hash() for r in roots):
|
||||
return color.colorize("@*{%s}" % fmt)
|
||||
return color.colorize(f"@*{{{fmt}}}")
|
||||
elif spec in removed:
|
||||
return color.colorize("@K{%s}" % fmt)
|
||||
return color.colorize(f"@K{{{fmt}}}")
|
||||
else:
|
||||
return "%s" % fmt
|
||||
return fmt
|
||||
|
||||
return decorator, added, roots, removed
|
||||
return decorator
|
||||
|
||||
|
||||
def display_env(env, args, decorator, results):
|
||||
@@ -223,10 +226,54 @@ def display_env(env, args, decorator, results):
|
||||
"""
|
||||
tty.msg("In environment %s" % env.name)
|
||||
|
||||
if not env.user_specs:
|
||||
tty.msg("No root specs")
|
||||
else:
|
||||
tty.msg("Root specs")
|
||||
num_roots = len(env.user_specs) or "No"
|
||||
tty.msg(f"{num_roots} root specs")
|
||||
|
||||
concrete_specs = {
|
||||
root: concrete_root
|
||||
for root, concrete_root in zip(env.concretized_user_specs, env.concrete_roots())
|
||||
}
|
||||
|
||||
def root_decorator(spec, string):
|
||||
"""Decorate root specs with their install status if needed"""
|
||||
concrete = concrete_specs.get(spec)
|
||||
if concrete:
|
||||
status = color.colorize(concrete.install_status().value)
|
||||
hash = concrete.dag_hash()
|
||||
else:
|
||||
status = color.colorize(spack.spec.InstallStatus.absent.value)
|
||||
hash = "-" * 32
|
||||
|
||||
# TODO: status has two extra spaces on the end of it, but fixing this and other spec
|
||||
# TODO: space format idiosyncrasies is complicated. Fix this eventually
|
||||
status = status[:-2]
|
||||
|
||||
if args.long or args.very_long:
|
||||
hash = color.colorize(f"@K{{{hash[: 7 if args.long else None]}}}")
|
||||
return f"{status} {hash} {string}"
|
||||
else:
|
||||
return f"{status} {string}"
|
||||
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
cmd.display_specs(
|
||||
env.user_specs,
|
||||
args,
|
||||
# these are overrides of CLI args
|
||||
paths=False,
|
||||
long=False,
|
||||
very_long=False,
|
||||
# these enforce details in the root specs to show what the user asked for
|
||||
namespaces=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
decorator=root_decorator,
|
||||
variants=True,
|
||||
)
|
||||
|
||||
print()
|
||||
|
||||
if env.included_concrete_envs:
|
||||
tty.msg("Included specs")
|
||||
|
||||
# Root specs cannot be displayed with prefixes, since those are not
|
||||
# set for abstract specs. Same for hashes
|
||||
@@ -236,10 +283,10 @@ def display_env(env, args, decorator, results):
|
||||
# Roots are displayed with variants, etc. so that we can see
|
||||
# specifically what the user asked for.
|
||||
cmd.display_specs(
|
||||
env.user_specs,
|
||||
env.included_user_specs,
|
||||
root_args,
|
||||
decorator=lambda s, f: color.colorize("@*{%s}" % f),
|
||||
namespaces=True,
|
||||
namespace=True,
|
||||
show_flags=True,
|
||||
show_full_compiler=True,
|
||||
variants=True,
|
||||
@@ -254,7 +301,7 @@ def display_env(env, args, decorator, results):
|
||||
# Display a header for the installed packages section IF there are installed
|
||||
# packages. If there aren't any, we'll just end up printing "0 installed packages"
|
||||
# later.
|
||||
if results:
|
||||
if results and not args.only_roots:
|
||||
tty.msg("Installed packages")
|
||||
|
||||
|
||||
@@ -263,9 +310,10 @@ def find(parser, args):
|
||||
results = args.specs(**q_args)
|
||||
|
||||
env = ev.active_environment()
|
||||
decorator = lambda s, f: f
|
||||
if env:
|
||||
decorator, _, roots, _ = setup_env(env)
|
||||
if not env and args.only_roots:
|
||||
tty.die("-r / --only-roots requires an active environment")
|
||||
|
||||
decorator = make_env_decorator(env) if env else lambda s, f: f
|
||||
|
||||
# use groups by default except with format.
|
||||
if args.groups is None:
|
||||
@@ -292,9 +340,12 @@ def find(parser, args):
|
||||
if env:
|
||||
display_env(env, args, decorator, results)
|
||||
|
||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||
count_suffix = " (not shown)"
|
||||
if not args.only_roots:
|
||||
cmd.display_specs(results, args, decorator=decorator, all_headers=True)
|
||||
count_suffix = ""
|
||||
|
||||
# print number of installed packages last (as the list may be long)
|
||||
if sys.stdout.isatty() and args.groups:
|
||||
pkg_type = "loaded" if args.loaded else "installed"
|
||||
spack.cmd.print_how_many_pkgs(results, pkg_type)
|
||||
spack.cmd.print_how_many_pkgs(results, pkg_type, suffix=count_suffix)
|
||||
|
@@ -263,8 +263,8 @@ def _fmt_name_and_default(variant):
|
||||
return color.colorize(f"@c{{{variant.name}}} @C{{[{_fmt_value(variant.default)}]}}")
|
||||
|
||||
|
||||
def _fmt_when(when, indent):
|
||||
return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(when)}")
|
||||
def _fmt_when(when: "spack.spec.Spec", indent: int):
|
||||
return color.colorize(f"{indent * ' '}@B{{when}} {color.cescape(str(when))}")
|
||||
|
||||
|
||||
def _fmt_variant_description(variant, width, indent):
|
||||
@@ -441,7 +441,7 @@ def get_url(version):
|
||||
return "No URL"
|
||||
|
||||
url = get_url(preferred) if pkg.has_code else ""
|
||||
line = version(" {0}".format(pad(preferred))) + color.cescape(url)
|
||||
line = version(" {0}".format(pad(preferred))) + color.cescape(str(url))
|
||||
color.cwrite(line)
|
||||
|
||||
print()
|
||||
@@ -464,7 +464,7 @@ def get_url(version):
|
||||
continue
|
||||
|
||||
for v, url in vers:
|
||||
line = version(" {0}".format(pad(v))) + color.cescape(url)
|
||||
line = version(" {0}".format(pad(v))) + color.cescape(str(url))
|
||||
color.cprint(line)
|
||||
|
||||
|
||||
@@ -475,10 +475,7 @@ def print_virtuals(pkg, args):
|
||||
color.cprint(section_title("Virtual Packages: "))
|
||||
if pkg.provided:
|
||||
for when, specs in reversed(sorted(pkg.provided.items())):
|
||||
line = " %s provides %s" % (
|
||||
when.colorized(),
|
||||
", ".join(s.colorized() for s in specs),
|
||||
)
|
||||
line = " %s provides %s" % (when.cformat(), ", ".join(s.cformat() for s in specs))
|
||||
print(line)
|
||||
|
||||
else:
|
||||
@@ -497,7 +494,9 @@ def print_licenses(pkg, args):
|
||||
pad = padder(pkg.licenses, 4)
|
||||
for when_spec in pkg.licenses:
|
||||
license_identifier = pkg.licenses[when_spec]
|
||||
line = license(" {0}".format(pad(license_identifier))) + color.cescape(when_spec)
|
||||
line = license(" {0}".format(pad(license_identifier))) + color.cescape(
|
||||
str(when_spec)
|
||||
)
|
||||
color.cprint(line)
|
||||
|
||||
|
||||
|
@@ -71,6 +71,11 @@ def setup_parser(subparser):
|
||||
help="the number of versions to fetch for each spec, choose 'all' to"
|
||||
" retrieve all versions of each package",
|
||||
)
|
||||
create_parser.add_argument(
|
||||
"--private",
|
||||
action="store_true",
|
||||
help="for a private mirror, include non-redistributable packages",
|
||||
)
|
||||
arguments.add_common_arguments(create_parser, ["specs"])
|
||||
arguments.add_concretizer_args(create_parser)
|
||||
|
||||
@@ -108,6 +113,11 @@ def setup_parser(subparser):
|
||||
"and source use `--type binary --type source` (default)"
|
||||
),
|
||||
)
|
||||
add_parser.add_argument(
|
||||
"--autopush",
|
||||
action="store_true",
|
||||
help=("set mirror to push automatically after installation"),
|
||||
)
|
||||
add_parser_signed = add_parser.add_mutually_exclusive_group(required=False)
|
||||
add_parser_signed.add_argument(
|
||||
"--unsigned",
|
||||
@@ -175,6 +185,21 @@ def setup_parser(subparser):
|
||||
),
|
||||
)
|
||||
set_parser.add_argument("--url", help="url of mirror directory from 'spack mirror create'")
|
||||
set_parser_autopush = set_parser.add_mutually_exclusive_group(required=False)
|
||||
set_parser_autopush.add_argument(
|
||||
"--autopush",
|
||||
help="set mirror to push automatically after installation",
|
||||
action="store_true",
|
||||
default=None,
|
||||
dest="autopush",
|
||||
)
|
||||
set_parser_autopush.add_argument(
|
||||
"--no-autopush",
|
||||
help="set mirror to not push automatically after installation",
|
||||
action="store_false",
|
||||
default=None,
|
||||
dest="autopush",
|
||||
)
|
||||
set_parser_unsigned = set_parser.add_mutually_exclusive_group(required=False)
|
||||
set_parser_unsigned.add_argument(
|
||||
"--unsigned",
|
||||
@@ -218,6 +243,7 @@ def mirror_add(args):
|
||||
or args.type
|
||||
or args.oci_username
|
||||
or args.oci_password
|
||||
or args.autopush
|
||||
or args.signed is not None
|
||||
):
|
||||
connection = {"url": args.url}
|
||||
@@ -234,6 +260,8 @@ def mirror_add(args):
|
||||
if args.type:
|
||||
connection["binary"] = "binary" in args.type
|
||||
connection["source"] = "source" in args.type
|
||||
if args.autopush:
|
||||
connection["autopush"] = args.autopush
|
||||
if args.signed is not None:
|
||||
connection["signed"] = args.signed
|
||||
mirror = spack.mirror.Mirror(connection, name=args.name)
|
||||
@@ -270,6 +298,8 @@ def _configure_mirror(args):
|
||||
changes["access_pair"] = [args.oci_username, args.oci_password]
|
||||
if getattr(args, "signed", None) is not None:
|
||||
changes["signed"] = args.signed
|
||||
if getattr(args, "autopush", None) is not None:
|
||||
changes["autopush"] = args.autopush
|
||||
|
||||
# argparse cannot distinguish between --binary and --no-binary when same dest :(
|
||||
# notice that set-url does not have these args, so getattr
|
||||
@@ -334,7 +364,6 @@ def concrete_specs_from_user(args):
|
||||
specs = filter_externals(specs)
|
||||
specs = list(set(specs))
|
||||
specs.sort(key=lambda s: (s.name, s.version))
|
||||
specs, _ = lang.stable_partition(specs, predicate_fn=not_excluded_fn(args))
|
||||
return specs
|
||||
|
||||
|
||||
@@ -379,36 +408,50 @@ def concrete_specs_from_cli_or_file(args):
|
||||
return specs
|
||||
|
||||
|
||||
def not_excluded_fn(args):
|
||||
"""Return a predicate that evaluate to True if a spec was not explicitly
|
||||
excluded by the user.
|
||||
"""
|
||||
exclude_specs = []
|
||||
if args.exclude_file:
|
||||
exclude_specs.extend(specs_from_text_file(args.exclude_file, concretize=False))
|
||||
if args.exclude_specs:
|
||||
exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
|
||||
class IncludeFilter:
|
||||
def __init__(self, args):
|
||||
self.exclude_specs = []
|
||||
if args.exclude_file:
|
||||
self.exclude_specs.extend(specs_from_text_file(args.exclude_file, concretize=False))
|
||||
if args.exclude_specs:
|
||||
self.exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
|
||||
self.private = args.private
|
||||
|
||||
def not_excluded(x):
|
||||
return not any(x.satisfies(y) for y in exclude_specs)
|
||||
def __call__(self, x):
|
||||
return all([self._not_license_excluded(x), self._not_cmdline_excluded(x)])
|
||||
|
||||
return not_excluded
|
||||
def _not_license_excluded(self, x):
|
||||
"""True if the spec is for a private mirror, or as long as the
|
||||
package does not explicitly forbid redistributing source."""
|
||||
if self.private:
|
||||
return True
|
||||
elif x.package_class.redistribute_source(x):
|
||||
return True
|
||||
else:
|
||||
tty.debug(
|
||||
"Skip adding {0} to mirror: the package.py file"
|
||||
" indicates that a public mirror should not contain"
|
||||
" it.".format(x.name)
|
||||
)
|
||||
return False
|
||||
|
||||
def _not_cmdline_excluded(self, x):
|
||||
"""True if a spec was not explicitly excluded by the user."""
|
||||
return not any(x.satisfies(y) for y in self.exclude_specs)
|
||||
|
||||
|
||||
def concrete_specs_from_environment(selection_fn):
|
||||
def concrete_specs_from_environment():
|
||||
env = ev.active_environment()
|
||||
assert env, "an active environment is required"
|
||||
mirror_specs = env.all_specs()
|
||||
mirror_specs = filter_externals(mirror_specs)
|
||||
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=selection_fn)
|
||||
return mirror_specs
|
||||
|
||||
|
||||
def all_specs_with_all_versions(selection_fn):
|
||||
def all_specs_with_all_versions():
|
||||
specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()]
|
||||
mirror_specs = spack.mirror.get_all_versions(specs)
|
||||
mirror_specs.sort(key=lambda s: (s.name, s.version))
|
||||
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=selection_fn)
|
||||
return mirror_specs
|
||||
|
||||
|
||||
@@ -429,12 +472,6 @@ def versions_per_spec(args):
|
||||
return num_versions
|
||||
|
||||
|
||||
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
|
||||
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
|
||||
tty.msg("Summary for mirror in {}".format(path))
|
||||
process_mirror_stats(present, mirrored, error)
|
||||
|
||||
|
||||
def process_mirror_stats(present, mirrored, error):
|
||||
p, m, e = len(present), len(mirrored), len(error)
|
||||
tty.msg(
|
||||
@@ -480,30 +517,28 @@ def mirror_create(args):
|
||||
# When no directory is provided, the source dir is used
|
||||
path = args.directory or spack.caches.fetch_cache_location()
|
||||
|
||||
mirror_specs, mirror_fn = _specs_and_action(args)
|
||||
mirror_fn(mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions)
|
||||
|
||||
|
||||
def _specs_and_action(args):
|
||||
include_fn = IncludeFilter(args)
|
||||
|
||||
if args.all and not ev.active_environment():
|
||||
create_mirror_for_all_specs(
|
||||
path=path,
|
||||
skip_unstable_versions=args.skip_unstable_versions,
|
||||
selection_fn=not_excluded_fn(args),
|
||||
)
|
||||
return
|
||||
mirror_specs = all_specs_with_all_versions()
|
||||
mirror_fn = create_mirror_for_all_specs
|
||||
elif args.all and ev.active_environment():
|
||||
mirror_specs = concrete_specs_from_environment()
|
||||
mirror_fn = create_mirror_for_individual_specs
|
||||
else:
|
||||
mirror_specs = concrete_specs_from_user(args)
|
||||
mirror_fn = create_mirror_for_individual_specs
|
||||
|
||||
if args.all and ev.active_environment():
|
||||
create_mirror_for_all_specs_inside_environment(
|
||||
path=path,
|
||||
skip_unstable_versions=args.skip_unstable_versions,
|
||||
selection_fn=not_excluded_fn(args),
|
||||
)
|
||||
return
|
||||
|
||||
mirror_specs = concrete_specs_from_user(args)
|
||||
create_mirror_for_individual_specs(
|
||||
mirror_specs, path=path, skip_unstable_versions=args.skip_unstable_versions
|
||||
)
|
||||
mirror_specs, _ = lang.stable_partition(mirror_specs, predicate_fn=include_fn)
|
||||
return mirror_specs, mirror_fn
|
||||
|
||||
|
||||
def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
||||
mirror_specs = all_specs_with_all_versions(selection_fn=selection_fn)
|
||||
def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
|
||||
mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
|
||||
path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
@@ -515,11 +550,10 @@ def create_mirror_for_all_specs(path, skip_unstable_versions, selection_fn):
|
||||
process_mirror_stats(*mirror_stats.stats())
|
||||
|
||||
|
||||
def create_mirror_for_all_specs_inside_environment(path, skip_unstable_versions, selection_fn):
|
||||
mirror_specs = concrete_specs_from_environment(selection_fn=selection_fn)
|
||||
create_mirror_for_individual_specs(
|
||||
mirror_specs, path=path, skip_unstable_versions=skip_unstable_versions
|
||||
)
|
||||
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
|
||||
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
|
||||
tty.msg("Summary for mirror in {}".format(path))
|
||||
process_mirror_stats(present, mirrored, error)
|
||||
|
||||
|
||||
def mirror_destroy(args):
|
||||
|
@@ -23,7 +23,7 @@
|
||||
|
||||
|
||||
# tutorial configuration parameters
|
||||
tutorial_branch = "releases/v0.21"
|
||||
tutorial_branch = "releases/v0.22"
|
||||
tutorial_mirror = "file:///mirror"
|
||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||
|
||||
|
@@ -214,8 +214,6 @@ def unit_test(parser, args, unknown_args):
|
||||
|
||||
# Ensure clingo is available before switching to the
|
||||
# mock configuration used by unit tests
|
||||
# Note: skip on windows here because for the moment,
|
||||
# clingo is wholly unsupported from bootstrap
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
if pytest is None:
|
||||
|
@@ -20,8 +20,10 @@
|
||||
|
||||
import spack.compilers
|
||||
import spack.error
|
||||
import spack.schema.environment
|
||||
import spack.spec
|
||||
import spack.util.executable
|
||||
import spack.util.libc
|
||||
import spack.util.module_cmd
|
||||
import spack.version
|
||||
from spack.util.environment import filter_system_paths
|
||||
@@ -107,7 +109,6 @@ def _parse_link_paths(string):
|
||||
"""
|
||||
lib_search_paths = False
|
||||
raw_link_dirs = []
|
||||
tty.debug("parsing implicit link info")
|
||||
for line in string.splitlines():
|
||||
if lib_search_paths:
|
||||
if line.startswith("\t"):
|
||||
@@ -122,7 +123,7 @@ def _parse_link_paths(string):
|
||||
continue
|
||||
if _LINKER_LINE_IGNORE.match(line):
|
||||
continue
|
||||
tty.debug("linker line: %s" % line)
|
||||
tty.debug(f"implicit link dirs: link line: {line}")
|
||||
|
||||
next_arg = False
|
||||
for arg in line.split():
|
||||
@@ -138,15 +139,12 @@ def _parse_link_paths(string):
|
||||
link_dir_arg = _LINK_DIR_ARG.match(arg)
|
||||
if link_dir_arg:
|
||||
link_dir = link_dir_arg.group("dir")
|
||||
tty.debug("linkdir: %s" % link_dir)
|
||||
raw_link_dirs.append(link_dir)
|
||||
|
||||
link_dir_arg = _LIBPATH_ARG.match(arg)
|
||||
if link_dir_arg:
|
||||
link_dir = link_dir_arg.group("dir")
|
||||
tty.debug("libpath: %s", link_dir)
|
||||
raw_link_dirs.append(link_dir)
|
||||
tty.debug("found raw link dirs: %s" % ", ".join(raw_link_dirs))
|
||||
|
||||
implicit_link_dirs = list()
|
||||
visited = set()
|
||||
@@ -156,7 +154,7 @@ def _parse_link_paths(string):
|
||||
implicit_link_dirs.append(normalized_path)
|
||||
visited.add(normalized_path)
|
||||
|
||||
tty.debug("found link dirs: %s" % ", ".join(implicit_link_dirs))
|
||||
tty.debug(f"implicit link dirs: result: {', '.join(implicit_link_dirs)}")
|
||||
return implicit_link_dirs
|
||||
|
||||
|
||||
@@ -417,17 +415,35 @@ def real_version(self):
|
||||
self._real_version = self.version
|
||||
return self._real_version
|
||||
|
||||
def implicit_rpaths(self):
|
||||
def implicit_rpaths(self) -> List[str]:
|
||||
if self.enable_implicit_rpaths is False:
|
||||
return []
|
||||
|
||||
# Put CXX first since it has the most linking issues
|
||||
# And because it has flags that affect linking
|
||||
link_dirs = self._get_compiler_link_paths()
|
||||
output = self.compiler_verbose_output
|
||||
|
||||
if not output:
|
||||
return []
|
||||
|
||||
link_dirs = _parse_non_system_link_dirs(output)
|
||||
|
||||
all_required_libs = list(self.required_libs) + Compiler._all_compiler_rpath_libraries
|
||||
return list(paths_containing_libs(link_dirs, all_required_libs))
|
||||
|
||||
@property
|
||||
def default_libc(self) -> Optional["spack.spec.Spec"]:
|
||||
"""Determine libc targeted by the compiler from link line"""
|
||||
output = self.compiler_verbose_output
|
||||
|
||||
if not output:
|
||||
return None
|
||||
|
||||
dynamic_linker = spack.util.libc.parse_dynamic_linker(output)
|
||||
|
||||
if not dynamic_linker:
|
||||
return None
|
||||
|
||||
return spack.util.libc.libc_from_dynamic_linker(dynamic_linker)
|
||||
|
||||
@property
|
||||
def required_libs(self):
|
||||
"""For executables created with this compiler, the compiler libraries
|
||||
@@ -436,17 +452,17 @@ def required_libs(self):
|
||||
# By default every compiler returns the empty list
|
||||
return []
|
||||
|
||||
def _get_compiler_link_paths(self):
|
||||
@property
|
||||
def compiler_verbose_output(self) -> Optional[str]:
|
||||
"""Verbose output from compiling a dummy C source file. Output is cached."""
|
||||
if not hasattr(self, "_compile_c_source_output"):
|
||||
self._compile_c_source_output = self._compile_dummy_c_source()
|
||||
return self._compile_c_source_output
|
||||
|
||||
def _compile_dummy_c_source(self) -> Optional[str]:
|
||||
cc = self.cc if self.cc else self.cxx
|
||||
if not cc or not self.verbose_flag:
|
||||
# Cannot determine implicit link paths without a compiler / verbose flag
|
||||
return []
|
||||
|
||||
# What flag types apply to first_compiler, in what order
|
||||
if cc == self.cc:
|
||||
flags = ["cflags", "cppflags", "ldflags"]
|
||||
else:
|
||||
flags = ["cxxflags", "cppflags", "ldflags"]
|
||||
return None
|
||||
|
||||
try:
|
||||
tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
|
||||
@@ -458,20 +474,19 @@ def _get_compiler_link_paths(self):
|
||||
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
|
||||
)
|
||||
cc_exe = spack.util.executable.Executable(cc)
|
||||
for flag_type in flags:
|
||||
for flag_type in ["cflags" if cc == self.cc else "cxxflags", "cppflags", "ldflags"]:
|
||||
cc_exe.add_default_arg(*self.flags.get(flag_type, []))
|
||||
|
||||
with self.compiler_environment():
|
||||
output = cc_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
|
||||
return _parse_non_system_link_dirs(output)
|
||||
return cc_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
|
||||
except spack.util.executable.ProcessError as pe:
|
||||
tty.debug("ProcessError: Command exited with non-zero status: " + pe.long_message)
|
||||
return []
|
||||
return None
|
||||
finally:
|
||||
shutil.rmtree(tmpdir, ignore_errors=True)
|
||||
|
||||
@property
|
||||
def verbose_flag(self):
|
||||
def verbose_flag(self) -> Optional[str]:
|
||||
"""
|
||||
This property should be overridden in the compiler subclass if a
|
||||
verbose flag is available.
|
||||
@@ -669,8 +684,8 @@ def __str__(self):
|
||||
|
||||
@contextlib.contextmanager
|
||||
def compiler_environment(self):
|
||||
# yield immediately if no modules
|
||||
if not self.modules:
|
||||
# Avoid modifying os.environ if possible.
|
||||
if not self.modules and not self.environment:
|
||||
yield
|
||||
return
|
||||
|
||||
@@ -687,13 +702,9 @@ def compiler_environment(self):
|
||||
spack.util.module_cmd.load_module(module)
|
||||
|
||||
# apply other compiler environment changes
|
||||
env = spack.util.environment.EnvironmentModifications()
|
||||
env.extend(spack.schema.environment.parse(self.environment))
|
||||
env.apply_modifications()
|
||||
spack.schema.environment.parse(self.environment).apply_modifications()
|
||||
|
||||
yield
|
||||
except BaseException:
|
||||
raise
|
||||
finally:
|
||||
# Restore environment regardless of whether inner code succeeded
|
||||
os.environ.clear()
|
||||
|
@@ -10,6 +10,7 @@
|
||||
import itertools
|
||||
import multiprocessing.pool
|
||||
import os
|
||||
import warnings
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import archspec.cpu
|
||||
@@ -109,27 +110,33 @@ def _to_dict(compiler):
|
||||
return {"compiler": d}
|
||||
|
||||
|
||||
def get_compiler_config(scope=None, init_config=False):
|
||||
def get_compiler_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = False,
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration for the specified architecture."""
|
||||
|
||||
config = spack.config.CONFIG.get("compilers", scope=scope) or []
|
||||
config = configuration.get("compilers", scope=scope) or []
|
||||
if config or not init_config:
|
||||
return config
|
||||
|
||||
merged_config = spack.config.CONFIG.get("compilers")
|
||||
merged_config = configuration.get("compilers")
|
||||
if merged_config:
|
||||
# Config is empty for this scope
|
||||
# Do not init config because there is a non-empty scope
|
||||
return config
|
||||
|
||||
_init_compiler_config(scope=scope)
|
||||
config = spack.config.CONFIG.get("compilers", scope=scope)
|
||||
_init_compiler_config(configuration, scope=scope)
|
||||
config = configuration.get("compilers", scope=scope)
|
||||
return config
|
||||
|
||||
|
||||
def get_compiler_config_from_packages(scope=None):
|
||||
def get_compiler_config_from_packages(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""Return the compiler configuration from packages.yaml"""
|
||||
config = spack.config.get("packages", scope=scope)
|
||||
config = configuration.get("packages", scope=scope)
|
||||
if not config:
|
||||
return []
|
||||
|
||||
@@ -157,33 +164,56 @@ def _compiler_config_from_package_config(config):
|
||||
|
||||
|
||||
def _compiler_config_from_external(config):
|
||||
extra_attributes_key = "extra_attributes"
|
||||
compilers_key = "compilers"
|
||||
c_key, cxx_key, fortran_key = "c", "cxx", "fortran"
|
||||
|
||||
# Allow `@x.y.z` instead of `@=x.y.z`
|
||||
spec = spack.spec.parse_with_version_concrete(config["spec"])
|
||||
# use str(spec.versions) to allow `@x.y.z` instead of `@=x.y.z`
|
||||
|
||||
compiler_spec = spack.spec.CompilerSpec(
|
||||
package_name_to_compiler_name.get(spec.name, spec.name), spec.version
|
||||
)
|
||||
|
||||
extra_attributes = config.get("extra_attributes", {})
|
||||
prefix = config.get("prefix", None)
|
||||
err_header = f"The external spec '{spec}' cannot be used as a compiler"
|
||||
|
||||
compiler_class = class_for_compiler_name(compiler_spec.name)
|
||||
paths = extra_attributes.get("paths", {})
|
||||
compiler_langs = ["cc", "cxx", "fc", "f77"]
|
||||
for lang in compiler_langs:
|
||||
if paths.setdefault(lang, None):
|
||||
continue
|
||||
|
||||
if not prefix:
|
||||
continue
|
||||
|
||||
# Check for files that satisfy the naming scheme for this compiler
|
||||
bindir = os.path.join(prefix, "bin")
|
||||
for f, regex in itertools.product(os.listdir(bindir), compiler_class.search_regexps(lang)):
|
||||
if regex.match(f):
|
||||
paths[lang] = os.path.join(bindir, f)
|
||||
|
||||
if all(v is None for v in paths.values()):
|
||||
# If extra_attributes is not there I might not want to use this entry as a compiler,
|
||||
# therefore just leave a debug message, but don't be loud with a warning.
|
||||
if extra_attributes_key not in config:
|
||||
tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key")
|
||||
return None
|
||||
extra_attributes = config[extra_attributes_key]
|
||||
|
||||
# If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler
|
||||
if compilers_key not in extra_attributes:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'"
|
||||
)
|
||||
return None
|
||||
attribute_compilers = extra_attributes[compilers_key]
|
||||
|
||||
if c_key not in attribute_compilers:
|
||||
warnings.warn(
|
||||
f"{err_header}: missing the C compiler path under "
|
||||
f"'{extra_attributes_key}:{compilers_key}'"
|
||||
)
|
||||
return None
|
||||
c_compiler = attribute_compilers[c_key]
|
||||
|
||||
# C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
|
||||
if cxx_key not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
|
||||
|
||||
if fortran_key not in attribute_compilers:
|
||||
tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
|
||||
|
||||
# compilers format has cc/fc/f77, externals format has "c/fortran"
|
||||
paths = {
|
||||
"cc": c_compiler,
|
||||
"cxx": attribute_compilers.get(cxx_key, None),
|
||||
"fc": attribute_compilers.get(fortran_key, None),
|
||||
"f77": attribute_compilers.get(fortran_key, None),
|
||||
}
|
||||
|
||||
if not spec.architecture:
|
||||
host_platform = spack.platforms.host()
|
||||
@@ -216,13 +246,15 @@ def _compiler_config_from_external(config):
|
||||
return compiler_entry
|
||||
|
||||
|
||||
def _init_compiler_config(*, scope):
|
||||
def _init_compiler_config(
|
||||
configuration: "spack.config.Configuration", *, scope: Optional[str]
|
||||
) -> None:
|
||||
"""Compiler search used when Spack has no compilers."""
|
||||
compilers = find_compilers()
|
||||
compilers_dict = []
|
||||
for compiler in compilers:
|
||||
compilers_dict.append(_to_dict(compiler))
|
||||
spack.config.set("compilers", compilers_dict, scope=scope)
|
||||
configuration.set("compilers", compilers_dict, scope=scope)
|
||||
|
||||
|
||||
def compiler_config_files():
|
||||
@@ -233,7 +265,7 @@ def compiler_config_files():
|
||||
compiler_config = config.get("compilers", scope=name)
|
||||
if compiler_config:
|
||||
config_files.append(config.get_config_filename(name, "compilers"))
|
||||
compiler_config_from_packages = get_compiler_config_from_packages(scope=name)
|
||||
compiler_config_from_packages = get_compiler_config_from_packages(config, scope=name)
|
||||
if compiler_config_from_packages:
|
||||
config_files.append(config.get_config_filename(name, "packages"))
|
||||
return config_files
|
||||
@@ -246,7 +278,9 @@ def add_compilers_to_config(compilers, scope=None):
|
||||
compilers: a list of Compiler objects.
|
||||
scope: configuration scope to modify.
|
||||
"""
|
||||
compiler_config = get_compiler_config(scope, init_config=False)
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
for compiler in compilers:
|
||||
if not compiler.cc:
|
||||
tty.debug(f"{compiler.spec} does not have a C compiler")
|
||||
@@ -295,7 +329,9 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
True if one or more compiler entries were actually removed, False otherwise
|
||||
"""
|
||||
assert scope is not None, "a specific scope is needed when calling this function"
|
||||
compiler_config = get_compiler_config(scope, init_config=False)
|
||||
compiler_config = get_compiler_config(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=False
|
||||
)
|
||||
filtered_compiler_config = [
|
||||
compiler_entry
|
||||
for compiler_entry in compiler_config
|
||||
@@ -310,21 +346,28 @@ def _remove_compiler_from_scope(compiler_spec, scope):
|
||||
# We need to preserve the YAML type for comments, hence we are copying the
|
||||
# items in the list that has just been retrieved
|
||||
compiler_config[:] = filtered_compiler_config
|
||||
spack.config.set("compilers", compiler_config, scope=scope)
|
||||
spack.config.CONFIG.set("compilers", compiler_config, scope=scope)
|
||||
return True
|
||||
|
||||
|
||||
def all_compilers_config(scope=None, init_config=True):
|
||||
def all_compilers_config(
|
||||
configuration: "spack.config.Configuration",
|
||||
*,
|
||||
scope: Optional[str] = None,
|
||||
init_config: bool = True,
|
||||
) -> List["spack.compiler.Compiler"]:
|
||||
"""Return a set of specs for all the compiler versions currently
|
||||
available to build with. These are instances of CompilerSpec.
|
||||
"""
|
||||
from_packages_yaml = get_compiler_config_from_packages(scope)
|
||||
from_packages_yaml = get_compiler_config_from_packages(configuration, scope=scope)
|
||||
if from_packages_yaml:
|
||||
init_config = False
|
||||
from_compilers_yaml = get_compiler_config(scope, init_config)
|
||||
from_compilers_yaml = get_compiler_config(configuration, scope=scope, init_config=init_config)
|
||||
|
||||
result = from_compilers_yaml + from_packages_yaml
|
||||
key = lambda c: _compiler_from_config_entry(c["compiler"])
|
||||
# Dedupe entries by the compiler they represent
|
||||
# If the entry is invalid, treat it as unique for deduplication
|
||||
key = lambda c: _compiler_from_config_entry(c["compiler"] or id(c))
|
||||
return list(llnl.util.lang.dedupe(result, key=key))
|
||||
|
||||
|
||||
@@ -332,7 +375,7 @@ def all_compiler_specs(scope=None, init_config=True):
|
||||
# Return compiler specs from the merged config.
|
||||
return [
|
||||
spack.spec.parse_with_version_concrete(s["compiler"]["spec"], compiler=True)
|
||||
for s in all_compilers_config(scope, init_config)
|
||||
for s in all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
]
|
||||
|
||||
|
||||
@@ -492,11 +535,20 @@ def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
|
||||
|
||||
|
||||
def all_compilers(scope=None, init_config=True):
|
||||
config = all_compilers_config(scope, init_config=init_config)
|
||||
compilers = list()
|
||||
for items in config:
|
||||
return all_compilers_from(
|
||||
configuration=spack.config.CONFIG, scope=scope, init_config=init_config
|
||||
)
|
||||
|
||||
|
||||
def all_compilers_from(configuration, scope=None, init_config=True):
|
||||
compilers = []
|
||||
for items in all_compilers_config(
|
||||
configuration=configuration, scope=scope, init_config=init_config
|
||||
):
|
||||
items = items["compiler"]
|
||||
compilers.append(_compiler_from_config_entry(items))
|
||||
compiler = _compiler_from_config_entry(items) # can be None in error case
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
return compilers
|
||||
|
||||
|
||||
@@ -507,7 +559,7 @@ def compilers_for_spec(
|
||||
"""This gets all compilers that satisfy the supplied CompilerSpec.
|
||||
Returns an empty list if none are found.
|
||||
"""
|
||||
config = all_compilers_config(scope, init_config)
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
|
||||
|
||||
matches = set(find(compiler_spec, scope, init_config))
|
||||
compilers = []
|
||||
@@ -517,7 +569,7 @@ def compilers_for_spec(
|
||||
|
||||
|
||||
def compilers_for_arch(arch_spec, scope=None):
|
||||
config = all_compilers_config(scope)
|
||||
config = all_compilers_config(spack.config.CONFIG, scope=scope)
|
||||
return list(get_compilers(config, arch_spec=arch_spec))
|
||||
|
||||
|
||||
@@ -603,7 +655,10 @@ def _compiler_from_config_entry(items):
|
||||
compiler = _compiler_cache.get(config_id, None)
|
||||
|
||||
if compiler is None:
|
||||
compiler = compiler_from_dict(items)
|
||||
try:
|
||||
compiler = compiler_from_dict(items)
|
||||
except UnknownCompilerError as e:
|
||||
warnings.warn(e.message)
|
||||
_compiler_cache[config_id] = compiler
|
||||
|
||||
return compiler
|
||||
@@ -656,7 +711,9 @@ def get_compilers(config, cspec=None, arch_spec=None):
|
||||
raise ValueError(msg)
|
||||
continue
|
||||
|
||||
compilers.append(_compiler_from_config_entry(items))
|
||||
compiler = _compiler_from_config_entry(items)
|
||||
if compiler:
|
||||
compilers.append(compiler)
|
||||
|
||||
return compilers
|
||||
|
||||
@@ -933,10 +990,11 @@ def _default_make_compilers(cmp_id, paths):
|
||||
make_mixed_toolchain(flat_compilers)
|
||||
|
||||
# Finally, create the compiler list
|
||||
compilers = []
|
||||
compilers: List["spack.compiler.Compiler"] = []
|
||||
for compiler_id, _, compiler in flat_compilers:
|
||||
make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
|
||||
compilers.extend(make_compilers(compiler_id, compiler))
|
||||
candidates = make_compilers(compiler_id, compiler)
|
||||
compilers.extend(x for x in candidates if x.cc is not None)
|
||||
|
||||
return compilers
|
||||
|
||||
|
@@ -38,10 +38,10 @@ class Clang(Compiler):
|
||||
cxx_names = ["clang++"]
|
||||
|
||||
# Subclasses use possible names of Fortran 77 compiler
|
||||
f77_names = ["flang"]
|
||||
f77_names = ["flang-new", "flang"]
|
||||
|
||||
# Subclasses use possible names of Fortran 90 compiler
|
||||
fc_names = ["flang"]
|
||||
fc_names = ["flang-new", "flang"]
|
||||
|
||||
version_argument = "--version"
|
||||
|
||||
@@ -171,10 +171,11 @@ def extract_version_from_output(cls, output):
|
||||
|
||||
match = re.search(
|
||||
# Normal clang compiler versions are left as-is
|
||||
r"clang version ([^ )\n]+)-svn[~.\w\d-]*|"
|
||||
r"(?:clang|flang-new) version ([^ )\n]+)-svn[~.\w\d-]*|"
|
||||
# Don't include hyphenated patch numbers in the version
|
||||
# (see https://github.com/spack/spack/pull/14365 for details)
|
||||
r"clang version ([^ )\n]+?)-[~.\w\d-]*|" r"clang version ([^ )\n]+)",
|
||||
r"(?:clang|flang-new) version ([^ )\n]+?)-[~.\w\d-]*|"
|
||||
r"(?:clang|flang-new) version ([^ )\n]+)",
|
||||
output,
|
||||
)
|
||||
if match:
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Dict, List, Set
|
||||
from typing import Dict, List
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -20,15 +20,7 @@
|
||||
from spack.error import SpackError
|
||||
from spack.version import Version, VersionRange
|
||||
|
||||
avail_fc_version: Set[str] = set()
|
||||
fc_path: Dict[str, str] = dict()
|
||||
|
||||
fortran_mapping = {
|
||||
"2021.3.0": "19.29.30133",
|
||||
"2021.2.1": "19.28.29913",
|
||||
"2021.2.0": "19.28.29334",
|
||||
"2021.1.0": "19.28.29333",
|
||||
}
|
||||
FC_PATH: Dict[str, str] = dict()
|
||||
|
||||
|
||||
class CmdCall:
|
||||
@@ -115,15 +107,13 @@ def command_str(self):
|
||||
return f"{script} {self.arch} {self.sdk_ver} {self.vcvars_ver}"
|
||||
|
||||
|
||||
def get_valid_fortran_pth(comp_ver):
|
||||
cl_ver = str(comp_ver)
|
||||
def get_valid_fortran_pth():
|
||||
"""Assign maximum available fortran compiler version"""
|
||||
# TODO (johnwparent): validate compatibility w/ try compiler
|
||||
# functionality when added
|
||||
sort_fn = lambda fc_ver: Version(fc_ver)
|
||||
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
|
||||
for ver in sort_fc_ver:
|
||||
if ver in fortran_mapping:
|
||||
if Version(cl_ver) <= Version(fortran_mapping[ver]):
|
||||
return fc_path[ver]
|
||||
return None
|
||||
sort_fc_ver = sorted(list(FC_PATH.keys()), key=sort_fn)
|
||||
return FC_PATH[sort_fc_ver[-1]] if sort_fc_ver else None
|
||||
|
||||
|
||||
class Msvc(Compiler):
|
||||
@@ -167,11 +157,9 @@ def __init__(self, *args, **kwargs):
|
||||
# This positional argument "paths" is later parsed and process by the base class
|
||||
# via the call to `super` later in this method
|
||||
paths = args[3]
|
||||
# This positional argument "cspec" is also parsed and handled by the base class
|
||||
# constructor
|
||||
cspec = args[0]
|
||||
new_pth = [pth if pth else get_valid_fortran_pth(cspec.version) for pth in paths]
|
||||
paths[:] = new_pth
|
||||
latest_fc = get_valid_fortran_pth()
|
||||
new_pth = [pth if pth else latest_fc for pth in paths[2:]]
|
||||
paths[2:] = new_pth
|
||||
# Initialize, deferring to base class but then adding the vcvarsallfile
|
||||
# file based on compiler executable path.
|
||||
super().__init__(*args, **kwargs)
|
||||
@@ -183,7 +171,7 @@ def __init__(self, *args, **kwargs):
|
||||
# and stores their path, but their respective VCVARS
|
||||
# file must be invoked before useage.
|
||||
env_cmds = []
|
||||
compiler_root = os.path.join(self.cc, "../../../../../../..")
|
||||
compiler_root = os.path.join(os.path.dirname(self.cc), "../../../../../..")
|
||||
vcvars_script_path = os.path.join(compiler_root, "Auxiliary", "Build", "vcvars64.bat")
|
||||
# get current platform architecture and format for vcvars argument
|
||||
arch = spack.platforms.real_host().default.lower()
|
||||
@@ -198,11 +186,34 @@ def __init__(self, *args, **kwargs):
|
||||
# paths[2] refers to the fc path and is a generic check
|
||||
# for a fortran compiler
|
||||
if paths[2]:
|
||||
|
||||
def get_oneapi_root(pth: str):
|
||||
"""From within a prefix known to be a oneAPI path
|
||||
determine the oneAPI root path from arbitrary point
|
||||
under root
|
||||
|
||||
Args:
|
||||
pth: path prefixed within oneAPI root
|
||||
"""
|
||||
if not pth:
|
||||
return ""
|
||||
while os.path.basename(pth) and os.path.basename(pth) != "oneAPI":
|
||||
pth = os.path.dirname(pth)
|
||||
return pth
|
||||
|
||||
# If this found, it sets all the vars
|
||||
oneapi_root = os.path.join(self.cc, "../../..")
|
||||
oneapi_root = get_oneapi_root(self.fc)
|
||||
if not oneapi_root:
|
||||
raise RuntimeError(f"Non-oneAPI Fortran compiler {self.fc} assigned to MSVC")
|
||||
oneapi_root_setvars = os.path.join(oneapi_root, "setvars.bat")
|
||||
# some oneAPI exes return a version more precise than their
|
||||
# install paths specify, so we determine path from
|
||||
# the install path rather than the fc executable itself
|
||||
numver = r"\d+\.\d+(?:\.\d+)?"
|
||||
pattern = f"((?:{numver})|(?:latest))"
|
||||
version_from_path = re.search(pattern, self.fc).group(1)
|
||||
oneapi_version_setvars = os.path.join(
|
||||
oneapi_root, "compiler", str(self.ifx_version), "env", "vars.bat"
|
||||
oneapi_root, "compiler", version_from_path, "env", "vars.bat"
|
||||
)
|
||||
# order matters here, the specific version env must be invoked first,
|
||||
# otherwise it will be ignored if the root setvars sets up the oneapi
|
||||
@@ -314,23 +325,19 @@ def setup_custom_environment(self, pkg, env):
|
||||
|
||||
@classmethod
|
||||
def fc_version(cls, fc):
|
||||
# We're using intel for the Fortran compilers, which exist if
|
||||
# ONEAPI_ROOT is a meaningful variable
|
||||
if not sys.platform == "win32":
|
||||
return "unknown"
|
||||
fc_ver = cls.default_version(fc)
|
||||
avail_fc_version.add(fc_ver)
|
||||
fc_path[fc_ver] = fc
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
try:
|
||||
sps = spack.operating_systems.windows_os.WindowsOs().compiler_search_paths
|
||||
except AttributeError:
|
||||
raise SpackError("Windows compiler search paths not established")
|
||||
clp = spack.util.executable.which_string("cl", path=sps)
|
||||
ver = cls.default_version(clp)
|
||||
else:
|
||||
ver = fc_ver
|
||||
return ver
|
||||
FC_PATH[fc_ver] = fc
|
||||
try:
|
||||
sps = spack.operating_systems.windows_os.WindowsOs().compiler_search_paths
|
||||
except AttributeError:
|
||||
raise SpackError(
|
||||
"Windows compiler search paths not established, "
|
||||
"please report this behavior to github.com/spack/spack"
|
||||
)
|
||||
clp = spack.util.executable.which_string("cl", path=sps)
|
||||
return cls.default_version(clp) if clp else fc_ver
|
||||
|
||||
@classmethod
|
||||
def f77_version(cls, f77):
|
||||
|
@@ -64,7 +64,7 @@ def verbose_flag(self):
|
||||
#
|
||||
# This way, we at least enable the implicit rpath detection, which is
|
||||
# based on compilation of a C file (see method
|
||||
# spack.compiler._get_compiler_link_paths): in the case of a mixed
|
||||
# spack.compiler._compile_dummy_c_source): in the case of a mixed
|
||||
# NAG/GCC toolchain, the flag will be passed to g++ (e.g.
|
||||
# 'g++ -Wl,-v ./main.c'), otherwise, the flag will be passed to nagfor
|
||||
# (e.g. 'nagfor -Wl,-v ./main.c' - note that nagfor recognizes '.c'
|
||||
|
@@ -74,6 +74,10 @@ class Concretizer:
|
||||
#: during concretization. Used for testing and for mirror creation
|
||||
check_for_compiler_existence = None
|
||||
|
||||
#: Packages that the old concretizer cannot deal with correctly, and cannot build anyway.
|
||||
#: Those will not be considered as providers for virtuals.
|
||||
non_buildable_packages = {"glibc", "musl"}
|
||||
|
||||
def __init__(self, abstract_spec=None):
|
||||
if Concretizer.check_for_compiler_existence is None:
|
||||
Concretizer.check_for_compiler_existence = not spack.config.get(
|
||||
@@ -113,7 +117,11 @@ def _valid_virtuals_and_externals(self, spec):
|
||||
pref_key = lambda spec: 0 # no-op pref key
|
||||
|
||||
if spec.virtual:
|
||||
candidates = spack.repo.PATH.providers_for(spec)
|
||||
candidates = [
|
||||
s
|
||||
for s in spack.repo.PATH.providers_for(spec)
|
||||
if s.name not in self.non_buildable_packages
|
||||
]
|
||||
if not candidates:
|
||||
raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
|
||||
|
||||
|
@@ -1562,8 +1562,9 @@ def ensure_latest_format_fn(section: str) -> Callable[[YamlConfigDict], bool]:
|
||||
def use_configuration(
|
||||
*scopes_or_paths: Union[ConfigScope, str]
|
||||
) -> Generator[Configuration, None, None]:
|
||||
"""Use the configuration scopes passed as arguments within the
|
||||
context manager.
|
||||
"""Use the configuration scopes passed as arguments within the context manager.
|
||||
|
||||
This function invalidates caches, and is therefore very slow.
|
||||
|
||||
Args:
|
||||
*scopes_or_paths: scope objects or paths to be used
|
||||
|
@@ -12,26 +12,26 @@
|
||||
},
|
||||
"os_package_manager": "yum_amazon"
|
||||
},
|
||||
"fedora:38": {
|
||||
"fedora:40": {
|
||||
"bootstrap": {
|
||||
"template": "container/fedora_38.dockerfile",
|
||||
"image": "docker.io/fedora:38"
|
||||
"template": "container/fedora.dockerfile",
|
||||
"image": "docker.io/fedora:40"
|
||||
},
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora38",
|
||||
"build": "spack/fedora40",
|
||||
"final": {
|
||||
"image": "docker.io/fedora:38"
|
||||
"image": "docker.io/fedora:40"
|
||||
}
|
||||
},
|
||||
"fedora:37": {
|
||||
"fedora:39": {
|
||||
"bootstrap": {
|
||||
"template": "container/fedora_37.dockerfile",
|
||||
"image": "docker.io/fedora:37"
|
||||
"template": "container/fedora.dockerfile",
|
||||
"image": "docker.io/fedora:39"
|
||||
},
|
||||
"os_package_manager": "dnf",
|
||||
"build": "spack/fedora37",
|
||||
"build": "spack/fedora39",
|
||||
"final": {
|
||||
"image": "docker.io/fedora:37"
|
||||
"image": "docker.io/fedora:39"
|
||||
}
|
||||
},
|
||||
"rockylinux:9": {
|
||||
@@ -116,6 +116,13 @@
|
||||
},
|
||||
"os_package_manager": "apt"
|
||||
},
|
||||
"ubuntu:24.04": {
|
||||
"bootstrap": {
|
||||
"template": "container/ubuntu_2404.dockerfile"
|
||||
},
|
||||
"os_package_manager": "apt",
|
||||
"build": "spack/ubuntu-noble"
|
||||
},
|
||||
"ubuntu:22.04": {
|
||||
"bootstrap": {
|
||||
"template": "container/ubuntu_2204.dockerfile"
|
||||
@@ -129,13 +136,6 @@
|
||||
},
|
||||
"build": "spack/ubuntu-focal",
|
||||
"os_package_manager": "apt"
|
||||
},
|
||||
"ubuntu:18.04": {
|
||||
"bootstrap": {
|
||||
"template": "container/ubuntu_1804.dockerfile"
|
||||
},
|
||||
"os_package_manager": "apt",
|
||||
"build": "spack/ubuntu-bionic"
|
||||
}
|
||||
},
|
||||
"os_package_managers": {
|
||||
|
@@ -25,6 +25,7 @@
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
from json import JSONDecoder
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
@@ -818,7 +819,8 @@ def _read_from_file(self, filename):
|
||||
"""
|
||||
try:
|
||||
with open(filename, "r") as f:
|
||||
fdata = sjson.load(f)
|
||||
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
|
||||
fdata, _ = JSONDecoder().raw_decode(f.read())
|
||||
except Exception as e:
|
||||
raise CorruptDatabaseError("error parsing database:", str(e)) from e
|
||||
|
||||
@@ -833,27 +835,24 @@ def check(cond, msg):
|
||||
|
||||
# High-level file checks
|
||||
db = fdata["database"]
|
||||
check("installs" in db, "no 'installs' in JSON DB.")
|
||||
check("version" in db, "no 'version' in JSON DB.")
|
||||
|
||||
installs = db["installs"]
|
||||
|
||||
# TODO: better version checking semantics.
|
||||
version = vn.Version(db["version"])
|
||||
if version > _DB_VERSION:
|
||||
raise InvalidDatabaseVersionError(self, _DB_VERSION, version)
|
||||
elif version < _DB_VERSION:
|
||||
if not any(old == version and new == _DB_VERSION for old, new in _SKIP_REINDEX):
|
||||
tty.warn(
|
||||
"Spack database version changed from %s to %s. Upgrading."
|
||||
% (version, _DB_VERSION)
|
||||
)
|
||||
elif version < _DB_VERSION and not any(
|
||||
old == version and new == _DB_VERSION for old, new in _SKIP_REINDEX
|
||||
):
|
||||
tty.warn(f"Spack database version changed from {version} to {_DB_VERSION}. Upgrading.")
|
||||
|
||||
self.reindex(spack.store.STORE.layout)
|
||||
installs = dict(
|
||||
(k, v.to_dict(include_fields=self._record_fields))
|
||||
for k, v in self._data.items()
|
||||
)
|
||||
self.reindex(spack.store.STORE.layout)
|
||||
installs = dict(
|
||||
(k, v.to_dict(include_fields=self._record_fields)) for k, v in self._data.items()
|
||||
)
|
||||
else:
|
||||
check("installs" in db, "no 'installs' in JSON DB.")
|
||||
installs = db["installs"]
|
||||
|
||||
spec_reader = reader(version)
|
||||
|
||||
|
@@ -83,26 +83,15 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
|
||||
return path_to_dict(search_paths)
|
||||
|
||||
|
||||
def get_elf_compat(path):
|
||||
"""For ELF files, get a triplet (EI_CLASS, EI_DATA, e_machine) and see if
|
||||
it is host-compatible."""
|
||||
# On ELF platforms supporting, we try to be a bit smarter when it comes to shared
|
||||
# libraries, by dropping those that are not host compatible.
|
||||
with open(path, "rb") as f:
|
||||
elf = elf_utils.parse_elf(f, only_header=True)
|
||||
return (elf.is_64_bit, elf.is_little_endian, elf.elf_hdr.e_machine)
|
||||
|
||||
|
||||
def accept_elf(path, host_compat):
|
||||
"""Accept an ELF file if the header matches the given compat triplet,
|
||||
obtained with :py:func:`get_elf_compat`. In case it's not an ELF (e.g.
|
||||
static library, or some arbitrary file, fall back to is_readable_file)."""
|
||||
"""Accept an ELF file if the header matches the given compat triplet. In case it's not an ELF
|
||||
(e.g. static library, or some arbitrary file, fall back to is_readable_file)."""
|
||||
# Fast path: assume libraries at least have .so in their basename.
|
||||
# Note: don't replace with splitext, because of libsmth.so.1.2.3 file names.
|
||||
if ".so" not in os.path.basename(path):
|
||||
return llnl.util.filesystem.is_readable_file(path)
|
||||
try:
|
||||
return host_compat == get_elf_compat(path)
|
||||
return host_compat == elf_utils.get_elf_compat(path)
|
||||
except (OSError, elf_utils.ElfParsingError):
|
||||
return llnl.util.filesystem.is_readable_file(path)
|
||||
|
||||
@@ -155,7 +144,7 @@ def libraries_in_ld_and_system_library_path(
|
||||
search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
|
||||
|
||||
try:
|
||||
host_compat = get_elf_compat(sys.executable)
|
||||
host_compat = elf_utils.get_elf_compat(sys.executable)
|
||||
accept = lambda path: accept_elf(path, host_compat)
|
||||
except (OSError, elf_utils.ElfParsingError):
|
||||
accept = llnl.util.filesystem.is_readable_file
|
||||
|
@@ -11,6 +11,7 @@
|
||||
|
||||
from llnl.util import filesystem
|
||||
|
||||
import spack.platforms
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
from spack.util import spack_yaml
|
||||
@@ -32,6 +33,8 @@ class ExpectedTestResult(NamedTuple):
|
||||
|
||||
#: Spec to be detected
|
||||
spec: str
|
||||
#: Attributes expected in the external spec
|
||||
extra_attributes: Dict[str, str]
|
||||
|
||||
|
||||
class DetectionTest(NamedTuple):
|
||||
@@ -100,7 +103,10 @@ def _create_executable_scripts(self, mock_executables: MockExecutables) -> List[
|
||||
|
||||
@property
|
||||
def expected_specs(self) -> List[spack.spec.Spec]:
|
||||
return [spack.spec.Spec(r.spec) for r in self.test.results]
|
||||
return [
|
||||
spack.spec.Spec.from_detection(item.spec, extra_attributes=item.extra_attributes)
|
||||
for item in self.test.results
|
||||
]
|
||||
|
||||
|
||||
def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runner]:
|
||||
@@ -117,9 +123,13 @@ def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runn
|
||||
"""
|
||||
result = []
|
||||
detection_tests_content = read_detection_tests(pkg_name, repository)
|
||||
current_platform = str(spack.platforms.host())
|
||||
|
||||
tests_by_path = detection_tests_content.get("paths", [])
|
||||
for single_test_data in tests_by_path:
|
||||
if current_platform not in single_test_data.get("platforms", [current_platform]):
|
||||
continue
|
||||
|
||||
mock_executables = []
|
||||
for layout in single_test_data["layout"]:
|
||||
mock_executables.append(
|
||||
@@ -127,7 +137,11 @@ def detection_tests(pkg_name: str, repository: spack.repo.RepoPath) -> List[Runn
|
||||
)
|
||||
expected_results = []
|
||||
for assertion in single_test_data["results"]:
|
||||
expected_results.append(ExpectedTestResult(spec=assertion["spec"]))
|
||||
expected_results.append(
|
||||
ExpectedTestResult(
|
||||
spec=assertion["spec"], extra_attributes=assertion.get("extra_attributes", {})
|
||||
)
|
||||
)
|
||||
|
||||
current_test = DetectionTest(
|
||||
pkg_name=pkg_name, layout=mock_executables, results=expected_results
|
||||
|
@@ -27,6 +27,7 @@ class OpenMpi(Package):
|
||||
* ``variant``
|
||||
* ``version``
|
||||
* ``requires``
|
||||
* ``redistribute``
|
||||
|
||||
"""
|
||||
import collections
|
||||
@@ -63,6 +64,7 @@ class OpenMpi(Package):
|
||||
__all__ = [
|
||||
"DirectiveError",
|
||||
"DirectiveMeta",
|
||||
"DisableRedistribute",
|
||||
"version",
|
||||
"conflicts",
|
||||
"depends_on",
|
||||
@@ -75,6 +77,7 @@ class OpenMpi(Package):
|
||||
"resource",
|
||||
"build_system",
|
||||
"requires",
|
||||
"redistribute",
|
||||
]
|
||||
|
||||
#: These are variant names used by Spack internally; packages can't use them
|
||||
@@ -598,9 +601,68 @@ def _execute_depends_on(pkg: "spack.package_base.PackageBase"):
|
||||
return _execute_depends_on
|
||||
|
||||
|
||||
#: Store whether a given Spec source/binary should not be redistributed.
|
||||
class DisableRedistribute:
|
||||
def __init__(self, source, binary):
|
||||
self.source = source
|
||||
self.binary = binary
|
||||
|
||||
|
||||
@directive("disable_redistribute")
|
||||
def redistribute(source=None, binary=None, when: WhenType = None):
|
||||
"""Can be used inside a Package definition to declare that
|
||||
the package source and/or compiled binaries should not be
|
||||
redistributed.
|
||||
|
||||
By default, Packages allow source/binary distribution (i.e. in
|
||||
mirrors). Because of this, and because overlapping enable/
|
||||
disable specs are not allowed, this directive only allows users
|
||||
to explicitly disable redistribution for specs.
|
||||
"""
|
||||
|
||||
return lambda pkg: _execute_redistribute(pkg, source, binary, when)
|
||||
|
||||
|
||||
def _execute_redistribute(
|
||||
pkg: "spack.package_base.PackageBase", source=None, binary=None, when: WhenType = None
|
||||
):
|
||||
if source is None and binary is None:
|
||||
return
|
||||
elif (source is True) or (binary is True):
|
||||
raise DirectiveError(
|
||||
"Source/binary distribution are true by default, they can only "
|
||||
"be explicitly disabled."
|
||||
)
|
||||
|
||||
if source is None:
|
||||
source = True
|
||||
if binary is None:
|
||||
binary = True
|
||||
|
||||
when_spec = _make_when_spec(when)
|
||||
if not when_spec:
|
||||
return
|
||||
if source is False:
|
||||
max_constraint = spack.spec.Spec(f"{pkg.name}@{when_spec.versions}")
|
||||
if not max_constraint.satisfies(when_spec):
|
||||
raise DirectiveError("Source distribution can only be disabled for versions")
|
||||
|
||||
if when_spec in pkg.disable_redistribute:
|
||||
disable = pkg.disable_redistribute[when_spec]
|
||||
if not source:
|
||||
disable.source = True
|
||||
if not binary:
|
||||
disable.binary = True
|
||||
else:
|
||||
pkg.disable_redistribute[when_spec] = DisableRedistribute(
|
||||
source=not source, binary=not binary
|
||||
)
|
||||
|
||||
|
||||
@directive(("extendees", "dependencies"))
|
||||
def extends(spec, when=None, type=("build", "run"), patches=None):
|
||||
"""Same as depends_on, but also adds this package to the extendee list.
|
||||
In case of Python, also adds a dependency on python-venv.
|
||||
|
||||
keyword arguments can be passed to extends() so that extension
|
||||
packages can pass parameters to the extendee's extension
|
||||
@@ -616,6 +678,11 @@ def _execute_extends(pkg):
|
||||
_depends_on(pkg, spec, when=when, type=type, patches=patches)
|
||||
spec_obj = spack.spec.Spec(spec)
|
||||
|
||||
# When extending python, also add a dependency on python-venv. This is done so that
|
||||
# Spack environment views are Python virtual environments.
|
||||
if spec_obj.name == "python" and not pkg.name == "python-venv":
|
||||
_depends_on(pkg, "python-venv", when=when, type=("build", "run"))
|
||||
|
||||
# TODO: the values of the extendees dictionary are not used. Remove in next refactor.
|
||||
pkg.extendees[spec_obj.name] = (spec_obj, None)
|
||||
|
||||
|
@@ -34,6 +34,9 @@
|
||||
* ``spec``: a string representation of the abstract spec that was concretized
|
||||
|
||||
4. ``concrete_specs``: a dictionary containing the specs in the environment.
|
||||
5. ``include_concrete`` (dictionary): an optional dictionary that includes the roots
|
||||
and concrete specs from the included environments, keyed by the path to that
|
||||
environment
|
||||
|
||||
Compatibility
|
||||
-------------
|
||||
@@ -50,26 +53,37 @@
|
||||
- ``v2``
|
||||
- ``v3``
|
||||
- ``v4``
|
||||
- ``v5``
|
||||
* - ``v0.12:0.14``
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
-
|
||||
-
|
||||
* - ``v0.15:0.16``
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
-
|
||||
* - ``v0.17``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
-
|
||||
* - ``v0.18:``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
-
|
||||
* - ``v0.22:``
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
- ✅
|
||||
|
||||
Version 1
|
||||
---------
|
||||
@@ -334,6 +348,118 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Version 5
|
||||
---------
|
||||
|
||||
Version 5 doesn't change the top-level lockfile format, but an optional dictionary is
|
||||
added. The dictionary has the ``root`` and ``concrete_specs`` of the included
|
||||
environments, which are keyed by the path to that environment. Since this is optional
|
||||
if the environment does not have any included environments ``include_concrete`` will
|
||||
not be a part of the lockfile.
|
||||
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"_meta": {
|
||||
"file-type": "spack-lockfile",
|
||||
"lockfile-version": 5,
|
||||
"specfile-version": 3
|
||||
},
|
||||
"roots": [
|
||||
{
|
||||
"hash": "<dag_hash 1>",
|
||||
"spec": "<abstract spec 1>"
|
||||
},
|
||||
{
|
||||
"hash": "<dag_hash 2>",
|
||||
"spec": "<abstract spec 2>"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"<dag_hash 1>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_1",
|
||||
"hash": "<dag_hash for depname_1>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_2",
|
||||
"hash": "<dag_hash for depname_2>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 1>",
|
||||
},
|
||||
"<daghash 2>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_3",
|
||||
"hash": "<dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_4",
|
||||
"hash": "<dag_hash for depname_4>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 2>"
|
||||
}
|
||||
}
|
||||
"include_concrete": {
|
||||
"<path to environment>": {
|
||||
"roots": [
|
||||
{
|
||||
"hash": "<dag_hash 1>",
|
||||
"spec": "<abstract spec 1>"
|
||||
},
|
||||
{
|
||||
"hash": "<dag_hash 2>",
|
||||
"spec": "<abstract spec 2>"
|
||||
}
|
||||
],
|
||||
"concrete_specs": {
|
||||
"<dag_hash 1>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_1",
|
||||
"hash": "<dag_hash for depname_1>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_2",
|
||||
"hash": "<dag_hash for depname_2>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 1>",
|
||||
},
|
||||
"<daghash 2>": {
|
||||
"... <spec dict attributes> ...": { },
|
||||
"dependencies": [
|
||||
{
|
||||
"name": "depname_3",
|
||||
"hash": "<dag_hash for depname_3>",
|
||||
"type": ["build", "link"]
|
||||
},
|
||||
{
|
||||
"name": "depname_4",
|
||||
"hash": "<dag_hash for depname_4>",
|
||||
"type": ["build", "link"]
|
||||
}
|
||||
],
|
||||
"hash": "<dag_hash 2>"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
from .environment import (
|
||||
|
@@ -16,7 +16,7 @@
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from typing import Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Union
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -106,17 +106,16 @@ def environment_name(path: Union[str, pathlib.Path]) -> str:
|
||||
return path_str
|
||||
|
||||
|
||||
def check_disallowed_env_config_mods(scopes):
|
||||
def ensure_no_disallowed_env_config_mods(scopes: List[spack.config.ConfigScope]) -> None:
|
||||
for scope in scopes:
|
||||
with spack.config.use_configuration(scope):
|
||||
if spack.config.get("config:environments_root"):
|
||||
raise SpackEnvironmentError(
|
||||
"Spack environments are prohibited from modifying 'config:environments_root' "
|
||||
"because it can make the definition of the environment ill-posed. Please "
|
||||
"remove from your environment and place it in a permanent scope such as "
|
||||
"defaults, system, site, etc."
|
||||
)
|
||||
return scopes
|
||||
config = scope.get_section("config")
|
||||
if config and "environments_root" in config["config"]:
|
||||
raise SpackEnvironmentError(
|
||||
"Spack environments are prohibited from modifying 'config:environments_root' "
|
||||
"because it can make the definition of the environment ill-posed. Please "
|
||||
"remove from your environment and place it in a permanent scope such as "
|
||||
"defaults, system, site, etc."
|
||||
)
|
||||
|
||||
|
||||
def default_manifest_yaml():
|
||||
@@ -160,6 +159,8 @@ def default_manifest_yaml():
|
||||
default_view_name = "default"
|
||||
# Default behavior to link all packages into views (vs. only root packages)
|
||||
default_view_link = "all"
|
||||
# The name for any included concrete specs
|
||||
included_concrete_name = "include_concrete"
|
||||
|
||||
|
||||
def installed_specs():
|
||||
@@ -294,6 +295,7 @@ def create(
|
||||
init_file: Optional[Union[str, pathlib.Path]] = None,
|
||||
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
||||
keep_relative: bool = False,
|
||||
include_concrete: Optional[List[str]] = None,
|
||||
) -> "Environment":
|
||||
"""Create a managed environment in Spack and returns it.
|
||||
|
||||
@@ -310,10 +312,15 @@ def create(
|
||||
string, it specifies the path to the view
|
||||
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
||||
otherwise they are made absolute
|
||||
include_concrete: list of concrete environment names/paths to be included
|
||||
"""
|
||||
environment_dir = environment_dir_from_name(name, exists_ok=False)
|
||||
return create_in_dir(
|
||||
environment_dir, init_file=init_file, with_view=with_view, keep_relative=keep_relative
|
||||
environment_dir,
|
||||
init_file=init_file,
|
||||
with_view=with_view,
|
||||
keep_relative=keep_relative,
|
||||
include_concrete=include_concrete,
|
||||
)
|
||||
|
||||
|
||||
@@ -322,6 +329,7 @@ def create_in_dir(
|
||||
init_file: Optional[Union[str, pathlib.Path]] = None,
|
||||
with_view: Optional[Union[str, pathlib.Path, bool]] = None,
|
||||
keep_relative: bool = False,
|
||||
include_concrete: Optional[List[str]] = None,
|
||||
) -> "Environment":
|
||||
"""Create an environment in the directory passed as input and returns it.
|
||||
|
||||
@@ -335,6 +343,7 @@ def create_in_dir(
|
||||
string, it specifies the path to the view
|
||||
keep_relative: if True, develop paths are copied verbatim into the new environment file,
|
||||
otherwise they are made absolute
|
||||
include_concrete: concrete environment names/paths to be included
|
||||
"""
|
||||
initialize_environment_dir(root, envfile=init_file)
|
||||
|
||||
@@ -347,6 +356,12 @@ def create_in_dir(
|
||||
if with_view is not None:
|
||||
manifest.set_default_view(with_view)
|
||||
|
||||
if include_concrete is not None:
|
||||
set_included_envs_to_env_paths(include_concrete)
|
||||
validate_included_envs_exists(include_concrete)
|
||||
validate_included_envs_concrete(include_concrete)
|
||||
manifest.set_include_concrete(include_concrete)
|
||||
|
||||
manifest.flush()
|
||||
|
||||
except (spack.config.ConfigFormatError, SpackEnvironmentConfigError) as e:
|
||||
@@ -420,6 +435,67 @@ def ensure_env_root_path_exists():
|
||||
fs.mkdirp(env_root_path())
|
||||
|
||||
|
||||
def set_included_envs_to_env_paths(include_concrete: List[str]) -> None:
|
||||
"""If the included environment(s) is the environment name
|
||||
it is replaced by the path to the environment
|
||||
|
||||
Args:
|
||||
include_concrete: list of env name or path to env"""
|
||||
|
||||
for i, env_name in enumerate(include_concrete):
|
||||
if is_env_dir(env_name):
|
||||
include_concrete[i] = env_name
|
||||
elif exists(env_name):
|
||||
include_concrete[i] = root(env_name)
|
||||
|
||||
|
||||
def validate_included_envs_exists(include_concrete: List[str]) -> None:
|
||||
"""Checks that all of the included environments exist
|
||||
|
||||
Args:
|
||||
include_concrete: list of already existing concrete environments to include
|
||||
|
||||
Raises:
|
||||
SpackEnvironmentError: if any of the included environments do not exist
|
||||
"""
|
||||
|
||||
missing_envs = set()
|
||||
|
||||
for i, env_name in enumerate(include_concrete):
|
||||
if not is_env_dir(env_name):
|
||||
missing_envs.add(env_name)
|
||||
|
||||
if missing_envs:
|
||||
msg = "The following environment(s) are missing: {0}".format(", ".join(missing_envs))
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
|
||||
def validate_included_envs_concrete(include_concrete: List[str]) -> None:
|
||||
"""Checks that all of the included environments are concrete
|
||||
|
||||
Args:
|
||||
include_concrete: list of already existing concrete environments to include
|
||||
|
||||
Raises:
|
||||
SpackEnvironmentError: if any of the included environments are not concrete
|
||||
"""
|
||||
|
||||
non_concrete_envs = set()
|
||||
|
||||
for env_path in include_concrete:
|
||||
if not os.path.exists(Environment(env_path).lock_path):
|
||||
non_concrete_envs.add(Environment(env_path).name)
|
||||
|
||||
if non_concrete_envs:
|
||||
msg = "The following environment(s) are not concrete: {0}\n" "Please run:".format(
|
||||
", ".join(non_concrete_envs)
|
||||
)
|
||||
for env in non_concrete_envs:
|
||||
msg += f"\n\t`spack -e {env} concretize`"
|
||||
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
|
||||
def all_environment_names():
|
||||
"""List the names of environments that currently exist."""
|
||||
# just return empty if the env path does not exist. A read-only
|
||||
@@ -822,6 +898,18 @@ def __init__(self, manifest_dir: Union[str, pathlib.Path]) -> None:
|
||||
self.specs_by_hash: Dict[str, Spec] = {}
|
||||
#: Repository for this environment (memoized)
|
||||
self._repo = None
|
||||
|
||||
#: Environment paths for concrete (lockfile) included environments
|
||||
self.included_concrete_envs: List[str] = []
|
||||
#: First-level included concretized spec data from/to the lockfile.
|
||||
self.included_concrete_spec_data: Dict[str, Dict[str, List[str]]] = {}
|
||||
#: User specs from included environments from the last concretization
|
||||
self.included_concretized_user_specs: Dict[str, List[Spec]] = {}
|
||||
#: Roots from included environments with the last concretization, in order
|
||||
self.included_concretized_order: Dict[str, List[str]] = {}
|
||||
#: Concretized specs by hash from the included environments
|
||||
self.included_specs_by_hash: Dict[str, Dict[str, Spec]] = {}
|
||||
|
||||
#: Previously active environment
|
||||
self._previous_active = None
|
||||
self._dev_specs = None
|
||||
@@ -859,7 +947,7 @@ def _read(self):
|
||||
|
||||
if os.path.exists(self.lock_path):
|
||||
with open(self.lock_path) as f:
|
||||
read_lock_version = self._read_lockfile(f)
|
||||
read_lock_version = self._read_lockfile(f)["_meta"]["lockfile-version"]
|
||||
|
||||
if read_lock_version == 1:
|
||||
tty.debug(f"Storing backup of {self.lock_path} at {self._lock_backup_v1_path}")
|
||||
@@ -927,6 +1015,20 @@ def add_view(name, values):
|
||||
if self.views == dict():
|
||||
self.views[default_view_name] = ViewDescriptor(self.path, self.view_path_default)
|
||||
|
||||
def _process_concrete_includes(self):
|
||||
"""Extract and load into memory included concrete spec data."""
|
||||
self.included_concrete_envs = self.manifest[TOP_LEVEL_KEY].get(included_concrete_name, [])
|
||||
|
||||
if self.included_concrete_envs:
|
||||
if os.path.exists(self.lock_path):
|
||||
with open(self.lock_path) as f:
|
||||
data = self._read_lockfile(f)
|
||||
|
||||
if included_concrete_name in data:
|
||||
self.included_concrete_spec_data = data[included_concrete_name]
|
||||
else:
|
||||
self.include_concrete_envs()
|
||||
|
||||
def _construct_state_from_manifest(self):
|
||||
"""Set up user specs and views from the manifest file."""
|
||||
self.spec_lists = collections.OrderedDict()
|
||||
@@ -943,6 +1045,31 @@ def _construct_state_from_manifest(self):
|
||||
self.spec_lists[user_speclist_name] = user_specs
|
||||
|
||||
self._process_view(spack.config.get("view", True))
|
||||
self._process_concrete_includes()
|
||||
|
||||
def all_concretized_user_specs(self) -> List[Spec]:
|
||||
"""Returns all of the concretized user specs of the environment and
|
||||
its included environment(s)."""
|
||||
concretized_user_specs = self.concretized_user_specs[:]
|
||||
for included_specs in self.included_concretized_user_specs.values():
|
||||
for included in included_specs:
|
||||
# Don't duplicate included spec(s)
|
||||
if included not in concretized_user_specs:
|
||||
concretized_user_specs.append(included)
|
||||
|
||||
return concretized_user_specs
|
||||
|
||||
def all_concretized_orders(self) -> List[str]:
|
||||
"""Returns all of the concretized order of the environment and
|
||||
its included environment(s)."""
|
||||
concretized_order = self.concretized_order[:]
|
||||
for included_concretized_order in self.included_concretized_order.values():
|
||||
for included in included_concretized_order:
|
||||
# Don't duplicate included spec(s)
|
||||
if included not in concretized_order:
|
||||
concretized_order.append(included)
|
||||
|
||||
return concretized_order
|
||||
|
||||
@property
|
||||
def user_specs(self):
|
||||
@@ -967,6 +1094,26 @@ def _read_dev_specs(self):
|
||||
dev_specs[name] = local_entry
|
||||
return dev_specs
|
||||
|
||||
@property
|
||||
def included_user_specs(self) -> SpecList:
|
||||
"""Included concrete user (or root) specs from last concretization."""
|
||||
spec_list = SpecList()
|
||||
|
||||
if not self.included_concrete_envs:
|
||||
return spec_list
|
||||
|
||||
def add_root_specs(included_concrete_specs):
|
||||
# add specs from the include *and* any nested includes it may have
|
||||
for env, info in included_concrete_specs.items():
|
||||
for root_list in info["roots"]:
|
||||
spec_list.add(root_list["spec"])
|
||||
|
||||
if "include_concrete" in info:
|
||||
add_root_specs(info["include_concrete"])
|
||||
|
||||
add_root_specs(self.included_concrete_spec_data)
|
||||
return spec_list
|
||||
|
||||
def clear(self, re_read=False):
|
||||
"""Clear the contents of the environment
|
||||
|
||||
@@ -978,9 +1125,15 @@ def clear(self, re_read=False):
|
||||
self.spec_lists[user_speclist_name] = SpecList()
|
||||
|
||||
self._dev_specs = {}
|
||||
self.concretized_user_specs = [] # user specs from last concretize
|
||||
self.concretized_order = [] # roots of last concretize, in order
|
||||
self.concretized_user_specs = [] # user specs from last concretize
|
||||
self.specs_by_hash = {} # concretized specs by hash
|
||||
|
||||
self.included_concrete_spec_data = {} # concretized specs from lockfile of included envs
|
||||
self.included_concretized_order = {} # root specs of the included envs, keyed by env path
|
||||
self.included_concretized_user_specs = {} # user specs from last concretize's included env
|
||||
self.included_specs_by_hash = {} # concretized specs by hash from the included envs
|
||||
|
||||
self.invalidate_repository_cache()
|
||||
self._previous_active = None # previously active environment
|
||||
if not re_read:
|
||||
@@ -1034,6 +1187,55 @@ def scope_name(self):
|
||||
"""Name of the config scope of this environment's manifest file."""
|
||||
return self.manifest.scope_name
|
||||
|
||||
def include_concrete_envs(self):
|
||||
"""Copy and save the included envs' specs internally"""
|
||||
|
||||
lockfile_meta = None
|
||||
root_hash_seen = set()
|
||||
concrete_hash_seen = set()
|
||||
self.included_concrete_spec_data = {}
|
||||
|
||||
for env_path in self.included_concrete_envs:
|
||||
# Check that environment exists
|
||||
if not is_env_dir(env_path):
|
||||
raise SpackEnvironmentError(f"Unable to find env at {env_path}")
|
||||
|
||||
env = Environment(env_path)
|
||||
|
||||
with open(env.lock_path) as f:
|
||||
lockfile_as_dict = env._read_lockfile(f)
|
||||
|
||||
# Lockfile_meta must match each env and use at least format version 5
|
||||
if lockfile_meta is None:
|
||||
lockfile_meta = lockfile_as_dict["_meta"]
|
||||
elif lockfile_meta != lockfile_as_dict["_meta"]:
|
||||
raise SpackEnvironmentError("All lockfile _meta values must match")
|
||||
elif lockfile_meta["lockfile-version"] < 5:
|
||||
raise SpackEnvironmentError("The lockfile format must be at version 5 or higher")
|
||||
|
||||
# Copy unique root specs from env
|
||||
self.included_concrete_spec_data[env_path] = {"roots": []}
|
||||
for root_dict in lockfile_as_dict["roots"]:
|
||||
if root_dict["hash"] not in root_hash_seen:
|
||||
self.included_concrete_spec_data[env_path]["roots"].append(root_dict)
|
||||
root_hash_seen.add(root_dict["hash"])
|
||||
|
||||
# Copy unique concrete specs from env
|
||||
for concrete_spec in lockfile_as_dict["concrete_specs"]:
|
||||
if concrete_spec not in concrete_hash_seen:
|
||||
self.included_concrete_spec_data[env_path].update(
|
||||
{"concrete_specs": lockfile_as_dict["concrete_specs"]}
|
||||
)
|
||||
concrete_hash_seen.add(concrete_spec)
|
||||
|
||||
if "include_concrete" in lockfile_as_dict.keys():
|
||||
self.included_concrete_spec_data[env_path]["include_concrete"] = lockfile_as_dict[
|
||||
"include_concrete"
|
||||
]
|
||||
|
||||
self._read_lockfile_dict(self._to_lockfile_dict())
|
||||
self.write()
|
||||
|
||||
def destroy(self):
|
||||
"""Remove this environment from Spack entirely."""
|
||||
shutil.rmtree(self.path)
|
||||
@@ -1233,6 +1435,10 @@ def concretize(self, force=False, tests=False):
|
||||
for spec in set(self.concretized_user_specs) - set(self.user_specs):
|
||||
self.deconcretize(spec, concrete=False)
|
||||
|
||||
# If a combined env, check updated spec is in the linked envs
|
||||
if self.included_concrete_envs:
|
||||
self.include_concrete_envs()
|
||||
|
||||
# Pick the right concretization strategy
|
||||
if self.unify == "when_possible":
|
||||
return self._concretize_together_where_possible(tests=tests)
|
||||
@@ -1416,7 +1622,7 @@ def _concretize_separately(self, tests=False):
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
if spack.config.get("config:concretizer", "clingo") == "clingo":
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
spack.bootstrap.ensure_core_dependencies()
|
||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||
|
||||
# Ensure all the indexes have been built or updated, since
|
||||
# otherwise the processes in the pool may timeout on waiting
|
||||
@@ -1427,7 +1633,7 @@ def _concretize_separately(self, tests=False):
|
||||
|
||||
# Ensure we have compilers in compilers.yaml to avoid that
|
||||
# processes try to write the config file in parallel
|
||||
_ = spack.compilers.get_compiler_config(init_config=True)
|
||||
_ = spack.compilers.get_compiler_config(spack.config.CONFIG, init_config=True)
|
||||
|
||||
# Early return if there is nothing to do
|
||||
if len(args) == 0:
|
||||
@@ -1705,8 +1911,14 @@ def _partition_roots_by_install_status(self):
|
||||
of per spec."""
|
||||
installed, uninstalled = [], []
|
||||
with spack.store.STORE.db.read_transaction():
|
||||
for concretized_hash in self.concretized_order:
|
||||
spec = self.specs_by_hash[concretized_hash]
|
||||
for concretized_hash in self.all_concretized_orders():
|
||||
if concretized_hash in self.specs_by_hash:
|
||||
spec = self.specs_by_hash[concretized_hash]
|
||||
else:
|
||||
for env_path in self.included_specs_by_hash.keys():
|
||||
if concretized_hash in self.included_specs_by_hash[env_path]:
|
||||
spec = self.included_specs_by_hash[env_path][concretized_hash]
|
||||
break
|
||||
if not spec.installed or (
|
||||
spec.satisfies("dev_path=*") or spec.satisfies("^dev_path=*")
|
||||
):
|
||||
@@ -1786,8 +1998,14 @@ def added_specs(self):
|
||||
|
||||
def concretized_specs(self):
|
||||
"""Tuples of (user spec, concrete spec) for all concrete specs."""
|
||||
for s, h in zip(self.concretized_user_specs, self.concretized_order):
|
||||
yield (s, self.specs_by_hash[h])
|
||||
for s, h in zip(self.all_concretized_user_specs(), self.all_concretized_orders()):
|
||||
if h in self.specs_by_hash:
|
||||
yield (s, self.specs_by_hash[h])
|
||||
else:
|
||||
for env_path in self.included_specs_by_hash.keys():
|
||||
if h in self.included_specs_by_hash[env_path]:
|
||||
yield (s, self.included_specs_by_hash[env_path][h])
|
||||
break
|
||||
|
||||
def concrete_roots(self):
|
||||
"""Same as concretized_specs, except it returns the list of concrete
|
||||
@@ -1916,8 +2134,7 @@ def _get_environment_specs(self, recurse_dependencies=True):
|
||||
If these specs appear under different user_specs, only one copy
|
||||
is added to the list returned.
|
||||
"""
|
||||
specs = [self.specs_by_hash[h] for h in self.concretized_order]
|
||||
|
||||
specs = [self.specs_by_hash[h] for h in self.all_concretized_orders()]
|
||||
if recurse_dependencies:
|
||||
specs.extend(
|
||||
traverse.traverse_nodes(
|
||||
@@ -1962,31 +2179,76 @@ def _to_lockfile_dict(self):
|
||||
"concrete_specs": concrete_specs,
|
||||
}
|
||||
|
||||
if self.included_concrete_envs:
|
||||
data[included_concrete_name] = self.included_concrete_spec_data
|
||||
|
||||
return data
|
||||
|
||||
def _read_lockfile(self, file_or_json):
|
||||
"""Read a lockfile from a file or from a raw string."""
|
||||
lockfile_dict = sjson.load(file_or_json)
|
||||
self._read_lockfile_dict(lockfile_dict)
|
||||
return lockfile_dict["_meta"]["lockfile-version"]
|
||||
return lockfile_dict
|
||||
|
||||
def set_included_concretized_user_specs(
|
||||
self,
|
||||
env_name: str,
|
||||
env_info: Dict[str, Dict[str, Any]],
|
||||
included_json_specs_by_hash: Dict[str, Dict[str, Any]],
|
||||
) -> Dict[str, Dict[str, Any]]:
|
||||
"""Sets all of the concretized user specs from included environments
|
||||
to include those from nested included environments.
|
||||
|
||||
Args:
|
||||
env_name: the name (technically the path) of the included environment
|
||||
env_info: included concrete environment data
|
||||
included_json_specs_by_hash: concrete spec data keyed by hash
|
||||
|
||||
Returns: updated specs_by_hash
|
||||
"""
|
||||
self.included_concretized_order[env_name] = []
|
||||
self.included_concretized_user_specs[env_name] = []
|
||||
|
||||
def add_specs(name, info, specs_by_hash):
|
||||
# Add specs from the environment as well as any of its nested
|
||||
# environments.
|
||||
for root_info in info["roots"]:
|
||||
self.included_concretized_order[name].append(root_info["hash"])
|
||||
self.included_concretized_user_specs[name].append(Spec(root_info["spec"]))
|
||||
if "concrete_specs" in info:
|
||||
specs_by_hash.update(info["concrete_specs"])
|
||||
|
||||
if included_concrete_name in info:
|
||||
for included_name, included_info in info[included_concrete_name].items():
|
||||
if included_name not in self.included_concretized_order:
|
||||
self.included_concretized_order[included_name] = []
|
||||
self.included_concretized_user_specs[included_name] = []
|
||||
add_specs(included_name, included_info, specs_by_hash)
|
||||
|
||||
add_specs(env_name, env_info, included_json_specs_by_hash)
|
||||
return included_json_specs_by_hash
|
||||
|
||||
def _read_lockfile_dict(self, d):
|
||||
"""Read a lockfile dictionary into this environment."""
|
||||
self.specs_by_hash = {}
|
||||
self.included_specs_by_hash = {}
|
||||
self.included_concretized_user_specs = {}
|
||||
self.included_concretized_order = {}
|
||||
|
||||
roots = d["roots"]
|
||||
self.concretized_user_specs = [Spec(r["spec"]) for r in roots]
|
||||
self.concretized_order = [r["hash"] for r in roots]
|
||||
json_specs_by_hash = d["concrete_specs"]
|
||||
included_json_specs_by_hash = {}
|
||||
|
||||
# Track specs by their lockfile key. Currently spack uses the finest
|
||||
# grained hash as the lockfile key, while older formats used the build
|
||||
# hash or a previous incarnation of the DAG hash (one that did not
|
||||
# include build deps or package hash).
|
||||
specs_by_hash = {}
|
||||
if included_concrete_name in d:
|
||||
for env_name, env_info in d[included_concrete_name].items():
|
||||
included_json_specs_by_hash.update(
|
||||
self.set_included_concretized_user_specs(
|
||||
env_name, env_info, included_json_specs_by_hash
|
||||
)
|
||||
)
|
||||
|
||||
# Track specs by their DAG hash, allows handling DAG hash collisions
|
||||
first_seen = {}
|
||||
current_lockfile_format = d["_meta"]["lockfile-version"]
|
||||
try:
|
||||
reader = READER_CLS[current_lockfile_format]
|
||||
@@ -1999,6 +2261,39 @@ def _read_lockfile_dict(self, d):
|
||||
msg += " You need to use a newer Spack version."
|
||||
raise SpackEnvironmentError(msg)
|
||||
|
||||
first_seen, self.concretized_order = self.filter_specs(
|
||||
reader, json_specs_by_hash, self.concretized_order
|
||||
)
|
||||
|
||||
for spec_dag_hash in self.concretized_order:
|
||||
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
|
||||
|
||||
if any(self.included_concretized_order.values()):
|
||||
first_seen = {}
|
||||
|
||||
for env_name, concretized_order in self.included_concretized_order.items():
|
||||
filtered_spec, self.included_concretized_order[env_name] = self.filter_specs(
|
||||
reader, included_json_specs_by_hash, concretized_order
|
||||
)
|
||||
first_seen.update(filtered_spec)
|
||||
|
||||
for env_path, spec_hashes in self.included_concretized_order.items():
|
||||
self.included_specs_by_hash[env_path] = {}
|
||||
for spec_dag_hash in spec_hashes:
|
||||
self.included_specs_by_hash[env_path].update(
|
||||
{spec_dag_hash: first_seen[spec_dag_hash]}
|
||||
)
|
||||
|
||||
def filter_specs(self, reader, json_specs_by_hash, order_concretized):
|
||||
# Track specs by their lockfile key. Currently spack uses the finest
|
||||
# grained hash as the lockfile key, while older formats used the build
|
||||
# hash or a previous incarnation of the DAG hash (one that did not
|
||||
# include build deps or package hash).
|
||||
specs_by_hash = {}
|
||||
|
||||
# Track specs by their DAG hash, allows handling DAG hash collisions
|
||||
first_seen = {}
|
||||
|
||||
# First pass: Put each spec in the map ignoring dependencies
|
||||
for lockfile_key, node_dict in json_specs_by_hash.items():
|
||||
spec = reader.from_node_dict(node_dict)
|
||||
@@ -2021,7 +2316,8 @@ def _read_lockfile_dict(self, d):
|
||||
# keep. This is only required as long as we support older lockfile
|
||||
# formats where the mapping from DAG hash to lockfile key is possibly
|
||||
# one-to-many.
|
||||
for lockfile_key in self.concretized_order:
|
||||
|
||||
for lockfile_key in order_concretized:
|
||||
for s in specs_by_hash[lockfile_key].traverse():
|
||||
if s.dag_hash() not in first_seen:
|
||||
first_seen[s.dag_hash()] = s
|
||||
@@ -2029,12 +2325,10 @@ def _read_lockfile_dict(self, d):
|
||||
# Now make sure concretized_order and our internal specs dict
|
||||
# contains the keys used by modern spack (i.e. the dag_hash
|
||||
# that includes build deps and package hash).
|
||||
self.concretized_order = [
|
||||
specs_by_hash[h_key].dag_hash() for h_key in self.concretized_order
|
||||
]
|
||||
|
||||
for spec_dag_hash in self.concretized_order:
|
||||
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
|
||||
order_concretized = [specs_by_hash[h_key].dag_hash() for h_key in order_concretized]
|
||||
|
||||
return first_seen, order_concretized
|
||||
|
||||
def write(self, regenerate: bool = True) -> None:
|
||||
"""Writes an in-memory environment to its location on disk.
|
||||
@@ -2047,7 +2341,7 @@ def write(self, regenerate: bool = True) -> None:
|
||||
regenerate: regenerate views and run post-write hooks as well as writing if True.
|
||||
"""
|
||||
self.manifest_uptodate_or_warn()
|
||||
if self.specs_by_hash:
|
||||
if self.specs_by_hash or self.included_concrete_envs:
|
||||
self.ensure_env_directory_exists(dot_env=True)
|
||||
self.update_environment_repository()
|
||||
self.manifest.flush()
|
||||
@@ -2463,6 +2757,10 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str]) -> None:
|
||||
self.scope_name = f"env:{environment_name(self.manifest_dir)}"
|
||||
self.config_stage_dir = os.path.join(env_subdir_path(manifest_dir), "config")
|
||||
|
||||
#: Configuration scopes associated with this environment. Note that these are not
|
||||
#: invalidated by a re-read of the manifest file.
|
||||
self._config_scopes: Optional[List[spack.config.ConfigScope]] = None
|
||||
|
||||
if not self.manifest_file.exists():
|
||||
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
|
||||
raise SpackEnvironmentError(msg)
|
||||
@@ -2542,6 +2840,19 @@ def override_user_spec(self, user_spec: str, idx: int) -> None:
|
||||
raise SpackEnvironmentError(msg) from e
|
||||
self.changed = True
|
||||
|
||||
def set_include_concrete(self, include_concrete: List[str]) -> None:
|
||||
"""Sets the included concrete environments in the manifest to the value(s) passed as input.
|
||||
|
||||
Args:
|
||||
include_concrete: list of already existing concrete environments to include
|
||||
"""
|
||||
self.pristine_configuration[included_concrete_name] = []
|
||||
|
||||
for env_path in include_concrete:
|
||||
self.pristine_configuration[included_concrete_name].append(env_path)
|
||||
|
||||
self.changed = True
|
||||
|
||||
def add_definition(self, user_spec: str, list_name: str) -> None:
|
||||
"""Appends a user spec to the first active definition matching the name passed as argument.
|
||||
|
||||
@@ -2808,16 +3119,19 @@ def included_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
|
||||
@property
|
||||
def env_config_scopes(self) -> List[spack.config.ConfigScope]:
|
||||
"""A list of all configuration scopes for the environment manifest.
|
||||
|
||||
Returns: All configuration scopes associated with the environment
|
||||
"""
|
||||
config_name = self.scope_name
|
||||
env_scope = spack.config.SingleFileScope(
|
||||
config_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||
)
|
||||
|
||||
return check_disallowed_env_config_mods(self.included_config_scopes + [env_scope])
|
||||
"""A list of all configuration scopes for the environment manifest. On the first call this
|
||||
instantiates all the scopes, on subsequent calls it returns the cached list."""
|
||||
if self._config_scopes is not None:
|
||||
return self._config_scopes
|
||||
scopes: List[spack.config.ConfigScope] = [
|
||||
*self.included_config_scopes,
|
||||
spack.config.SingleFileScope(
|
||||
self.scope_name, str(self.manifest_file), spack.schema.env.schema, [TOP_LEVEL_KEY]
|
||||
),
|
||||
]
|
||||
ensure_no_disallowed_env_config_mods(scopes)
|
||||
self._config_scopes = scopes
|
||||
return scopes
|
||||
|
||||
def prepare_config_scope(self) -> None:
|
||||
"""Add the manifest's scopes to the global configuration search path."""
|
||||
|
@@ -662,9 +662,6 @@ def add_specs(self, *specs: spack.spec.Spec) -> None:
|
||||
return
|
||||
|
||||
# Drop externals
|
||||
for s in specs:
|
||||
if s.external:
|
||||
tty.warn("Skipping external package: " + s.short_spec)
|
||||
specs = [s for s in specs if not s.external]
|
||||
|
||||
self._sanity_check_view_projection(specs)
|
||||
|
31
lib/spack/spack/hooks/autopush.py
Normal file
31
lib/spack/spack/hooks/autopush.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
import spack.mirror
|
||||
|
||||
|
||||
def post_install(spec, explicit):
|
||||
# Push package to all buildcaches with autopush==True
|
||||
|
||||
# Do nothing if spec is an external package
|
||||
if spec.external:
|
||||
return
|
||||
|
||||
# Do nothing if package was not installed from source
|
||||
pkg = spec.package
|
||||
if pkg.installed_from_binary_cache:
|
||||
return
|
||||
|
||||
# Push the package to all autopush mirrors
|
||||
for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values():
|
||||
bindist.push_or_raise(
|
||||
spec,
|
||||
mirror.push_url,
|
||||
bindist.PushOptions(force=True, regenerate_index=False, unsigned=not mirror.signed),
|
||||
)
|
||||
tty.msg(f"{spec.name}: Pushed to build cache: '{mirror.name}'")
|
8
lib/spack/spack/hooks/windows_runtime_linkage.py
Normal file
8
lib/spack/spack/hooks/windows_runtime_linkage.py
Normal file
@@ -0,0 +1,8 @@
|
||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
def post_install(spec, explicit=None):
|
||||
spec.package.windows_establish_runtime_linkage()
|
@@ -489,6 +489,9 @@ def _process_binary_cache_tarball(
|
||||
with timer.measure("install"), spack.util.path.filter_padding():
|
||||
binary_distribution.extract_tarball(pkg.spec, download_result, force=False, timer=timer)
|
||||
|
||||
if hasattr(pkg, "_post_buildcache_install_hook"):
|
||||
pkg._post_buildcache_install_hook()
|
||||
|
||||
pkg.installed_from_binary_cache = True
|
||||
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
|
||||
return True
|
||||
@@ -976,7 +979,11 @@ def __init__(
|
||||
# a dependency of the build task. Here we add it to self.dependencies
|
||||
compiler_spec = self.pkg.spec.compiler
|
||||
arch_spec = self.pkg.spec.architecture
|
||||
if not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec):
|
||||
strict = spack.concretize.Concretizer().check_for_compiler_existence
|
||||
if (
|
||||
not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec)
|
||||
and not strict
|
||||
):
|
||||
# The compiler is in the queue, identify it as dependency
|
||||
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
|
||||
dep.constrain(f"platform={str(arch_spec.platform)}")
|
||||
@@ -1691,10 +1698,6 @@ def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
|
||||
spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
|
||||
pkg, build_process, install_args
|
||||
)
|
||||
# Currently this is how RPATH-like behavior is achieved on Windows, after install
|
||||
# establish runtime linkage via Windows Runtime link object
|
||||
# Note: this is a no-op on non Windows platforms
|
||||
pkg.windows_establish_runtime_linkage()
|
||||
# Note: PARENT of the build process adds the new package to
|
||||
# the database, so that we don't need to re-read from file.
|
||||
spack.store.STORE.db.add(pkg.spec, spack.store.STORE.layout, explicit=explicit)
|
||||
|
@@ -427,7 +427,7 @@ def make_argument_parser(**kwargs):
|
||||
parser.add_argument(
|
||||
"--color",
|
||||
action="store",
|
||||
default=os.environ.get("SPACK_COLOR", "auto"),
|
||||
default=None,
|
||||
choices=("always", "never", "auto"),
|
||||
help="when to colorize output (default: auto)",
|
||||
)
|
||||
@@ -622,7 +622,8 @@ def setup_main_options(args):
|
||||
# with color
|
||||
color.try_enable_terminal_color_on_windows()
|
||||
# when to use color (takes always, auto, or never)
|
||||
color.set_color_when(args.color)
|
||||
if args.color is not None:
|
||||
color.set_color_when(args.color)
|
||||
|
||||
|
||||
def allows_unknown_args(command):
|
||||
|
@@ -137,6 +137,12 @@ def source(self):
|
||||
def signed(self) -> bool:
|
||||
return isinstance(self._data, str) or self._data.get("signed", True)
|
||||
|
||||
@property
|
||||
def autopush(self) -> bool:
|
||||
if isinstance(self._data, str):
|
||||
return False
|
||||
return self._data.get("autopush", False)
|
||||
|
||||
@property
|
||||
def fetch_url(self):
|
||||
"""Get the valid, canonicalized fetch URL"""
|
||||
@@ -150,7 +156,7 @@ def push_url(self):
|
||||
def _update_connection_dict(self, current_data: dict, new_data: dict, top_level: bool):
|
||||
keys = ["url", "access_pair", "access_token", "profile", "endpoint_url"]
|
||||
if top_level:
|
||||
keys += ["binary", "source", "signed"]
|
||||
keys += ["binary", "source", "signed", "autopush"]
|
||||
changed = False
|
||||
for key in keys:
|
||||
if key in new_data and current_data.get(key) != new_data[key]:
|
||||
@@ -286,6 +292,7 @@ def __init__(
|
||||
scope=None,
|
||||
binary: Optional[bool] = None,
|
||||
source: Optional[bool] = None,
|
||||
autopush: Optional[bool] = None,
|
||||
):
|
||||
"""Initialize a mirror collection.
|
||||
|
||||
@@ -297,21 +304,27 @@ def __init__(
|
||||
If None, do not filter on binary mirrors.
|
||||
source: If True, only include source mirrors.
|
||||
If False, omit source mirrors.
|
||||
If None, do not filter on source mirrors."""
|
||||
self._mirrors = {
|
||||
name: Mirror(data=mirror, name=name)
|
||||
for name, mirror in (
|
||||
mirrors.items()
|
||||
if mirrors is not None
|
||||
else spack.config.get("mirrors", scope=scope).items()
|
||||
)
|
||||
}
|
||||
If None, do not filter on source mirrors.
|
||||
autopush: If True, only include mirrors that have autopush enabled.
|
||||
If False, omit mirrors that have autopush enabled.
|
||||
If None, do not filter on autopush."""
|
||||
mirrors_data = (
|
||||
mirrors.items()
|
||||
if mirrors is not None
|
||||
else spack.config.get("mirrors", scope=scope).items()
|
||||
)
|
||||
mirrors = (Mirror(data=mirror, name=name) for name, mirror in mirrors_data)
|
||||
|
||||
if source is not None:
|
||||
self._mirrors = {k: v for k, v in self._mirrors.items() if v.source == source}
|
||||
def _filter(m: Mirror):
|
||||
if source is not None and m.source != source:
|
||||
return False
|
||||
if binary is not None and m.binary != binary:
|
||||
return False
|
||||
if autopush is not None and m.autopush != autopush:
|
||||
return False
|
||||
return True
|
||||
|
||||
if binary is not None:
|
||||
self._mirrors = {k: v for k, v in self._mirrors.items() if v.binary == binary}
|
||||
self._mirrors = {m.name: m for m in mirrors if _filter(m)}
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._mirrors == other._mirrors
|
||||
|
@@ -83,6 +83,17 @@ def configuration(module_set_name):
|
||||
)
|
||||
|
||||
|
||||
_FORMAT_STRING_RE = re.compile(r"({[^}]*})")
|
||||
|
||||
|
||||
def _format_env_var_name(spec, var_name_fmt):
|
||||
"""Format the variable name, but uppercase any formatted fields."""
|
||||
fmt_parts = _FORMAT_STRING_RE.split(var_name_fmt)
|
||||
return "".join(
|
||||
spec.format(part).upper() if _FORMAT_STRING_RE.match(part) else part for part in fmt_parts
|
||||
)
|
||||
|
||||
|
||||
def _check_tokens_are_valid(format_string, message):
|
||||
"""Checks that the tokens used in the format string are valid in
|
||||
the context of module file and environment variable naming.
|
||||
@@ -737,20 +748,12 @@ def environment_modifications(self):
|
||||
exclude = self.conf.exclude_env_vars
|
||||
|
||||
# We may have tokens to substitute in environment commands
|
||||
|
||||
# Prepare a suitable transformation dictionary for the names
|
||||
# of the environment variables. This means turn the valid
|
||||
# tokens uppercase.
|
||||
transform = {}
|
||||
for token in _valid_tokens:
|
||||
transform[token] = lambda s, string: str.upper(string)
|
||||
|
||||
for x in env:
|
||||
# Ensure all the tokens are valid in this context
|
||||
msg = "some tokens cannot be expanded in an environment variable name"
|
||||
|
||||
_check_tokens_are_valid(x.name, message=msg)
|
||||
# Transform them
|
||||
x.name = self.spec.format(x.name, transform=transform)
|
||||
x.name = _format_env_var_name(self.spec, x.name)
|
||||
if self.modification_needs_formatting(x):
|
||||
try:
|
||||
# Not every command has a value
|
||||
|
@@ -398,7 +398,7 @@ def create_opener():
|
||||
opener = urllib.request.OpenerDirector()
|
||||
for handler in [
|
||||
urllib.request.UnknownHandler(),
|
||||
urllib.request.HTTPSHandler(),
|
||||
urllib.request.HTTPSHandler(context=spack.util.web.ssl_create_default_context()),
|
||||
spack.util.web.SpackHTTPDefaultErrorHandler(),
|
||||
urllib.request.HTTPRedirectHandler(),
|
||||
urllib.request.HTTPErrorProcessor(),
|
||||
@@ -418,18 +418,27 @@ def ensure_status(request: urllib.request.Request, response: HTTPResponse, statu
|
||||
)
|
||||
|
||||
|
||||
def default_retry(f, retries: int = 3, sleep=None):
|
||||
def default_retry(f, retries: int = 5, sleep=None):
|
||||
sleep = sleep or time.sleep
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
for i in range(retries):
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
except urllib.error.HTTPError as e:
|
||||
except (urllib.error.URLError, TimeoutError) as e:
|
||||
# Retry on internal server errors, and rate limit errors
|
||||
# Potentially this could take into account the Retry-After header
|
||||
# if registries support it
|
||||
if i + 1 != retries and (500 <= e.code < 600 or e.code == 429):
|
||||
if i + 1 != retries and (
|
||||
(
|
||||
isinstance(e, urllib.error.HTTPError)
|
||||
and (500 <= e.code < 600 or e.code == 429)
|
||||
)
|
||||
or (
|
||||
isinstance(e, urllib.error.URLError) and isinstance(e.reason, TimeoutError)
|
||||
)
|
||||
or isinstance(e, TimeoutError)
|
||||
):
|
||||
# Exponential backoff
|
||||
sleep(2**i)
|
||||
continue
|
||||
|
@@ -73,17 +73,24 @@ def vs_install_paths(self):
|
||||
def msvc_paths(self):
|
||||
return [os.path.join(path, "VC", "Tools", "MSVC") for path in self.vs_install_paths]
|
||||
|
||||
@property
|
||||
def oneapi_root(self):
|
||||
root = os.environ.get("ONEAPI_ROOT", "") or os.path.join(
|
||||
os.environ.get("ProgramFiles(x86)", ""), "Intel", "oneAPI"
|
||||
)
|
||||
if os.path.exists(root):
|
||||
return root
|
||||
|
||||
@property
|
||||
def compiler_search_paths(self):
|
||||
# First Strategy: Find MSVC directories using vswhere
|
||||
_compiler_search_paths = []
|
||||
for p in self.msvc_paths:
|
||||
_compiler_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64")))
|
||||
if os.getenv("ONEAPI_ROOT"):
|
||||
oneapi_root = self.oneapi_root
|
||||
if oneapi_root:
|
||||
_compiler_search_paths.extend(
|
||||
glob.glob(
|
||||
os.path.join(str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin")
|
||||
)
|
||||
glob.glob(os.path.join(oneapi_root, "compiler", "**", "bin"), recursive=True)
|
||||
)
|
||||
|
||||
# Second strategy: Find MSVC via the registry
|
||||
|
@@ -39,6 +39,7 @@
|
||||
)
|
||||
from spack.build_systems.cargo import CargoPackage
|
||||
from spack.build_systems.cmake import CMakePackage, generator
|
||||
from spack.build_systems.compiler import CompilerPackage
|
||||
from spack.build_systems.cuda import CudaPackage
|
||||
from spack.build_systems.generic import Package
|
||||
from spack.build_systems.gnu import GNUMirrorPackage
|
||||
|
@@ -468,7 +468,41 @@ def _names(when_indexed_dictionary):
|
||||
return sorted(all_names)
|
||||
|
||||
|
||||
class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
||||
class RedistributionMixin:
|
||||
"""Logic for determining whether a Package is source/binary
|
||||
redistributable.
|
||||
"""
|
||||
|
||||
#: Store whether a given Spec source/binary should not be
|
||||
#: redistributed.
|
||||
disable_redistribute: Dict["spack.spec.Spec", "spack.directives.DisableRedistribute"]
|
||||
|
||||
# Source redistribution must be determined before concretization
|
||||
# (because source mirrors work with un-concretized Specs).
|
||||
@classmethod
|
||||
def redistribute_source(cls, spec):
|
||||
"""Whether it should be possible to add the source of this
|
||||
package to a Spack mirror.
|
||||
"""
|
||||
for when_spec, disable_redistribute in cls.disable_redistribute.items():
|
||||
if disable_redistribute.source and spec.satisfies(when_spec):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@property
|
||||
def redistribute_binary(self):
|
||||
"""Whether it should be possible to create a binary out of an
|
||||
installed instance of this package.
|
||||
"""
|
||||
for when_spec, disable_redistribute in self.__class__.disable_redistribute.items():
|
||||
if disable_redistribute.binary and self.spec.satisfies(when_spec):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class PackageBase(WindowsRPath, PackageViewMixin, RedistributionMixin, metaclass=PackageMeta):
|
||||
"""This is the superclass for all spack packages.
|
||||
|
||||
***The Package class***
|
||||
@@ -1206,7 +1240,7 @@ def install_test_root(self):
|
||||
"""Return the install test root directory."""
|
||||
tty.warn(
|
||||
"The 'pkg.install_test_root' property is deprecated with removal "
|
||||
"expected v0.22. Use 'install_test_root(pkg)' instead."
|
||||
"expected v0.23. Use 'install_test_root(pkg)' instead."
|
||||
)
|
||||
return install_test_root(self)
|
||||
|
||||
@@ -1864,7 +1898,7 @@ def cache_extra_test_sources(self, srcs):
|
||||
"""
|
||||
msg = (
|
||||
"'pkg.cache_extra_test_sources(srcs) is deprecated with removal "
|
||||
"expected in v0.22. Use 'cache_extra_test_sources(pkg, srcs)' "
|
||||
"expected in v0.23. Use 'cache_extra_test_sources(pkg, srcs)' "
|
||||
"instead."
|
||||
)
|
||||
warnings.warn(msg)
|
||||
@@ -2521,7 +2555,12 @@ class PackageStillNeededError(InstallError):
|
||||
"""Raised when package is still needed by another on uninstall."""
|
||||
|
||||
def __init__(self, spec, dependents):
|
||||
super().__init__("Cannot uninstall %s" % spec)
|
||||
spec_fmt = spack.spec.DEFAULT_FORMAT + " /{hash:7}"
|
||||
dep_fmt = "{name}{@versions} /{hash:7}"
|
||||
super().__init__(
|
||||
f"Cannot uninstall {spec.format(spec_fmt)}, "
|
||||
f"needed by {[dep.format(dep_fmt) for dep in dependents]}"
|
||||
)
|
||||
self.spec = spec
|
||||
self.dependents = dependents
|
||||
|
||||
|
@@ -726,14 +726,14 @@ def first_repo(self):
|
||||
"""Get the first repo in precedence order."""
|
||||
return self.repos[0] if self.repos else None
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _all_package_names_set(self, include_virtuals):
|
||||
return {name for repo in self.repos for name in repo.all_package_names(include_virtuals)}
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def _all_package_names(self, include_virtuals):
|
||||
"""Return all unique package names in all repositories."""
|
||||
all_pkgs = set()
|
||||
for repo in self.repos:
|
||||
for name in repo.all_package_names(include_virtuals):
|
||||
all_pkgs.add(name)
|
||||
return sorted(all_pkgs, key=lambda n: n.lower())
|
||||
return sorted(self._all_package_names_set(include_virtuals), key=lambda n: n.lower())
|
||||
|
||||
def all_package_names(self, include_virtuals=False):
|
||||
return self._all_package_names(include_virtuals)
|
||||
@@ -794,7 +794,11 @@ def patch_index(self):
|
||||
|
||||
@autospec
|
||||
def providers_for(self, vpkg_spec):
|
||||
providers = self.provider_index.providers_for(vpkg_spec)
|
||||
providers = [
|
||||
spec
|
||||
for spec in self.provider_index.providers_for(vpkg_spec)
|
||||
if spec.name in self._all_package_names_set(include_virtuals=False)
|
||||
]
|
||||
if not providers:
|
||||
raise UnknownPackageError(vpkg_spec.fullname)
|
||||
return providers
|
||||
|
@@ -27,7 +27,7 @@
|
||||
from spack.error import SpackError
|
||||
from spack.util.crypto import checksum
|
||||
from spack.util.log_parse import parse_log_events
|
||||
from spack.util.web import urllib_ssl_cert_handler
|
||||
from spack.util.web import ssl_create_default_context
|
||||
|
||||
from .base import Reporter
|
||||
from .extract import extract_test_parts
|
||||
@@ -428,7 +428,7 @@ def upload(self, filename):
|
||||
# Compute md5 checksum for the contents of this file.
|
||||
md5sum = checksum(hashlib.md5, filename, block_size=8192)
|
||||
|
||||
opener = build_opener(HTTPSHandler(context=urllib_ssl_cert_handler()))
|
||||
opener = build_opener(HTTPSHandler(context=ssl_create_default_context()))
|
||||
with open(filename, "rb") as f:
|
||||
params_dict = {
|
||||
"build": self.buildname,
|
||||
|
@@ -9,13 +9,40 @@
|
||||
"""
|
||||
from typing import Any, Dict
|
||||
|
||||
LIST_OF_SPECS = {"type": "array", "items": {"type": "string"}}
|
||||
|
||||
properties: Dict[str, Any] = {
|
||||
"concretizer": {
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": {
|
||||
"reuse": {
|
||||
"oneOf": [{"type": "boolean"}, {"type": "string", "enum": ["dependencies"]}]
|
||||
"oneOf": [
|
||||
{"type": "boolean"},
|
||||
{"type": "string", "enum": ["dependencies"]},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"roots": {"type": "boolean"},
|
||||
"include": LIST_OF_SPECS,
|
||||
"exclude": LIST_OF_SPECS,
|
||||
"from": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": ["local", "buildcache", "external"],
|
||||
},
|
||||
"include": LIST_OF_SPECS,
|
||||
"exclude": LIST_OF_SPECS,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
"enable_node_namespace": {"type": "boolean"},
|
||||
"targets": {
|
||||
|
@@ -35,6 +35,7 @@
|
||||
{
|
||||
"include": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
"specs": spec_list_schema,
|
||||
"include_concrete": {"type": "array", "default": [], "items": {"type": "string"}},
|
||||
},
|
||||
),
|
||||
}
|
||||
|
@@ -46,6 +46,7 @@
|
||||
"signed": {"type": "boolean"},
|
||||
"fetch": fetch_and_push,
|
||||
"push": fetch_and_push,
|
||||
"autopush": {"type": "boolean"},
|
||||
**connection, # type: ignore
|
||||
},
|
||||
}
|
||||
|
@@ -141,7 +141,7 @@
|
||||
"deprecatedProperties": {
|
||||
"properties": ["version"],
|
||||
"message": "setting version preferences in the 'all' section of packages.yaml "
|
||||
"is deprecated and will be removed in v0.22\n\n\tThese preferences "
|
||||
"is deprecated and will be removed in v0.23\n\n\tThese preferences "
|
||||
"will be ignored by Spack. You can set them only in package-specific sections "
|
||||
"of the same file.\n",
|
||||
"error": False,
|
||||
@@ -197,7 +197,7 @@
|
||||
"properties": ["target", "compiler", "providers"],
|
||||
"message": "setting 'compiler:', 'target:' or 'provider:' preferences in "
|
||||
"a package-specific section of packages.yaml is deprecated, and will be "
|
||||
"removed in v0.22.\n\n\tThese preferences will be ignored by Spack, and "
|
||||
"removed in v0.23.\n\n\tThese preferences will be ignored by Spack, and "
|
||||
"can be set only in the 'all' section of the same file. "
|
||||
"You can run:\n\n\t\t$ spack audit configs\n\n\tto get better diagnostics, "
|
||||
"including files:lines where the deprecated attributes are used.\n\n"
|
||||
|
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user