Compare commits
332 Commits
packages/v
...
license-fi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9ee32b060b | ||
|
|
37c63aa22d | ||
|
|
5b3942a489 | ||
|
|
a9c879d53e | ||
|
|
f42f59c84b | ||
|
|
313b7d4cdb | ||
|
|
bd41863797 | ||
|
|
b0dba4ff5a | ||
|
|
4ff43d7fa9 | ||
|
|
c1df1c7ee5 | ||
|
|
9ac6ecd5ba | ||
|
|
20ddb85020 | ||
|
|
2ced87297d | ||
|
|
aa00c3fe1f | ||
|
|
0158fc46aa | ||
|
|
8ac826cca8 | ||
|
|
1b829a4a28 | ||
|
|
e2ed1c2308 | ||
|
|
94b828add1 | ||
|
|
fd7dcf3a3f | ||
|
|
e3bb0d77bc | ||
|
|
25761b13e5 | ||
|
|
ae48faa83a | ||
|
|
e15a3b0717 | ||
|
|
2c8afc5443 | ||
|
|
99479b7e77 | ||
|
|
5d0b5ed73c | ||
|
|
151af13be2 | ||
|
|
93ea3f51e7 | ||
|
|
a3abc1c492 | ||
|
|
401484ddf4 | ||
|
|
fc4e76e6fe | ||
|
|
0853f42723 | ||
|
|
19ca69d0d8 | ||
|
|
036794725f | ||
|
|
e5a2c9aee3 | ||
|
|
5364b88777 | ||
|
|
7d1b6324e1 | ||
|
|
3d0263755e | ||
|
|
54ad5dca45 | ||
|
|
ee206952c9 | ||
|
|
4ccef372e8 | ||
|
|
ac6e534806 | ||
|
|
5983f72439 | ||
|
|
6e10fac7ae | ||
|
|
ee6ea5155c | ||
|
|
48258e8ddc | ||
|
|
429b0375ed | ||
|
|
c6925ab83f | ||
|
|
00d78dfa0c | ||
|
|
e072a91572 | ||
|
|
b7eb0308d4 | ||
|
|
c98ee6d8ac | ||
|
|
b343ebb64e | ||
|
|
e178d2c75d | ||
|
|
9b64560ae6 | ||
|
|
ca226f3506 | ||
|
|
8569e04fea | ||
|
|
32213d5e6b | ||
|
|
4891f3dbc9 | ||
|
|
2b5959c3dd | ||
|
|
353db6752a | ||
|
|
bf24b8e82c | ||
|
|
f2d830cd4c | ||
|
|
070bfa1ed7 | ||
|
|
c79b6207e8 | ||
|
|
38d77570b4 | ||
|
|
d8885b28fa | ||
|
|
abd3487570 | ||
|
|
0d760a5fd8 | ||
|
|
dde91ae181 | ||
|
|
590dbf67f3 | ||
|
|
d199738f31 | ||
|
|
f55f829437 | ||
|
|
295f3ff915 | ||
|
|
a0ad02c247 | ||
|
|
a21d314ba7 | ||
|
|
a4ad8c8174 | ||
|
|
aa3ee3fa2a | ||
|
|
a8584d5eb4 | ||
|
|
26f7b2c066 | ||
|
|
3a715c3e07 | ||
|
|
963519d2b2 | ||
|
|
34efcb686c | ||
|
|
5016084213 | ||
|
|
5a04e84097 | ||
|
|
ec34e88d79 | ||
|
|
31fa12ebd3 | ||
|
|
ecf414ed07 | ||
|
|
119bec391e | ||
|
|
d5c0ace993 | ||
|
|
d6bbd8f758 | ||
|
|
f74d51bf6e | ||
|
|
821ebee53c | ||
|
|
9dada76d34 | ||
|
|
e9cc1b36bc | ||
|
|
fd2c040981 | ||
|
|
33cd7d6033 | ||
|
|
9c255381b1 | ||
|
|
fd6c419682 | ||
|
|
9d1d808f94 | ||
|
|
7a0ef93332 | ||
|
|
bf48b7662e | ||
|
|
d14333cc79 | ||
|
|
084361124e | ||
|
|
a1f4cc8b73 | ||
|
|
b20800e765 | ||
|
|
01b1e24074 | ||
|
|
8029279dad | ||
|
|
5f4e12d8f2 | ||
|
|
a8728e700b | ||
|
|
f8adf2b70f | ||
|
|
d0ef2d9e00 | ||
|
|
d4bd3e298a | ||
|
|
40268634b6 | ||
|
|
b0e8451d83 | ||
|
|
868a52387b | ||
|
|
3fe89115c2 | ||
|
|
412024cf21 | ||
|
|
91b20ed7d0 | ||
|
|
0caacc6e21 | ||
|
|
651126e64c | ||
|
|
e15a530f32 | ||
|
|
0f84623914 | ||
|
|
90afa5c5ef | ||
|
|
024620bd7b | ||
|
|
9bec8e2f4b | ||
|
|
18dd465532 | ||
|
|
a2431ec00c | ||
|
|
78abe968a0 | ||
|
|
38e9043b9e | ||
|
|
a0599e5e27 | ||
|
|
1cd6f4e28f | ||
|
|
d2298e8e99 | ||
|
|
e3806aeac5 | ||
|
|
38309ced33 | ||
|
|
2f21201bf8 | ||
|
|
95a0f1924d | ||
|
|
52969dfa78 | ||
|
|
ee588e4bbe | ||
|
|
461f1d186b | ||
|
|
03b864f986 | ||
|
|
bff4fa2761 | ||
|
|
ad3fd4e7e9 | ||
|
|
a574a995f8 | ||
|
|
0002861daf | ||
|
|
a65216f0a0 | ||
|
|
7604869198 | ||
|
|
d409126c27 | ||
|
|
2b0d985714 | ||
|
|
eedec51566 | ||
|
|
016954fcff | ||
|
|
0f17672ddb | ||
|
|
f82de718cd | ||
|
|
4f6836c878 | ||
|
|
2806ed2751 | ||
|
|
92b0cb5e22 | ||
|
|
f32b5e572a | ||
|
|
e35c5ec104 | ||
|
|
60be77f761 | ||
|
|
69b7c32b5d | ||
|
|
e2c6914dfe | ||
|
|
87926e40a9 | ||
|
|
324d733bf9 | ||
|
|
07bf35d54b | ||
|
|
72196ee4a1 | ||
|
|
738e41d8d2 | ||
|
|
f3321bdbcf | ||
|
|
9c6f0392d5 | ||
|
|
297848c207 | ||
|
|
e9c2a53d83 | ||
|
|
77b6923906 | ||
|
|
8235aa1804 | ||
|
|
d09c5a4bd4 | ||
|
|
916755e22a | ||
|
|
3676381357 | ||
|
|
de9f92c588 | ||
|
|
6ba7aa325b | ||
|
|
c0cbbcfa0a | ||
|
|
f2dc4ed6d3 | ||
|
|
38bf1772a0 | ||
|
|
3460602fb9 | ||
|
|
a6ce7735e6 | ||
|
|
4b11266e03 | ||
|
|
436ff3c818 | ||
|
|
fa35d8f8ec | ||
|
|
6f8a3674af | ||
|
|
39b7276a33 | ||
|
|
d67afc7191 | ||
|
|
8823c57b72 | ||
|
|
c8466c4cd4 | ||
|
|
f5ff63e68d | ||
|
|
11f52ce2f6 | ||
|
|
63895b39f0 | ||
|
|
64220779d4 | ||
|
|
774346038e | ||
|
|
03dbc3035c | ||
|
|
ad78ed741c | ||
|
|
599d32d1c2 | ||
|
|
e5c7fe87aa | ||
|
|
cc6ab75063 | ||
|
|
fe00c13afa | ||
|
|
d610ff6cb1 | ||
|
|
54f947fc2a | ||
|
|
a5aa784d69 | ||
|
|
3bd58f3b49 | ||
|
|
cac0beaecf | ||
|
|
406ccc2fe3 | ||
|
|
40cd8e6ad8 | ||
|
|
682e4bf4d4 | ||
|
|
56b2979966 | ||
|
|
d518aaa4c9 | ||
|
|
8486a80651 | ||
|
|
28341ef0a9 | ||
|
|
f89a2ada4c | ||
|
|
cf804c4ea8 | ||
|
|
a45d09abcd | ||
|
|
cd3068dc0b | ||
|
|
de9aa3bcc6 | ||
|
|
db7ab9826d | ||
|
|
9f69d9b286 | ||
|
|
d352b71df0 | ||
|
|
4cb4634c74 | ||
|
|
594554935d | ||
|
|
8b56470650 | ||
|
|
ba4fd64caa | ||
|
|
07ec8a9ba3 | ||
|
|
64ba324b4a | ||
|
|
2aab567782 | ||
|
|
d4e29c32f0 | ||
|
|
30e5639995 | ||
|
|
fa4c09d04e | ||
|
|
f0a458862f | ||
|
|
2938680878 | ||
|
|
a8132e5c94 | ||
|
|
9875a0e807 | ||
|
|
cb4d3a9fc2 | ||
|
|
7d79648cb5 | ||
|
|
e84e5fa9bf | ||
|
|
f25cbb0fe4 | ||
|
|
f3257cea90 | ||
|
|
d037e658a4 | ||
|
|
a14acd97bd | ||
|
|
199cce879f | ||
|
|
7d66063bd9 | ||
|
|
47c6fb750a | ||
|
|
8c3ac352b7 | ||
|
|
d6ac16ca16 | ||
|
|
75e37c6db5 | ||
|
|
3f8dcfc6ed | ||
|
|
07d4915e82 | ||
|
|
77ff574d94 | ||
|
|
5783f950cf | ||
|
|
1c76c88f2c | ||
|
|
50b56ee1ce | ||
|
|
be521c441e | ||
|
|
61ffb87757 | ||
|
|
950b4c5847 | ||
|
|
ac078f262d | ||
|
|
fd62f0f3a8 | ||
|
|
ca977ea9e1 | ||
|
|
0d2c624bcb | ||
|
|
765b6b7150 | ||
|
|
a91f96292c | ||
|
|
18487a45ed | ||
|
|
29485e2125 | ||
|
|
7674ea0b7d | ||
|
|
693376ea97 | ||
|
|
88bf2a8bcf | ||
|
|
03e9ca0a76 | ||
|
|
18399d0bd1 | ||
|
|
3aabff77d7 | ||
|
|
aa86342814 | ||
|
|
170a276f18 | ||
|
|
313524dc6d | ||
|
|
5aae6e25a5 | ||
|
|
b58a52b6ce | ||
|
|
32760e2885 | ||
|
|
125feb125c | ||
|
|
8677063142 | ||
|
|
f015b18230 | ||
|
|
aa9e610fa6 | ||
|
|
7d62045c30 | ||
|
|
5b03173b99 | ||
|
|
36fcdb8cfa | ||
|
|
7d5b17fbf2 | ||
|
|
d6e3292955 | ||
|
|
60f54df964 | ||
|
|
487df807cc | ||
|
|
cacdf84964 | ||
|
|
e2293c758f | ||
|
|
f5a275adf5 | ||
|
|
615ced32cd | ||
|
|
bc04d963e5 | ||
|
|
11051ce5c7 | ||
|
|
631bddc52e | ||
|
|
b5f40aa7fb | ||
|
|
57e0798af2 | ||
|
|
0161b662f7 | ||
|
|
aa55b19680 | ||
|
|
8cfffd88fa | ||
|
|
2f8dcb8097 | ||
|
|
5b70fa8cc8 | ||
|
|
b4025e89ed | ||
|
|
8db74e1b2f | ||
|
|
1fcfbadba7 | ||
|
|
13ec35873f | ||
|
|
f96b6eac2b | ||
|
|
933a1a5cd9 | ||
|
|
b2b9914efc | ||
|
|
9ce9596981 | ||
|
|
fc30fe1f6b | ||
|
|
25a4b98359 | ||
|
|
05c34b7312 | ||
|
|
b22842af56 | ||
|
|
0bef028692 | ||
|
|
935facd069 | ||
|
|
87e5255bbc | ||
|
|
b42f0d793d | ||
|
|
ccca0d3354 | ||
|
|
9699bbc7b9 | ||
|
|
c7e251de9f | ||
|
|
d788b15529 | ||
|
|
8e7489bc17 | ||
|
|
d234df62d7 | ||
|
|
4a5922a0ec | ||
|
|
5bd184aaaf | ||
|
|
464c3b96fa | ||
|
|
60544a4e84 | ||
|
|
a664d98f37 | ||
|
|
0e3d7efb0f | ||
|
|
a8cd0b99f3 |
11
.github/workflows/ci.yaml
vendored
11
.github/workflows/ci.yaml
vendored
@@ -9,6 +9,7 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- develop
|
- develop
|
||||||
- releases/**
|
- releases/**
|
||||||
|
merge_group:
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
group: ci-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
@@ -25,13 +26,17 @@ jobs:
|
|||||||
packages: ${{ steps.filter.outputs.packages }}
|
packages: ${{ steps.filter.outputs.packages }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
if: ${{ github.event_name == 'push' }}
|
if: ${{ github.event_name == 'push' || github.event_name == 'merge_group' }}
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
# For pull requests it's not necessary to checkout the code
|
# For pull requests it's not necessary to checkout the code
|
||||||
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
|
- uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36
|
||||||
id: filter
|
id: filter
|
||||||
with:
|
with:
|
||||||
|
# For merge group events, compare against the target branch (main)
|
||||||
|
base: ${{ github.event_name == 'merge_group' && github.event.merge_group.base_ref || '' }}
|
||||||
|
# For merge group events, use the merge group head ref
|
||||||
|
ref: ${{ github.event_name == 'merge_group' && github.event.merge_group.head_sha || github.ref }}
|
||||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||||
# Don't run if we only modified packages in the
|
# Don't run if we only modified packages in the
|
||||||
# built-in repository or documentation
|
# built-in repository or documentation
|
||||||
@@ -76,10 +81,11 @@ jobs:
|
|||||||
|
|
||||||
prechecks:
|
prechecks:
|
||||||
needs: [ changes ]
|
needs: [ changes ]
|
||||||
uses: ./.github/workflows/valid-style.yml
|
uses: ./.github/workflows/prechecks.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
with_coverage: ${{ needs.changes.outputs.core }}
|
with_coverage: ${{ needs.changes.outputs.core }}
|
||||||
|
with_packages: ${{ needs.changes.outputs.packages }}
|
||||||
|
|
||||||
import-check:
|
import-check:
|
||||||
needs: [ changes ]
|
needs: [ changes ]
|
||||||
@@ -101,6 +107,7 @@ jobs:
|
|||||||
|
|
||||||
coverage:
|
coverage:
|
||||||
needs: [ unit-tests, prechecks ]
|
needs: [ unit-tests, prechecks ]
|
||||||
|
if: ${{ needs.changes.outputs.core }}
|
||||||
uses: ./.github/workflows/coverage.yml
|
uses: ./.github/workflows/coverage.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
name: style
|
name: prechecks
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
@@ -6,6 +6,9 @@ on:
|
|||||||
with_coverage:
|
with_coverage:
|
||||||
required: true
|
required: true
|
||||||
type: string
|
type: string
|
||||||
|
with_packages:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
group: style-${{github.ref}}-${{github.event.pull_request.number || github.run_number}}
|
||||||
@@ -30,6 +33,7 @@ jobs:
|
|||||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||||
- name: vermin (Repositories)
|
- name: vermin (Repositories)
|
||||||
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||||
|
|
||||||
# Run style checks on the files that have been changed
|
# Run style checks on the files that have been changed
|
||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -53,12 +57,25 @@ jobs:
|
|||||||
- name: Run style tests
|
- name: Run style tests
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-style-tests
|
share/spack/qa/run-style-tests
|
||||||
|
|
||||||
audit:
|
audit:
|
||||||
uses: ./.github/workflows/audit.yaml
|
uses: ./.github/workflows/audit.yaml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
with_coverage: ${{ inputs.with_coverage }}
|
with_coverage: ${{ inputs.with_coverage }}
|
||||||
python_version: '3.13'
|
python_version: '3.13'
|
||||||
|
|
||||||
|
verify-checksums:
|
||||||
|
if: ${{ inputs.with_packages == 'true' }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29
|
||||||
|
with:
|
||||||
|
fetch-depth: 2
|
||||||
|
- name: Verify Added Checksums
|
||||||
|
run: |
|
||||||
|
bin/spack ci verify-versions HEAD^1 HEAD
|
||||||
|
|
||||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
||||||
bootstrap-dev-rhel8:
|
bootstrap-dev-rhel8:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
black==25.1.0
|
black==25.1.0
|
||||||
clingo==5.7.1
|
clingo==5.7.1
|
||||||
flake8==7.1.2
|
flake8==7.1.2
|
||||||
isort==6.0.0
|
isort==6.0.1
|
||||||
mypy==1.15.0
|
mypy==1.15.0
|
||||||
types-six==1.17.0.20241205
|
types-six==1.17.0.20250304
|
||||||
vermin==1.6.0
|
vermin==1.6.0
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -201,7 +201,6 @@ tramp
|
|||||||
|
|
||||||
# Org-mode
|
# Org-mode
|
||||||
.org-id-locations
|
.org-id-locations
|
||||||
*_archive
|
|
||||||
|
|
||||||
# flymake-mode
|
# flymake-mode
|
||||||
*_flymake.*
|
*_flymake.*
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ config:
|
|||||||
install_tree:
|
install_tree:
|
||||||
root: $spack/opt/spack
|
root: $spack/opt/spack
|
||||||
projections:
|
projections:
|
||||||
all: "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
|
all: "{architecture.platform}-{architecture.target}/{name}-{version}-{hash}"
|
||||||
# install_tree can include an optional padded length (int or boolean)
|
# install_tree can include an optional padded length (int or boolean)
|
||||||
# default is False (do not pad)
|
# default is False (do not pad)
|
||||||
# if padded_length is True, Spack will pad as close to the system max path
|
# if padded_length is True, Spack will pad as close to the system max path
|
||||||
|
|||||||
@@ -15,12 +15,11 @@
|
|||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
compiler:
|
|
||||||
- apple-clang
|
|
||||||
- clang
|
|
||||||
- gcc
|
|
||||||
providers:
|
providers:
|
||||||
|
c: [apple-clang, llvm, gcc]
|
||||||
|
cxx: [apple-clang, llvm, gcc]
|
||||||
elf: [libelf]
|
elf: [libelf]
|
||||||
|
fortran: [gcc]
|
||||||
fuse: [macfuse]
|
fuse: [macfuse]
|
||||||
gl: [apple-gl]
|
gl: [apple-gl]
|
||||||
glu: [apple-glu]
|
glu: [apple-glu]
|
||||||
@@ -50,3 +49,12 @@ packages:
|
|||||||
# although the version number used here isn't critical
|
# although the version number used here isn't critical
|
||||||
- spec: apple-libuuid@1353.100.2
|
- spec: apple-libuuid@1353.100.2
|
||||||
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
|
prefix: /Library/Developer/CommandLineTools/SDKs/MacOSX.sdk
|
||||||
|
c:
|
||||||
|
prefer:
|
||||||
|
- apple-clang
|
||||||
|
cxx:
|
||||||
|
prefer:
|
||||||
|
- apple-clang
|
||||||
|
fortran:
|
||||||
|
prefer:
|
||||||
|
- gcc
|
||||||
|
|||||||
@@ -15,19 +15,18 @@
|
|||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
compiler: [gcc, clang, oneapi, xl, nag, fj, aocc]
|
|
||||||
providers:
|
providers:
|
||||||
awk: [gawk]
|
awk: [gawk]
|
||||||
armci: [armcimpi]
|
armci: [armcimpi]
|
||||||
blas: [openblas, amdblis]
|
blas: [openblas, amdblis]
|
||||||
c: [gcc]
|
c: [gcc, llvm, intel-oneapi-compilers]
|
||||||
cxx: [gcc]
|
cxx: [gcc, llvm, intel-oneapi-compilers]
|
||||||
D: [ldc]
|
D: [ldc]
|
||||||
daal: [intel-oneapi-daal]
|
daal: [intel-oneapi-daal]
|
||||||
elf: [elfutils]
|
elf: [elfutils]
|
||||||
fftw-api: [fftw, amdfftw]
|
fftw-api: [fftw, amdfftw]
|
||||||
flame: [libflame, amdlibflame]
|
flame: [libflame, amdlibflame]
|
||||||
fortran: [gcc]
|
fortran: [gcc, llvm, intel-oneapi-compilers]
|
||||||
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
fortran-rt: [gcc-runtime, intel-oneapi-runtime]
|
||||||
fuse: [libfuse]
|
fuse: [libfuse]
|
||||||
gl: [glx, osmesa]
|
gl: [glx, osmesa]
|
||||||
|
|||||||
@@ -15,8 +15,8 @@
|
|||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
packages:
|
packages:
|
||||||
all:
|
all:
|
||||||
compiler:
|
|
||||||
- msvc
|
|
||||||
providers:
|
providers:
|
||||||
|
c : [msvc]
|
||||||
|
cxx: [msvc]
|
||||||
mpi: [msmpi]
|
mpi: [msmpi]
|
||||||
gl: [wgl]
|
gl: [wgl]
|
||||||
|
|||||||
@@ -125,6 +125,8 @@ are stored in ``$spack/var/spack/cache``. These are stored indefinitely
|
|||||||
by default. Can be purged with :ref:`spack clean --downloads
|
by default. Can be purged with :ref:`spack clean --downloads
|
||||||
<cmd-spack-clean>`.
|
<cmd-spack-clean>`.
|
||||||
|
|
||||||
|
.. _Misc Cache:
|
||||||
|
|
||||||
--------------------
|
--------------------
|
||||||
``misc_cache``
|
``misc_cache``
|
||||||
--------------------
|
--------------------
|
||||||
@@ -334,3 +336,52 @@ create a new alias called ``inst`` that will always call ``install -v``:
|
|||||||
|
|
||||||
aliases:
|
aliases:
|
||||||
inst: install -v
|
inst: install -v
|
||||||
|
|
||||||
|
-------------------------------
|
||||||
|
``concretization_cache:enable``
|
||||||
|
-------------------------------
|
||||||
|
|
||||||
|
When set to ``true``, Spack will utilize a cache of solver outputs from
|
||||||
|
successful concretization runs. When enabled, Spack will check the concretization
|
||||||
|
cache prior to running the solver. If a previous request to solve a given
|
||||||
|
problem is present in the cache, Spack will load the concrete specs and other
|
||||||
|
solver data from the cache rather than running the solver. Specs not previously
|
||||||
|
concretized will be added to the cache on a successful solve. The cache additionally
|
||||||
|
holds solver statistics, so commands like ``spack solve`` will still return information
|
||||||
|
about the run that produced a given solver result.
|
||||||
|
|
||||||
|
This cache is a subcache of the :ref:`Misc Cache` and as such will be cleaned when the Misc
|
||||||
|
Cache is cleaned.
|
||||||
|
|
||||||
|
When ``false`` or ommitted, all concretization requests will be performed from scatch
|
||||||
|
|
||||||
|
----------------------------
|
||||||
|
``concretization_cache:url``
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
Path to the location where Spack will root the concretization cache. Currently this only supports
|
||||||
|
paths on the local filesystem.
|
||||||
|
|
||||||
|
Default location is under the :ref:`Misc Cache` at: ``$misc_cache/concretization``
|
||||||
|
|
||||||
|
------------------------------------
|
||||||
|
``concretization_cache:entry_limit``
|
||||||
|
------------------------------------
|
||||||
|
|
||||||
|
Sets a limit on the number of concretization results that Spack will cache. The limit is evaluated
|
||||||
|
after each concretization run; if Spack has stored more results than the limit allows, the
|
||||||
|
oldest concretization results are pruned until 10% of the limit has been removed.
|
||||||
|
|
||||||
|
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
||||||
|
responsible for maintaining this cache.
|
||||||
|
|
||||||
|
-----------------------------------
|
||||||
|
``concretization_cache:size_limit``
|
||||||
|
-----------------------------------
|
||||||
|
|
||||||
|
Sets a limit on the size of the concretization cache in bytes. The limit is evaluated
|
||||||
|
after each concretization run; if Spack has stored more results than the limit allows, the
|
||||||
|
oldest concretization results are pruned until 10% of the limit has been removed.
|
||||||
|
|
||||||
|
Setting this value to 0 disables the automatic pruning. It is expected users will be
|
||||||
|
responsible for maintaining this cache.
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ case you want to skip directly to specific docs:
|
|||||||
* :ref:`compilers.yaml <compiler-config>`
|
* :ref:`compilers.yaml <compiler-config>`
|
||||||
* :ref:`concretizer.yaml <concretizer-options>`
|
* :ref:`concretizer.yaml <concretizer-options>`
|
||||||
* :ref:`config.yaml <config-yaml>`
|
* :ref:`config.yaml <config-yaml>`
|
||||||
|
* :ref:`include.yaml <include-yaml>`
|
||||||
* :ref:`mirrors.yaml <mirrors>`
|
* :ref:`mirrors.yaml <mirrors>`
|
||||||
* :ref:`modules.yaml <modules>`
|
* :ref:`modules.yaml <modules>`
|
||||||
* :ref:`packages.yaml <packages-config>`
|
* :ref:`packages.yaml <packages-config>`
|
||||||
|
|||||||
@@ -457,6 +457,13 @@ developed package in the environment are concretized to match the
|
|||||||
version (and other constraints) passed as the spec argument to the
|
version (and other constraints) passed as the spec argument to the
|
||||||
``spack develop`` command.
|
``spack develop`` command.
|
||||||
|
|
||||||
|
When working deep in the graph it is often desirable to have multiple specs marked
|
||||||
|
as ``develop`` so you don't have to restage and/or do full rebuilds each time you
|
||||||
|
call ``spack install``. The ``--recursive`` flag can be used in these scenarios
|
||||||
|
to ensure that all the dependents of the initial spec you provide are also marked
|
||||||
|
as develop specs. The ``--recursive`` flag requires a pre-concretized environment
|
||||||
|
so the graph can be traversed from the supplied spec all the way to the root specs.
|
||||||
|
|
||||||
For packages with ``git`` attributes, git branches, tags, and commits can
|
For packages with ``git`` attributes, git branches, tags, and commits can
|
||||||
also be used as valid concrete versions (see :ref:`version-specifier`).
|
also be used as valid concrete versions (see :ref:`version-specifier`).
|
||||||
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
This means that for a package ``foo``, ``spack develop foo@git.main`` will clone
|
||||||
@@ -670,24 +677,45 @@ This configuration sets the default compiler for all packages to
|
|||||||
Included configurations
|
Included configurations
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Spack environments allow an ``include`` heading in their yaml
|
Spack environments allow an ``include`` heading in their yaml schema.
|
||||||
schema. This heading pulls in external configuration files and applies
|
This heading pulls in external configuration files and applies them to
|
||||||
them to the environment.
|
the environment.
|
||||||
|
|
||||||
.. code-block:: yaml
|
.. code-block:: yaml
|
||||||
|
|
||||||
spack:
|
spack:
|
||||||
include:
|
include:
|
||||||
- relative/path/to/config.yaml
|
- environment/relative/path/to/config.yaml
|
||||||
- https://github.com/path/to/raw/config/compilers.yaml
|
- https://github.com/path/to/raw/config/compilers.yaml
|
||||||
- /absolute/path/to/packages.yaml
|
- /absolute/path/to/packages.yaml
|
||||||
|
- path: /path/to/$os/$target/environment
|
||||||
|
optional: true
|
||||||
|
- path: /path/to/os-specific/config-dir
|
||||||
|
when: os == "ventura"
|
||||||
|
|
||||||
|
Included configuration files are required *unless* they are explicitly optional
|
||||||
|
or the entry's condition evaluates to ``false``. Optional includes are specified
|
||||||
|
with the ``optional`` clause and conditional with the ``when`` clause. (See
|
||||||
|
:ref:`include-yaml` for more information on optional and conditional entries.)
|
||||||
|
|
||||||
|
Files are listed using paths to individual files or directories containing them.
|
||||||
|
Path entries may be absolute or relative to the environment or specified as
|
||||||
|
URLs. URLs to individual files need link to the **raw** form of the file's
|
||||||
|
contents (e.g., `GitHub
|
||||||
|
<https://docs.github.com/en/repositories/working-with-files/using-files/viewing-and-understanding-files#viewing-or-copying-the-raw-file-content>`_
|
||||||
|
or `GitLab
|
||||||
|
<https://docs.gitlab.com/ee/api/repository_files.html#get-raw-file-from-repository>`_).
|
||||||
|
Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or schemes) are
|
||||||
|
supported. Spack-specific, environment and user path variables can be used.
|
||||||
|
(See :ref:`config-file-variables` for more information.)
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
Recursive includes are not currently processed in a breadth-first manner
|
||||||
|
so the value of a configuration option that is altered by multiple included
|
||||||
|
files may not be what you expect. This will be addressed in a future
|
||||||
|
update.
|
||||||
|
|
||||||
Environments can include files or URLs. File paths can be relative or
|
|
||||||
absolute. URLs include the path to the text for individual files or
|
|
||||||
can be the path to a directory containing configuration files.
|
|
||||||
Spack supports ``file``, ``http``, ``https`` and ``ftp`` protocols (or
|
|
||||||
schemes). Spack-specific, environment and user path variables may be
|
|
||||||
used in these paths. See :ref:`config-file-variables` for more information.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Configuration precedence
|
Configuration precedence
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ than always choosing the latest versions or default variants.
|
|||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
As a rule of thumb: requirements + constraints > reuse > preferences > defaults.
|
As a rule of thumb: requirements + constraints > strong preferences > reuse > preferences > defaults.
|
||||||
|
|
||||||
The following set of criteria (from lowest to highest precedence) explain
|
The following set of criteria (from lowest to highest precedence) explain
|
||||||
common cases where concretization output may seem surprising at first.
|
common cases where concretization output may seem surprising at first.
|
||||||
@@ -56,7 +56,19 @@ common cases where concretization output may seem surprising at first.
|
|||||||
concretizer:
|
concretizer:
|
||||||
reuse: dependencies # other options are 'true' and 'false'
|
reuse: dependencies # other options are 'true' and 'false'
|
||||||
|
|
||||||
3. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
|
3. :ref:`Strong preferences <package-strong-preferences>` configured in ``packages.yaml``
|
||||||
|
are higher priority than reuse, and can be used to strongly prefer a specific version
|
||||||
|
or variant, without erroring out if it's not possible. Strong preferences are specified
|
||||||
|
as follows:
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
packages:
|
||||||
|
foo:
|
||||||
|
prefer:
|
||||||
|
- "@1.1: ~mpi"
|
||||||
|
|
||||||
|
4. :ref:`Package requirements <package-requirements>` configured in ``packages.yaml``,
|
||||||
and constraints from the command line as well as ``package.py`` files override all
|
and constraints from the command line as well as ``package.py`` files override all
|
||||||
of the above. Requirements are specified as follows:
|
of the above. Requirements are specified as follows:
|
||||||
|
|
||||||
@@ -66,6 +78,8 @@ common cases where concretization output may seem surprising at first.
|
|||||||
foo:
|
foo:
|
||||||
require:
|
require:
|
||||||
- "@1.2: +mpi"
|
- "@1.2: +mpi"
|
||||||
|
conflicts:
|
||||||
|
- "@1.4"
|
||||||
|
|
||||||
Requirements and constraints restrict the set of possible solutions, while reuse
|
Requirements and constraints restrict the set of possible solutions, while reuse
|
||||||
behavior and preferences influence what an optimal solution looks like.
|
behavior and preferences influence what an optimal solution looks like.
|
||||||
|
|||||||
51
lib/spack/docs/include_yaml.rst
Normal file
51
lib/spack/docs/include_yaml.rst
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
.. Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
.. _include-yaml:
|
||||||
|
|
||||||
|
===============================
|
||||||
|
Include Settings (include.yaml)
|
||||||
|
===============================
|
||||||
|
|
||||||
|
Spack allows you to include configuration files through ``include.yaml``.
|
||||||
|
Using the ``include:`` heading results in pulling in external configuration
|
||||||
|
information to be used by any Spack command.
|
||||||
|
|
||||||
|
Included configuration files are required *unless* they are explicitly optional
|
||||||
|
or the entry's condition evaluates to ``false``. Optional includes are specified
|
||||||
|
with the ``optional`` clause and conditional with the ``when`` clause. For
|
||||||
|
example,
|
||||||
|
|
||||||
|
.. code-block:: yaml
|
||||||
|
|
||||||
|
include:
|
||||||
|
- /path/to/a/required/config.yaml
|
||||||
|
- path: /path/to/$os/$target/config
|
||||||
|
optional: true
|
||||||
|
- path: /path/to/os-specific/config-dir
|
||||||
|
when: os == "ventura"
|
||||||
|
|
||||||
|
shows all three. The first entry, ``/path/to/a/required/config.yaml``,
|
||||||
|
indicates that included ``config.yaml`` file is required (so must exist).
|
||||||
|
Use of ``optional: true`` for ``/path/to/$os/$target/config`` means
|
||||||
|
the path is only included if it exists. The condition ``os == "ventura"``
|
||||||
|
in the ``when`` clause for ``/path/to/os-specific/config-dir`` means the
|
||||||
|
path is only included when the operating system (``os``) is ``ventura``.
|
||||||
|
|
||||||
|
The same conditions and variables in `Spec List References
|
||||||
|
<https://spack.readthedocs.io/en/latest/environments.html#spec-list-references>`_
|
||||||
|
can be used for conditional activation in the ``when`` clauses.
|
||||||
|
|
||||||
|
Included files can be specified by path or by their parent directory.
|
||||||
|
Paths may be absolute, relative (to the configuration file including the path),
|
||||||
|
or specified as URLs. Only the ``file``, ``ftp``, ``http`` and ``https`` protocols (or
|
||||||
|
schemes) are supported. Spack-specific, environment and user path variables
|
||||||
|
can be used. (See :ref:`config-file-variables` for more information.)
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
Recursive includes are not currently processed in a breadth-first manner
|
||||||
|
so the value of a configuration option that is altered by multiple included
|
||||||
|
files may not be what you expect. This will be addressed in a future
|
||||||
|
update.
|
||||||
@@ -71,6 +71,7 @@ or refer to the full manual below.
|
|||||||
|
|
||||||
configuration
|
configuration
|
||||||
config_yaml
|
config_yaml
|
||||||
|
include_yaml
|
||||||
packages_yaml
|
packages_yaml
|
||||||
build_settings
|
build_settings
|
||||||
environments
|
environments
|
||||||
|
|||||||
@@ -486,6 +486,8 @@ present. For instance with a configuration like:
|
|||||||
|
|
||||||
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
you will use ``mvapich2~cuda %gcc`` as an ``mpi`` provider.
|
||||||
|
|
||||||
|
.. _package-strong-preferences:
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Conflicts and strong preferences
|
Conflicts and strong preferences
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|||||||
@@ -330,7 +330,7 @@ that ``--tests`` is passed to ``spack ci rebuild`` as part of the
|
|||||||
- spack --version
|
- spack --version
|
||||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||||
- spack env activate --without-view .
|
- spack env activate --without-view .
|
||||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture.platform}-{architecture.target}/{name}-{version}-{hash}'"
|
||||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||||
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg; fi
|
||||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg; fi
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
sphinx==8.2.1
|
sphinx==8.2.3
|
||||||
sphinxcontrib-programoutput==0.18
|
sphinxcontrib-programoutput==0.18
|
||||||
sphinx_design==0.6.1
|
sphinx_design==0.6.1
|
||||||
sphinx-rtd-theme==3.0.2
|
sphinx-rtd-theme==3.0.2
|
||||||
python-levenshtein==0.26.1
|
python-levenshtein==0.27.1
|
||||||
docutils==0.21.2
|
docutils==0.21.2
|
||||||
pygments==2.19.1
|
pygments==2.19.1
|
||||||
urllib3==2.3.0
|
urllib3==2.3.0
|
||||||
pytest==8.3.4
|
pytest==8.3.5
|
||||||
isort==6.0.0
|
isort==6.0.1
|
||||||
black==25.1.0
|
black==25.1.0
|
||||||
flake8==7.1.2
|
flake8==7.1.2
|
||||||
mypy==1.11.1
|
mypy==1.11.1
|
||||||
|
|||||||
1
lib/spack/env/aocc/clang
vendored
1
lib/spack/env/aocc/clang
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/aocc/clang++
vendored
1
lib/spack/env/aocc/clang++
vendored
@@ -1 +0,0 @@
|
|||||||
../cpp
|
|
||||||
1
lib/spack/env/aocc/flang
vendored
1
lib/spack/env/aocc/flang
vendored
@@ -1 +0,0 @@
|
|||||||
../fc
|
|
||||||
1
lib/spack/env/arm/armclang
vendored
1
lib/spack/env/arm/armclang
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/arm/armclang++
vendored
1
lib/spack/env/arm/armclang++
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/arm/armflang
vendored
1
lib/spack/env/arm/armflang
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/c++
vendored
1
lib/spack/env/c++
vendored
@@ -1 +0,0 @@
|
|||||||
cc
|
|
||||||
1
lib/spack/env/c89
vendored
1
lib/spack/env/c89
vendored
@@ -1 +0,0 @@
|
|||||||
cc
|
|
||||||
1
lib/spack/env/c99
vendored
1
lib/spack/env/c99
vendored
@@ -1 +0,0 @@
|
|||||||
cc
|
|
||||||
1
lib/spack/env/case-insensitive/CC
vendored
1
lib/spack/env/case-insensitive/CC
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/cce/case-insensitive/CC
vendored
1
lib/spack/env/cce/case-insensitive/CC
vendored
@@ -1 +0,0 @@
|
|||||||
../../cc
|
|
||||||
1
lib/spack/env/cce/case-insensitive/crayCC
vendored
1
lib/spack/env/cce/case-insensitive/crayCC
vendored
@@ -1 +0,0 @@
|
|||||||
../../cc
|
|
||||||
1
lib/spack/env/cce/cc
vendored
1
lib/spack/env/cce/cc
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/cce/craycc
vendored
1
lib/spack/env/cce/craycc
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/cce/crayftn
vendored
1
lib/spack/env/cce/crayftn
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/cce/ftn
vendored
1
lib/spack/env/cce/ftn
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/clang/clang
vendored
1
lib/spack/env/clang/clang
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/clang/clang++
vendored
1
lib/spack/env/clang/clang++
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/clang/flang
vendored
1
lib/spack/env/clang/flang
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/clang/gfortran
vendored
1
lib/spack/env/clang/gfortran
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/cpp
vendored
1
lib/spack/env/cpp
vendored
@@ -1 +0,0 @@
|
|||||||
cc
|
|
||||||
1
lib/spack/env/f77
vendored
1
lib/spack/env/f77
vendored
@@ -1 +0,0 @@
|
|||||||
cc
|
|
||||||
1
lib/spack/env/f90
vendored
1
lib/spack/env/f90
vendored
@@ -1 +0,0 @@
|
|||||||
cc
|
|
||||||
1
lib/spack/env/f95
vendored
1
lib/spack/env/f95
vendored
@@ -1 +0,0 @@
|
|||||||
cc
|
|
||||||
1
lib/spack/env/fc
vendored
1
lib/spack/env/fc
vendored
@@ -1 +0,0 @@
|
|||||||
cc
|
|
||||||
1
lib/spack/env/fj/case-insensitive/FCC
vendored
1
lib/spack/env/fj/case-insensitive/FCC
vendored
@@ -1 +0,0 @@
|
|||||||
../../cc
|
|
||||||
1
lib/spack/env/fj/fcc
vendored
1
lib/spack/env/fj/fcc
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/fj/frt
vendored
1
lib/spack/env/fj/frt
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/ftn
vendored
1
lib/spack/env/ftn
vendored
@@ -1 +0,0 @@
|
|||||||
cc
|
|
||||||
1
lib/spack/env/gcc/g++
vendored
1
lib/spack/env/gcc/g++
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/gcc/gcc
vendored
1
lib/spack/env/gcc/gcc
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/gcc/gfortran
vendored
1
lib/spack/env/gcc/gfortran
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/intel/icc
vendored
1
lib/spack/env/intel/icc
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/intel/icpc
vendored
1
lib/spack/env/intel/icpc
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/intel/ifort
vendored
1
lib/spack/env/intel/ifort
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/ld
vendored
1
lib/spack/env/ld
vendored
@@ -1 +0,0 @@
|
|||||||
cc
|
|
||||||
1
lib/spack/env/ld.gold
vendored
1
lib/spack/env/ld.gold
vendored
@@ -1 +0,0 @@
|
|||||||
cc
|
|
||||||
1
lib/spack/env/ld.lld
vendored
1
lib/spack/env/ld.lld
vendored
@@ -1 +0,0 @@
|
|||||||
cc
|
|
||||||
1
lib/spack/env/nag/nagfor
vendored
1
lib/spack/env/nag/nagfor
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/nvhpc/nvc
vendored
1
lib/spack/env/nvhpc/nvc
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/nvhpc/nvc++
vendored
1
lib/spack/env/nvhpc/nvc++
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/nvhpc/nvfortran
vendored
1
lib/spack/env/nvhpc/nvfortran
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/oneapi/dpcpp
vendored
1
lib/spack/env/oneapi/dpcpp
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/oneapi/icpx
vendored
1
lib/spack/env/oneapi/icpx
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/oneapi/icx
vendored
1
lib/spack/env/oneapi/icx
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/oneapi/ifx
vendored
1
lib/spack/env/oneapi/ifx
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/pgi/pgc++
vendored
1
lib/spack/env/pgi/pgc++
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/pgi/pgcc
vendored
1
lib/spack/env/pgi/pgcc
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/pgi/pgfortran
vendored
1
lib/spack/env/pgi/pgfortran
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/rocmcc/amdclang
vendored
1
lib/spack/env/rocmcc/amdclang
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/rocmcc/amdclang++
vendored
1
lib/spack/env/rocmcc/amdclang++
vendored
@@ -1 +0,0 @@
|
|||||||
../cpp
|
|
||||||
1
lib/spack/env/rocmcc/amdflang
vendored
1
lib/spack/env/rocmcc/amdflang
vendored
@@ -1 +0,0 @@
|
|||||||
../fc
|
|
||||||
1
lib/spack/env/xl/xlc
vendored
1
lib/spack/env/xl/xlc
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/xl/xlc++
vendored
1
lib/spack/env/xl/xlc++
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/xl/xlf
vendored
1
lib/spack/env/xl/xlf
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/xl/xlf90
vendored
1
lib/spack/env/xl/xlf90
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/xl_r/xlc++_r
vendored
1
lib/spack/env/xl_r/xlc++_r
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/xl_r/xlc_r
vendored
1
lib/spack/env/xl_r/xlc_r
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/xl_r/xlf90_r
vendored
1
lib/spack/env/xl_r/xlf90_r
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
1
lib/spack/env/xl_r/xlf_r
vendored
1
lib/spack/env/xl_r/xlf_r
vendored
@@ -1 +0,0 @@
|
|||||||
../cc
|
|
||||||
@@ -7,6 +7,7 @@
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import glob
|
import glob
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import io
|
||||||
import itertools
|
import itertools
|
||||||
import numbers
|
import numbers
|
||||||
import os
|
import os
|
||||||
@@ -20,6 +21,7 @@
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from itertools import accumulate
|
from itertools import accumulate
|
||||||
from typing import (
|
from typing import (
|
||||||
|
IO,
|
||||||
Callable,
|
Callable,
|
||||||
Deque,
|
Deque,
|
||||||
Dict,
|
Dict,
|
||||||
@@ -2454,26 +2456,69 @@ class WindowsSimulatedRPath:
|
|||||||
and vis versa.
|
and vis versa.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, package, link_install_prefix=True):
|
def __init__(
|
||||||
|
self,
|
||||||
|
package,
|
||||||
|
base_modification_prefix: Optional[Union[str, pathlib.Path]] = None,
|
||||||
|
link_install_prefix: bool = True,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
package (spack.package_base.PackageBase): Package requiring links
|
package (spack.package_base.PackageBase): Package requiring links
|
||||||
|
base_modification_prefix (str|pathlib.Path): Path representation indicating
|
||||||
|
the root directory in which to establish the simulated rpath, ie where the
|
||||||
|
symlinks that comprise the "rpath" behavior will be installed.
|
||||||
|
|
||||||
|
Note: This is a mutually exclusive option with `link_install_prefix` using
|
||||||
|
both is an error.
|
||||||
|
|
||||||
|
Default: None
|
||||||
link_install_prefix (bool): Link against package's own install or stage root.
|
link_install_prefix (bool): Link against package's own install or stage root.
|
||||||
Packages that run their own executables during build and require rpaths to
|
Packages that run their own executables during build and require rpaths to
|
||||||
the build directory during build time require this option. Default: install
|
the build directory during build time require this option.
|
||||||
|
|
||||||
|
Default: install
|
||||||
root
|
root
|
||||||
|
|
||||||
|
Note: This is a mutually exclusive option with `base_modification_prefix`, using
|
||||||
|
both is an error.
|
||||||
"""
|
"""
|
||||||
self.pkg = package
|
self.pkg = package
|
||||||
self._addl_rpaths = set()
|
self._addl_rpaths: set[str] = set()
|
||||||
|
if link_install_prefix and base_modification_prefix:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Invalid combination of arguments given to WindowsSimulated RPath.\n"
|
||||||
|
"Select either `link_install_prefix` to create an install prefix rpath"
|
||||||
|
" or specify a `base_modification_prefix` for any other link type. "
|
||||||
|
"Specifying both arguments is invalid."
|
||||||
|
)
|
||||||
|
if not (link_install_prefix or base_modification_prefix):
|
||||||
|
raise RuntimeError(
|
||||||
|
"Insufficient arguments given to WindowsSimulatedRpath.\n"
|
||||||
|
"WindowsSimulatedRPath requires one of link_install_prefix"
|
||||||
|
" or base_modification_prefix to be specified."
|
||||||
|
" Neither was provided."
|
||||||
|
)
|
||||||
|
|
||||||
self.link_install_prefix = link_install_prefix
|
self.link_install_prefix = link_install_prefix
|
||||||
self._additional_library_dependents = set()
|
if base_modification_prefix:
|
||||||
|
self.base_modification_prefix = pathlib.Path(base_modification_prefix)
|
||||||
|
else:
|
||||||
|
self.base_modification_prefix = pathlib.Path(self.pkg.prefix)
|
||||||
|
self._additional_library_dependents: set[pathlib.Path] = set()
|
||||||
|
if not self.link_install_prefix:
|
||||||
|
tty.debug(f"Generating rpath for non install context: {base_modification_prefix}")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def library_dependents(self):
|
def library_dependents(self):
|
||||||
"""
|
"""
|
||||||
Set of directories where package binaries/libraries are located.
|
Set of directories where package binaries/libraries are located.
|
||||||
"""
|
"""
|
||||||
return set([pathlib.Path(self.pkg.prefix.bin)]) | self._additional_library_dependents
|
base_pths = set()
|
||||||
|
if self.link_install_prefix:
|
||||||
|
base_pths.add(pathlib.Path(self.pkg.prefix.bin))
|
||||||
|
base_pths |= self._additional_library_dependents
|
||||||
|
return base_pths
|
||||||
|
|
||||||
def add_library_dependent(self, *dest):
|
def add_library_dependent(self, *dest):
|
||||||
"""
|
"""
|
||||||
@@ -2489,6 +2534,12 @@ def add_library_dependent(self, *dest):
|
|||||||
new_pth = pathlib.Path(pth).parent
|
new_pth = pathlib.Path(pth).parent
|
||||||
else:
|
else:
|
||||||
new_pth = pathlib.Path(pth)
|
new_pth = pathlib.Path(pth)
|
||||||
|
path_is_in_prefix = new_pth.is_relative_to(self.base_modification_prefix)
|
||||||
|
if not path_is_in_prefix:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Attempting to generate rpath symlink out of rpath context:\
|
||||||
|
{str(self.base_modification_prefix)}"
|
||||||
|
)
|
||||||
self._additional_library_dependents.add(new_pth)
|
self._additional_library_dependents.add(new_pth)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -2577,6 +2628,33 @@ def establish_link(self):
|
|||||||
self._link(library, lib_dir)
|
self._link(library, lib_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def make_package_test_rpath(pkg, test_dir: Union[str, pathlib.Path]):
|
||||||
|
"""Establishes a temp Windows simulated rpath for the pkg in the testing directory
|
||||||
|
so an executable can test the libraries/executables with proper access
|
||||||
|
to dependent dlls
|
||||||
|
|
||||||
|
Note: this is a no-op on all other platforms besides Windows
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg (spack.package_base.PackageBase): the package for which the rpath should be computed
|
||||||
|
test_dir: the testing directory in which we should construct an rpath
|
||||||
|
"""
|
||||||
|
# link_install_prefix as false ensures we're not linking into the install prefix
|
||||||
|
mini_rpath = WindowsSimulatedRPath(pkg, link_install_prefix=False)
|
||||||
|
# add the testing directory as a location to install rpath symlinks
|
||||||
|
mini_rpath.add_library_dependent(test_dir)
|
||||||
|
|
||||||
|
# check for whether build_directory is available, if not
|
||||||
|
# assume the stage root is the build dir
|
||||||
|
build_dir_attr = getattr(pkg, "build_directory", None)
|
||||||
|
build_directory = build_dir_attr if build_dir_attr else pkg.stage.path
|
||||||
|
# add the build dir & build dir bin
|
||||||
|
mini_rpath.add_rpath(os.path.join(build_directory, "bin"))
|
||||||
|
mini_rpath.add_rpath(os.path.join(build_directory))
|
||||||
|
# construct rpath
|
||||||
|
mini_rpath.establish_link()
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
@memoized
|
@memoized
|
||||||
def can_access_dir(path):
|
def can_access_dir(path):
|
||||||
@@ -2805,6 +2883,20 @@ def keep_modification_time(*filenames):
|
|||||||
os.utime(f, (os.path.getatime(f), mtime))
|
os.utime(f, (os.path.getatime(f), mtime))
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def temporary_file_position(stream):
|
||||||
|
orig_pos = stream.tell()
|
||||||
|
yield
|
||||||
|
stream.seek(orig_pos)
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def current_file_position(stream: IO[str], loc: int, relative_to=io.SEEK_CUR):
|
||||||
|
with temporary_file_position(stream):
|
||||||
|
stream.seek(loc, relative_to)
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def temporary_dir(
|
def temporary_dir(
|
||||||
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None
|
suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[str] = None
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
import types
|
||||||
import typing
|
import typing
|
||||||
import warnings
|
import warnings
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
@@ -72,7 +73,7 @@ def index_by(objects, *funcs):
|
|||||||
if isinstance(f, str):
|
if isinstance(f, str):
|
||||||
f = lambda x: getattr(x, funcs[0])
|
f = lambda x: getattr(x, funcs[0])
|
||||||
elif isinstance(f, tuple):
|
elif isinstance(f, tuple):
|
||||||
f = lambda x: tuple(getattr(x, p) for p in funcs[0])
|
f = lambda x: tuple(getattr(x, p, None) for p in funcs[0])
|
||||||
|
|
||||||
result = {}
|
result = {}
|
||||||
for o in objects:
|
for o in objects:
|
||||||
@@ -707,14 +708,24 @@ def __init__(self, wrapped_object):
|
|||||||
|
|
||||||
|
|
||||||
class Singleton:
|
class Singleton:
|
||||||
"""Simple wrapper for lazily initialized singleton objects."""
|
"""Wrapper for lazily initialized singleton objects."""
|
||||||
|
|
||||||
def __init__(self, factory):
|
def __init__(self, factory: Callable[[], object]):
|
||||||
"""Create a new singleton to be inited with the factory function.
|
"""Create a new singleton to be inited with the factory function.
|
||||||
|
|
||||||
|
Most factories will simply create the object to be initialized and
|
||||||
|
return it.
|
||||||
|
|
||||||
|
In some cases, e.g. when bootstrapping some global state, the singleton
|
||||||
|
may need to be initialized incrementally. If the factory returns a generator
|
||||||
|
instead of a regular object, the singleton will assign each result yielded by
|
||||||
|
the generator to the singleton instance. This allows methods called by
|
||||||
|
the factory in later stages to refer back to the singleton.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
factory (function): function taking no arguments that
|
factory (function): function taking no arguments that creates the
|
||||||
creates the singleton instance.
|
singleton instance.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.factory = factory
|
self.factory = factory
|
||||||
self._instance = None
|
self._instance = None
|
||||||
@@ -722,7 +733,16 @@ def __init__(self, factory):
|
|||||||
@property
|
@property
|
||||||
def instance(self):
|
def instance(self):
|
||||||
if self._instance is None:
|
if self._instance is None:
|
||||||
self._instance = self.factory()
|
instance = self.factory()
|
||||||
|
|
||||||
|
if isinstance(instance, types.GeneratorType):
|
||||||
|
# if it's a generator, assign every value
|
||||||
|
for value in instance:
|
||||||
|
self._instance = value
|
||||||
|
else:
|
||||||
|
# if not, just assign the result like a normal singleton
|
||||||
|
self._instance = instance
|
||||||
|
|
||||||
return self._instance
|
return self._instance
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
@@ -996,11 +1016,8 @@ def _receive_forwarded(self, context: str, exc: Exception, tb: List[str]):
|
|||||||
def grouped_message(self, with_tracebacks: bool = True) -> str:
|
def grouped_message(self, with_tracebacks: bool = True) -> str:
|
||||||
"""Print out an error message coalescing all the forwarded errors."""
|
"""Print out an error message coalescing all the forwarded errors."""
|
||||||
each_exception_message = [
|
each_exception_message = [
|
||||||
"{0} raised {1}: {2}{3}".format(
|
"\n\t{0} raised {1}: {2}\n{3}".format(
|
||||||
context,
|
context, exc.__class__.__name__, exc, f"\n{''.join(tb)}" if with_tracebacks else ""
|
||||||
exc.__class__.__name__,
|
|
||||||
exc,
|
|
||||||
"\n{0}".format("".join(tb)) if with_tracebacks else "",
|
|
||||||
)
|
)
|
||||||
for context, exc, tb in self.exceptions
|
for context, exc, tb in self.exceptions
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -13,6 +13,18 @@
|
|||||||
__version__ = "1.0.0.dev0"
|
__version__ = "1.0.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
|
#: The current Package API version implemented by this version of Spack. The Package API defines
|
||||||
|
#: the Python interface for packages as well as the layout of package repositories. The minor
|
||||||
|
#: version is incremented when the package API is extended in a backwards-compatible way. The major
|
||||||
|
#: version is incremented upon breaking changes. This version is changed independently from the
|
||||||
|
#: Spack version.
|
||||||
|
package_api_version = (1, 0)
|
||||||
|
|
||||||
|
#: The minimum Package API version that this version of Spack is compatible with. This should
|
||||||
|
#: always be a tuple of the form ``(major, 0)``, since compatibility with vX.Y implies
|
||||||
|
#: compatibility with vX.0.
|
||||||
|
min_package_api_version = (1, 0)
|
||||||
|
|
||||||
|
|
||||||
def __try_int(v):
|
def __try_int(v):
|
||||||
try:
|
try:
|
||||||
@@ -79,4 +91,6 @@ def get_short_version() -> str:
|
|||||||
"get_version",
|
"get_version",
|
||||||
"get_spack_commit",
|
"get_spack_commit",
|
||||||
"get_short_version",
|
"get_short_version",
|
||||||
|
"package_api_version",
|
||||||
|
"min_package_api_version",
|
||||||
]
|
]
|
||||||
|
|||||||
20
lib/spack/spack/aliases.py
Normal file
20
lib/spack/spack/aliases.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Copyright Spack Project Developers. See COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
"""Alias names to convert legacy compilers to builtin packages and vice-versa"""
|
||||||
|
|
||||||
|
BUILTIN_TO_LEGACY_COMPILER = {
|
||||||
|
"llvm": "clang",
|
||||||
|
"intel-oneapi-compilers": "oneapi",
|
||||||
|
"llvm-amdgpu": "rocmcc",
|
||||||
|
"intel-oneapi-compiler-classic": "intel",
|
||||||
|
"acfl": "arm",
|
||||||
|
}
|
||||||
|
|
||||||
|
LEGACY_COMPILER_TO_BUILTIN = {
|
||||||
|
"clang": "llvm",
|
||||||
|
"oneapi": "intel-oneapi-compilers",
|
||||||
|
"rocmcc": "llvm-amdgpu",
|
||||||
|
"intel": "intel-oneapi-compiler-classic",
|
||||||
|
"arm": "acfl",
|
||||||
|
}
|
||||||
@@ -110,6 +110,13 @@ def __init__(self, root):
|
|||||||
self._write_transaction_impl = llnl.util.lang.nullcontext
|
self._write_transaction_impl = llnl.util.lang.nullcontext
|
||||||
self._read_transaction_impl = llnl.util.lang.nullcontext
|
self._read_transaction_impl = llnl.util.lang.nullcontext
|
||||||
|
|
||||||
|
def _handle_old_db_versions_read(self, check, db, *, reindex: bool):
|
||||||
|
if not self.is_readable():
|
||||||
|
raise spack_db.DatabaseNotReadableError(
|
||||||
|
f"cannot read buildcache v{self.db_version} at {self.root}"
|
||||||
|
)
|
||||||
|
return self._handle_current_version_read(check, db)
|
||||||
|
|
||||||
|
|
||||||
class FetchCacheError(Exception):
|
class FetchCacheError(Exception):
|
||||||
"""Error thrown when fetching the cache failed, usually a composite error list."""
|
"""Error thrown when fetching the cache failed, usually a composite error list."""
|
||||||
@@ -242,7 +249,7 @@ def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
|
|||||||
self._index_file_cache.init_entry(cache_key)
|
self._index_file_cache.init_entry(cache_key)
|
||||||
cache_path = self._index_file_cache.cache_path(cache_key)
|
cache_path = self._index_file_cache.cache_path(cache_key)
|
||||||
with self._index_file_cache.read_transaction(cache_key):
|
with self._index_file_cache.read_transaction(cache_key):
|
||||||
db._read_from_file(cache_path)
|
db._read_from_file(pathlib.Path(cache_path))
|
||||||
except spack_db.InvalidDatabaseVersionError as e:
|
except spack_db.InvalidDatabaseVersionError as e:
|
||||||
tty.warn(
|
tty.warn(
|
||||||
f"you need a newer Spack version to read the buildcache index for the "
|
f"you need a newer Spack version to read the buildcache index for the "
|
||||||
|
|||||||
@@ -234,14 +234,6 @@ def _root_spec(spec_str: str) -> str:
|
|||||||
# Add a compiler and platform requirement to the root spec.
|
# Add a compiler and platform requirement to the root spec.
|
||||||
platform = str(spack.platforms.host())
|
platform = str(spack.platforms.host())
|
||||||
|
|
||||||
if platform == "darwin":
|
|
||||||
spec_str += " %apple-clang"
|
|
||||||
elif platform == "windows":
|
|
||||||
spec_str += " %msvc"
|
|
||||||
elif platform == "linux":
|
|
||||||
spec_str += " %gcc"
|
|
||||||
elif platform == "freebsd":
|
|
||||||
spec_str += " %clang"
|
|
||||||
spec_str += f" platform={platform}"
|
spec_str += f" platform={platform}"
|
||||||
target = archspec.cpu.host().family
|
target = archspec.cpu.host().family
|
||||||
spec_str += f" target={target}"
|
spec_str += f" target={target}"
|
||||||
|
|||||||
@@ -15,11 +15,13 @@
|
|||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
import spack.compiler
|
import spack.compilers.config
|
||||||
import spack.compilers
|
import spack.compilers.libraries
|
||||||
|
import spack.config
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.traverse
|
import spack.traverse
|
||||||
|
import spack.version
|
||||||
|
|
||||||
from .config import spec_for_current_python
|
from .config import spec_for_current_python
|
||||||
|
|
||||||
@@ -38,7 +40,7 @@ def __init__(self, configuration):
|
|||||||
|
|
||||||
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
|
self.external_cmake, self.external_bison = self._externals_from_yaml(configuration)
|
||||||
|
|
||||||
def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
|
def _valid_compiler_or_raise(self):
|
||||||
if str(self.host_platform) == "linux":
|
if str(self.host_platform) == "linux":
|
||||||
compiler_name = "gcc"
|
compiler_name = "gcc"
|
||||||
elif str(self.host_platform) == "darwin":
|
elif str(self.host_platform) == "darwin":
|
||||||
@@ -46,17 +48,30 @@ def _valid_compiler_or_raise(self) -> "spack.compiler.Compiler":
|
|||||||
elif str(self.host_platform) == "windows":
|
elif str(self.host_platform) == "windows":
|
||||||
compiler_name = "msvc"
|
compiler_name = "msvc"
|
||||||
elif str(self.host_platform) == "freebsd":
|
elif str(self.host_platform) == "freebsd":
|
||||||
compiler_name = "clang"
|
compiler_name = "llvm"
|
||||||
else:
|
else:
|
||||||
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
raise RuntimeError(f"Cannot bootstrap clingo from sources on {self.host_platform}")
|
||||||
candidates = spack.compilers.compilers_for_spec(
|
|
||||||
compiler_name, arch_spec=self.host_architecture
|
candidates = [
|
||||||
)
|
x
|
||||||
|
for x in spack.compilers.config.CompilerFactory.from_packages_yaml(spack.config.CONFIG)
|
||||||
|
if x.name == compiler_name
|
||||||
|
]
|
||||||
if not candidates:
|
if not candidates:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
|
f"Cannot find any version of {compiler_name} to bootstrap clingo from sources"
|
||||||
)
|
)
|
||||||
candidates.sort(key=lambda x: x.spec.version, reverse=True)
|
candidates.sort(key=lambda x: x.version, reverse=True)
|
||||||
|
best = candidates[0]
|
||||||
|
# Get compilers for bootstrapping from the 'builtin' repository
|
||||||
|
best.namespace = "builtin"
|
||||||
|
# If the compiler does not support C++ 14, fail with a legible error message
|
||||||
|
try:
|
||||||
|
_ = best.package.standard_flag(language="cxx", standard="14")
|
||||||
|
except RuntimeError as e:
|
||||||
|
raise RuntimeError(
|
||||||
|
"cannot find a compiler supporting C++ 14 [needed to bootstrap clingo]"
|
||||||
|
) from e
|
||||||
return candidates[0]
|
return candidates[0]
|
||||||
|
|
||||||
def _externals_from_yaml(
|
def _externals_from_yaml(
|
||||||
@@ -75,9 +90,6 @@ def _externals_from_yaml(
|
|||||||
if not s.satisfies(requirements[pkg_name]):
|
if not s.satisfies(requirements[pkg_name]):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not s.intersects(f"%{self.host_compiler.spec}"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not s.intersects(f"arch={self.host_architecture}"):
|
if not s.intersects(f"arch={self.host_architecture}"):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -110,11 +122,14 @@ def concretize(self) -> "spack.spec.Spec":
|
|||||||
# Tweak it to conform to the host architecture
|
# Tweak it to conform to the host architecture
|
||||||
for node in s.traverse():
|
for node in s.traverse():
|
||||||
node.architecture.os = str(self.host_os)
|
node.architecture.os = str(self.host_os)
|
||||||
node.compiler = self.host_compiler.spec
|
|
||||||
node.architecture = self.host_architecture
|
node.architecture = self.host_architecture
|
||||||
|
|
||||||
if node.name == "gcc-runtime":
|
if node.name == "gcc-runtime":
|
||||||
node.versions = self.host_compiler.spec.versions
|
node.versions = self.host_compiler.versions
|
||||||
|
|
||||||
|
# Can't use re2c@3.1 with Python 3.6
|
||||||
|
if self.host_python.satisfies("@3.6"):
|
||||||
|
s["re2c"].versions.versions = [spack.version.from_string("=2.2")]
|
||||||
|
|
||||||
for edge in spack.traverse.traverse_edges([s], cover="edges"):
|
for edge in spack.traverse.traverse_edges([s], cover="edges"):
|
||||||
if edge.spec.name == "python":
|
if edge.spec.name == "python":
|
||||||
@@ -126,6 +141,9 @@ def concretize(self) -> "spack.spec.Spec":
|
|||||||
if edge.spec.name == "cmake" and self.external_cmake:
|
if edge.spec.name == "cmake" and self.external_cmake:
|
||||||
edge.spec = self.external_cmake
|
edge.spec = self.external_cmake
|
||||||
|
|
||||||
|
if edge.spec.name == self.host_compiler.name:
|
||||||
|
edge.spec = self.host_compiler
|
||||||
|
|
||||||
if "libc" in edge.virtuals:
|
if "libc" in edge.virtuals:
|
||||||
edge.spec = self.host_libc
|
edge.spec = self.host_libc
|
||||||
|
|
||||||
@@ -141,12 +159,12 @@ def python_external_spec(self) -> "spack.spec.Spec":
|
|||||||
return self._external_spec(result)
|
return self._external_spec(result)
|
||||||
|
|
||||||
def libc_external_spec(self) -> "spack.spec.Spec":
|
def libc_external_spec(self) -> "spack.spec.Spec":
|
||||||
result = self.host_compiler.default_libc
|
detector = spack.compilers.libraries.CompilerPropertyDetector(self.host_compiler)
|
||||||
|
result = detector.default_libc()
|
||||||
return self._external_spec(result)
|
return self._external_spec(result)
|
||||||
|
|
||||||
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
|
def _external_spec(self, initial_spec) -> "spack.spec.Spec":
|
||||||
initial_spec.namespace = "builtin"
|
initial_spec.namespace = "builtin"
|
||||||
initial_spec.compiler = self.host_compiler.spec
|
|
||||||
initial_spec.architecture = self.host_architecture
|
initial_spec.architecture = self.host_architecture
|
||||||
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
|
for flag_type in spack.spec.FlagMap.valid_compiler_flags():
|
||||||
initial_spec.compiler_flags[flag_type] = []
|
initial_spec.compiler_flags[flag_type] = []
|
||||||
|
|||||||
@@ -10,7 +10,7 @@
|
|||||||
|
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
|
|
||||||
import spack.compilers
|
import spack.compilers.config
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment
|
import spack.environment
|
||||||
import spack.modules
|
import spack.modules
|
||||||
@@ -142,8 +142,8 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
|||||||
|
|
||||||
def _add_compilers_if_missing() -> None:
|
def _add_compilers_if_missing() -> None:
|
||||||
arch = spack.spec.ArchSpec.default_arch()
|
arch = spack.spec.ArchSpec.default_arch()
|
||||||
if not spack.compilers.compilers_for_arch(arch):
|
if not spack.compilers.config.compilers_for_arch(arch):
|
||||||
spack.compilers.find_compilers()
|
spack.compilers.config.find_compilers()
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -36,7 +36,6 @@
|
|||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import stat
|
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import types
|
import types
|
||||||
@@ -71,7 +70,7 @@
|
|||||||
import spack.build_systems.meson
|
import spack.build_systems.meson
|
||||||
import spack.build_systems.python
|
import spack.build_systems.python
|
||||||
import spack.builder
|
import spack.builder
|
||||||
import spack.compilers
|
import spack.compilers.libraries
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.error
|
import spack.error
|
||||||
@@ -85,7 +84,6 @@
|
|||||||
import spack.store
|
import spack.store
|
||||||
import spack.subprocess_context
|
import spack.subprocess_context
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
import spack.util.libc
|
|
||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.context import Context
|
from spack.context import Context
|
||||||
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
from spack.error import InstallError, NoHeadersError, NoLibrariesError
|
||||||
@@ -93,6 +91,8 @@
|
|||||||
from spack.util.environment import (
|
from spack.util.environment import (
|
||||||
SYSTEM_DIR_CASE_ENTRY,
|
SYSTEM_DIR_CASE_ENTRY,
|
||||||
EnvironmentModifications,
|
EnvironmentModifications,
|
||||||
|
ModificationList,
|
||||||
|
PrependPath,
|
||||||
env_flag,
|
env_flag,
|
||||||
filter_system_paths,
|
filter_system_paths,
|
||||||
get_path,
|
get_path,
|
||||||
@@ -113,7 +113,7 @@
|
|||||||
# set_wrapper_variables and used to pass parameters to
|
# set_wrapper_variables and used to pass parameters to
|
||||||
# Spack's compiler wrappers.
|
# Spack's compiler wrappers.
|
||||||
#
|
#
|
||||||
SPACK_ENV_PATH = "SPACK_ENV_PATH"
|
SPACK_COMPILER_WRAPPER_PATH = "SPACK_COMPILER_WRAPPER_PATH"
|
||||||
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
|
SPACK_MANAGED_DIRS = "SPACK_MANAGED_DIRS"
|
||||||
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
|
SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
|
||||||
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
|
SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
|
||||||
@@ -390,62 +390,10 @@ def _add_werror_handling(keep_werror, env):
|
|||||||
env.set("SPACK_COMPILER_FLAGS_REPLACE", " ".join(["|".join(item) for item in replace_flags]))
|
env.set("SPACK_COMPILER_FLAGS_REPLACE", " ".join(["|".join(item) for item in replace_flags]))
|
||||||
|
|
||||||
|
|
||||||
def set_compiler_environment_variables(pkg, env):
|
def set_wrapper_environment_variables_for_flags(pkg, env):
|
||||||
assert pkg.spec.concrete
|
assert pkg.spec.concrete
|
||||||
compiler = pkg.compiler
|
|
||||||
spec = pkg.spec
|
spec = pkg.spec
|
||||||
|
|
||||||
# Make sure the executables for this compiler exist
|
|
||||||
compiler.verify_executables()
|
|
||||||
|
|
||||||
# Set compiler variables used by CMake and autotools
|
|
||||||
assert all(key in compiler.link_paths for key in ("cc", "cxx", "f77", "fc"))
|
|
||||||
|
|
||||||
# Populate an object with the list of environment modifications
|
|
||||||
# and return it
|
|
||||||
# TODO : add additional kwargs for better diagnostics, like requestor,
|
|
||||||
# ttyout, ttyerr, etc.
|
|
||||||
link_dir = spack.paths.build_env_path
|
|
||||||
|
|
||||||
# Set SPACK compiler variables so that our wrapper knows what to
|
|
||||||
# call. If there is no compiler configured then use a default
|
|
||||||
# wrapper which will emit an error if it is used.
|
|
||||||
if compiler.cc:
|
|
||||||
env.set("SPACK_CC", compiler.cc)
|
|
||||||
env.set("CC", os.path.join(link_dir, compiler.link_paths["cc"]))
|
|
||||||
else:
|
|
||||||
env.set("CC", os.path.join(link_dir, "cc"))
|
|
||||||
if compiler.cxx:
|
|
||||||
env.set("SPACK_CXX", compiler.cxx)
|
|
||||||
env.set("CXX", os.path.join(link_dir, compiler.link_paths["cxx"]))
|
|
||||||
else:
|
|
||||||
env.set("CC", os.path.join(link_dir, "c++"))
|
|
||||||
if compiler.f77:
|
|
||||||
env.set("SPACK_F77", compiler.f77)
|
|
||||||
env.set("F77", os.path.join(link_dir, compiler.link_paths["f77"]))
|
|
||||||
else:
|
|
||||||
env.set("F77", os.path.join(link_dir, "f77"))
|
|
||||||
if compiler.fc:
|
|
||||||
env.set("SPACK_FC", compiler.fc)
|
|
||||||
env.set("FC", os.path.join(link_dir, compiler.link_paths["fc"]))
|
|
||||||
else:
|
|
||||||
env.set("FC", os.path.join(link_dir, "fc"))
|
|
||||||
|
|
||||||
# Set SPACK compiler rpath flags so that our wrapper knows what to use
|
|
||||||
env.set("SPACK_CC_RPATH_ARG", compiler.cc_rpath_arg)
|
|
||||||
env.set("SPACK_CXX_RPATH_ARG", compiler.cxx_rpath_arg)
|
|
||||||
env.set("SPACK_F77_RPATH_ARG", compiler.f77_rpath_arg)
|
|
||||||
env.set("SPACK_FC_RPATH_ARG", compiler.fc_rpath_arg)
|
|
||||||
env.set("SPACK_LINKER_ARG", compiler.linker_arg)
|
|
||||||
|
|
||||||
# Check whether we want to force RPATH or RUNPATH
|
|
||||||
if spack.config.get("config:shared_linking:type") == "rpath":
|
|
||||||
env.set("SPACK_DTAGS_TO_STRIP", compiler.enable_new_dtags)
|
|
||||||
env.set("SPACK_DTAGS_TO_ADD", compiler.disable_new_dtags)
|
|
||||||
else:
|
|
||||||
env.set("SPACK_DTAGS_TO_STRIP", compiler.disable_new_dtags)
|
|
||||||
env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
|
|
||||||
|
|
||||||
if pkg.keep_werror is not None:
|
if pkg.keep_werror is not None:
|
||||||
keep_werror = pkg.keep_werror
|
keep_werror = pkg.keep_werror
|
||||||
else:
|
else:
|
||||||
@@ -453,10 +401,6 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
|
|
||||||
_add_werror_handling(keep_werror, env)
|
_add_werror_handling(keep_werror, env)
|
||||||
|
|
||||||
# Set the target parameters that the compiler will add
|
|
||||||
isa_arg = optimization_flags(compiler, spec.target)
|
|
||||||
env.set("SPACK_TARGET_ARGS", isa_arg)
|
|
||||||
|
|
||||||
# Trap spack-tracked compiler flags as appropriate.
|
# Trap spack-tracked compiler flags as appropriate.
|
||||||
# env_flags are easy to accidentally override.
|
# env_flags are easy to accidentally override.
|
||||||
inject_flags = {}
|
inject_flags = {}
|
||||||
@@ -489,75 +433,23 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
# implicit variables
|
# implicit variables
|
||||||
env.set(flag.upper(), " ".join(f for f in env_flags[flag]))
|
env.set(flag.upper(), " ".join(f for f in env_flags[flag]))
|
||||||
pkg.flags_to_build_system_args(build_system_flags)
|
pkg.flags_to_build_system_args(build_system_flags)
|
||||||
|
|
||||||
env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
|
|
||||||
|
|
||||||
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
|
env.set("SPACK_SYSTEM_DIRS", SYSTEM_DIR_CASE_ENTRY)
|
||||||
|
|
||||||
compiler.setup_custom_environment(pkg, env)
|
|
||||||
|
|
||||||
return env
|
return env
|
||||||
|
|
||||||
|
|
||||||
def optimization_flags(compiler, target):
|
def optimization_flags(compiler, target):
|
||||||
if spack.compilers.is_mixed_toolchain(compiler):
|
|
||||||
msg = (
|
|
||||||
"microarchitecture specific optimizations are not "
|
|
||||||
"supported yet on mixed compiler toolchains [check"
|
|
||||||
f" {compiler.name}@{compiler.version} for further details]"
|
|
||||||
)
|
|
||||||
tty.debug(msg)
|
|
||||||
return ""
|
|
||||||
|
|
||||||
# Try to check if the current compiler comes with a version number or
|
# Try to check if the current compiler comes with a version number or
|
||||||
# has an unexpected suffix. If so, treat it as a compiler with a
|
# has an unexpected suffix. If so, treat it as a compiler with a
|
||||||
# custom spec.
|
# custom spec.
|
||||||
compiler_version = compiler.version
|
version_number, _ = archspec.cpu.version_components(compiler.version.dotted_numeric_string)
|
||||||
version_number, suffix = archspec.cpu.version_components(compiler.version)
|
|
||||||
if not version_number or suffix:
|
|
||||||
try:
|
try:
|
||||||
compiler_version = compiler.real_version
|
result = target.optimization_flags(compiler.name, version_number)
|
||||||
except spack.util.executable.ProcessError as e:
|
|
||||||
# log this and just return compiler.version instead
|
|
||||||
tty.debug(str(e))
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = target.optimization_flags(compiler.name, compiler_version.dotted_numeric_string)
|
|
||||||
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
|
except (ValueError, archspec.cpu.UnsupportedMicroarchitecture):
|
||||||
result = ""
|
result = ""
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class FilterDefaultDynamicLinkerSearchPaths:
|
|
||||||
"""Remove rpaths to directories that are default search paths of the dynamic linker."""
|
|
||||||
|
|
||||||
def __init__(self, dynamic_linker: Optional[str]) -> None:
|
|
||||||
# Identify directories by (inode, device) tuple, which handles symlinks too.
|
|
||||||
self.default_path_identifiers: Set[Tuple[int, int]] = set()
|
|
||||||
if not dynamic_linker:
|
|
||||||
return
|
|
||||||
for path in spack.util.libc.default_search_paths_from_dynamic_linker(dynamic_linker):
|
|
||||||
try:
|
|
||||||
s = os.stat(path)
|
|
||||||
if stat.S_ISDIR(s.st_mode):
|
|
||||||
self.default_path_identifiers.add((s.st_ino, s.st_dev))
|
|
||||||
except OSError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
def is_dynamic_loader_default_path(self, p: str) -> bool:
|
|
||||||
try:
|
|
||||||
s = os.stat(p)
|
|
||||||
return (s.st_ino, s.st_dev) in self.default_path_identifiers
|
|
||||||
except OSError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def __call__(self, dirs: List[str]) -> List[str]:
|
|
||||||
if not self.default_path_identifiers:
|
|
||||||
return dirs
|
|
||||||
return [p for p in dirs if not self.is_dynamic_loader_default_path(p)]
|
|
||||||
|
|
||||||
|
|
||||||
def set_wrapper_variables(pkg, env):
|
def set_wrapper_variables(pkg, env):
|
||||||
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
|
"""Set environment variables used by the Spack compiler wrapper (which have the prefix
|
||||||
`SPACK_`) and also add the compiler wrappers to PATH.
|
`SPACK_`) and also add the compiler wrappers to PATH.
|
||||||
@@ -566,39 +458,8 @@ def set_wrapper_variables(pkg, env):
|
|||||||
this function computes these options in a manner that is intended to match the DAG traversal
|
this function computes these options in a manner that is intended to match the DAG traversal
|
||||||
order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext
|
order in `SetupContext`. TODO: this is not the case yet, we're using post order, SetupContext
|
||||||
is using topo order."""
|
is using topo order."""
|
||||||
# Set environment variables if specified for
|
# Set compiler flags injected from the spec
|
||||||
# the given compiler
|
set_wrapper_environment_variables_for_flags(pkg, env)
|
||||||
compiler = pkg.compiler
|
|
||||||
env.extend(spack.schema.environment.parse(compiler.environment))
|
|
||||||
|
|
||||||
if compiler.extra_rpaths:
|
|
||||||
extra_rpaths = ":".join(compiler.extra_rpaths)
|
|
||||||
env.set("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
|
|
||||||
|
|
||||||
# Add spack build environment path with compiler wrappers first in
|
|
||||||
# the path. We add the compiler wrapper path, which includes default
|
|
||||||
# wrappers (cc, c++, f77, f90), AND a subdirectory containing
|
|
||||||
# compiler-specific symlinks. The latter ensures that builds that
|
|
||||||
# are sensitive to the *name* of the compiler see the right name when
|
|
||||||
# we're building with the wrappers.
|
|
||||||
#
|
|
||||||
# Conflicts on case-insensitive systems (like "CC" and "cc") are
|
|
||||||
# handled by putting one in the <build_env_path>/case-insensitive
|
|
||||||
# directory. Add that to the path too.
|
|
||||||
env_paths = []
|
|
||||||
compiler_specific = os.path.join(
|
|
||||||
spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths["cc"])
|
|
||||||
)
|
|
||||||
for item in [spack.paths.build_env_path, compiler_specific]:
|
|
||||||
env_paths.append(item)
|
|
||||||
ci = os.path.join(item, "case-insensitive")
|
|
||||||
if os.path.isdir(ci):
|
|
||||||
env_paths.append(ci)
|
|
||||||
|
|
||||||
tty.debug("Adding compiler bin/ paths: " + " ".join(env_paths))
|
|
||||||
for item in env_paths:
|
|
||||||
env.prepend_path("PATH", item)
|
|
||||||
env.set_path(SPACK_ENV_PATH, env_paths)
|
|
||||||
|
|
||||||
# Working directory for the spack command itself, for debug logs.
|
# Working directory for the spack command itself, for debug logs.
|
||||||
if spack.config.get("config:debug"):
|
if spack.config.get("config:debug"):
|
||||||
@@ -664,22 +525,15 @@ def set_wrapper_variables(pkg, env):
|
|||||||
lib_path = os.path.join(pkg.prefix, libdir)
|
lib_path = os.path.join(pkg.prefix, libdir)
|
||||||
rpath_dirs.insert(0, lib_path)
|
rpath_dirs.insert(0, lib_path)
|
||||||
|
|
||||||
filter_default_dynamic_linker_search_paths = FilterDefaultDynamicLinkerSearchPaths(
|
|
||||||
pkg.compiler.default_dynamic_linker
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path
|
# TODO: filter_system_paths is again wrong (and probably unnecessary due to the is_system_path
|
||||||
# branch above). link_dirs should be filtered with entries from _parse_link_paths.
|
# branch above). link_dirs should be filtered with entries from _parse_link_paths.
|
||||||
link_dirs = list(dedupe(filter_system_paths(link_dirs)))
|
link_dirs = list(dedupe(filter_system_paths(link_dirs)))
|
||||||
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
|
||||||
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
|
||||||
rpath_dirs = filter_default_dynamic_linker_search_paths(rpath_dirs)
|
|
||||||
|
|
||||||
# TODO: implicit_rpaths is prefiltered by is_system_path, that should be removed in favor of
|
default_dynamic_linker_filter = spack.compilers.libraries.dynamic_linker_filter_for(pkg.spec)
|
||||||
# just this filter.
|
if default_dynamic_linker_filter:
|
||||||
implicit_rpaths = filter_default_dynamic_linker_search_paths(pkg.compiler.implicit_rpaths())
|
rpath_dirs = default_dynamic_linker_filter(rpath_dirs)
|
||||||
if implicit_rpaths:
|
|
||||||
env.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
|
|
||||||
|
|
||||||
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
# Spack managed directories include the stage, store and upstream stores. We extend this with
|
||||||
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
# their real paths to make it more robust (e.g. /tmp vs /private/tmp on macOS).
|
||||||
@@ -731,26 +585,6 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
|
|||||||
# Don't use which for this; we want to find it in the current dir.
|
# Don't use which for this; we want to find it in the current dir.
|
||||||
module.configure = Executable("./configure")
|
module.configure = Executable("./configure")
|
||||||
|
|
||||||
# Put spack compiler paths in module scope. (Some packages use it
|
|
||||||
# in setup_run_environment etc, so don't put it context == build)
|
|
||||||
link_dir = spack.paths.build_env_path
|
|
||||||
pkg_compiler = None
|
|
||||||
try:
|
|
||||||
pkg_compiler = pkg.compiler
|
|
||||||
except spack.compilers.NoCompilerForSpecError as e:
|
|
||||||
tty.debug(f"cannot set 'spack_cc': {str(e)}")
|
|
||||||
|
|
||||||
if pkg_compiler is not None:
|
|
||||||
module.spack_cc = os.path.join(link_dir, pkg_compiler.link_paths["cc"])
|
|
||||||
module.spack_cxx = os.path.join(link_dir, pkg_compiler.link_paths["cxx"])
|
|
||||||
module.spack_f77 = os.path.join(link_dir, pkg_compiler.link_paths["f77"])
|
|
||||||
module.spack_fc = os.path.join(link_dir, pkg_compiler.link_paths["fc"])
|
|
||||||
else:
|
|
||||||
module.spack_cc = None
|
|
||||||
module.spack_cxx = None
|
|
||||||
module.spack_f77 = None
|
|
||||||
module.spack_fc = None
|
|
||||||
|
|
||||||
# Useful directories within the prefix are encapsulated in
|
# Useful directories within the prefix are encapsulated in
|
||||||
# a Prefix object.
|
# a Prefix object.
|
||||||
module.prefix = pkg.prefix
|
module.prefix = pkg.prefix
|
||||||
@@ -881,21 +715,6 @@ def get_rpath_deps(pkg: spack.package_base.PackageBase) -> List[spack.spec.Spec]
|
|||||||
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
return _get_rpath_deps_from_spec(pkg.spec, pkg.transitive_rpaths)
|
||||||
|
|
||||||
|
|
||||||
def load_external_modules(pkg):
|
|
||||||
"""Traverse a package's spec DAG and load any external modules.
|
|
||||||
|
|
||||||
Traverse a package's dependencies and load any external modules
|
|
||||||
associated with them.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
pkg (spack.package_base.PackageBase): package to load deps for
|
|
||||||
"""
|
|
||||||
for dep in list(pkg.spec.traverse()):
|
|
||||||
external_modules = dep.external_modules or []
|
|
||||||
for external_module in external_modules:
|
|
||||||
load_module(external_module)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
||||||
"""Execute all environment setup routines."""
|
"""Execute all environment setup routines."""
|
||||||
if context not in (Context.BUILD, Context.TEST):
|
if context not in (Context.BUILD, Context.TEST):
|
||||||
@@ -916,7 +735,6 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
context == Context.TEST and pkg.test_requires_compiler
|
context == Context.TEST and pkg.test_requires_compiler
|
||||||
)
|
)
|
||||||
if need_compiler:
|
if need_compiler:
|
||||||
set_compiler_environment_variables(pkg, env_mods)
|
|
||||||
set_wrapper_variables(pkg, env_mods)
|
set_wrapper_variables(pkg, env_mods)
|
||||||
|
|
||||||
# Platform specific setup goes before package specific setup. This is for setting
|
# Platform specific setup goes before package specific setup. This is for setting
|
||||||
@@ -928,6 +746,26 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
env_mods.extend(setup_context.get_env_modifications())
|
env_mods.extend(setup_context.get_env_modifications())
|
||||||
tty.debug("setup_package: collected all modifications from dependencies")
|
tty.debug("setup_package: collected all modifications from dependencies")
|
||||||
|
|
||||||
|
tty.debug("setup_package: adding compiler wrappers paths")
|
||||||
|
env_by_name = env_mods.group_by_name()
|
||||||
|
for x in env_by_name["SPACK_COMPILER_WRAPPER_PATH"]:
|
||||||
|
assert isinstance(
|
||||||
|
x, PrependPath
|
||||||
|
), "unexpected setting used for SPACK_COMPILER_WRAPPER_PATH"
|
||||||
|
env_mods.prepend_path("PATH", x.value)
|
||||||
|
|
||||||
|
# Check whether we want to force RPATH or RUNPATH
|
||||||
|
enable_var_name, disable_var_name = "SPACK_ENABLE_NEW_DTAGS", "SPACK_DISABLE_NEW_DTAGS"
|
||||||
|
if enable_var_name in env_by_name and disable_var_name in env_by_name:
|
||||||
|
enable_new_dtags = _extract_dtags_arg(env_by_name, var_name=enable_var_name)
|
||||||
|
disable_new_dtags = _extract_dtags_arg(env_by_name, var_name=disable_var_name)
|
||||||
|
if spack.config.CONFIG.get("config:shared_linking:type") == "rpath":
|
||||||
|
env_mods.set("SPACK_DTAGS_TO_STRIP", enable_new_dtags)
|
||||||
|
env_mods.set("SPACK_DTAGS_TO_ADD", disable_new_dtags)
|
||||||
|
else:
|
||||||
|
env_mods.set("SPACK_DTAGS_TO_STRIP", disable_new_dtags)
|
||||||
|
env_mods.set("SPACK_DTAGS_TO_ADD", enable_new_dtags)
|
||||||
|
|
||||||
if context == Context.TEST:
|
if context == Context.TEST:
|
||||||
env_mods.prepend_path("PATH", ".")
|
env_mods.prepend_path("PATH", ".")
|
||||||
elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"):
|
elif context == Context.BUILD and not dirty and not env_mods.is_unset("CPATH"):
|
||||||
@@ -941,12 +779,7 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
|
|
||||||
# Load modules on an already clean environment, just before applying Spack's
|
# Load modules on an already clean environment, just before applying Spack's
|
||||||
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
|
# own environment modifications. This ensures Spack controls CC/CXX/... variables.
|
||||||
if need_compiler:
|
load_external_modules(setup_context)
|
||||||
tty.debug("setup_package: loading compiler modules")
|
|
||||||
for mod in pkg.compiler.modules:
|
|
||||||
load_module(mod)
|
|
||||||
|
|
||||||
load_external_modules(pkg)
|
|
||||||
|
|
||||||
# Make sure nothing's strange about the Spack environment.
|
# Make sure nothing's strange about the Spack environment.
|
||||||
validate(env_mods, tty.warn)
|
validate(env_mods, tty.warn)
|
||||||
@@ -957,6 +790,14 @@ def setup_package(pkg, dirty, context: Context = Context.BUILD):
|
|||||||
return env_base
|
return env_base
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_dtags_arg(env_by_name: Dict[str, ModificationList], *, var_name: str) -> str:
|
||||||
|
try:
|
||||||
|
enable_new_dtags = env_by_name[var_name][0].value # type: ignore[union-attr]
|
||||||
|
except (KeyError, IndexError, AttributeError):
|
||||||
|
enable_new_dtags = ""
|
||||||
|
return enable_new_dtags
|
||||||
|
|
||||||
|
|
||||||
class EnvironmentVisitor:
|
class EnvironmentVisitor:
|
||||||
def __init__(self, *roots: spack.spec.Spec, context: Context):
|
def __init__(self, *roots: spack.spec.Spec, context: Context):
|
||||||
# For the roots (well, marked specs) we follow different edges
|
# For the roots (well, marked specs) we follow different edges
|
||||||
@@ -1234,9 +1075,20 @@ def _make_runnable(self, dep: spack.spec.Spec, env: EnvironmentModifications):
|
|||||||
if os.path.isdir(bin_dir):
|
if os.path.isdir(bin_dir):
|
||||||
env.prepend_path("PATH", bin_dir)
|
env.prepend_path("PATH", bin_dir)
|
||||||
|
|
||||||
for cp_dir in spack.build_systems.cmake.get_cmake_prefix_path(dep.package):
|
|
||||||
env.append_path("CMAKE_PREFIX_PATH", cp_dir)
|
def load_external_modules(context: SetupContext) -> None:
|
||||||
env.prune_duplicate_paths("CMAKE_PREFIX_PATH")
|
"""Traverse a package's spec DAG and load any external modules.
|
||||||
|
|
||||||
|
Traverse a package's dependencies and load any external modules
|
||||||
|
associated with them.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
context: A populated SetupContext object
|
||||||
|
"""
|
||||||
|
for spec, _ in context.external:
|
||||||
|
external_modules = spec.external_modules or []
|
||||||
|
for external_module in external_modules:
|
||||||
|
load_module(external_module)
|
||||||
|
|
||||||
|
|
||||||
def _setup_pkg_and_run(
|
def _setup_pkg_and_run(
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
|
|
||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
import spack.builder
|
import spack.builder
|
||||||
|
import spack.compilers.libraries
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.phase_callbacks
|
import spack.phase_callbacks
|
||||||
@@ -398,33 +399,44 @@ def _do_patch_libtool(self) -> None:
|
|||||||
markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper())
|
markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper())
|
||||||
|
|
||||||
# Replace empty linker flag prefixes:
|
# Replace empty linker flag prefixes:
|
||||||
if self.pkg.compiler.name == "nag":
|
if self.spec.satisfies("%nag"):
|
||||||
# Nag is mixed with gcc and g++, which are recognized correctly.
|
# Nag is mixed with gcc and g++, which are recognized correctly.
|
||||||
# Therefore, we change only Fortran values:
|
# Therefore, we change only Fortran values:
|
||||||
|
nag_pkg = self.spec["fortran"].package
|
||||||
for tag in ["fc", "f77"]:
|
for tag in ["fc", "f77"]:
|
||||||
marker = markers[tag]
|
marker = markers[tag]
|
||||||
x.filter(
|
x.filter(
|
||||||
regex='^wl=""$',
|
regex='^wl=""$',
|
||||||
repl='wl="{0}"'.format(self.pkg.compiler.linker_arg),
|
repl=f'wl="{nag_pkg.linker_arg}"',
|
||||||
start_at="# ### BEGIN {0}".format(marker),
|
start_at=f"# ### BEGIN {marker}",
|
||||||
stop_at="# ### END {0}".format(marker),
|
stop_at=f"# ### END {marker}",
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.pkg.compiler.linker_arg))
|
compiler_spec = spack.compilers.libraries.compiler_spec(self.spec)
|
||||||
|
if compiler_spec:
|
||||||
|
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(compiler_spec.package.linker_arg))
|
||||||
|
|
||||||
# Replace empty PIC flag values:
|
# Replace empty PIC flag values:
|
||||||
for cc, marker in markers.items():
|
for compiler, marker in markers.items():
|
||||||
|
if compiler == "cc":
|
||||||
|
language = "c"
|
||||||
|
elif compiler == "cxx":
|
||||||
|
language = "cxx"
|
||||||
|
else:
|
||||||
|
language = "fortran"
|
||||||
|
|
||||||
|
if language not in self.spec:
|
||||||
|
continue
|
||||||
|
|
||||||
x.filter(
|
x.filter(
|
||||||
regex='^pic_flag=""$',
|
regex='^pic_flag=""$',
|
||||||
repl='pic_flag="{0}"'.format(
|
repl=f'pic_flag="{self.spec[language].package.pic_flag}"',
|
||||||
getattr(self.pkg.compiler, "{0}_pic_flag".format(cc))
|
start_at=f"# ### BEGIN {marker}",
|
||||||
),
|
stop_at=f"# ### END {marker}",
|
||||||
start_at="# ### BEGIN {0}".format(marker),
|
|
||||||
stop_at="# ### END {0}".format(marker),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Other compiler-specific patches:
|
# Other compiler-specific patches:
|
||||||
if self.pkg.compiler.name == "fj":
|
if self.spec.satisfies("%fj"):
|
||||||
x.filter(regex="-nostdlib", repl="", string=True)
|
x.filter(regex="-nostdlib", repl="", string=True)
|
||||||
rehead = r"/\S*/"
|
rehead = r"/\S*/"
|
||||||
for o in [
|
for o in [
|
||||||
@@ -437,7 +449,7 @@ def _do_patch_libtool(self) -> None:
|
|||||||
r"crtendS\.o",
|
r"crtendS\.o",
|
||||||
]:
|
]:
|
||||||
x.filter(regex=(rehead + o), repl="")
|
x.filter(regex=(rehead + o), repl="")
|
||||||
elif self.pkg.compiler.name == "nag":
|
elif self.spec.satisfies("%nag"):
|
||||||
for tag in ["fc", "f77"]:
|
for tag in ["fc", "f77"]:
|
||||||
marker = markers[tag]
|
marker = markers[tag]
|
||||||
start_at = "# ### BEGIN {0}".format(marker)
|
start_at = "# ### BEGIN {0}".format(marker)
|
||||||
|
|||||||
@@ -70,12 +70,8 @@ class CachedCMakeBuilder(CMakeBuilder):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def cache_name(self):
|
def cache_name(self):
|
||||||
return "{0}-{1}-{2}@{3}.cmake".format(
|
compiler_str = f"{self.spec['c'].name}-{self.spec['c'].version}"
|
||||||
self.pkg.name,
|
return f"{self.pkg.name}-{self.spec.architecture.platform}-{compiler_str}.cmake"
|
||||||
self.pkg.spec.architecture,
|
|
||||||
self.pkg.spec.compiler.name,
|
|
||||||
self.pkg.spec.compiler.version,
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cache_path(self):
|
def cache_path(self):
|
||||||
@@ -118,7 +114,9 @@ def initconfig_compiler_entries(self):
|
|||||||
# Fortran compiler is optional
|
# Fortran compiler is optional
|
||||||
if "FC" in os.environ:
|
if "FC" in os.environ:
|
||||||
spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"])
|
spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"])
|
||||||
system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.pkg.compiler.fc)
|
system_fc_entry = cmake_cache_path(
|
||||||
|
"CMAKE_Fortran_COMPILER", self.spec["fortran"].package.fortran
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
spack_fc_entry = "# No Fortran compiler defined in spec"
|
spack_fc_entry = "# No Fortran compiler defined in spec"
|
||||||
system_fc_entry = "# No Fortran compiler defined in spec"
|
system_fc_entry = "# No Fortran compiler defined in spec"
|
||||||
@@ -134,8 +132,8 @@ def initconfig_compiler_entries(self):
|
|||||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]),
|
" " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]),
|
||||||
" " + spack_fc_entry,
|
" " + spack_fc_entry,
|
||||||
"else()\n",
|
"else()\n",
|
||||||
" " + cmake_cache_path("CMAKE_C_COMPILER", self.pkg.compiler.cc),
|
" " + cmake_cache_path("CMAKE_C_COMPILER", self.spec["c"].package.cc),
|
||||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.pkg.compiler.cxx),
|
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.spec["cxx"].package.cxx),
|
||||||
" " + system_fc_entry,
|
" " + system_fc_entry,
|
||||||
"endif()\n",
|
"endif()\n",
|
||||||
]
|
]
|
||||||
@@ -278,6 +276,10 @@ def initconfig_hardware_entries(self):
|
|||||||
entries.append("# ROCm")
|
entries.append("# ROCm")
|
||||||
entries.append("#------------------{0}\n".format("-" * 30))
|
entries.append("#------------------{0}\n".format("-" * 30))
|
||||||
|
|
||||||
|
if spec.satisfies("^blt@0.7:"):
|
||||||
|
rocm_root = os.path.dirname(spec["llvm-amdgpu"].prefix)
|
||||||
|
entries.append(cmake_cache_path("ROCM_PATH", rocm_root))
|
||||||
|
else:
|
||||||
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
# Explicitly setting HIP_ROOT_DIR may be a patch that is no longer necessary
|
||||||
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
entries.append(cmake_cache_path("HIP_ROOT_DIR", "{0}".format(spec["hip"].prefix)))
|
||||||
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
llvm_bin = spec["llvm-amdgpu"].prefix.bin
|
||||||
@@ -287,8 +289,11 @@ def initconfig_hardware_entries(self):
|
|||||||
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
|
if os.path.basename(os.path.normpath(llvm_prefix)) != "llvm":
|
||||||
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
|
llvm_bin = os.path.join(llvm_prefix, "llvm/bin/")
|
||||||
entries.append(
|
entries.append(
|
||||||
cmake_cache_filepath("CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "clang++"))
|
cmake_cache_filepath(
|
||||||
|
"CMAKE_HIP_COMPILER", os.path.join(llvm_bin, "amdclang++")
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
archs = self.spec.variants["amdgpu_target"].value
|
archs = self.spec.variants["amdgpu_target"].value
|
||||||
if archs[0] != "none":
|
if archs[0] != "none":
|
||||||
arch_str = ";".join(archs)
|
arch_str = ";".join(archs)
|
||||||
|
|||||||
@@ -6,12 +6,13 @@
|
|||||||
import pathlib
|
import pathlib
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import Dict, List, Sequence, Tuple, Union
|
from typing import Dict, List, Optional, Sequence, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import classproperty
|
from llnl.util.lang import classproperty, memoized
|
||||||
|
|
||||||
import spack.compiler
|
import spack
|
||||||
|
import spack.compilers.error
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
|
|
||||||
@@ -43,6 +44,9 @@ class CompilerPackage(spack.package_base.PackageBase):
|
|||||||
#: Static definition of languages supported by this class
|
#: Static definition of languages supported by this class
|
||||||
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
compiler_languages: Sequence[str] = ["c", "cxx", "fortran"]
|
||||||
|
|
||||||
|
#: Relative path to compiler wrappers
|
||||||
|
compiler_wrapper_link_paths: Dict[str, str] = {}
|
||||||
|
|
||||||
def __init__(self, spec: "spack.spec.Spec"):
|
def __init__(self, spec: "spack.spec.Spec"):
|
||||||
super().__init__(spec)
|
super().__init__(spec)
|
||||||
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
msg = f"Supported languages for {spec} are not a subset of possible supported languages"
|
||||||
@@ -77,14 +81,14 @@ def executables(cls) -> Sequence[str]:
|
|||||||
]
|
]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def determine_version(cls, exe: Path):
|
def determine_version(cls, exe: Path) -> str:
|
||||||
version_argument = cls.compiler_version_argument
|
version_argument = cls.compiler_version_argument
|
||||||
if isinstance(version_argument, str):
|
if isinstance(version_argument, str):
|
||||||
version_argument = (version_argument,)
|
version_argument = (version_argument,)
|
||||||
|
|
||||||
for va in version_argument:
|
for va in version_argument:
|
||||||
try:
|
try:
|
||||||
output = spack.compiler.get_compiler_version_output(exe, va)
|
output = compiler_output(exe, version_argument=va)
|
||||||
match = re.search(cls.compiler_version_regex, output)
|
match = re.search(cls.compiler_version_regex, output)
|
||||||
if match:
|
if match:
|
||||||
return ".".join(match.groups())
|
return ".".join(match.groups())
|
||||||
@@ -95,10 +99,11 @@ def determine_version(cls, exe: Path):
|
|||||||
f"[{__file__}] Cannot detect a valid version for the executable "
|
f"[{__file__}] Cannot detect a valid version for the executable "
|
||||||
f"{str(exe)}, for package '{cls.name}': {e}"
|
f"{str(exe)}, for package '{cls.name}': {e}"
|
||||||
)
|
)
|
||||||
|
return ""
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def compiler_bindir(cls, prefix: Path) -> Path:
|
def compiler_bindir(cls, prefix: Path) -> Path:
|
||||||
"""Overridable method for the location of the compiler bindir within the preifx"""
|
"""Overridable method for the location of the compiler bindir within the prefix"""
|
||||||
return os.path.join(prefix, "bin")
|
return os.path.join(prefix, "bin")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -142,3 +147,109 @@ def determine_compiler_paths(cls, exes: Sequence[Path]) -> Dict[str, Path]:
|
|||||||
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
def determine_variants(cls, exes: Sequence[Path], version_str: str) -> Tuple:
|
||||||
# path determination is separated so it can be reused in subclasses
|
# path determination is separated so it can be reused in subclasses
|
||||||
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
|
return "", {"compilers": cls.determine_compiler_paths(exes=exes)}
|
||||||
|
|
||||||
|
#: Returns the argument needed to set the RPATH, or None if it does not exist
|
||||||
|
rpath_arg: Optional[str] = "-Wl,-rpath,"
|
||||||
|
#: Flag that needs to be used to pass an argument to the linker
|
||||||
|
linker_arg: str = "-Wl,"
|
||||||
|
#: Flag used to produce Position Independent Code
|
||||||
|
pic_flag: str = "-fPIC"
|
||||||
|
#: Flag used to get verbose output
|
||||||
|
verbose_flags: str = "-v"
|
||||||
|
#: Flag to activate OpenMP support
|
||||||
|
openmp_flag: str = "-fopenmp"
|
||||||
|
|
||||||
|
implicit_rpath_libs: List[str] = []
|
||||||
|
|
||||||
|
def standard_flag(self, *, language: str, standard: str) -> str:
|
||||||
|
"""Returns the flag used to enforce a given standard for a language"""
|
||||||
|
if language not in self.supported_languages:
|
||||||
|
raise spack.compilers.error.UnsupportedCompilerFlag(
|
||||||
|
f"{self.spec} does not provide the '{language}' language"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
return self._standard_flag(language=language, standard=standard)
|
||||||
|
except (KeyError, RuntimeError) as e:
|
||||||
|
raise spack.compilers.error.UnsupportedCompilerFlag(
|
||||||
|
f"{self.spec} does not provide the '{language}' standard {standard}"
|
||||||
|
) from e
|
||||||
|
|
||||||
|
def _standard_flag(self, *, language: str, standard: str) -> str:
|
||||||
|
raise NotImplementedError("Must be implemented by derived classes")
|
||||||
|
|
||||||
|
def archspec_name(self) -> str:
|
||||||
|
"""Name that archspec uses to refer to this compiler"""
|
||||||
|
return self.spec.name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cc(self) -> Optional[str]:
|
||||||
|
assert self.spec.concrete, "cannot retrieve C compiler, spec is not concrete"
|
||||||
|
if self.spec.external:
|
||||||
|
return self.spec.extra_attributes["compilers"].get("c", None)
|
||||||
|
return self._cc_path()
|
||||||
|
|
||||||
|
def _cc_path(self) -> Optional[str]:
|
||||||
|
"""Returns the path to the C compiler, if the package was installed by Spack"""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cxx(self) -> Optional[str]:
|
||||||
|
assert self.spec.concrete, "cannot retrieve C++ compiler, spec is not concrete"
|
||||||
|
if self.spec.external:
|
||||||
|
return self.spec.extra_attributes["compilers"].get("cxx", None)
|
||||||
|
return self._cxx_path()
|
||||||
|
|
||||||
|
def _cxx_path(self) -> Optional[str]:
|
||||||
|
"""Returns the path to the C++ compiler, if the package was installed by Spack"""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fortran(self):
|
||||||
|
assert self.spec.concrete, "cannot retrieve Fortran compiler, spec is not concrete"
|
||||||
|
if self.spec.external:
|
||||||
|
return self.spec.extra_attributes["compilers"].get("fortran", None)
|
||||||
|
return self._fortran_path()
|
||||||
|
|
||||||
|
def _fortran_path(self) -> Optional[str]:
|
||||||
|
"""Returns the path to the Fortran compiler, if the package was installed by Spack"""
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@memoized
|
||||||
|
def _compiler_output(
|
||||||
|
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
|
||||||
|
) -> str:
|
||||||
|
"""Returns the output from the compiler invoked with the given version argument.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
compiler_path: path of the compiler to be invoked
|
||||||
|
version_argument: the argument used to extract version information
|
||||||
|
"""
|
||||||
|
compiler = spack.util.executable.Executable(compiler_path)
|
||||||
|
if not version_argument:
|
||||||
|
return compiler(
|
||||||
|
output=str, error=str, ignore_errors=ignore_errors, timeout=120, fail_on_error=True
|
||||||
|
)
|
||||||
|
return compiler(
|
||||||
|
version_argument,
|
||||||
|
output=str,
|
||||||
|
error=str,
|
||||||
|
ignore_errors=ignore_errors,
|
||||||
|
timeout=120,
|
||||||
|
fail_on_error=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def compiler_output(
|
||||||
|
compiler_path: Path, *, version_argument: str, ignore_errors: Tuple[int, ...] = ()
|
||||||
|
) -> str:
|
||||||
|
"""Wrapper for _get_compiler_version_output()."""
|
||||||
|
# This ensures that we memoize compiler output by *absolute path*,
|
||||||
|
# not just executable name. If we don't do this, and the path changes
|
||||||
|
# (e.g., during testing), we can get incorrect results.
|
||||||
|
if not os.path.isabs(compiler_path):
|
||||||
|
compiler_path = spack.util.executable.which_string(str(compiler_path), required=True)
|
||||||
|
|
||||||
|
return _compiler_output(
|
||||||
|
compiler_path, version_argument=version_argument, ignore_errors=ignore_errors
|
||||||
|
)
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ def toolchain_version(self):
|
|||||||
Override this method to select a specific version of the toolchain or change
|
Override this method to select a specific version of the toolchain or change
|
||||||
selection heuristics.
|
selection heuristics.
|
||||||
Default is whatever version of msvc has been selected by concretization"""
|
Default is whatever version of msvc has been selected by concretization"""
|
||||||
return "v" + self.pkg.compiler.platform_toolset_ver
|
return "v" + self.spec["msvc"].package.platform_toolset_ver
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def std_msbuild_args(self):
|
def std_msbuild_args(self):
|
||||||
|
|||||||
@@ -278,10 +278,6 @@ def update_external_dependencies(self, extendee_spec=None):
|
|||||||
if not python.architecture.target:
|
if not python.architecture.target:
|
||||||
python.architecture.target = archspec.cpu.host().family.name
|
python.architecture.target = archspec.cpu.host().family.name
|
||||||
|
|
||||||
# Ensure compiler information is present
|
|
||||||
if not python.compiler:
|
|
||||||
python.compiler = self.spec.compiler
|
|
||||||
|
|
||||||
python.external_path = self.spec.external_path
|
python.external_path = self.spec.external_path
|
||||||
python._mark_concrete()
|
python._mark_concrete()
|
||||||
self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=())
|
self.spec.add_dependency_edge(python, depflag=dt.BUILD | dt.LINK | dt.RUN, virtuals=())
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
import codecs
|
import codecs
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import pathlib
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import stat
|
import stat
|
||||||
@@ -13,7 +14,7 @@
|
|||||||
import tempfile
|
import tempfile
|
||||||
import zipfile
|
import zipfile
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from typing import Callable, Dict, List, Set
|
from typing import Callable, Dict, List, Set, Union
|
||||||
from urllib.request import Request
|
from urllib.request import Request
|
||||||
|
|
||||||
import llnl.path
|
import llnl.path
|
||||||
@@ -23,8 +24,6 @@
|
|||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.builder
|
|
||||||
import spack.concretize
|
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
@@ -33,6 +32,7 @@
|
|||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
|
import spack.store
|
||||||
import spack.util.git
|
import spack.util.git
|
||||||
import spack.util.gpg as gpg_util
|
import spack.util.gpg as gpg_util
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
@@ -41,6 +41,7 @@
|
|||||||
from spack import traverse
|
from spack import traverse
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||||
|
from spack.version import GitVersion, StandardVersion
|
||||||
|
|
||||||
from .common import (
|
from .common import (
|
||||||
IS_WINDOWS,
|
IS_WINDOWS,
|
||||||
@@ -79,6 +80,45 @@ def get_change_revisions():
|
|||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
|
|
||||||
|
def get_added_versions(
|
||||||
|
checksums_version_dict: Dict[str, Union[StandardVersion, GitVersion]],
|
||||||
|
path: str,
|
||||||
|
from_ref: str = "HEAD~1",
|
||||||
|
to_ref: str = "HEAD",
|
||||||
|
) -> List[Union[StandardVersion, GitVersion]]:
|
||||||
|
"""Get a list of the versions added between `from_ref` and `to_ref`.
|
||||||
|
Args:
|
||||||
|
checksums_version_dict (Dict): all package versions keyed by known checksums.
|
||||||
|
path (str): path to the package.py
|
||||||
|
from_ref (str): oldest git ref, defaults to `HEAD~1`
|
||||||
|
to_ref (str): newer git ref, defaults to `HEAD`
|
||||||
|
Returns: list of versions added between refs
|
||||||
|
"""
|
||||||
|
git_exe = spack.util.git.git(required=True)
|
||||||
|
|
||||||
|
# Gather git diff
|
||||||
|
diff_lines = git_exe("diff", from_ref, to_ref, "--", path, output=str).split("\n")
|
||||||
|
|
||||||
|
# Store added and removed versions
|
||||||
|
# Removed versions are tracked here to determine when versions are moved in a file
|
||||||
|
# and show up as both added and removed in a git diff.
|
||||||
|
added_checksums = set()
|
||||||
|
removed_checksums = set()
|
||||||
|
|
||||||
|
# Scrape diff for modified versions and prune added versions if they show up
|
||||||
|
# as also removed (which means they've actually just moved in the file and
|
||||||
|
# we shouldn't need to rechecksum them)
|
||||||
|
for checksum in checksums_version_dict.keys():
|
||||||
|
for line in diff_lines:
|
||||||
|
if checksum in line:
|
||||||
|
if line.startswith("+"):
|
||||||
|
added_checksums.add(checksum)
|
||||||
|
if line.startswith("-"):
|
||||||
|
removed_checksums.add(checksum)
|
||||||
|
|
||||||
|
return [checksums_version_dict[c] for c in added_checksums - removed_checksums]
|
||||||
|
|
||||||
|
|
||||||
def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
|
||||||
"""Given an environment manifest path and two revisions to compare, return
|
"""Given an environment manifest path and two revisions to compare, return
|
||||||
whether or not the stack was changed. Returns True if the environment
|
whether or not the stack was changed. Returns True if the environment
|
||||||
@@ -224,7 +264,7 @@ def rebuild_filter(s: spack.spec.Spec) -> RebuildDecision:
|
|||||||
|
|
||||||
def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str:
|
def _format_pruning_message(spec: spack.spec.Spec, prune: bool, reasons: List[str]) -> str:
|
||||||
reason_msg = ", ".join(reasons)
|
reason_msg = ", ".join(reasons)
|
||||||
spec_fmt = "{name}{@version}{%compiler}{/hash:7}"
|
spec_fmt = "{name}{@version}{/hash:7}{%compiler}"
|
||||||
|
|
||||||
if not prune:
|
if not prune:
|
||||||
status = colorize("@*g{[x]} ")
|
status = colorize("@*g{[x]} ")
|
||||||
@@ -380,7 +420,6 @@ def generate_pipeline(env: ev.Environment, args) -> None:
|
|||||||
args: (spack.main.SpackArgumentParser): Parsed arguments from the command
|
args: (spack.main.SpackArgumentParser): Parsed arguments from the command
|
||||||
line.
|
line.
|
||||||
"""
|
"""
|
||||||
with spack.concretize.disable_compiler_existence_check():
|
|
||||||
with env.write_transaction():
|
with env.write_transaction():
|
||||||
env.concretize()
|
env.concretize()
|
||||||
env.write()
|
env.write()
|
||||||
@@ -581,22 +620,25 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) ->
|
|||||||
tty.debug(f"job spec: {job_spec}")
|
tty.debug(f"job spec: {job_spec}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name)
|
package_metadata_root = pathlib.Path(spack.store.STORE.layout.metadata_path(job_spec))
|
||||||
job_pkg = pkg_cls(job_spec)
|
except spack.error.SpackError as e:
|
||||||
tty.debug(f"job package: {job_pkg}")
|
tty.error(f"Cannot copy logs: {str(e)}")
|
||||||
except AssertionError:
|
|
||||||
msg = f"Cannot copy stage logs: job spec ({job_spec}) must be concrete"
|
|
||||||
tty.error(msg)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
stage_dir = job_pkg.stage.path
|
# Get the package's archived files
|
||||||
tty.debug(f"stage dir: {stage_dir}")
|
archive_files = []
|
||||||
for file in [
|
archive_root = package_metadata_root / "archived-files"
|
||||||
job_pkg.log_path,
|
if archive_root.is_dir():
|
||||||
job_pkg.env_mods_path,
|
archive_files = [f for f in archive_root.rglob("*") if f.is_file()]
|
||||||
*spack.builder.create(job_pkg).archive_files,
|
else:
|
||||||
]:
|
msg = "Cannot copy package archived files: archived-files must be a directory"
|
||||||
copy_files_to_artifacts(file, job_log_dir)
|
tty.warn(msg)
|
||||||
|
|
||||||
|
build_log_zipped = package_metadata_root / "spack-build-out.txt.gz"
|
||||||
|
build_env_mods = package_metadata_root / "spack-build-env.txt"
|
||||||
|
|
||||||
|
for f in [build_log_zipped, build_env_mods, *archive_files]:
|
||||||
|
copy_files_to_artifacts(str(f), job_log_dir)
|
||||||
|
|
||||||
|
|
||||||
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
def copy_test_logs_to_artifacts(test_stage, job_test_dir):
|
||||||
|
|||||||
@@ -209,10 +209,8 @@ def build_name(self, spec: Optional[spack.spec.Spec] = None) -> Optional[str]:
|
|||||||
|
|
||||||
Returns: (str) given spec's CDash build name."""
|
Returns: (str) given spec's CDash build name."""
|
||||||
if spec:
|
if spec:
|
||||||
build_name = (
|
spec_str = spec.format("{name}{@version}{%compiler} hash={hash} arch={architecture}")
|
||||||
f"{spec.name}@{spec.version}%{spec.compiler} "
|
build_name = f"{spec_str} ({self.build_group})"
|
||||||
f"hash={spec.dag_hash()} arch={spec.architecture} ({self.build_group})"
|
|
||||||
)
|
|
||||||
tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}")
|
tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}")
|
||||||
return build_name
|
return build_name
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user