Compare commits
285 Commits
revert-363
...
bugfix/env
Author | SHA1 | Date | |
---|---|---|---|
![]() |
5d0bb647a3 | ||
![]() |
bfb72c7fd6 | ||
![]() |
08a5a59b21 | ||
![]() |
cebd482300 | ||
![]() |
84a1fc415d | ||
![]() |
394f2a58d3 | ||
![]() |
b2310f9e64 | ||
![]() |
2c7d7388da | ||
![]() |
1f7b1f5ee1 | ||
![]() |
6054daf1e0 | ||
![]() |
15bbd0724d | ||
![]() |
5771500255 | ||
![]() |
4d535ed50f | ||
![]() |
219c49db04 | ||
![]() |
4006ce28aa | ||
![]() |
cb9ae0fa77 | ||
![]() |
c35e59a6c4 | ||
![]() |
c32aeea1a4 | ||
![]() |
6de3786c36 | ||
![]() |
3869761216 | ||
![]() |
04209599ef | ||
![]() |
9d05e65578 | ||
![]() |
9d7b79d8e1 | ||
![]() |
65ee864232 | ||
![]() |
4703c3bcb8 | ||
![]() |
a7b2196eab | ||
![]() |
4ace1e660a | ||
![]() |
12eff8daad | ||
![]() |
017d66eb79 | ||
![]() |
9e11d0e489 | ||
![]() |
74ad92e16b | ||
![]() |
3e2d03984e | ||
![]() |
bd8dc1919b | ||
![]() |
6922e8f282 | ||
![]() |
a70f307f7e | ||
![]() |
c338d2fb02 | ||
![]() |
b32edd3a72 | ||
![]() |
44e15da92c | ||
![]() |
685dd7272a | ||
![]() |
0b9694575f | ||
![]() |
6e490f2239 | ||
![]() |
0ce548a850 | ||
![]() |
d19475c3b4 | ||
![]() |
3d1320c834 | ||
![]() |
bb735fb896 | ||
![]() |
767046f8da | ||
![]() |
8f800aca72 | ||
![]() |
65556eb53e | ||
![]() |
a080cf0193 | ||
![]() |
3e1c6b27a4 | ||
![]() |
f28219cdda | ||
![]() |
4206478f5e | ||
![]() |
eebfb1cf07 | ||
![]() |
8235e1f38a | ||
![]() |
a1703fa437 | ||
![]() |
4b3cc800ff | ||
![]() |
5cf7c60d74 | ||
![]() |
674c22f815 | ||
![]() |
5a4890cef8 | ||
![]() |
66a9a9caa8 | ||
![]() |
c3a41c742e | ||
![]() |
664c12c7be | ||
![]() |
44306d3492 | ||
![]() |
354d498971 | ||
![]() |
d2fd68071e | ||
![]() |
1a2510d031 | ||
![]() |
78f5b2a2c6 | ||
![]() |
d0cd340628 | ||
![]() |
6a2f80e2c6 | ||
![]() |
ed0c6cd0f3 | ||
![]() |
6aa4c29119 | ||
![]() |
dc1399386c | ||
![]() |
6fca0f8018 | ||
![]() |
88cc949841 | ||
![]() |
c81d0a9e97 | ||
![]() |
e4794274d6 | ||
![]() |
2b112dd02c | ||
![]() |
5da231969e | ||
![]() |
c3b0806f6c | ||
![]() |
77d55ebbd1 | ||
![]() |
e9a1d0a157 | ||
![]() |
5560017ebe | ||
![]() |
87da3a07bb | ||
![]() |
712b30c9c5 | ||
![]() |
dc194ec14c | ||
![]() |
5c0004bbc1 | ||
![]() |
3d923fd5b8 | ||
![]() |
a8c400bae0 | ||
![]() |
7a77ecbdb6 | ||
![]() |
91636f0e9d | ||
![]() |
f91968cf6f | ||
![]() |
3d149a7db2 | ||
![]() |
cfea2d1010 | ||
![]() |
0860139c83 | ||
![]() |
182ef042ce | ||
![]() |
9d68100891 | ||
![]() |
ebffc53b93 | ||
![]() |
4c3edac454 | ||
![]() |
5e33f6bbc5 | ||
![]() |
a19f13f57a | ||
![]() |
93cad90413 | ||
![]() |
7e4927b892 | ||
![]() |
88548ba76f | ||
![]() |
2117e37a0b | ||
![]() |
ac62817ba0 | ||
![]() |
08cf82b977 | ||
![]() |
4242989f22 | ||
![]() |
b524351e10 | ||
![]() |
b9bde03df5 | ||
![]() |
5c3bc36fde | ||
![]() |
2a2c9cdf02 | ||
![]() |
8b98564840 | ||
![]() |
d79c8179fc | ||
![]() |
1175831203 | ||
![]() |
210b2e8caa | ||
![]() |
a8e2961010 | ||
![]() |
a6a364d3b8 | ||
![]() |
46bbce1922 | ||
![]() |
14465e61ae | ||
![]() |
4064191fbc | ||
![]() |
7bb64b526f | ||
![]() |
4f42092f4f | ||
![]() |
3b5b9e8474 | ||
![]() |
aabd76cb74 | ||
![]() |
fbde853360 | ||
![]() |
951f691d1b | ||
![]() |
27978fd355 | ||
![]() |
349b5d982b | ||
![]() |
c2c56c1ca1 | ||
![]() |
7e06b5bc88 | ||
![]() |
17cec3b101 | ||
![]() |
b0e7b8c794 | ||
![]() |
98ac3acc92 | ||
![]() |
7d66c3b5d1 | ||
![]() |
ae9a65ae56 | ||
![]() |
996442ea9b | ||
![]() |
bd20b7b8b7 | ||
![]() |
08426ec492 | ||
![]() |
f863859bde | ||
![]() |
07559d778e | ||
![]() |
33833a4f32 | ||
![]() |
e544bb6271 | ||
![]() |
e1a104e3a2 | ||
![]() |
f5624f096c | ||
![]() |
d82fc158ca | ||
![]() |
709c5c4844 | ||
![]() |
e356390575 | ||
![]() |
906a8c56af | ||
![]() |
6c5d3299fe | ||
![]() |
fefa4b8cc4 | ||
![]() |
2ca471745c | ||
![]() |
3756d5761b | ||
![]() |
0c05f2bc21 | ||
![]() |
054cbe84de | ||
![]() |
a185343493 | ||
![]() |
16404034dc | ||
![]() |
516a023173 | ||
![]() |
f8064cd744 | ||
![]() |
781c87840d | ||
![]() |
b9a3254b66 | ||
![]() |
20d2e6a9fd | ||
![]() |
b6390e335c | ||
![]() |
3e09f04241 | ||
![]() |
67c4ada08b | ||
![]() |
701e46464d | ||
![]() |
dba57ff113 | ||
![]() |
7579eaf75a | ||
![]() |
aa99063065 | ||
![]() |
18c21d0c32 | ||
![]() |
6b03c9f285 | ||
![]() |
7ffe2fadfe | ||
![]() |
a3a9b48ed7 | ||
![]() |
e0fb737e8e | ||
![]() |
5e70943d1b | ||
![]() |
a48abfee75 | ||
![]() |
af86759116 | ||
![]() |
6a868ec9c5 | ||
![]() |
c6ab42a86a | ||
![]() |
a771bfadd1 | ||
![]() |
d76a8b7de7 | ||
![]() |
9a93f223d6 | ||
![]() |
b8735fd1e4 | ||
![]() |
eb80f4d9af | ||
![]() |
9160e78729 | ||
![]() |
0d829b632f | ||
![]() |
c88b30b426 | ||
![]() |
d862edcce0 | ||
![]() |
3b497359b7 | ||
![]() |
4c599980da | ||
![]() |
7266cc9b92 | ||
![]() |
cc4a528274 | ||
![]() |
efbfe38f63 | ||
![]() |
5072e48dab | ||
![]() |
c5b3fc6929 | ||
![]() |
deca4ce107 | ||
![]() |
e612436e26 | ||
![]() |
fd19759783 | ||
![]() |
3265215f1d | ||
![]() |
428e5726c1 | ||
![]() |
e0570c819c | ||
![]() |
ea60220a84 | ||
![]() |
84d67190a6 | ||
![]() |
80a34ae9cc | ||
![]() |
7beb57cb05 | ||
![]() |
055c30acfb | ||
![]() |
3cd61b9b83 | ||
![]() |
6e8f449882 | ||
![]() |
51b0023638 | ||
![]() |
f02c374181 | ||
![]() |
ff3245382e | ||
![]() |
ebc492f1e8 | ||
![]() |
5b8f005962 | ||
![]() |
b11febbbc9 | ||
![]() |
5837d4c587 | ||
![]() |
e5ea7b6e32 | ||
![]() |
c69a1af5c7 | ||
![]() |
b40f9f72ed | ||
![]() |
ab3fd38eda | ||
![]() |
5b9b8902ac | ||
![]() |
8671f32b14 | ||
![]() |
24f41ad050 | ||
![]() |
732153c9e2 | ||
![]() |
227d19ef02 | ||
![]() |
7600422183 | ||
![]() |
c2b4f5bf45 | ||
![]() |
56d98c3f0a | ||
![]() |
37fadd9b2f | ||
![]() |
72318ba364 | ||
![]() |
32416eedd8 | ||
![]() |
417a5e4c3e | ||
![]() |
85f1eb4534 | ||
![]() |
39100c5336 | ||
![]() |
9f59d4f199 | ||
![]() |
334f704d36 | ||
![]() |
eb7b18e827 | ||
![]() |
9a4b710f4e | ||
![]() |
d7a75b4fae | ||
![]() |
c20feda19c | ||
![]() |
e2a170f8a2 | ||
![]() |
6777f2d9e9 | ||
![]() |
a7175979cd | ||
![]() |
c4923fe3b3 | ||
![]() |
ae504ce2fe | ||
![]() |
e4edcf6104 | ||
![]() |
693eea499c | ||
![]() |
a451f55340 | ||
![]() |
15f7b72557 | ||
![]() |
6bf33b2b7f | ||
![]() |
dac62c8cf8 | ||
![]() |
30b8b0e6f5 | ||
![]() |
8741d2a7ed | ||
![]() |
4ec1d860fc | ||
![]() |
df614169bd | ||
![]() |
1029590672 | ||
![]() |
c7e2346d8b | ||
![]() |
93631d7512 | ||
![]() |
54e5dc3eb5 | ||
![]() |
0a9eea593b | ||
![]() |
17e50f519a | ||
![]() |
d15fe6a345 | ||
![]() |
3504866185 | ||
![]() |
d0aee3aa30 | ||
![]() |
bbf7ff348a | ||
![]() |
c6ec5a71a7 | ||
![]() |
56358c5901 | ||
![]() |
06d8196dfd | ||
![]() |
6a119b911c | ||
![]() |
5e1cfeaad0 | ||
![]() |
d199c1a7cf | ||
![]() |
795ee106f0 | ||
![]() |
00f4021e6a | ||
![]() |
d367fded81 | ||
![]() |
56c086ea17 | ||
![]() |
4dcca72e89 | ||
![]() |
8326ef0772 | ||
![]() |
27456f53aa | ||
![]() |
326442b169 | ||
![]() |
ef2b31f7d1 | ||
![]() |
f2abf90bfc | ||
![]() |
906151075d | ||
![]() |
d0d5526110 | ||
![]() |
729b8113cc | ||
![]() |
c59bebbff9 | ||
![]() |
118d8e4f57 | ||
![]() |
2d9c913eb1 |
6
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
6
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: "\U0001F38A Feature request"
|
||||
name: "\U0001F38A Feature request"
|
||||
description: Suggest adding a feature that is not yet in Spack
|
||||
labels: [feature]
|
||||
body:
|
||||
@@ -29,13 +29,11 @@ body:
|
||||
attributes:
|
||||
label: General information
|
||||
options:
|
||||
- label: I have run `spack --version` and reported the version of Spack
|
||||
required: true
|
||||
- label: I have searched the issues of this repo and believe this is not a duplicate
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
If you want to ask a question about the tool (how to use it, what it can currently do, etc.), try the `#general` channel on [our Slack](https://slack.spack.io/) first. We have a welcoming community and chances are you'll get your reply faster and without opening an issue.
|
||||
|
||||
|
||||
Other than that, thanks for taking the time to contribute to Spack!
|
||||
|
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
package-audits:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
with:
|
||||
python-version: ${{inputs.python_version}}
|
||||
|
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- name: Checkout
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- name: Checkout
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -158,7 +158,7 @@ jobs:
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -179,7 +179,7 @@ jobs:
|
||||
run: |
|
||||
brew install tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
set -ex
|
||||
@@ -204,7 +204,7 @@ jobs:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup repo
|
||||
@@ -247,7 +247,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- name: Checkout
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -283,7 +283,7 @@ jobs:
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- name: Checkout
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup non-root user
|
||||
@@ -316,7 +316,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -333,7 +333,7 @@ jobs:
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- name: Checkout
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
if: github.repository == 'spack/spack'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
|
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
steps:
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
10
.github/workflows/unit_tests.yaml
vendored
10
.github/workflows/unit_tests.yaml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
on_develop: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
@@ -133,7 +133,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -151,7 +151,7 @@ jobs:
|
||||
clingo-cffi:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
@@ -185,7 +185,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
steps:
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
|
29
.github/workflows/valid-style.yml
vendored
29
.github/workflows/valid-style.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
with:
|
||||
python-version: '3.11'
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # @v2
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # @v2
|
||||
@@ -58,3 +58,28 @@ jobs:
|
||||
with:
|
||||
with_coverage: ${{ inputs.with_coverage }}
|
||||
python_version: '3.11'
|
||||
# Check that spack can bootstrap the development environment on Python 3.6 - RHEL8
|
||||
bootstrap-dev-rhel8:
|
||||
runs-on: ubuntu-latest
|
||||
container: registry.access.redhat.com/ubi8/ubi
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Bootstrap Spack development environment
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack -d bootstrap now --dev
|
||||
spack style -t black
|
||||
spack unit-test -V
|
||||
|
8
.github/workflows/windows_python.yml
vendored
8
.github/workflows/windows_python.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
unit-tests:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
unit-tests-cmd:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
build-abseil:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# git config --global core.symlinks false
|
||||
# shell:
|
||||
# powershell
|
||||
# - uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f
|
||||
# - uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
# with:
|
||||
# fetch-depth: 0
|
||||
# - uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435
|
||||
|
109
bin/spack.bat
109
bin/spack.bat
@@ -50,24 +50,69 @@ setlocal enabledelayedexpansion
|
||||
:: flags will always start with '-', e.g. --help or -V
|
||||
:: subcommands will never start with '-'
|
||||
:: everything after the subcommand is an arg
|
||||
for %%x in (%*) do (
|
||||
set t="%%~x"
|
||||
|
||||
:: we cannot allow batch "for" loop to directly process CL args
|
||||
:: a number of batch reserved characters are commonly passed to
|
||||
:: spack and allowing batch's "for" method to process the raw inputs
|
||||
:: results in a large number of formatting issues
|
||||
:: instead, treat the entire CLI as one string
|
||||
:: and split by space manually
|
||||
:: capture cl args in variable named cl_args
|
||||
set cl_args=%*
|
||||
:process_cl_args
|
||||
rem tokens=1* returns the first processed token produced
|
||||
rem by tokenizing the input string cl_args on spaces into
|
||||
rem the named variable %%g
|
||||
rem While this make look like a for loop, it only
|
||||
rem executes a single time for each of the cl args
|
||||
rem the actual iterative loop is performed by the
|
||||
rem goto process_cl_args stanza
|
||||
rem we are simply leveraging the "for" method's string
|
||||
rem tokenization
|
||||
for /f "tokens=1*" %%g in ("%cl_args%") do (
|
||||
set t=%%~g
|
||||
rem remainder of string is composed into %%h
|
||||
rem these are the cl args yet to be processed
|
||||
rem assign cl_args var to only the args to be processed
|
||||
rem effectively discarding the current arg %%g
|
||||
rem this will be nul when we have no further tokens to process
|
||||
set cl_args=%%h
|
||||
rem process the first space delineated cl arg
|
||||
rem of this iteration
|
||||
if "!t:~0,1!" == "-" (
|
||||
if defined _sp_subcommand (
|
||||
:: We already have a subcommand, processing args now
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
rem We already have a subcommand, processing args now
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
)
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
shift
|
||||
if not defined _sp_flags (
|
||||
set "_sp_flags=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_flags=!_sp_flags! !t!"
|
||||
shift
|
||||
)
|
||||
)
|
||||
) else if not defined _sp_subcommand (
|
||||
set "_sp_subcommand=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
shift
|
||||
if not defined _sp_args (
|
||||
set "_sp_args=!t!"
|
||||
shift
|
||||
) else (
|
||||
set "_sp_args=!_sp_args! !t!"
|
||||
shift
|
||||
)
|
||||
)
|
||||
)
|
||||
rem if this is not nil, we have more tokens to process
|
||||
rem start above process again with remaining unprocessed cl args
|
||||
if defined cl_args goto :process_cl_args
|
||||
|
||||
|
||||
:: --help, -h and -V flags don't require further output parsing.
|
||||
:: If we encounter, execute and exit
|
||||
@@ -95,31 +140,21 @@ if not defined _sp_subcommand (
|
||||
|
||||
:: pass parsed variables outside of local scope. Need to do
|
||||
:: this because delayedexpansion can only be set by setlocal
|
||||
echo %_sp_flags%>flags
|
||||
echo %_sp_args%>args
|
||||
echo %_sp_subcommand%>subcmd
|
||||
endlocal
|
||||
set /p _sp_subcommand=<subcmd
|
||||
set /p _sp_flags=<flags
|
||||
set /p _sp_args=<args
|
||||
if "%_sp_subcommand%"=="ECHO is off." (set "_sp_subcommand=")
|
||||
if "%_sp_subcommand%"=="ECHO is on." (set "_sp_subcommand=")
|
||||
if "%_sp_flags%"=="ECHO is off." (set "_sp_flags=")
|
||||
if "%_sp_flags%"=="ECHO is on." (set "_sp_flags=")
|
||||
if "%_sp_args%"=="ECHO is off." (set "_sp_args=")
|
||||
if "%_sp_args%"=="ECHO is on." (set "_sp_args=")
|
||||
del subcmd
|
||||
del flags
|
||||
del args
|
||||
endlocal & (
|
||||
set "_sp_flags=%_sp_flags%"
|
||||
set "_sp_args=%_sp_args%"
|
||||
set "_sp_subcommand=%_sp_subcommand%"
|
||||
)
|
||||
|
||||
|
||||
:: Filter out some commands. For any others, just run the command.
|
||||
if %_sp_subcommand% == "cd" (
|
||||
if "%_sp_subcommand%" == "cd" (
|
||||
goto :case_cd
|
||||
) else if %_sp_subcommand% == "env" (
|
||||
) else if "%_sp_subcommand%" == "env" (
|
||||
goto :case_env
|
||||
) else if %_sp_subcommand% == "load" (
|
||||
) else if "%_sp_subcommand%" == "load" (
|
||||
goto :case_load
|
||||
) else if %_sp_subcommand% == "unload" (
|
||||
) else if "%_sp_subcommand%" == "unload" (
|
||||
goto :case_load
|
||||
) else (
|
||||
goto :default_case
|
||||
@@ -154,20 +189,20 @@ goto :end_switch
|
||||
if NOT defined _sp_args (
|
||||
goto :default_case
|
||||
)
|
||||
set args_no_quote=%_sp_args:"=%
|
||||
if NOT "%args_no_quote%"=="%args_no_quote:--help=%" (
|
||||
|
||||
if NOT "%_sp_args%"=="%_sp_args:--help=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%args_no_quote%"=="%args_no_quote: -h=%" (
|
||||
) else if NOT "%_sp_args%"=="%_sp_args: -h=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%args_no_quote%"=="%args_no_quote:--bat=%" (
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:--bat=%" (
|
||||
goto :default_case
|
||||
) else if NOT "%args_no_quote%"=="%args_no_quote:deactivate=%" (
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:deactivate=%" (
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`call python %spack% %_sp_flags% env deactivate --bat %args_no_quote:deactivate=%`
|
||||
`call python %spack% %_sp_flags% env deactivate --bat %_sp_args:deactivate=%`
|
||||
) do %%I
|
||||
) else if NOT "%args_no_quote%"=="%args_no_quote:activate=%" (
|
||||
) else if NOT "%_sp_args%"=="%_sp_args:activate=%" (
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`python %spack% %_sp_flags% env activate --bat %args_no_quote:activate=%`
|
||||
`python %spack% %_sp_flags% env activate --bat %_sp_args:activate=%`
|
||||
) do %%I
|
||||
) else (
|
||||
goto :default_case
|
||||
@@ -188,7 +223,7 @@ if defined _sp_args (
|
||||
|
||||
for /f "tokens=* USEBACKQ" %%I in (
|
||||
`python "%spack%" %_sp_flags% %_sp_subcommand% --bat %_sp_args%`) do %%I
|
||||
)
|
||||
|
||||
goto :end_switch
|
||||
|
||||
:case_unload
|
||||
|
@@ -3,3 +3,4 @@ config:
|
||||
concretizer: clingo
|
||||
build_stage::
|
||||
- '$spack/.staging'
|
||||
stage_name: '{name}-{version}-{hash:7}'
|
||||
|
@@ -19,3 +19,4 @@ packages:
|
||||
- msvc
|
||||
providers:
|
||||
mpi: [msmpi]
|
||||
gl: [wgl]
|
||||
|
@@ -444,6 +444,120 @@ attribute:
|
||||
The minimum version of Singularity required to build a SIF (Singularity Image Format)
|
||||
image from the recipes generated by Spack is ``3.5.3``.
|
||||
|
||||
------------------------------
|
||||
Extending the Jinja2 Templates
|
||||
------------------------------
|
||||
|
||||
The Dockerfile and the Singularity definition file that Spack can generate are based on
|
||||
a few Jinja2 templates that are rendered according to the environment being containerized.
|
||||
Even though Spack allows a great deal of customization by just setting appropriate values for
|
||||
the configuration options, sometimes that is not enough.
|
||||
|
||||
In those cases, a user can directly extend the template that Spack uses to render the image
|
||||
to e.g. set additional environment variables or perform specific operations either before or
|
||||
after a given stage of the build. Let's consider as an example the following structure:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ tree /opt/environment
|
||||
/opt/environment
|
||||
├── data
|
||||
│ └── data.csv
|
||||
├── spack.yaml
|
||||
├── data
|
||||
└── templates
|
||||
└── container
|
||||
└── CustomDockerfile
|
||||
|
||||
containing both the custom template extension and the environment manifest file. To use a custom
|
||||
template, the environment must register the directory containing it, and declare its use under the
|
||||
``container`` configuration:
|
||||
|
||||
.. code-block:: yaml
|
||||
:emphasize-lines: 7-8,12
|
||||
|
||||
spack:
|
||||
specs:
|
||||
- hdf5~mpi
|
||||
concretizer:
|
||||
unify: true
|
||||
config:
|
||||
template_dirs:
|
||||
- /opt/environment/templates
|
||||
container:
|
||||
format: docker
|
||||
depfile: true
|
||||
template: container/CustomDockerfile
|
||||
|
||||
The template extension can override two blocks, named ``build_stage`` and ``final_stage``, similarly to
|
||||
the example below:
|
||||
|
||||
.. code-block::
|
||||
:emphasize-lines: 3,8
|
||||
|
||||
{% extends "container/Dockerfile" %}
|
||||
{% block build_stage %}
|
||||
RUN echo "Start building"
|
||||
{{ super() }}
|
||||
{% endblock %}
|
||||
{% block final_stage %}
|
||||
{{ super() }}
|
||||
COPY data /share/myapp/data
|
||||
{% endblock %}
|
||||
|
||||
The recipe that gets generated contains the two extra instruction that we added in our template extension:
|
||||
|
||||
.. code-block:: Dockerfile
|
||||
:emphasize-lines: 4,43
|
||||
|
||||
# Build stage with Spack pre-installed and ready to be used
|
||||
FROM spack/ubuntu-jammy:latest as builder
|
||||
|
||||
RUN echo "Start building"
|
||||
|
||||
# What we want to install and how we want to install it
|
||||
# is specified in a manifest file (spack.yaml)
|
||||
RUN mkdir /opt/spack-environment \
|
||||
&& (echo "spack:" \
|
||||
&& echo " specs:" \
|
||||
&& echo " - hdf5~mpi" \
|
||||
&& echo " concretizer:" \
|
||||
&& echo " unify: true" \
|
||||
&& echo " config:" \
|
||||
&& echo " template_dirs:" \
|
||||
&& echo " - /tmp/environment/templates" \
|
||||
&& echo " install_tree: /opt/software" \
|
||||
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
|
||||
|
||||
# Install the software, remove unnecessary deps
|
||||
RUN cd /opt/spack-environment && spack env activate . && spack concretize && spack env depfile -o Makefile && make -j $(nproc) && spack gc -y
|
||||
|
||||
# Strip all the binaries
|
||||
RUN find -L /opt/view/* -type f -exec readlink -f '{}' \; | \
|
||||
xargs file -i | \
|
||||
grep 'charset=binary' | \
|
||||
grep 'x-executable\|x-archive\|x-sharedlib' | \
|
||||
awk -F: '{print $1}' | xargs strip -s
|
||||
|
||||
# Modifications to the environment that are necessary to run
|
||||
RUN cd /opt/spack-environment && \
|
||||
spack env activate --sh -d . >> /etc/profile.d/z10_spack_environment.sh
|
||||
|
||||
# Bare OS image to run the installed executables
|
||||
FROM ubuntu:22.04
|
||||
|
||||
COPY --from=builder /opt/spack-environment /opt/spack-environment
|
||||
COPY --from=builder /opt/software /opt/software
|
||||
COPY --from=builder /opt/._view /opt/._view
|
||||
COPY --from=builder /opt/view /opt/view
|
||||
COPY --from=builder /etc/profile.d/z10_spack_environment.sh /etc/profile.d/z10_spack_environment.sh
|
||||
|
||||
COPY data /share/myapp/data
|
||||
|
||||
ENTRYPOINT ["/bin/bash", "--rcfile", "/etc/profile", "-l", "-c", "$*", "--" ]
|
||||
CMD [ "/bin/bash" ]
|
||||
|
||||
|
||||
.. _container_config_options:
|
||||
|
||||
-----------------------
|
||||
@@ -464,6 +578,10 @@ to customize the generation of container recipes:
|
||||
- The format of the recipe
|
||||
- ``docker`` or ``singularity``
|
||||
- Yes
|
||||
* - ``depfile``
|
||||
- Whether to use a depfile for installation, or not
|
||||
- True or False (default)
|
||||
- No
|
||||
* - ``images:os``
|
||||
- Operating system used as a base for the image
|
||||
- See :ref:`containers-supported-os`
|
||||
@@ -512,14 +630,6 @@ to customize the generation of container recipes:
|
||||
- System packages needed at run-time
|
||||
- Valid packages for the current OS
|
||||
- No
|
||||
* - ``extra_instructions:build``
|
||||
- Extra instructions (e.g. `RUN`, `COPY`, etc.) at the end of the ``build`` stage
|
||||
- Anything understood by the current ``format``
|
||||
- No
|
||||
* - ``extra_instructions:final``
|
||||
- Extra instructions (e.g. `RUN`, `COPY`, etc.) at the end of the ``final`` stage
|
||||
- Anything understood by the current ``format``
|
||||
- No
|
||||
* - ``labels``
|
||||
- Labels to tag the image
|
||||
- Pairs of key-value strings
|
||||
|
@@ -632,18 +632,18 @@ Here's an example of what bootstrapping some compilers might look like:
|
||||
exclude:
|
||||
- '%gcc@7.3.0 os=centos7'
|
||||
- '%gcc@5.5.0 os=ubuntu18.04'
|
||||
gitlab-ci:
|
||||
ci:
|
||||
bootstrap:
|
||||
- name: compiler-pkgs
|
||||
compiler-agnostic: true
|
||||
mappings:
|
||||
# mappings similar to the example higher up in this description
|
||||
pipeline-gen:
|
||||
# similar to the example higher up in this description
|
||||
...
|
||||
|
||||
The example above adds a list to the ``definitions`` called ``compiler-pkgs``
|
||||
(you can add any number of these), which lists compiler packages that should
|
||||
be staged ahead of the full matrix of release specs (in this example, only
|
||||
readline). Then within the ``gitlab-ci`` section, note the addition of a
|
||||
readline). Then within the ``ci`` section, note the addition of a
|
||||
``bootstrap`` section, which can contain a list of items, each referring to
|
||||
a list in the ``definitions`` section. These items can either
|
||||
be a dictionary or a string. If you supply a dictionary, it must have a name
|
||||
@@ -709,7 +709,7 @@ be reported.
|
||||
|
||||
Take a look at the
|
||||
`schema <https://github.com/spack/spack/blob/develop/lib/spack/spack/schema/ci.py>`_
|
||||
for the gitlab-ci section of the spack environment file, to see precisely what
|
||||
for the ci section of the spack environment file, to see precisely what
|
||||
syntax is allowed there.
|
||||
|
||||
.. _reserved_tags:
|
||||
|
111
lib/spack/env/cc
vendored
111
lib/spack/env/cc
vendored
@@ -430,21 +430,37 @@ other_args_list=""
|
||||
# Global state for keeping track of -Wl,-rpath -Wl,/path
|
||||
wl_expect_rpath=no
|
||||
|
||||
# Same, but for -Xlinker -rpath -Xlinker /path
|
||||
xlinker_expect_rpath=no
|
||||
|
||||
parse_Wl() {
|
||||
# drop -Wl
|
||||
shift
|
||||
while [ $# -ne 0 ]; do
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
rp="$1"
|
||||
if system_dir "$1"; then
|
||||
append system_rpath_dirs_list "$1"
|
||||
else
|
||||
append rpath_dirs_list "$1"
|
||||
fi
|
||||
wl_expect_rpath=no
|
||||
else
|
||||
rp=""
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
rp="${1#-rpath=}"
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
rp="${1#--rpath=}"
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
wl_expect_rpath=yes
|
||||
@@ -456,17 +472,8 @@ parse_Wl() {
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
if [ -n "$rp" ]; then
|
||||
if system_dir "$rp"; then
|
||||
append system_rpath_dirs_list "$rp"
|
||||
else
|
||||
append rpath_dirs_list "$rp"
|
||||
fi
|
||||
fi
|
||||
shift
|
||||
done
|
||||
# By lack of local variables, always set this to empty string.
|
||||
rp=""
|
||||
}
|
||||
|
||||
|
||||
@@ -573,38 +580,72 @@ while [ $# -ne 0 ]; do
|
||||
unset IFS
|
||||
;;
|
||||
-Xlinker)
|
||||
if [ "$2" = "-rpath" ]; then
|
||||
if [ "$3" != "-Xlinker" ]; then
|
||||
die "-Xlinker,-rpath was not followed by -Xlinker,*"
|
||||
shift
|
||||
if [ $# -eq 0 ]; then
|
||||
# -Xlinker without value: let the compiler error about it.
|
||||
append other_args_list -Xlinker
|
||||
xlinker_expect_rpath=no
|
||||
break
|
||||
elif [ "$xlinker_expect_rpath" = yes ]; then
|
||||
# Register the path of -Xlinker -rpath <other args> -Xlinker <path>
|
||||
if system_dir "$1"; then
|
||||
append system_rpath_dirs_list "$1"
|
||||
else
|
||||
append rpath_dirs_list "$1"
|
||||
fi
|
||||
shift 3;
|
||||
rp="$1"
|
||||
elif [ "$2" = "$dtags_to_strip" ]; then
|
||||
shift # We want to remove explicitly this flag
|
||||
xlinker_expect_rpath=no
|
||||
else
|
||||
append other_args_list "$1"
|
||||
case "$1" in
|
||||
-rpath=*)
|
||||
arg="${1#-rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
--rpath=*)
|
||||
arg="${1#--rpath=}"
|
||||
if system_dir "$arg"; then
|
||||
append system_rpath_dirs_list "$arg"
|
||||
else
|
||||
append rpath_dirs_list "$arg"
|
||||
fi
|
||||
;;
|
||||
-rpath|--rpath)
|
||||
xlinker_expect_rpath=yes
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
append other_args_list -Xlinker
|
||||
append other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
"$dtags_to_strip")
|
||||
;;
|
||||
*)
|
||||
if [ "$1" = "$dtags_to_strip" ]; then
|
||||
: # We want to remove explicitly this flag
|
||||
else
|
||||
append other_args_list "$1"
|
||||
fi
|
||||
append other_args_list "$1"
|
||||
;;
|
||||
esac
|
||||
|
||||
# test rpaths against system directories in one place.
|
||||
if [ -n "$rp" ]; then
|
||||
if system_dir "$rp"; then
|
||||
append system_rpath_dirs_list "$rp"
|
||||
else
|
||||
append rpath_dirs_list "$rp"
|
||||
fi
|
||||
fi
|
||||
shift
|
||||
done
|
||||
|
||||
# We found `-Xlinker -rpath` but no matching value `-Xlinker /path`. Just append
|
||||
# `-Xlinker -rpath` again and let the compiler or linker handle the error during arg
|
||||
# parsing.
|
||||
if [ "$xlinker_expect_rpath" = yes ]; then
|
||||
append other_args_list -Xlinker
|
||||
append other_args_list -rpath
|
||||
fi
|
||||
|
||||
# Same, but for -Wl flags.
|
||||
if [ "$wl_expect_rpath" = yes ]; then
|
||||
append other_args_list -Wl,-rpath
|
||||
fi
|
||||
|
||||
#
|
||||
# Add flags from Spack's cppflags, cflags, cxxflags, fcflags, fflags, and
|
||||
# ldflags. We stick to the order that gmake puts the flags in by default.
|
||||
|
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -18,7 +18,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.2.0 (commit e44bad9c7b6defac73696f64078b2fe634719b62)
|
||||
* Version: 0.2.0-dev (commit f3667f95030c6573842fb5f6df0d647285597509)
|
||||
|
||||
astunparse
|
||||
----------------
|
||||
|
60
lib/spack/external/archspec/cli.py
vendored
60
lib/spack/external/archspec/cli.py
vendored
@@ -6,19 +6,61 @@
|
||||
archspec command line interface
|
||||
"""
|
||||
|
||||
import click
|
||||
import argparse
|
||||
import typing
|
||||
|
||||
import archspec
|
||||
import archspec.cpu
|
||||
|
||||
|
||||
@click.group(name="archspec")
|
||||
@click.version_option(version=archspec.__version__)
|
||||
def main():
|
||||
"""archspec command line interface"""
|
||||
def _make_parser() -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser(
|
||||
"archspec",
|
||||
description="archspec command line interface",
|
||||
add_help=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
"-V",
|
||||
help="Show the version and exit.",
|
||||
action="version",
|
||||
version=f"archspec, version {archspec.__version__}",
|
||||
)
|
||||
parser.add_argument("--help", "-h", help="Show the help and exit.", action="help")
|
||||
|
||||
subcommands = parser.add_subparsers(
|
||||
title="command",
|
||||
metavar="COMMAND",
|
||||
dest="command",
|
||||
)
|
||||
|
||||
cpu_command = subcommands.add_parser(
|
||||
"cpu",
|
||||
help="archspec command line interface for CPU",
|
||||
description="archspec command line interface for CPU",
|
||||
)
|
||||
cpu_command.set_defaults(run=cpu)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
@main.command()
|
||||
def cpu():
|
||||
"""archspec command line interface for CPU"""
|
||||
click.echo(archspec.cpu.host())
|
||||
def cpu() -> int:
|
||||
"""Run the `archspec cpu` subcommand."""
|
||||
print(archspec.cpu.host())
|
||||
return 0
|
||||
|
||||
|
||||
def main(argv: typing.Optional[typing.List[str]] = None) -> int:
|
||||
"""Run the `archspec` command line interface."""
|
||||
parser = _make_parser()
|
||||
|
||||
try:
|
||||
args = parser.parse_args(argv)
|
||||
except SystemExit as err:
|
||||
return err.code
|
||||
|
||||
if args.command is None:
|
||||
parser.print_help()
|
||||
return 0
|
||||
|
||||
return args.run()
|
||||
|
@@ -2782,6 +2782,10 @@
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
},
|
||||
{
|
||||
"versions": "16.0:",
|
||||
"flags" : "-mcpu=apple-m2"
|
||||
}
|
||||
],
|
||||
"apple-clang": [
|
||||
@@ -2790,8 +2794,12 @@
|
||||
"flags" : "-march=armv8.5-a"
|
||||
},
|
||||
{
|
||||
"versions": "13.0:",
|
||||
"flags" : "-mcpu=vortex"
|
||||
"versions": "13.0:14.0.2",
|
||||
"flags" : "-mcpu=apple-m1"
|
||||
},
|
||||
{
|
||||
"versions": "14.0.2:",
|
||||
"flags" : "-mcpu=apple-m2"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@@ -5,18 +5,20 @@
|
||||
import collections
|
||||
import collections.abc
|
||||
import errno
|
||||
import fnmatch
|
||||
import glob
|
||||
import hashlib
|
||||
import itertools
|
||||
import numbers
|
||||
import os
|
||||
import posixpath
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import sys
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from typing import Callable, List, Match, Optional, Tuple, Union
|
||||
from typing import Callable, Iterable, List, Match, Optional, Tuple, Union
|
||||
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import dedupe, memoized
|
||||
@@ -1671,6 +1673,38 @@ def fix_darwin_install_name(path):
|
||||
break
|
||||
|
||||
|
||||
def find_first(root: str, files: Union[Iterable[str], str], bfs_depth: int = 2) -> Optional[str]:
|
||||
"""Find the first file matching a pattern.
|
||||
|
||||
The following
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ find /usr -name 'abc*' -o -name 'def*' -quit
|
||||
|
||||
is equivalent to:
|
||||
|
||||
>>> find_first("/usr", ["abc*", "def*"])
|
||||
|
||||
Any glob pattern supported by fnmatch can be used.
|
||||
|
||||
The search order of this method is breadth-first over directories,
|
||||
until depth bfs_depth, after which depth-first search is used.
|
||||
|
||||
Parameters:
|
||||
root (str): The root directory to start searching from
|
||||
files (str or Iterable): File pattern(s) to search for
|
||||
bfs_depth (int): (advanced) parameter that specifies at which
|
||||
depth to switch to depth-first search.
|
||||
|
||||
Returns:
|
||||
str or None: The matching file or None when no file is found.
|
||||
"""
|
||||
if isinstance(files, str):
|
||||
files = [files]
|
||||
return FindFirstFile(root, *files, bfs_depth=bfs_depth).find()
|
||||
|
||||
|
||||
def find(root, files, recursive=True):
|
||||
"""Search for ``files`` starting from the ``root`` directory.
|
||||
|
||||
@@ -2720,3 +2754,105 @@ def filesummary(path, print_bytes=16) -> Tuple[int, bytes]:
|
||||
return size, short_contents
|
||||
except OSError:
|
||||
return 0, b""
|
||||
|
||||
|
||||
class FindFirstFile:
|
||||
"""Uses hybrid iterative deepening to locate the first matching
|
||||
file. Up to depth ``bfs_depth`` it uses iterative deepening, which
|
||||
mimics breadth-first with the same memory footprint as depth-first
|
||||
search, after which it switches to ordinary depth-first search using
|
||||
``os.walk``."""
|
||||
|
||||
def __init__(self, root: str, *file_patterns: str, bfs_depth: int = 2):
|
||||
"""Create a small summary of the given file. Does not error
|
||||
when file does not exist.
|
||||
|
||||
Args:
|
||||
root (str): directory in which to recursively search
|
||||
file_patterns (str): glob file patterns understood by fnmatch
|
||||
bfs_depth (int): until this depth breadth-first traversal is used,
|
||||
when no match is found, the mode is switched to depth-first search.
|
||||
"""
|
||||
self.root = root
|
||||
self.bfs_depth = bfs_depth
|
||||
self.match: Callable
|
||||
|
||||
# normcase is trivial on posix
|
||||
regex = re.compile("|".join(fnmatch.translate(os.path.normcase(p)) for p in file_patterns))
|
||||
|
||||
# On case sensitive filesystems match against normcase'd paths.
|
||||
if os.path is posixpath:
|
||||
self.match = regex.match
|
||||
else:
|
||||
self.match = lambda p: regex.match(os.path.normcase(p))
|
||||
|
||||
def find(self) -> Optional[str]:
|
||||
"""Run the file search
|
||||
|
||||
Returns:
|
||||
str or None: path of the matching file
|
||||
"""
|
||||
self.file = None
|
||||
|
||||
# First do iterative deepening (i.e. bfs through limited depth dfs)
|
||||
for i in range(self.bfs_depth + 1):
|
||||
if self._find_at_depth(self.root, i):
|
||||
return self.file
|
||||
|
||||
# Then fall back to depth-first search
|
||||
return self._find_dfs()
|
||||
|
||||
def _find_at_depth(self, path, max_depth, depth=0) -> bool:
|
||||
"""Returns True when done. Notice search can be done
|
||||
either because a file was found, or because it recursed
|
||||
through all directories."""
|
||||
try:
|
||||
entries = os.scandir(path)
|
||||
except OSError:
|
||||
return True
|
||||
|
||||
done = True
|
||||
|
||||
with entries:
|
||||
# At max depth we look for matching files.
|
||||
if depth == max_depth:
|
||||
for f in entries:
|
||||
# Exit on match
|
||||
if self.match(f.name):
|
||||
self.file = os.path.join(path, f.name)
|
||||
return True
|
||||
|
||||
# is_dir should not require a stat call, so it's a good optimization.
|
||||
if self._is_dir(f):
|
||||
done = False
|
||||
return done
|
||||
|
||||
# At lower depth only recurse into subdirs
|
||||
for f in entries:
|
||||
if not self._is_dir(f):
|
||||
continue
|
||||
|
||||
# If any subdir is not fully traversed, we're not done yet.
|
||||
if not self._find_at_depth(os.path.join(path, f.name), max_depth, depth + 1):
|
||||
done = False
|
||||
|
||||
# Early exit when we've found something.
|
||||
if self.file:
|
||||
return True
|
||||
|
||||
return done
|
||||
|
||||
def _is_dir(self, f: os.DirEntry) -> bool:
|
||||
"""Returns True when f is dir we can enter (and not a symlink)."""
|
||||
try:
|
||||
return f.is_dir(follow_symlinks=False)
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
def _find_dfs(self) -> Optional[str]:
|
||||
"""Returns match or None"""
|
||||
for dirpath, _, filenames in os.walk(self.root):
|
||||
for file in filenames:
|
||||
if self.match(file):
|
||||
return os.path.join(dirpath, file)
|
||||
return None
|
||||
|
@@ -24,6 +24,7 @@
|
||||
import warnings
|
||||
from contextlib import closing, contextmanager
|
||||
from gzip import GzipFile
|
||||
from typing import Union
|
||||
from urllib.error import HTTPError, URLError
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
@@ -502,7 +503,9 @@ def _binary_index():
|
||||
|
||||
|
||||
#: Singleton binary_index instance
|
||||
binary_index = llnl.util.lang.Singleton(_binary_index)
|
||||
binary_index: Union[BinaryCacheIndex, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(
|
||||
_binary_index
|
||||
)
|
||||
|
||||
|
||||
class NoOverwriteException(spack.error.SpackError):
|
||||
@@ -1796,7 +1799,15 @@ def is_backup_file(file):
|
||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
||||
|
||||
# relocate the install prefixes in binary files including dependencies
|
||||
relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||
changed_files = relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||
|
||||
# Add ad-hoc signatures to patched macho files when on macOS.
|
||||
if "macho" in platform.binary_formats and sys.platform == "darwin":
|
||||
codesign = which("codesign")
|
||||
if not codesign:
|
||||
return
|
||||
for binary in changed_files:
|
||||
codesign("-fs-", binary)
|
||||
|
||||
# If we are installing back to the same location
|
||||
# relocate the sbang location if the spack directory changed
|
||||
@@ -2015,7 +2026,7 @@ def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None
|
||||
with spack.util.path.filter_padding():
|
||||
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
|
||||
extract_tarball(spec, download_result, allow_root, unsigned, force)
|
||||
spack.hooks.post_install(spec)
|
||||
spack.hooks.post_install(spec, False)
|
||||
spack.store.db.add(spec, spack.store.layout)
|
||||
|
||||
|
||||
|
@@ -9,6 +9,7 @@
|
||||
import sys
|
||||
import sysconfig
|
||||
import warnings
|
||||
from typing import Dict, Optional, Sequence, Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -21,8 +22,10 @@
|
||||
|
||||
from .config import spec_for_current_python
|
||||
|
||||
QueryInfo = Dict[str, "spack.spec.Spec"]
|
||||
|
||||
def _python_import(module):
|
||||
|
||||
def _python_import(module: str) -> bool:
|
||||
try:
|
||||
__import__(module)
|
||||
except ImportError:
|
||||
@@ -30,7 +33,9 @@ def _python_import(module):
|
||||
return True
|
||||
|
||||
|
||||
def _try_import_from_store(module, query_spec, query_info=None):
|
||||
def _try_import_from_store(
|
||||
module: str, query_spec: Union[str, "spack.spec.Spec"], query_info: Optional[QueryInfo] = None
|
||||
) -> bool:
|
||||
"""Return True if the module can be imported from an already
|
||||
installed spec, False otherwise.
|
||||
|
||||
@@ -52,7 +57,7 @@ def _try_import_from_store(module, query_spec, query_info=None):
|
||||
module_paths = [
|
||||
os.path.join(candidate_spec.prefix, pkg.purelib),
|
||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||
] # type: list[str]
|
||||
]
|
||||
path_before = list(sys.path)
|
||||
|
||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||
@@ -89,7 +94,7 @@ def _try_import_from_store(module, query_spec, query_info=None):
|
||||
return False
|
||||
|
||||
|
||||
def _fix_ext_suffix(candidate_spec):
|
||||
def _fix_ext_suffix(candidate_spec: "spack.spec.Spec"):
|
||||
"""Fix the external suffixes of Python extensions on the fly for
|
||||
platforms that may need it
|
||||
|
||||
@@ -157,7 +162,11 @@ def _fix_ext_suffix(candidate_spec):
|
||||
os.symlink(abs_path, link_name)
|
||||
|
||||
|
||||
def _executables_in_store(executables, query_spec, query_info=None):
|
||||
def _executables_in_store(
|
||||
executables: Sequence[str],
|
||||
query_spec: Union["spack.spec.Spec", str],
|
||||
query_info: Optional[QueryInfo] = None,
|
||||
) -> bool:
|
||||
"""Return True if at least one of the executables can be retrieved from
|
||||
a spec in store, False otherwise.
|
||||
|
||||
@@ -193,7 +202,7 @@ def _executables_in_store(executables, query_spec, query_info=None):
|
||||
return False
|
||||
|
||||
|
||||
def _root_spec(spec_str):
|
||||
def _root_spec(spec_str: str) -> str:
|
||||
"""Add a proper compiler and target to a spec used during bootstrapping.
|
||||
|
||||
Args:
|
||||
|
@@ -7,6 +7,7 @@
|
||||
import contextlib
|
||||
import os.path
|
||||
import sys
|
||||
from typing import Any, Dict, Generator, MutableSequence, Sequence
|
||||
|
||||
from llnl.util import tty
|
||||
|
||||
@@ -24,12 +25,12 @@
|
||||
_REF_COUNT = 0
|
||||
|
||||
|
||||
def is_bootstrapping():
|
||||
def is_bootstrapping() -> bool:
|
||||
"""Return True if we are in a bootstrapping context, False otherwise."""
|
||||
return _REF_COUNT > 0
|
||||
|
||||
|
||||
def spec_for_current_python():
|
||||
def spec_for_current_python() -> str:
|
||||
"""For bootstrapping purposes we are just interested in the Python
|
||||
minor version (all patches are ABI compatible with the same minor).
|
||||
|
||||
@@ -41,14 +42,14 @@ def spec_for_current_python():
|
||||
return f"python@{version_str}"
|
||||
|
||||
|
||||
def root_path():
|
||||
def root_path() -> str:
|
||||
"""Root of all the bootstrap related folders"""
|
||||
return spack.util.path.canonicalize_path(
|
||||
spack.config.get("bootstrap:root", spack.paths.default_user_bootstrap_path)
|
||||
)
|
||||
|
||||
|
||||
def store_path():
|
||||
def store_path() -> str:
|
||||
"""Path to the store used for bootstrapped software"""
|
||||
enabled = spack.config.get("bootstrap:enable", True)
|
||||
if not enabled:
|
||||
@@ -59,7 +60,7 @@ def store_path():
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def spack_python_interpreter():
|
||||
def spack_python_interpreter() -> Generator:
|
||||
"""Override the current configuration to set the interpreter under
|
||||
which Spack is currently running as the only Python external spec
|
||||
available.
|
||||
@@ -76,18 +77,18 @@ def spack_python_interpreter():
|
||||
yield
|
||||
|
||||
|
||||
def _store_path():
|
||||
def _store_path() -> str:
|
||||
bootstrap_root_path = root_path()
|
||||
return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "store"))
|
||||
|
||||
|
||||
def _config_path():
|
||||
def _config_path() -> str:
|
||||
bootstrap_root_path = root_path()
|
||||
return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "config"))
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ensure_bootstrap_configuration():
|
||||
def ensure_bootstrap_configuration() -> Generator:
|
||||
"""Swap the current configuration for the one used to bootstrap Spack.
|
||||
|
||||
The context manager is reference counted to ensure we don't swap multiple
|
||||
@@ -107,7 +108,7 @@ def ensure_bootstrap_configuration():
|
||||
_REF_COUNT -= 1
|
||||
|
||||
|
||||
def _read_and_sanitize_configuration():
|
||||
def _read_and_sanitize_configuration() -> Dict[str, Any]:
|
||||
"""Read the user configuration that needs to be reused for bootstrapping
|
||||
and remove the entries that should not be copied over.
|
||||
"""
|
||||
@@ -120,9 +121,11 @@ def _read_and_sanitize_configuration():
|
||||
return user_configuration
|
||||
|
||||
|
||||
def _bootstrap_config_scopes():
|
||||
def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
|
||||
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||
config_scopes = [spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)]
|
||||
config_scopes: MutableSequence["spack.config.ConfigScope"] = [
|
||||
spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)
|
||||
]
|
||||
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
||||
for name, path in configuration_paths:
|
||||
platform = spack.platforms.host().name
|
||||
@@ -137,7 +140,7 @@ def _bootstrap_config_scopes():
|
||||
return config_scopes
|
||||
|
||||
|
||||
def _add_compilers_if_missing():
|
||||
def _add_compilers_if_missing() -> None:
|
||||
arch = spack.spec.ArchSpec.frontend_arch()
|
||||
if not spack.compilers.compilers_for_arch(arch):
|
||||
new_compilers = spack.compilers.find_new_compilers()
|
||||
@@ -146,7 +149,7 @@ def _add_compilers_if_missing():
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _ensure_bootstrap_configuration():
|
||||
def _ensure_bootstrap_configuration() -> Generator:
|
||||
bootstrap_store_path = store_path()
|
||||
user_configuration = _read_and_sanitize_configuration()
|
||||
with spack.environment.no_active_environment():
|
||||
|
@@ -29,7 +29,7 @@
|
||||
import os.path
|
||||
import sys
|
||||
import uuid
|
||||
from typing import Callable, List, Optional
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple
|
||||
|
||||
from llnl.util import tty
|
||||
from llnl.util.lang import GroupedExceptionHandler
|
||||
@@ -66,6 +66,9 @@
|
||||
_bootstrap_methods = {}
|
||||
|
||||
|
||||
ConfigDictionary = Dict[str, Any]
|
||||
|
||||
|
||||
def bootstrapper(bootstrapper_type: str):
|
||||
"""Decorator to register classes implementing bootstrapping
|
||||
methods.
|
||||
@@ -86,7 +89,7 @@ class Bootstrapper:
|
||||
|
||||
config_scope_name = ""
|
||||
|
||||
def __init__(self, conf):
|
||||
def __init__(self, conf: ConfigDictionary) -> None:
|
||||
self.conf = conf
|
||||
self.name = conf["name"]
|
||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||
@@ -100,7 +103,7 @@ def __init__(self, conf):
|
||||
self.url = url
|
||||
|
||||
@property
|
||||
def mirror_scope(self):
|
||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||
"""Mirror scope to be pushed onto the bootstrapping configuration when using
|
||||
this bootstrapper.
|
||||
"""
|
||||
@@ -121,7 +124,7 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
"""
|
||||
return False
|
||||
|
||||
def try_search_path(self, executables: List[str], abstract_spec_str: str) -> bool:
|
||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||
"""Try to search some executables in the prefix of specs satisfying the abstract
|
||||
spec passed as argument.
|
||||
|
||||
@@ -139,13 +142,15 @@ def try_search_path(self, executables: List[str], abstract_spec_str: str) -> boo
|
||||
class BuildcacheBootstrapper(Bootstrapper):
|
||||
"""Install the software needed during bootstrapping from a buildcache."""
|
||||
|
||||
def __init__(self, conf):
|
||||
def __init__(self, conf) -> None:
|
||||
super().__init__(conf)
|
||||
self.last_search = None
|
||||
self.last_search: Optional[ConfigDictionary] = None
|
||||
self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}"
|
||||
|
||||
@staticmethod
|
||||
def _spec_and_platform(abstract_spec_str):
|
||||
def _spec_and_platform(
|
||||
abstract_spec_str: str,
|
||||
) -> Tuple[spack.spec.Spec, spack.platforms.Platform]:
|
||||
"""Return the spec object and platform we need to use when
|
||||
querying the buildcache.
|
||||
|
||||
@@ -158,7 +163,7 @@ def _spec_and_platform(abstract_spec_str):
|
||||
bincache_platform = spack.platforms.real_host()
|
||||
return abstract_spec, bincache_platform
|
||||
|
||||
def _read_metadata(self, package_name):
|
||||
def _read_metadata(self, package_name: str) -> Any:
|
||||
"""Return metadata about the given package."""
|
||||
json_filename = f"{package_name}.json"
|
||||
json_dir = self.metadata_dir
|
||||
@@ -167,7 +172,13 @@ def _read_metadata(self, package_name):
|
||||
data = json.load(stream)
|
||||
return data
|
||||
|
||||
def _install_by_hash(self, pkg_hash, pkg_sha256, index, bincache_platform):
|
||||
def _install_by_hash(
|
||||
self,
|
||||
pkg_hash: str,
|
||||
pkg_sha256: str,
|
||||
index: List[spack.spec.Spec],
|
||||
bincache_platform: spack.platforms.Platform,
|
||||
) -> None:
|
||||
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
||||
# Reconstruct the compiler that we need to use for bootstrapping
|
||||
compiler_entry = {
|
||||
@@ -192,7 +203,13 @@ def _install_by_hash(self, pkg_hash, pkg_sha256, index, bincache_platform):
|
||||
match, allow_root=True, unsigned=True, force=True, sha256=pkg_sha256
|
||||
)
|
||||
|
||||
def _install_and_test(self, abstract_spec, bincache_platform, bincache_data, test_fn):
|
||||
def _install_and_test(
|
||||
self,
|
||||
abstract_spec: spack.spec.Spec,
|
||||
bincache_platform: spack.platforms.Platform,
|
||||
bincache_data,
|
||||
test_fn,
|
||||
) -> bool:
|
||||
# Ensure we see only the buildcache being used to bootstrap
|
||||
with spack.config.override(self.mirror_scope):
|
||||
# This index is currently needed to get the compiler used to build some
|
||||
@@ -217,13 +234,14 @@ def _install_and_test(self, abstract_spec, bincache_platform, bincache_data, tes
|
||||
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||
self._install_by_hash(pkg_hash, pkg_sha256, index, bincache_platform)
|
||||
|
||||
info = {}
|
||||
info: ConfigDictionary = {}
|
||||
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
info: ConfigDictionary
|
||||
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
return True
|
||||
@@ -235,7 +253,8 @@ def try_import(self, module, abstract_spec_str):
|
||||
data = self._read_metadata(module)
|
||||
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||
|
||||
def try_search_path(self, executables, abstract_spec_str):
|
||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||
info: ConfigDictionary
|
||||
test_fn, info = functools.partial(_executables_in_store, executables), {}
|
||||
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
@@ -251,13 +270,13 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
class SourceBootstrapper(Bootstrapper):
|
||||
"""Install the software needed during bootstrapping from sources."""
|
||||
|
||||
def __init__(self, conf):
|
||||
def __init__(self, conf) -> None:
|
||||
super().__init__(conf)
|
||||
self.last_search = None
|
||||
self.last_search: Optional[ConfigDictionary] = None
|
||||
self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}"
|
||||
|
||||
def try_import(self, module, abstract_spec_str):
|
||||
info = {}
|
||||
def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
||||
info: ConfigDictionary = {}
|
||||
if _try_import_from_store(module, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -293,8 +312,8 @@ def try_import(self, module, abstract_spec_str):
|
||||
return True
|
||||
return False
|
||||
|
||||
def try_search_path(self, executables, abstract_spec_str):
|
||||
info = {}
|
||||
def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bool:
|
||||
info: ConfigDictionary = {}
|
||||
if _executables_in_store(executables, abstract_spec_str, query_info=info):
|
||||
self.last_search = info
|
||||
return True
|
||||
@@ -323,13 +342,13 @@ def try_search_path(self, executables, abstract_spec_str):
|
||||
return False
|
||||
|
||||
|
||||
def create_bootstrapper(conf):
|
||||
def create_bootstrapper(conf: ConfigDictionary):
|
||||
"""Return a bootstrap object built according to the configuration argument"""
|
||||
btype = conf["type"]
|
||||
return _bootstrap_methods[btype](conf)
|
||||
|
||||
|
||||
def source_is_enabled_or_raise(conf):
|
||||
def source_is_enabled_or_raise(conf: ConfigDictionary):
|
||||
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
|
||||
if not trusted.get(name, False):
|
||||
@@ -454,7 +473,7 @@ def ensure_executables_in_path_or_raise(
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def _add_externals_if_missing():
|
||||
def _add_externals_if_missing() -> None:
|
||||
search_list = [
|
||||
# clingo
|
||||
spack.repo.path.get_pkg_class("cmake"),
|
||||
@@ -468,41 +487,41 @@ def _add_externals_if_missing():
|
||||
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||
|
||||
|
||||
def clingo_root_spec():
|
||||
def clingo_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap clingo"""
|
||||
return _root_spec("clingo-bootstrap@spack+python")
|
||||
|
||||
|
||||
def ensure_clingo_importable_or_raise():
|
||||
def ensure_clingo_importable_or_raise() -> None:
|
||||
"""Ensure that the clingo module is available for import."""
|
||||
ensure_module_importable_or_raise(module="clingo", abstract_spec=clingo_root_spec())
|
||||
|
||||
|
||||
def gnupg_root_spec():
|
||||
def gnupg_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap GnuPG"""
|
||||
return _root_spec("gnupg@2.3:")
|
||||
|
||||
|
||||
def ensure_gpg_in_path_or_raise():
|
||||
def ensure_gpg_in_path_or_raise() -> None:
|
||||
"""Ensure gpg or gpg2 are in the PATH or raise."""
|
||||
return ensure_executables_in_path_or_raise(
|
||||
executables=["gpg2", "gpg"], abstract_spec=gnupg_root_spec()
|
||||
)
|
||||
|
||||
|
||||
def patchelf_root_spec():
|
||||
def patchelf_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap patchelf"""
|
||||
# 0.13.1 is the last version not to require C++17.
|
||||
return _root_spec("patchelf@0.13.1:")
|
||||
|
||||
|
||||
def verify_patchelf(patchelf):
|
||||
def verify_patchelf(patchelf: "spack.util.executable.Executable") -> bool:
|
||||
"""Older patchelf versions can produce broken binaries, so we
|
||||
verify the version here.
|
||||
|
||||
Arguments:
|
||||
|
||||
patchelf (spack.util.executable.Executable): patchelf executable
|
||||
patchelf: patchelf executable
|
||||
"""
|
||||
out = patchelf("--version", output=str, error=os.devnull, fail_on_error=False).strip()
|
||||
if patchelf.returncode != 0:
|
||||
@@ -517,7 +536,7 @@ def verify_patchelf(patchelf):
|
||||
return version >= spack.version.Version("0.13.1")
|
||||
|
||||
|
||||
def ensure_patchelf_in_path_or_raise():
|
||||
def ensure_patchelf_in_path_or_raise() -> None:
|
||||
"""Ensure patchelf is in the PATH or raise."""
|
||||
# The old concretizer is not smart and we're doing its job: if the latest patchelf
|
||||
# does not concretize because the compiler doesn't support C++17, we try to
|
||||
@@ -534,7 +553,7 @@ def ensure_patchelf_in_path_or_raise():
|
||||
)
|
||||
|
||||
|
||||
def ensure_core_dependencies():
|
||||
def ensure_core_dependencies() -> None:
|
||||
"""Ensure the presence of all the core dependencies."""
|
||||
if sys.platform.lower() == "linux":
|
||||
ensure_patchelf_in_path_or_raise()
|
||||
@@ -543,7 +562,7 @@ def ensure_core_dependencies():
|
||||
ensure_clingo_importable_or_raise()
|
||||
|
||||
|
||||
def all_core_root_specs():
|
||||
def all_core_root_specs() -> List[str]:
|
||||
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
|
||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||
|
||||
|
@@ -9,6 +9,7 @@
|
||||
import pathlib
|
||||
import sys
|
||||
import warnings
|
||||
from typing import List
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -27,7 +28,7 @@ class BootstrapEnvironment(spack.environment.Environment):
|
||||
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
||||
|
||||
@classmethod
|
||||
def spack_dev_requirements(cls):
|
||||
def spack_dev_requirements(cls) -> List[str]:
|
||||
"""Spack development requirements"""
|
||||
return [
|
||||
isort_root_spec(),
|
||||
@@ -38,7 +39,7 @@ def spack_dev_requirements(cls):
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def environment_root(cls):
|
||||
def environment_root(cls) -> pathlib.Path:
|
||||
"""Environment root directory"""
|
||||
bootstrap_root_path = root_path()
|
||||
python_part = spec_for_current_python().replace("@", "")
|
||||
@@ -52,12 +53,12 @@ def environment_root(cls):
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def view_root(cls):
|
||||
def view_root(cls) -> pathlib.Path:
|
||||
"""Location of the view"""
|
||||
return cls.environment_root().joinpath("view")
|
||||
|
||||
@classmethod
|
||||
def pythonpaths(cls):
|
||||
def pythonpaths(cls) -> List[str]:
|
||||
"""Paths to be added to sys.path or PYTHONPATH"""
|
||||
python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}"
|
||||
glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**"))
|
||||
@@ -68,21 +69,21 @@ def pythonpaths(cls):
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def bin_dirs(cls):
|
||||
def bin_dirs(cls) -> List[pathlib.Path]:
|
||||
"""Paths to be added to PATH"""
|
||||
return [cls.view_root().joinpath("bin")]
|
||||
|
||||
@classmethod
|
||||
def spack_yaml(cls):
|
||||
def spack_yaml(cls) -> pathlib.Path:
|
||||
"""Environment spack.yaml file"""
|
||||
return cls.environment_root().joinpath("spack.yaml")
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
if not self.spack_yaml().exists():
|
||||
self._write_spack_yaml_file()
|
||||
super().__init__(self.environment_root())
|
||||
|
||||
def update_installations(self):
|
||||
def update_installations(self) -> None:
|
||||
"""Update the installations of this environment.
|
||||
|
||||
The update is done using a depfile on Linux and macOS, and using the ``install_all``
|
||||
@@ -103,7 +104,7 @@ def update_installations(self):
|
||||
self._install_with_depfile()
|
||||
self.write(regenerate=True)
|
||||
|
||||
def update_syspath_and_environ(self):
|
||||
def update_syspath_and_environ(self) -> None:
|
||||
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
||||
the environment view.
|
||||
"""
|
||||
@@ -119,7 +120,7 @@ def update_syspath_and_environ(self):
|
||||
+ [str(x) for x in self.pythonpaths()]
|
||||
)
|
||||
|
||||
def _install_with_depfile(self):
|
||||
def _install_with_depfile(self) -> None:
|
||||
spackcmd = spack.util.executable.which("spack")
|
||||
spackcmd(
|
||||
"-e",
|
||||
@@ -141,7 +142,7 @@ def _install_with_depfile(self):
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
def _write_spack_yaml_file(self):
|
||||
def _write_spack_yaml_file(self) -> None:
|
||||
tty.msg(
|
||||
"[BOOTSTRAPPING] Spack has missing dependencies, creating a bootstrapping environment"
|
||||
)
|
||||
@@ -159,32 +160,32 @@ def _write_spack_yaml_file(self):
|
||||
self.spack_yaml().write_text(template.render(context), encoding="utf-8")
|
||||
|
||||
|
||||
def isort_root_spec():
|
||||
def isort_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap isort"""
|
||||
return _root_spec("py-isort@4.3.5:")
|
||||
|
||||
|
||||
def mypy_root_spec():
|
||||
def mypy_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap mypy"""
|
||||
return _root_spec("py-mypy@0.900:")
|
||||
|
||||
|
||||
def black_root_spec():
|
||||
def black_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap black"""
|
||||
return _root_spec("py-black@:23.1.0")
|
||||
|
||||
|
||||
def flake8_root_spec():
|
||||
def flake8_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-flake8")
|
||||
|
||||
|
||||
def pytest_root_spec():
|
||||
def pytest_root_spec() -> str:
|
||||
"""Return the root spec used to bootstrap flake8"""
|
||||
return _root_spec("py-pytest")
|
||||
|
||||
|
||||
def ensure_environment_dependencies():
|
||||
def ensure_environment_dependencies() -> None:
|
||||
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
||||
with BootstrapEnvironment() as env:
|
||||
env.update_installations()
|
||||
|
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Query the status of bootstrapping on this machine"""
|
||||
import platform
|
||||
from typing import List, Optional, Sequence, Tuple, Union
|
||||
|
||||
import spack.util.executable
|
||||
|
||||
@@ -19,8 +20,12 @@
|
||||
pytest_root_spec,
|
||||
)
|
||||
|
||||
ExecutablesType = Union[str, Sequence[str]]
|
||||
RequiredResponseType = Tuple[bool, Optional[str]]
|
||||
SpecLike = Union["spack.spec.Spec", str]
|
||||
|
||||
def _required_system_executable(exes, msg):
|
||||
|
||||
def _required_system_executable(exes: ExecutablesType, msg: str) -> RequiredResponseType:
|
||||
"""Search for an executable is the system path only."""
|
||||
if isinstance(exes, str):
|
||||
exes = (exes,)
|
||||
@@ -29,7 +34,9 @@ def _required_system_executable(exes, msg):
|
||||
return False, msg
|
||||
|
||||
|
||||
def _required_executable(exes, query_spec, msg):
|
||||
def _required_executable(
|
||||
exes: ExecutablesType, query_spec: SpecLike, msg: str
|
||||
) -> RequiredResponseType:
|
||||
"""Search for an executable in the system path or in the bootstrap store."""
|
||||
if isinstance(exes, str):
|
||||
exes = (exes,)
|
||||
@@ -38,7 +45,7 @@ def _required_executable(exes, query_spec, msg):
|
||||
return False, msg
|
||||
|
||||
|
||||
def _required_python_module(module, query_spec, msg):
|
||||
def _required_python_module(module: str, query_spec: SpecLike, msg: str) -> RequiredResponseType:
|
||||
"""Check if a Python module is available in the current interpreter or
|
||||
if it can be loaded from the bootstrap store
|
||||
"""
|
||||
@@ -47,7 +54,7 @@ def _required_python_module(module, query_spec, msg):
|
||||
return False, msg
|
||||
|
||||
|
||||
def _missing(name, purpose, system_only=True):
|
||||
def _missing(name: str, purpose: str, system_only: bool = True) -> str:
|
||||
"""Message to be printed if an executable is not found"""
|
||||
msg = '[{2}] MISSING "{0}": {1}'
|
||||
if not system_only:
|
||||
@@ -55,7 +62,7 @@ def _missing(name, purpose, system_only=True):
|
||||
return msg.format(name, purpose, "@*y{{-}}")
|
||||
|
||||
|
||||
def _core_requirements():
|
||||
def _core_requirements() -> List[RequiredResponseType]:
|
||||
_core_system_exes = {
|
||||
"make": _missing("make", "required to build software from sources"),
|
||||
"patch": _missing("patch", "required to patch source code before building"),
|
||||
@@ -80,7 +87,7 @@ def _core_requirements():
|
||||
return result
|
||||
|
||||
|
||||
def _buildcache_requirements():
|
||||
def _buildcache_requirements() -> List[RequiredResponseType]:
|
||||
_buildcache_exes = {
|
||||
"file": _missing("file", "required to analyze files for buildcaches"),
|
||||
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
|
||||
@@ -103,7 +110,7 @@ def _buildcache_requirements():
|
||||
return result
|
||||
|
||||
|
||||
def _optional_requirements():
|
||||
def _optional_requirements() -> List[RequiredResponseType]:
|
||||
_optional_exes = {
|
||||
"zstd": _missing("zstd", "required to compress/decompress code archives"),
|
||||
"svn": _missing("svn", "required to manage subversion repositories"),
|
||||
@@ -114,7 +121,7 @@ def _optional_requirements():
|
||||
return result
|
||||
|
||||
|
||||
def _development_requirements():
|
||||
def _development_requirements() -> List[RequiredResponseType]:
|
||||
# Ensure we trigger environment modifications if we have an environment
|
||||
if BootstrapEnvironment.spack_yaml().exists():
|
||||
with BootstrapEnvironment() as env:
|
||||
@@ -139,7 +146,7 @@ def _development_requirements():
|
||||
]
|
||||
|
||||
|
||||
def status_message(section):
|
||||
def status_message(section) -> Tuple[str, bool]:
|
||||
"""Return a status message to be printed to screen that refers to the
|
||||
section passed as argument and a bool which is True if there are missing
|
||||
dependencies.
|
||||
@@ -161,7 +168,7 @@ def status_message(section):
|
||||
with ensure_bootstrap_configuration():
|
||||
missing_software = False
|
||||
for found, err_msg in required_software():
|
||||
if not found:
|
||||
if not found and err_msg:
|
||||
missing_software = True
|
||||
msg += "\n " + err_msg
|
||||
msg += "\n"
|
||||
|
@@ -5,6 +5,7 @@
|
||||
|
||||
"""Caches used by Spack to store data"""
|
||||
import os
|
||||
from typing import Union
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.util.filesystem import mkdirp
|
||||
@@ -34,7 +35,9 @@ def _misc_cache():
|
||||
|
||||
|
||||
#: Spack's cache for small data
|
||||
misc_cache = llnl.util.lang.Singleton(_misc_cache)
|
||||
misc_cache: Union[
|
||||
spack.util.file_cache.FileCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_misc_cache)
|
||||
|
||||
|
||||
def fetch_cache_location():
|
||||
@@ -88,4 +91,6 @@ def symlink(self, mirror_ref):
|
||||
|
||||
|
||||
#: Spack's local cache for downloaded source archives
|
||||
fetch_cache = llnl.util.lang.Singleton(_fetch_cache)
|
||||
fetch_cache: Union[
|
||||
spack.fetch_strategy.FsCache, llnl.util.lang.Singleton
|
||||
] = llnl.util.lang.Singleton(_fetch_cache)
|
||||
|
@@ -756,10 +756,20 @@ def generate_gitlab_ci_yaml(
|
||||
ci_config = cfg.get("ci")
|
||||
|
||||
if not ci_config:
|
||||
tty.die('Environment yaml does not have "ci" section')
|
||||
tty.warn("Environment does not have `ci` a configuration")
|
||||
gitlabci_config = yaml_root.get("gitlab-ci")
|
||||
if not gitlabci_config:
|
||||
tty.die("Environment yaml does not have `gitlab-ci` config section. Cannot recover.")
|
||||
|
||||
tty.warn(
|
||||
"The `gitlab-ci` configuration is deprecated in favor of `ci`.\n",
|
||||
"To update run \n\t$ spack env update /path/to/ci/spack.yaml",
|
||||
)
|
||||
translate_deprecated_config(gitlabci_config)
|
||||
ci_config = gitlabci_config
|
||||
|
||||
# Default target is gitlab...and only target is gitlab
|
||||
if "target" in ci_config and ci_config["target"] != "gitlab":
|
||||
if not ci_config.get("target", "gitlab") == "gitlab":
|
||||
tty.die('Spack CI module only generates target "gitlab"')
|
||||
|
||||
cdash_config = cfg.get("cdash")
|
||||
@@ -938,6 +948,10 @@ def generate_gitlab_ci_yaml(
|
||||
env_includes.extend(include_scopes)
|
||||
env_yaml_root["spack"]["include"] = env_includes
|
||||
|
||||
if "gitlab-ci" in env_yaml_root["spack"] and "ci" not in env_yaml_root["spack"]:
|
||||
env_yaml_root["spack"]["ci"] = env_yaml_root["spack"].pop("gitlab-ci")
|
||||
translate_deprecated_config(env_yaml_root["spack"]["ci"])
|
||||
|
||||
with open(os.path.join(concrete_env_dir, "spack.yaml"), "w") as fd:
|
||||
fd.write(syaml.dump_config(env_yaml_root, default_flow_style=False))
|
||||
|
||||
@@ -1445,6 +1459,9 @@ def main_script_replacements(cmd):
|
||||
if spack_stack_name:
|
||||
output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name
|
||||
|
||||
# Ensure the child pipeline always runs
|
||||
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
||||
|
||||
if spack_buildcache_copy:
|
||||
# Write out the file describing specs that should be copied
|
||||
copy_specs_dir = os.path.join(pipeline_artifacts_dir, "specs_to_copy")
|
||||
@@ -2471,3 +2488,66 @@ def report_skipped(self, spec, directory_name, reason):
|
||||
)
|
||||
reporter = CDash(configuration=configuration)
|
||||
reporter.test_skipped_report(directory_name, spec, reason)
|
||||
|
||||
|
||||
def translate_deprecated_config(config):
|
||||
# Remove all deprecated keys from config
|
||||
mappings = config.pop("mappings", [])
|
||||
match_behavior = config.pop("match_behavior", "first")
|
||||
|
||||
build_job = {}
|
||||
if "image" in config:
|
||||
build_job["image"] = config.pop("image")
|
||||
if "tags" in config:
|
||||
build_job["tags"] = config.pop("tags")
|
||||
if "variables" in config:
|
||||
build_job["variables"] = config.pop("variables")
|
||||
if "before_script" in config:
|
||||
build_job["before_script"] = config.pop("before_script")
|
||||
if "script" in config:
|
||||
build_job["script"] = config.pop("script")
|
||||
if "after_script" in config:
|
||||
build_job["after_script"] = config.pop("after_script")
|
||||
|
||||
signing_job = None
|
||||
if "signing-job-attributes" in config:
|
||||
signing_job = {"signing-job": config.pop("signing-job-attributes")}
|
||||
|
||||
service_job_attributes = None
|
||||
if "service-job-attributes" in config:
|
||||
service_job_attributes = config.pop("service-job-attributes")
|
||||
|
||||
# If this config already has pipeline-gen do not more
|
||||
if "pipeline-gen" in config:
|
||||
return True if mappings or build_job or signing_job or service_job_attributes else False
|
||||
|
||||
config["target"] = "gitlab"
|
||||
|
||||
config["pipeline-gen"] = []
|
||||
pipeline_gen = config["pipeline-gen"]
|
||||
|
||||
# Build Job
|
||||
submapping = []
|
||||
for section in mappings:
|
||||
submapping_section = {"match": section["match"]}
|
||||
if "runner-attributes" in section:
|
||||
submapping_section["build-job"] = section["runner-attributes"]
|
||||
if "remove-attributes" in section:
|
||||
submapping_section["build-job-remove"] = section["remove-attributes"]
|
||||
submapping.append(submapping_section)
|
||||
pipeline_gen.append({"submapping": submapping, "match_behavior": match_behavior})
|
||||
|
||||
if build_job:
|
||||
pipeline_gen.append({"build-job": build_job})
|
||||
|
||||
# Signing Job
|
||||
if signing_job:
|
||||
pipeline_gen.append(signing_job)
|
||||
|
||||
# Service Jobs
|
||||
if service_job_attributes:
|
||||
pipeline_gen.append({"reindex-job": service_job_attributes})
|
||||
pipeline_gen.append({"noop-job": service_job_attributes})
|
||||
pipeline_gen.append({"cleanup-job": service_job_attributes})
|
||||
|
||||
return True
|
||||
|
@@ -7,6 +7,7 @@
|
||||
import collections
|
||||
import os
|
||||
import shutil
|
||||
from typing import List
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
@@ -244,30 +245,35 @@ def config_remove(args):
|
||||
spack.config.set(path, existing, scope)
|
||||
|
||||
|
||||
def _can_update_config_file(scope_dir, cfg_file):
|
||||
dir_ok = fs.can_write_to_dir(scope_dir)
|
||||
cfg_ok = fs.can_access(cfg_file)
|
||||
return dir_ok and cfg_ok
|
||||
def _can_update_config_file(scope: spack.config.ConfigScope, cfg_file):
|
||||
if isinstance(scope, spack.config.SingleFileScope):
|
||||
return fs.can_access(cfg_file)
|
||||
return fs.can_write_to_dir(scope.path) and fs.can_access(cfg_file)
|
||||
|
||||
|
||||
def config_update(args):
|
||||
# Read the configuration files
|
||||
spack.config.config.get_config(args.section, scope=args.scope)
|
||||
updates = spack.config.config.format_updates[args.section]
|
||||
updates: List[spack.config.ConfigScope] = list(
|
||||
filter(
|
||||
lambda s: not isinstance(
|
||||
s, (spack.config.InternalConfigScope, spack.config.ImmutableConfigScope)
|
||||
),
|
||||
spack.config.config.format_updates[args.section],
|
||||
)
|
||||
)
|
||||
|
||||
cannot_overwrite, skip_system_scope = [], False
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
scope_dir = scope.path
|
||||
can_be_updated = _can_update_config_file(scope_dir, cfg_file)
|
||||
can_be_updated = _can_update_config_file(scope, cfg_file)
|
||||
if not can_be_updated:
|
||||
if scope.name == "system":
|
||||
skip_system_scope = True
|
||||
msg = (
|
||||
tty.warn(
|
||||
'Not enough permissions to write to "system" scope. '
|
||||
"Skipping update at that location [cfg={0}]"
|
||||
f"Skipping update at that location [cfg={cfg_file}]"
|
||||
)
|
||||
tty.warn(msg.format(cfg_file))
|
||||
continue
|
||||
cannot_overwrite.append((scope, cfg_file))
|
||||
|
||||
@@ -315,18 +321,14 @@ def config_update(args):
|
||||
# Get a function to update the format
|
||||
update_fn = spack.config.ensure_latest_format_fn(args.section)
|
||||
for scope in updates:
|
||||
cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
|
||||
with open(cfg_file) as f:
|
||||
data = syaml.load_config(f) or {}
|
||||
data = data.pop(args.section, {})
|
||||
data = scope.get_section(args.section).pop(args.section)
|
||||
update_fn(data)
|
||||
|
||||
# Make a backup copy and rewrite the file
|
||||
bkp_file = cfg_file + ".bkp"
|
||||
shutil.copy(cfg_file, bkp_file)
|
||||
spack.config.config.update_config(args.section, data, scope=scope.name, force=True)
|
||||
msg = 'File "{0}" updated [backup={1}]'
|
||||
tty.msg(msg.format(cfg_file, bkp_file))
|
||||
tty.msg(f'File "{cfg_file}" update [backup={bkp_file}]')
|
||||
|
||||
|
||||
def _can_revert_update(scope_dir, cfg_file, bkp_file):
|
||||
|
@@ -340,6 +340,10 @@ def install(parser, args):
|
||||
if args.deprecated:
|
||||
spack.config.set("config:deprecated", True, scope="command_line")
|
||||
|
||||
if args.log_file and not args.log_format:
|
||||
msg = "the '--log-format' must be specified when using '--log-file'"
|
||||
tty.die(msg)
|
||||
|
||||
arguments.sanitize_reporter_options(args)
|
||||
|
||||
def reporter_factory(specs):
|
||||
|
@@ -21,6 +21,7 @@
|
||||
import tempfile
|
||||
from contextlib import contextmanager
|
||||
from itertools import chain
|
||||
from typing import Union
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -43,7 +44,9 @@
|
||||
from spack.version import Version, VersionList, VersionRange, ver
|
||||
|
||||
#: impements rudimentary logic for ABI compatibility
|
||||
_abi = llnl.util.lang.Singleton(lambda: spack.abi.ABI())
|
||||
_abi: Union[spack.abi.ABI, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(
|
||||
lambda: spack.abi.ABI()
|
||||
)
|
||||
|
||||
|
||||
@functools.total_ordering
|
||||
|
@@ -36,9 +36,10 @@
|
||||
import re
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from typing import Dict, List, Optional
|
||||
from typing import Dict, List, Optional, Union
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
from ruamel.yaml.comments import Comment
|
||||
from ruamel.yaml.error import MarkedYAMLError
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -543,16 +544,14 @@ def update_config(
|
||||
scope = self._validate_scope(scope) # get ConfigScope object
|
||||
|
||||
# manually preserve comments
|
||||
need_comment_copy = section in scope.sections and scope.sections[section] is not None
|
||||
need_comment_copy = section in scope.sections and scope.sections[section]
|
||||
if need_comment_copy:
|
||||
comments = getattr(
|
||||
scope.sections[section][section], yaml.comments.Comment.attrib, None
|
||||
)
|
||||
comments = getattr(scope.sections[section][section], Comment.attrib, None)
|
||||
|
||||
# read only the requested section's data.
|
||||
scope.sections[section] = syaml.syaml_dict({section: update_data})
|
||||
if need_comment_copy and comments:
|
||||
setattr(scope.sections[section][section], yaml.comments.Comment.attrib, comments)
|
||||
setattr(scope.sections[section][section], Comment.attrib, comments)
|
||||
|
||||
scope._write_section(section)
|
||||
|
||||
@@ -838,7 +837,7 @@ def _config():
|
||||
|
||||
|
||||
#: This is the singleton configuration instance for Spack.
|
||||
config = llnl.util.lang.Singleton(_config)
|
||||
config: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_config)
|
||||
|
||||
|
||||
def add_from_file(filename, scope=None):
|
||||
|
@@ -39,10 +39,10 @@ def validate(configuration_file):
|
||||
# Ensure we have a "container" attribute with sensible defaults set
|
||||
env_dict = ev.config_dict(config)
|
||||
env_dict.setdefault(
|
||||
"container", {"format": "docker", "images": {"os": "ubuntu:18.04", "spack": "develop"}}
|
||||
"container", {"format": "docker", "images": {"os": "ubuntu:22.04", "spack": "develop"}}
|
||||
)
|
||||
env_dict["container"].setdefault("format", "docker")
|
||||
env_dict["container"].setdefault("images", {"os": "ubuntu:18.04", "spack": "develop"})
|
||||
env_dict["container"].setdefault("images", {"os": "ubuntu:22.04", "spack": "develop"})
|
||||
|
||||
# Remove attributes that are not needed / allowed in the
|
||||
# container recipe
|
||||
|
@@ -7,6 +7,7 @@
|
||||
"""
|
||||
import collections
|
||||
import copy
|
||||
from typing import Optional
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.schema.env
|
||||
@@ -131,6 +132,9 @@ class PathContext(tengine.Context):
|
||||
directly via PATH.
|
||||
"""
|
||||
|
||||
# Must be set by derived classes
|
||||
template_name: Optional[str] = None
|
||||
|
||||
def __init__(self, config, last_phase):
|
||||
self.config = ev.config_dict(config)
|
||||
self.container_config = self.config["container"]
|
||||
@@ -146,6 +150,10 @@ def __init__(self, config, last_phase):
|
||||
# Record the last phase
|
||||
self.last_phase = last_phase
|
||||
|
||||
@tengine.context_property
|
||||
def depfile(self):
|
||||
return self.container_config.get("depfile", False)
|
||||
|
||||
@tengine.context_property
|
||||
def run(self):
|
||||
"""Information related to the run image."""
|
||||
@@ -280,7 +288,8 @@ def render_phase(self):
|
||||
def __call__(self):
|
||||
"""Returns the recipe as a string"""
|
||||
env = tengine.make_environment()
|
||||
t = env.get_template(self.template_name)
|
||||
template_name = self.container_config.get("template", self.template_name)
|
||||
t = env.get_template(template_name)
|
||||
return t.render(**self.to_dict())
|
||||
|
||||
|
||||
|
@@ -13,6 +13,7 @@
|
||||
import time
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import warnings
|
||||
from typing import List, Optional
|
||||
|
||||
import ruamel.yaml as yaml
|
||||
@@ -697,6 +698,8 @@ def __init__(self, path, init_file=None, with_view=None, keep_relative=False):
|
||||
# This attribute will be set properly from configuration
|
||||
# during concretization
|
||||
self.unify = None
|
||||
self.new_specs = []
|
||||
self.new_installs = []
|
||||
self.clear()
|
||||
|
||||
if init_file:
|
||||
@@ -1748,31 +1751,8 @@ def install_all(self, **install_args):
|
||||
self.install_specs(None, **install_args)
|
||||
|
||||
def install_specs(self, specs=None, **install_args):
|
||||
tty.debug("Assessing installation status of environment packages")
|
||||
# If "spack install" is invoked repeatedly for a large environment
|
||||
# where all specs are already installed, the operation can take
|
||||
# a large amount of time due to repeatedly acquiring and releasing
|
||||
# locks. As a small optimization, drop already installed root specs.
|
||||
installed_roots, uninstalled_roots = self._partition_roots_by_install_status()
|
||||
if specs:
|
||||
specs_to_install = [s for s in specs if s not in installed_roots]
|
||||
specs_dropped = [s for s in specs if s in installed_roots]
|
||||
else:
|
||||
specs_to_install = uninstalled_roots
|
||||
specs_dropped = installed_roots
|
||||
|
||||
# We need to repeat the work of the installer thanks to the above optimization:
|
||||
# Already installed root specs should be marked explicitly installed in the
|
||||
# database.
|
||||
if specs_dropped:
|
||||
with spack.store.db.write_transaction(): # do all in one transaction
|
||||
for spec in specs_dropped:
|
||||
spack.store.db.update_explicit(spec, True)
|
||||
|
||||
if not specs_to_install:
|
||||
tty.msg("All of the packages are already installed")
|
||||
else:
|
||||
tty.debug("Processing {0} uninstalled specs".format(len(specs_to_install)))
|
||||
specs_to_install = specs or [concrete for _, concrete in self.concretized_specs()]
|
||||
tty.debug("Processing {0} specs".format(len(specs_to_install)))
|
||||
|
||||
specs_to_overwrite = self._get_overwrite_specs()
|
||||
tty.debug("{0} specs need to be overwritten".format(len(specs_to_overwrite)))
|
||||
@@ -2077,18 +2057,73 @@ def _read_lockfile_dict(self, d):
|
||||
for spec_dag_hash in self.concretized_order:
|
||||
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
|
||||
|
||||
def write(self, regenerate=True):
|
||||
def write(self, regenerate: bool = True) -> None:
|
||||
"""Writes an in-memory environment to its location on disk.
|
||||
|
||||
Write out package files for each newly concretized spec. Also
|
||||
regenerate any views associated with the environment and run post-write
|
||||
hooks, if regenerate is True.
|
||||
|
||||
Arguments:
|
||||
regenerate (bool): regenerate views and run post-write hooks as
|
||||
well as writing if True.
|
||||
Args:
|
||||
regenerate: regenerate views and run post-write hooks as well as writing if True.
|
||||
"""
|
||||
# Warn that environments are not in the latest format.
|
||||
self.manifest_uptodate_or_warn()
|
||||
if self.specs_by_hash:
|
||||
self.ensure_env_directory_exists(dot_env=True)
|
||||
self.update_environment_repository()
|
||||
self.update_manifest()
|
||||
# Write the lock file last. This is useful for Makefiles
|
||||
# with `spack.lock: spack.yaml` rules, where the target
|
||||
# should be newer than the prerequisite to avoid
|
||||
# redundant re-concretization.
|
||||
self.update_lockfile()
|
||||
else:
|
||||
self.ensure_env_directory_exists(dot_env=False)
|
||||
with fs.safe_remove(self.lock_path):
|
||||
self.update_manifest()
|
||||
|
||||
if regenerate:
|
||||
self.regenerate_views()
|
||||
spack.hooks.post_env_write(self)
|
||||
|
||||
self._reset_new_specs_and_installs()
|
||||
|
||||
def _reset_new_specs_and_installs(self) -> None:
|
||||
self.new_specs = []
|
||||
self.new_installs = []
|
||||
|
||||
def update_lockfile(self) -> None:
|
||||
with fs.write_tmp_and_move(self.lock_path) as f:
|
||||
sjson.dump(self._to_lockfile_dict(), stream=f)
|
||||
|
||||
def ensure_env_directory_exists(self, dot_env: bool = False) -> None:
|
||||
"""Ensure that the root directory of the environment exists
|
||||
|
||||
Args:
|
||||
dot_env: if True also ensures that the <root>/.env directory exists
|
||||
"""
|
||||
fs.mkdirp(self.path)
|
||||
if dot_env:
|
||||
fs.mkdirp(self.env_subdir_path)
|
||||
|
||||
def update_environment_repository(self) -> None:
|
||||
"""Updates the repository associated with the environment."""
|
||||
for spec in spack.traverse.traverse_nodes(self.new_specs):
|
||||
if not spec.concrete:
|
||||
raise ValueError("specs passed to environment.write() must be concrete!")
|
||||
|
||||
self._add_to_environment_repository(spec)
|
||||
|
||||
def _add_to_environment_repository(self, spec_node: Spec) -> None:
|
||||
"""Add the root node of the spec to the environment repository"""
|
||||
repository_dir = os.path.join(self.repos_path, spec_node.namespace)
|
||||
repository = spack.repo.create_or_construct(repository_dir, spec_node.namespace)
|
||||
pkg_dir = repository.dirname_for_package_name(spec_node.name)
|
||||
fs.mkdirp(pkg_dir)
|
||||
spack.repo.path.dump_provenance(spec_node, pkg_dir)
|
||||
|
||||
def manifest_uptodate_or_warn(self):
|
||||
"""Emits a warning if the manifest file is not up-to-date."""
|
||||
if not is_latest_format(self.manifest_path):
|
||||
ver = ".".join(str(s) for s in spack.spack_version_info[:2])
|
||||
msg = (
|
||||
@@ -2098,61 +2133,14 @@ def write(self, regenerate=True):
|
||||
"Note that versions of Spack older than {} may not be able to "
|
||||
"use the updated configuration."
|
||||
)
|
||||
tty.warn(msg.format(self.name, self.name, ver))
|
||||
warnings.warn(msg.format(self.name, self.name, ver))
|
||||
|
||||
# ensure path in var/spack/environments
|
||||
fs.mkdirp(self.path)
|
||||
|
||||
yaml_dict = config_dict(self.yaml)
|
||||
raw_yaml_dict = config_dict(self.raw_yaml)
|
||||
|
||||
if self.specs_by_hash:
|
||||
# ensure the prefix/.env directory exists
|
||||
fs.mkdirp(self.env_subdir_path)
|
||||
|
||||
for spec in spack.traverse.traverse_nodes(self.new_specs):
|
||||
if not spec.concrete:
|
||||
raise ValueError("specs passed to environment.write() " "must be concrete!")
|
||||
|
||||
root = os.path.join(self.repos_path, spec.namespace)
|
||||
repo = spack.repo.create_or_construct(root, spec.namespace)
|
||||
pkg_dir = repo.dirname_for_package_name(spec.name)
|
||||
|
||||
fs.mkdirp(pkg_dir)
|
||||
spack.repo.path.dump_provenance(spec, pkg_dir)
|
||||
|
||||
self._update_and_write_manifest(raw_yaml_dict, yaml_dict)
|
||||
|
||||
# Write the lock file last. This is useful for Makefiles
|
||||
# with `spack.lock: spack.yaml` rules, where the target
|
||||
# should be newer than the prerequisite to avoid
|
||||
# redundant re-concretization.
|
||||
with fs.write_tmp_and_move(self.lock_path) as f:
|
||||
sjson.dump(self._to_lockfile_dict(), stream=f)
|
||||
else:
|
||||
with fs.safe_remove(self.lock_path):
|
||||
self._update_and_write_manifest(raw_yaml_dict, yaml_dict)
|
||||
|
||||
# TODO: rethink where this needs to happen along with
|
||||
# writing. For some of the commands (like install, which write
|
||||
# concrete specs AND regen) this might as well be a separate
|
||||
# call. But, having it here makes the views consistent witht the
|
||||
# concretized environment for most operations. Which is the
|
||||
# special case?
|
||||
if regenerate:
|
||||
self.regenerate_views()
|
||||
|
||||
# Run post_env_hooks
|
||||
spack.hooks.post_env_write(self)
|
||||
|
||||
# new specs and new installs reset at write time
|
||||
self.new_specs = []
|
||||
self.new_installs = []
|
||||
|
||||
def _update_and_write_manifest(self, raw_yaml_dict, yaml_dict):
|
||||
def update_manifest(self):
|
||||
"""Update YAML manifest for this environment based on changes to
|
||||
spec lists and views and write it.
|
||||
"""
|
||||
yaml_dict = config_dict(self.yaml)
|
||||
raw_yaml_dict = config_dict(self.raw_yaml)
|
||||
# invalidate _repo cache
|
||||
self._repo = None
|
||||
# put any changes in the definitions in the YAML
|
||||
@@ -2252,12 +2240,19 @@ def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
activate(self._previous_active)
|
||||
|
||||
|
||||
def yaml_equivalent(first, second):
|
||||
def yaml_equivalent(first, second) -> bool:
|
||||
"""Returns whether two spack yaml items are equivalent, including overrides"""
|
||||
# YAML has timestamps and dates, but we don't use them yet in schemas
|
||||
if isinstance(first, dict):
|
||||
return isinstance(second, dict) and _equiv_dict(first, second)
|
||||
elif isinstance(first, list):
|
||||
return isinstance(second, list) and _equiv_list(first, second)
|
||||
elif isinstance(first, bool):
|
||||
return isinstance(second, bool) and first is second
|
||||
elif isinstance(first, int):
|
||||
return isinstance(second, int) and first == second
|
||||
elif first is None:
|
||||
return second is None
|
||||
else: # it's a string
|
||||
return isinstance(second, str) and first == second
|
||||
|
||||
|
@@ -12,7 +12,7 @@
|
||||
Currently the following hooks are supported:
|
||||
|
||||
* pre_install(spec)
|
||||
* post_install(spec)
|
||||
* post_install(spec, explicit)
|
||||
* pre_uninstall(spec)
|
||||
* post_uninstall(spec)
|
||||
* on_install_start(spec)
|
||||
|
@@ -131,7 +131,7 @@ def find_and_patch_sonames(prefix, exclude_list, patchelf):
|
||||
return patch_sonames(patchelf, prefix, relative_paths)
|
||||
|
||||
|
||||
def post_install(spec):
|
||||
def post_install(spec, explicit=None):
|
||||
# Skip if disabled
|
||||
if not spack.config.get("config:shared_linking:bind", False):
|
||||
return
|
||||
|
@@ -169,7 +169,7 @@ def write_license_file(pkg, license_path):
|
||||
f.close()
|
||||
|
||||
|
||||
def post_install(spec):
|
||||
def post_install(spec, explicit=None):
|
||||
"""This hook symlinks local licenses to the global license for
|
||||
licensed software.
|
||||
"""
|
||||
|
@@ -10,7 +10,7 @@
|
||||
import spack.modules.common
|
||||
|
||||
|
||||
def _for_each_enabled(spec, method_name):
|
||||
def _for_each_enabled(spec, method_name, explicit=None):
|
||||
"""Calls a method for each enabled module"""
|
||||
set_names = set(spack.config.get("modules", {}).keys())
|
||||
# If we have old-style modules enabled, we put those in the default set
|
||||
@@ -27,7 +27,7 @@ def _for_each_enabled(spec, method_name):
|
||||
continue
|
||||
|
||||
for type in enabled:
|
||||
generator = spack.modules.module_types[type](spec, name)
|
||||
generator = spack.modules.module_types[type](spec, name, explicit)
|
||||
try:
|
||||
getattr(generator, method_name)()
|
||||
except RuntimeError as e:
|
||||
@@ -36,7 +36,7 @@ def _for_each_enabled(spec, method_name):
|
||||
tty.warn(msg.format(method_name, str(e)))
|
||||
|
||||
|
||||
def post_install(spec):
|
||||
def post_install(spec, explicit):
|
||||
import spack.environment as ev # break import cycle
|
||||
|
||||
if ev.active_environment():
|
||||
@@ -45,7 +45,7 @@ def post_install(spec):
|
||||
# can manage interactions between env views and modules
|
||||
return
|
||||
|
||||
_for_each_enabled(spec, "write")
|
||||
_for_each_enabled(spec, "write", explicit)
|
||||
|
||||
|
||||
def post_uninstall(spec):
|
||||
|
@@ -8,7 +8,7 @@
|
||||
import spack.util.file_permissions as fp
|
||||
|
||||
|
||||
def post_install(spec):
|
||||
def post_install(spec, explicit=None):
|
||||
if not spec.external:
|
||||
fp.set_permissions_by_spec(spec.prefix, spec)
|
||||
|
||||
|
@@ -224,7 +224,7 @@ def install_sbang():
|
||||
os.rename(sbang_tmp_path, sbang_path)
|
||||
|
||||
|
||||
def post_install(spec):
|
||||
def post_install(spec, explicit=None):
|
||||
"""This hook edits scripts so that they call /bin/bash
|
||||
$spack_prefix/bin/sbang instead of something longer than the
|
||||
shebang limit.
|
||||
|
@@ -6,6 +6,6 @@
|
||||
import spack.verify
|
||||
|
||||
|
||||
def post_install(spec):
|
||||
def post_install(spec, explicit=None):
|
||||
if not spec.external:
|
||||
spack.verify.write_manifest(spec)
|
||||
|
@@ -315,7 +315,7 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
|
||||
tty.debug("Successfully extracted {0} from binary cache".format(pkg_id))
|
||||
_print_timer(pre=_log_prefix(pkg.name), pkg_id=pkg_id, timer=t)
|
||||
_print_installed_pkg(pkg.spec.prefix)
|
||||
spack.hooks.post_install(pkg.spec)
|
||||
spack.hooks.post_install(pkg.spec, explicit)
|
||||
return True
|
||||
|
||||
|
||||
@@ -353,7 +353,7 @@ def _process_external_package(pkg, explicit):
|
||||
# For external packages we just need to run
|
||||
# post-install hooks to generate module files.
|
||||
tty.debug("{0} generating module file".format(pre))
|
||||
spack.hooks.post_install(spec)
|
||||
spack.hooks.post_install(spec, explicit)
|
||||
|
||||
# Add to the DB
|
||||
tty.debug("{0} registering into DB".format(pre))
|
||||
@@ -1260,6 +1260,10 @@ def _install_task(self, task):
|
||||
if not pkg.unit_test_check():
|
||||
return
|
||||
|
||||
# Injecting information to know if this installation request is the root one
|
||||
# to determine in BuildProcessInstaller whether installation is explicit or not
|
||||
install_args["is_root"] = task.is_root
|
||||
|
||||
try:
|
||||
self._setup_install_dir(pkg)
|
||||
|
||||
@@ -1879,6 +1883,9 @@ def __init__(self, pkg, install_args):
|
||||
# whether to enable echoing of build output initially or not
|
||||
self.verbose = install_args.get("verbose", False)
|
||||
|
||||
# whether installation was explicitly requested by the user
|
||||
self.explicit = install_args.get("is_root", False) and install_args.get("explicit", True)
|
||||
|
||||
# env before starting installation
|
||||
self.unmodified_env = install_args.get("unmodified_env", {})
|
||||
|
||||
@@ -1939,7 +1946,7 @@ def run(self):
|
||||
self.timer.write_json(timelog)
|
||||
|
||||
# Run post install hooks before build stage is removed.
|
||||
spack.hooks.post_install(self.pkg.spec)
|
||||
spack.hooks.post_install(self.pkg.spec, self.explicit)
|
||||
|
||||
_print_timer(pre=self.pre, pkg_id=self.pkg_id, timer=self.timer)
|
||||
_print_installed_pkg(self.pkg.prefix)
|
||||
|
@@ -428,12 +428,17 @@ class BaseConfiguration(object):
|
||||
|
||||
default_projections = {"all": "{name}-{version}-{compiler.name}-{compiler.version}"}
|
||||
|
||||
def __init__(self, spec, module_set_name):
|
||||
def __init__(self, spec, module_set_name, explicit=None):
|
||||
# Module where type(self) is defined
|
||||
self.module = inspect.getmodule(self)
|
||||
# Spec for which we want to generate a module file
|
||||
self.spec = spec
|
||||
self.name = module_set_name
|
||||
# Software installation has been explicitly asked (get this information from
|
||||
# db when querying an existing module, like during a refresh or rm operations)
|
||||
if explicit is None:
|
||||
explicit = spec._installed_explicitly()
|
||||
self.explicit = explicit
|
||||
# Dictionary of configuration options that should be applied
|
||||
# to the spec
|
||||
self.conf = merge_config_rules(self.module.configuration(self.name), self.spec)
|
||||
@@ -519,8 +524,7 @@ def excluded(self):
|
||||
# Should I exclude the module because it's implicit?
|
||||
# DEPRECATED: remove 'blacklist_implicits' in v0.20
|
||||
exclude_implicits = get_deprecated(conf, "exclude_implicits", "blacklist_implicits", None)
|
||||
installed_implicitly = not spec._installed_explicitly()
|
||||
excluded_as_implicit = exclude_implicits and installed_implicitly
|
||||
excluded_as_implicit = exclude_implicits and not self.explicit
|
||||
|
||||
def debug_info(line_header, match_list):
|
||||
if match_list:
|
||||
@@ -699,7 +703,7 @@ def configure_options(self):
|
||||
|
||||
if os.path.exists(pkg.install_configure_args_path):
|
||||
with open(pkg.install_configure_args_path, "r") as args_file:
|
||||
return args_file.read()
|
||||
return spack.util.path.padding_filter(args_file.read())
|
||||
|
||||
# Returning a false-like value makes the default templates skip
|
||||
# the configure option section
|
||||
@@ -788,7 +792,8 @@ def autoload(self):
|
||||
def _create_module_list_of(self, what):
|
||||
m = self.conf.module
|
||||
name = self.conf.name
|
||||
return [m.make_layout(x, name).use_name for x in getattr(self.conf, what)]
|
||||
explicit = self.conf.explicit
|
||||
return [m.make_layout(x, name, explicit).use_name for x in getattr(self.conf, what)]
|
||||
|
||||
@tengine.context_property
|
||||
def verbose(self):
|
||||
@@ -797,7 +802,7 @@ def verbose(self):
|
||||
|
||||
|
||||
class BaseModuleFileWriter(object):
|
||||
def __init__(self, spec, module_set_name):
|
||||
def __init__(self, spec, module_set_name, explicit=None):
|
||||
self.spec = spec
|
||||
|
||||
# This class is meant to be derived. Get the module of the
|
||||
@@ -806,9 +811,9 @@ def __init__(self, spec, module_set_name):
|
||||
m = self.module
|
||||
|
||||
# Create the triplet of configuration/layout/context
|
||||
self.conf = m.make_configuration(spec, module_set_name)
|
||||
self.layout = m.make_layout(spec, module_set_name)
|
||||
self.context = m.make_context(spec, module_set_name)
|
||||
self.conf = m.make_configuration(spec, module_set_name, explicit)
|
||||
self.layout = m.make_layout(spec, module_set_name, explicit)
|
||||
self.context = m.make_context(spec, module_set_name, explicit)
|
||||
|
||||
# Check if a default template has been defined,
|
||||
# throw if not found
|
||||
@@ -930,6 +935,7 @@ def remove(self):
|
||||
if os.path.exists(mod_file):
|
||||
try:
|
||||
os.remove(mod_file) # Remove the module file
|
||||
self.remove_module_defaults() # Remove default targeting module file
|
||||
os.removedirs(
|
||||
os.path.dirname(mod_file)
|
||||
) # Remove all the empty directories from the leaf up
|
||||
@@ -937,6 +943,18 @@ def remove(self):
|
||||
# removedirs throws OSError on first non-empty directory found
|
||||
pass
|
||||
|
||||
def remove_module_defaults(self):
|
||||
if not any(self.spec.satisfies(default) for default in self.conf.defaults):
|
||||
return
|
||||
|
||||
# This spec matches a default, symlink needs to be removed as we remove the module
|
||||
# file it targets.
|
||||
default_symlink = os.path.join(os.path.dirname(self.layout.filename), "default")
|
||||
try:
|
||||
os.unlink(default_symlink)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def disable_modules():
|
||||
|
@@ -33,24 +33,26 @@ def configuration(module_set_name):
|
||||
configuration_registry: Dict[str, Any] = {}
|
||||
|
||||
|
||||
def make_configuration(spec, module_set_name):
|
||||
def make_configuration(spec, module_set_name, explicit):
|
||||
"""Returns the lmod configuration for spec"""
|
||||
key = (spec.dag_hash(), module_set_name)
|
||||
key = (spec.dag_hash(), module_set_name, explicit)
|
||||
try:
|
||||
return configuration_registry[key]
|
||||
except KeyError:
|
||||
return configuration_registry.setdefault(key, LmodConfiguration(spec, module_set_name))
|
||||
return configuration_registry.setdefault(
|
||||
key, LmodConfiguration(spec, module_set_name, explicit)
|
||||
)
|
||||
|
||||
|
||||
def make_layout(spec, module_set_name):
|
||||
def make_layout(spec, module_set_name, explicit):
|
||||
"""Returns the layout information for spec"""
|
||||
conf = make_configuration(spec, module_set_name)
|
||||
conf = make_configuration(spec, module_set_name, explicit)
|
||||
return LmodFileLayout(conf)
|
||||
|
||||
|
||||
def make_context(spec, module_set_name):
|
||||
def make_context(spec, module_set_name, explicit):
|
||||
"""Returns the context information for spec"""
|
||||
conf = make_configuration(spec, module_set_name)
|
||||
conf = make_configuration(spec, module_set_name, explicit)
|
||||
return LmodContext(conf)
|
||||
|
||||
|
||||
@@ -409,7 +411,7 @@ def missing(self):
|
||||
@tengine.context_property
|
||||
def unlocked_paths(self):
|
||||
"""Returns the list of paths that are unlocked unconditionally."""
|
||||
layout = make_layout(self.spec, self.conf.name)
|
||||
layout = make_layout(self.spec, self.conf.name, self.conf.explicit)
|
||||
return [os.path.join(*parts) for parts in layout.unlocked_paths[None]]
|
||||
|
||||
@tengine.context_property
|
||||
@@ -417,7 +419,7 @@ def conditionally_unlocked_paths(self):
|
||||
"""Returns the list of paths that are unlocked conditionally.
|
||||
Each item in the list is a tuple with the structure (condition, path).
|
||||
"""
|
||||
layout = make_layout(self.spec, self.conf.name)
|
||||
layout = make_layout(self.spec, self.conf.name, self.conf.explicit)
|
||||
value = []
|
||||
conditional_paths = layout.unlocked_paths
|
||||
conditional_paths.pop(None)
|
||||
|
@@ -30,24 +30,26 @@ def configuration(module_set_name):
|
||||
configuration_registry: Dict[str, Any] = {}
|
||||
|
||||
|
||||
def make_configuration(spec, module_set_name):
|
||||
def make_configuration(spec, module_set_name, explicit):
|
||||
"""Returns the tcl configuration for spec"""
|
||||
key = (spec.dag_hash(), module_set_name)
|
||||
key = (spec.dag_hash(), module_set_name, explicit)
|
||||
try:
|
||||
return configuration_registry[key]
|
||||
except KeyError:
|
||||
return configuration_registry.setdefault(key, TclConfiguration(spec, module_set_name))
|
||||
return configuration_registry.setdefault(
|
||||
key, TclConfiguration(spec, module_set_name, explicit)
|
||||
)
|
||||
|
||||
|
||||
def make_layout(spec, module_set_name):
|
||||
def make_layout(spec, module_set_name, explicit):
|
||||
"""Returns the layout information for spec"""
|
||||
conf = make_configuration(spec, module_set_name)
|
||||
conf = make_configuration(spec, module_set_name, explicit)
|
||||
return TclFileLayout(conf)
|
||||
|
||||
|
||||
def make_context(spec, module_set_name):
|
||||
def make_context(spec, module_set_name, explicit):
|
||||
"""Returns the context information for spec"""
|
||||
conf = make_configuration(spec, module_set_name)
|
||||
conf = make_configuration(spec, module_set_name, explicit)
|
||||
return TclContext(conf)
|
||||
|
||||
|
||||
|
@@ -57,7 +57,7 @@
|
||||
from spack.filesystem_view import YamlFilesystemView
|
||||
from spack.install_test import TestFailure, TestSuite
|
||||
from spack.installer import InstallError, PackageInstaller
|
||||
from spack.stage import ResourceStage, Stage, StageComposite, stage_prefix
|
||||
from spack.stage import ResourceStage, Stage, StageComposite, compute_stage_name
|
||||
from spack.util.executable import ProcessError, which
|
||||
from spack.util.package_hash import package_hash
|
||||
from spack.util.prefix import Prefix
|
||||
@@ -1022,8 +1022,7 @@ def _make_root_stage(self, fetcher):
|
||||
)
|
||||
# Construct a path where the stage should build..
|
||||
s = self.spec
|
||||
stage_name = "{0}{1}-{2}-{3}".format(stage_prefix, s.name, s.version, s.dag_hash())
|
||||
|
||||
stage_name = compute_stage_name(s)
|
||||
stage = Stage(
|
||||
fetcher,
|
||||
mirror_paths=mirror_paths,
|
||||
|
@@ -675,7 +675,7 @@ def relocate_text_bin(binaries, prefixes):
|
||||
Raises:
|
||||
spack.relocate_text.BinaryTextReplaceError: when the new path is longer than the old path
|
||||
"""
|
||||
BinaryFilePrefixReplacer.from_strings_or_bytes(prefixes).apply(binaries)
|
||||
return BinaryFilePrefixReplacer.from_strings_or_bytes(prefixes).apply(binaries)
|
||||
|
||||
|
||||
def is_relocatable(spec):
|
||||
|
@@ -73,24 +73,28 @@ def is_noop(self) -> bool:
|
||||
"""Returns true when the prefix to prefix map
|
||||
is mapping everything to the same location (identity)
|
||||
or there are no prefixes to replace."""
|
||||
return not bool(self.prefix_to_prefix)
|
||||
return not self.prefix_to_prefix
|
||||
|
||||
def apply(self, filenames: list):
|
||||
"""Returns a list of files that were modified"""
|
||||
changed_files = []
|
||||
if self.is_noop:
|
||||
return
|
||||
return []
|
||||
for filename in filenames:
|
||||
self.apply_to_filename(filename)
|
||||
if self.apply_to_filename(filename):
|
||||
changed_files.append(filename)
|
||||
return changed_files
|
||||
|
||||
def apply_to_filename(self, filename):
|
||||
if self.is_noop:
|
||||
return
|
||||
return False
|
||||
with open(filename, "rb+") as f:
|
||||
self.apply_to_file(f)
|
||||
return self.apply_to_file(f)
|
||||
|
||||
def apply_to_file(self, f):
|
||||
if self.is_noop:
|
||||
return
|
||||
self._apply_to_file(f)
|
||||
return False
|
||||
return self._apply_to_file(f)
|
||||
|
||||
|
||||
class TextFilePrefixReplacer(PrefixReplacer):
|
||||
@@ -122,10 +126,11 @@ def _apply_to_file(self, f):
|
||||
data = f.read()
|
||||
new_data = re.sub(self.regex, replacement, data)
|
||||
if id(data) == id(new_data):
|
||||
return
|
||||
return False
|
||||
f.seek(0)
|
||||
f.write(new_data)
|
||||
f.truncate()
|
||||
return True
|
||||
|
||||
|
||||
class BinaryFilePrefixReplacer(PrefixReplacer):
|
||||
@@ -194,6 +199,9 @@ def _apply_to_file(self, f):
|
||||
|
||||
Arguments:
|
||||
f: file opened in rb+ mode
|
||||
|
||||
Returns:
|
||||
bool: True if file was modified
|
||||
"""
|
||||
assert f.tell() == 0
|
||||
|
||||
@@ -201,6 +209,8 @@ def _apply_to_file(self, f):
|
||||
# but it's nasty to deal with matches across boundaries, so let's stick to
|
||||
# something simple.
|
||||
|
||||
modified = True
|
||||
|
||||
for match in self.regex.finditer(f.read()):
|
||||
# The matching prefix (old) and its replacement (new)
|
||||
old = match.group(1)
|
||||
@@ -243,6 +253,9 @@ def _apply_to_file(self, f):
|
||||
|
||||
f.seek(match.start())
|
||||
f.write(replacement)
|
||||
modified = True
|
||||
|
||||
return modified
|
||||
|
||||
|
||||
class BinaryStringReplacementError(spack.error.SpackError):
|
||||
|
@@ -1368,7 +1368,7 @@ def create(configuration):
|
||||
|
||||
|
||||
#: Singleton repo path instance
|
||||
path = llnl.util.lang.Singleton(_path)
|
||||
path: Union[RepoPath, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_path)
|
||||
|
||||
# Add the finder to sys.meta_path
|
||||
REPOS_FINDER = ReposFinder()
|
||||
|
@@ -119,7 +119,7 @@ def rewire_node(spec, explicit):
|
||||
spack.store.db.add(spec, spack.store.layout, explicit=explicit)
|
||||
|
||||
# run post install hooks
|
||||
spack.hooks.post_install(spec)
|
||||
spack.hooks.post_install(spec, explicit)
|
||||
|
||||
|
||||
class RewireError(spack.error.SpackError):
|
||||
|
@@ -11,6 +11,8 @@
|
||||
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
import spack.schema.gitlab_ci
|
||||
|
||||
# Schema for script fields
|
||||
# List of lists and/or strings
|
||||
# This is similar to what is allowed in
|
||||
@@ -20,24 +22,27 @@
|
||||
"items": {"anyOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}]},
|
||||
}
|
||||
|
||||
# Schema for CI image
|
||||
image_schema = {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"entrypoint": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
# Additional attributes are allow
|
||||
# and will be forwarded directly to the
|
||||
# CI target YAML for each job.
|
||||
attributes_schema = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"image": {
|
||||
"oneOf": [
|
||||
{"type": "string"},
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {"type": "string"},
|
||||
"entrypoint": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
"image": image_schema,
|
||||
"tags": {"type": "array", "items": {"type": "string"}},
|
||||
"variables": {
|
||||
"type": "object",
|
||||
@@ -169,7 +174,15 @@
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties = {"ci": ci_properties}
|
||||
properties = {
|
||||
"ci": {
|
||||
"oneOf": [
|
||||
ci_properties,
|
||||
# Allow legacy format under `ci` for `config update ci`
|
||||
spack.schema.gitlab_ci.gitlab_ci_properties,
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
@@ -179,3 +192,21 @@
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
||||
|
||||
|
||||
def update(data):
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.ci
|
||||
import spack.environment as ev
|
||||
|
||||
# Warn if deprecated section is still in the environment
|
||||
ci_env = ev.active_environment()
|
||||
if ci_env:
|
||||
env_config = ev.config_dict(ci_env.yaml)
|
||||
if "gitlab-ci" in env_config:
|
||||
tty.die("Error: `gitlab-ci` section detected with `ci`, these are not compatible")
|
||||
|
||||
# Detect if the ci section is using the new pipeline-gen
|
||||
# If it is, assume it has already been converted
|
||||
return spack.ci.translate_deprecated_config(data)
|
||||
|
@@ -61,6 +61,7 @@
|
||||
"build_stage": {
|
||||
"oneOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}]
|
||||
},
|
||||
"stage_name": {"type": "string"},
|
||||
"test_stage": {"type": "string"},
|
||||
"extensions": {"type": "array", "items": {"type": "string"}},
|
||||
"template_dirs": {"type": "array", "items": {"type": "string"}},
|
||||
|
@@ -63,6 +63,8 @@
|
||||
},
|
||||
# Add labels to the image
|
||||
"labels": {"type": "object"},
|
||||
# Use a custom template to render the recipe
|
||||
"template": {"type": "string", "default": None},
|
||||
# Add a custom extra section at the bottom of a stage
|
||||
"extra_instructions": {
|
||||
"type": "object",
|
||||
@@ -82,6 +84,16 @@
|
||||
},
|
||||
},
|
||||
"docker": {"type": "object", "additionalProperties": False, "default": {}},
|
||||
"depfile": {"type": "boolean", "default": False},
|
||||
},
|
||||
"deprecatedProperties": {
|
||||
"properties": ["extra_instructions"],
|
||||
"message": (
|
||||
"container:extra_instructions has been deprecated and will be removed "
|
||||
"in Spack v0.21. Set container:template appropriately to use custom Jinja2 "
|
||||
"templates instead."
|
||||
),
|
||||
"error": False,
|
||||
},
|
||||
}
|
||||
|
||||
|
@@ -10,6 +10,7 @@
|
||||
"""
|
||||
from llnl.util.lang import union_dicts
|
||||
|
||||
import spack.schema.gitlab_ci # DEPRECATED
|
||||
import spack.schema.merged
|
||||
import spack.schema.packages
|
||||
import spack.schema.projections
|
||||
@@ -52,6 +53,8 @@
|
||||
"default": {},
|
||||
"additionalProperties": False,
|
||||
"properties": union_dicts(
|
||||
# Include deprecated "gitlab-ci" section
|
||||
spack.schema.gitlab_ci.properties,
|
||||
# merged configuration scope schemas
|
||||
spack.schema.merged.properties,
|
||||
# extra environment schema properties
|
||||
@@ -130,6 +133,15 @@ def update(data):
|
||||
Returns:
|
||||
True if data was changed, False otherwise
|
||||
"""
|
||||
|
||||
import spack.ci
|
||||
|
||||
if "gitlab-ci" in data:
|
||||
data["ci"] = data.pop("gitlab-ci")
|
||||
|
||||
if "ci" in data:
|
||||
return spack.ci.translate_deprecated_config(data["ci"])
|
||||
|
||||
# There are not currently any deprecated attributes in this section
|
||||
# that have not been removed
|
||||
return False
|
||||
|
@@ -103,6 +103,7 @@ def getter(node):
|
||||
"dev_spec",
|
||||
"external",
|
||||
"packages_yaml",
|
||||
"package_requirements",
|
||||
"package_py",
|
||||
"installed",
|
||||
]
|
||||
@@ -115,9 +116,10 @@ def getter(node):
|
||||
"VersionProvenance", version_origin_fields
|
||||
)(**{name: i for i, name in enumerate(version_origin_fields)})
|
||||
|
||||
#: Named tuple to contain information on declared versions
|
||||
|
||||
DeclaredVersion = collections.namedtuple("DeclaredVersion", ["version", "idx", "origin"])
|
||||
|
||||
|
||||
# Below numbers are used to map names of criteria to the order
|
||||
# they appear in the solution. See concretize.lp
|
||||
|
||||
@@ -584,6 +586,54 @@ def extract_args(model, predicate_name):
|
||||
return [stringify(sym.arguments) for sym in model if sym.name == predicate_name]
|
||||
|
||||
|
||||
class ErrorHandler:
|
||||
def __init__(self, model):
|
||||
self.model = model
|
||||
self.error_args = extract_args(model, "error")
|
||||
|
||||
def multiple_values_error(self, attribute, pkg):
|
||||
return f'Cannot select a single "{attribute}" for package "{pkg}"'
|
||||
|
||||
def no_value_error(self, attribute, pkg):
|
||||
return f'Cannot select a single "{attribute}" for package "{pkg}"'
|
||||
|
||||
def handle_error(self, msg, *args):
|
||||
"""Handle an error state derived by the solver."""
|
||||
if msg == "multiple_values_error":
|
||||
return self.multiple_values_error(*args)
|
||||
|
||||
if msg == "no_value_error":
|
||||
return self.no_value_error(*args)
|
||||
|
||||
# For variant formatting, we sometimes have to construct specs
|
||||
# to format values properly. Find/replace all occurances of
|
||||
# Spec(...) with the string representation of the spec mentioned
|
||||
msg = msg.format(*args)
|
||||
specs_to_construct = re.findall(r"Spec\(([^)]*)\)", msg)
|
||||
for spec_str in specs_to_construct:
|
||||
msg = msg.replace("Spec(%s)" % spec_str, str(spack.spec.Spec(spec_str)))
|
||||
|
||||
return msg
|
||||
|
||||
def message(self, errors) -> str:
|
||||
messages = [
|
||||
f" {idx+1: 2}. {self.handle_error(msg, *args)}"
|
||||
for idx, (_, msg, args) in enumerate(errors)
|
||||
]
|
||||
header = "concretization failed for the following reasons:\n"
|
||||
return "\n".join([header] + messages)
|
||||
|
||||
def raise_if_errors(self):
|
||||
if not self.error_args:
|
||||
return
|
||||
|
||||
errors = sorted(
|
||||
[(int(priority), msg, args) for priority, msg, *args in self.error_args], reverse=True
|
||||
)
|
||||
msg = self.message(errors)
|
||||
raise UnsatisfiableSpecError(msg)
|
||||
|
||||
|
||||
class PyclingoDriver(object):
|
||||
def __init__(self, cores=True):
|
||||
"""Driver for the Python clingo interface.
|
||||
@@ -639,20 +689,6 @@ def fact(self, head):
|
||||
if choice:
|
||||
self.assumptions.append(atom)
|
||||
|
||||
def handle_error(self, msg, *args):
|
||||
"""Handle an error state derived by the solver."""
|
||||
msg = msg.format(*args)
|
||||
|
||||
# For variant formatting, we sometimes have to construct specs
|
||||
# to format values properly. Find/replace all occurances of
|
||||
# Spec(...) with the string representation of the spec mentioned
|
||||
specs_to_construct = re.findall(r"Spec\(([^)]*)\)", msg)
|
||||
for spec_str in specs_to_construct:
|
||||
msg = msg.replace("Spec(%s)" % spec_str, str(spack.spec.Spec(spec_str)))
|
||||
|
||||
# TODO: this raises early -- we should handle multiple errors if there are any.
|
||||
raise UnsatisfiableSpecError(msg)
|
||||
|
||||
def solve(self, setup, specs, reuse=None, output=None, control=None):
|
||||
"""Set up the input and solve for dependencies of ``specs``.
|
||||
|
||||
@@ -754,10 +790,8 @@ def on_model(model):
|
||||
min_cost, best_model = min(models)
|
||||
|
||||
# first check for errors
|
||||
error_args = extract_args(best_model, "error")
|
||||
errors = sorted((int(priority), msg, args) for priority, msg, *args in error_args)
|
||||
for _, msg, args in errors:
|
||||
self.handle_error(msg, *args)
|
||||
error_handler = ErrorHandler(best_model)
|
||||
error_handler.raise_if_errors()
|
||||
|
||||
# build specs from spec attributes in the model
|
||||
spec_attrs = [(name, tuple(rest)) for name, *rest in extract_args(best_model, "attr")]
|
||||
@@ -872,30 +906,6 @@ def key_fn(version):
|
||||
)
|
||||
)
|
||||
|
||||
for v in most_to_least_preferred:
|
||||
# There are two paths for creating the ref_version in GitVersions.
|
||||
# The first uses a lookup to supply a tag and distance as a version.
|
||||
# The second is user specified and can be resolved as a standard version.
|
||||
# This second option is constrained such that the user version must be known to Spack
|
||||
if (
|
||||
isinstance(v.version, spack.version.GitVersion)
|
||||
and v.version.user_supplied_reference
|
||||
):
|
||||
ref_version = spack.version.Version(v.version.ref_version_str)
|
||||
self.gen.fact(fn.version_equivalent(pkg.name, v.version, ref_version))
|
||||
# disqualify any git supplied version from user if they weren't already known
|
||||
# versions in spack
|
||||
if not any(ref_version == dv.version for dv in most_to_least_preferred if v != dv):
|
||||
msg = (
|
||||
"The reference version '{version}' for package '{package}' is not defined."
|
||||
" Either choose another reference version or define '{version}' in your"
|
||||
" version preferences or package.py file for {package}.".format(
|
||||
package=pkg.name, version=str(ref_version)
|
||||
)
|
||||
)
|
||||
|
||||
raise UnsatisfiableSpecError(msg)
|
||||
|
||||
# Declare deprecated versions for this package, if any
|
||||
deprecated = self.deprecated_versions[pkg.name]
|
||||
for v in sorted(deprecated):
|
||||
@@ -1601,7 +1611,12 @@ def key_fn(item):
|
||||
# When COMPARING VERSIONS, the '@develop' version is always
|
||||
# larger than other versions. BUT when CONCRETIZING, the largest
|
||||
# NON-develop version is selected by default.
|
||||
return info.get("preferred", False), not version.isdevelop(), version
|
||||
return (
|
||||
info.get("preferred", False),
|
||||
not info.get("deprecated", False),
|
||||
not version.isdevelop(),
|
||||
version,
|
||||
)
|
||||
|
||||
for idx, item in enumerate(sorted(pkg_cls.versions.items(), key=key_fn, reverse=True)):
|
||||
v, version_info = item
|
||||
@@ -1651,6 +1666,15 @@ def add_concrete_versions_from_specs(self, specs, origin):
|
||||
DeclaredVersion(version=dep.version, idx=0, origin=origin)
|
||||
)
|
||||
self.possible_versions[dep.name].add(dep.version)
|
||||
if (
|
||||
isinstance(dep.version, spack.version.GitVersion)
|
||||
and dep.version.user_supplied_reference
|
||||
):
|
||||
defined_version = spack.version.Version(dep.version.ref_version_str)
|
||||
self.declared_versions[dep.name].append(
|
||||
DeclaredVersion(version=defined_version, idx=1, origin=origin)
|
||||
)
|
||||
self.possible_versions[dep.name].add(defined_version)
|
||||
|
||||
def _supported_targets(self, compiler_name, compiler_version, targets):
|
||||
"""Get a list of which targets are supported by the compiler.
|
||||
@@ -1887,7 +1911,11 @@ def define_version_constraints(self):
|
||||
|
||||
# This is needed to account for a variable number of
|
||||
# numbers e.g. if both 1.0 and 1.0.2 are possible versions
|
||||
exact_match = [v for v in allowed_versions if v == versions]
|
||||
exact_match = [
|
||||
v
|
||||
for v in allowed_versions
|
||||
if v == versions and not isinstance(v, spack.version.GitVersion)
|
||||
]
|
||||
if exact_match:
|
||||
allowed_versions = exact_match
|
||||
|
||||
@@ -2089,6 +2117,11 @@ def setup(self, driver, specs, reuse=None):
|
||||
self.add_concrete_versions_from_specs(specs, version_provenance.spec)
|
||||
self.add_concrete_versions_from_specs(dev_specs, version_provenance.dev_spec)
|
||||
|
||||
req_version_specs = _get_versioned_specs_from_pkg_requirements()
|
||||
self.add_concrete_versions_from_specs(
|
||||
req_version_specs, version_provenance.package_requirements
|
||||
)
|
||||
|
||||
self.gen.h1("Concrete input spec definitions")
|
||||
self.define_concrete_input_specs(specs, possible)
|
||||
|
||||
@@ -2163,6 +2196,55 @@ def literal_specs(self, specs):
|
||||
self.gen.fact(fn.concretize_everything())
|
||||
|
||||
|
||||
def _get_versioned_specs_from_pkg_requirements():
|
||||
"""If package requirements mention versions that are not mentioned
|
||||
elsewhere, then we need to collect those to mark them as possible
|
||||
versions.
|
||||
"""
|
||||
req_version_specs = list()
|
||||
config = spack.config.get("packages")
|
||||
for pkg_name, d in config.items():
|
||||
if pkg_name == "all":
|
||||
continue
|
||||
if "require" in d:
|
||||
req_version_specs.extend(_specs_from_requires(pkg_name, d["require"]))
|
||||
return req_version_specs
|
||||
|
||||
|
||||
def _specs_from_requires(pkg_name, section):
|
||||
if isinstance(section, str):
|
||||
spec = spack.spec.Spec(section)
|
||||
if not spec.name:
|
||||
spec.name = pkg_name
|
||||
extracted_specs = [spec]
|
||||
else:
|
||||
spec_strs = []
|
||||
for spec_group in section:
|
||||
if isinstance(spec_group, str):
|
||||
spec_strs.append(spec_group)
|
||||
else:
|
||||
# Otherwise it is a one_of or any_of: get the values
|
||||
(x,) = spec_group.values()
|
||||
spec_strs.extend(x)
|
||||
|
||||
extracted_specs = []
|
||||
for spec_str in spec_strs:
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
if not spec.name:
|
||||
spec.name = pkg_name
|
||||
extracted_specs.append(spec)
|
||||
|
||||
version_specs = []
|
||||
for spec in extracted_specs:
|
||||
try:
|
||||
spec.version
|
||||
version_specs.append(spec)
|
||||
except spack.error.SpecError:
|
||||
pass
|
||||
|
||||
return version_specs
|
||||
|
||||
|
||||
class SpecBuilder(object):
|
||||
"""Class with actions to rebuild a spec from ASP results."""
|
||||
|
||||
@@ -2237,10 +2319,8 @@ def variant_value(self, pkg, name, value):
|
||||
def version(self, pkg, version):
|
||||
self._specs[pkg].versions = spack.version.ver([version])
|
||||
|
||||
def node_compiler(self, pkg, compiler):
|
||||
self._specs[pkg].compiler = spack.spec.CompilerSpec(compiler)
|
||||
|
||||
def node_compiler_version(self, pkg, compiler, version):
|
||||
self._specs[pkg].compiler = spack.spec.CompilerSpec(compiler)
|
||||
self._specs[pkg].compiler.versions = spack.version.VersionList([version])
|
||||
|
||||
def node_flag_compiler_default(self, pkg):
|
||||
@@ -2345,7 +2425,6 @@ def sort_fn(function_tuple):
|
||||
|
||||
hash attributes are handled first, since they imply entire concrete specs
|
||||
node attributes are handled next, since they instantiate nodes
|
||||
node_compiler attributes are handled next to ensure they come before node_compiler_version
|
||||
external_spec_selected attributes are handled last, so that external extensions can find
|
||||
the concrete specs on which they depend because all nodes are fully constructed before we
|
||||
consider which ones are external.
|
||||
@@ -2355,8 +2434,6 @@ def sort_fn(function_tuple):
|
||||
return (-5, 0)
|
||||
elif name == "node":
|
||||
return (-4, 0)
|
||||
elif name == "node_compiler":
|
||||
return (-3, 0)
|
||||
elif name == "node_flag":
|
||||
return (-2, 0)
|
||||
elif name == "external_spec_selected":
|
||||
|
@@ -40,6 +40,24 @@ attr(Name, A1, A2, A3, A4) :- literal(LiteralID, Name, A1, A2, A3, A4), literal_
|
||||
#defined literal/5.
|
||||
#defined literal/6.
|
||||
|
||||
% Attributes for node packages which must have a single value
|
||||
attr_single_value("version").
|
||||
attr_single_value("node_platform").
|
||||
attr_single_value("node_os").
|
||||
attr_single_value("node_target").
|
||||
|
||||
% Error when no attribute is selected
|
||||
error(100, no_value_error, Attribute, Package)
|
||||
:- attr("node", Package),
|
||||
attr_single_value(Attribute),
|
||||
not attr(Attribute, Package, _).
|
||||
|
||||
% Error when multiple attr need to be selected
|
||||
error(100, multiple_values_error, Attribute, Package)
|
||||
:- attr("node", Package),
|
||||
attr_single_value(Attribute),
|
||||
2 { attr(Attribute, Package, Version) }.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Version semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -77,21 +95,11 @@ version_satisfies(Package, Constraint, HashVersion) :- version_satisfies(Package
|
||||
% possible
|
||||
{ attr("version", Package, Version) : version_declared(Package, Version) }
|
||||
:- attr("node", Package).
|
||||
error(2, "No version for '{0}' satisfies '@{1}' and '@{2}'", Package, Version1, Version2)
|
||||
:- attr("node", Package),
|
||||
attr("version", Package, Version1),
|
||||
attr("version", Package, Version2),
|
||||
Version1 < Version2. % see[1]
|
||||
|
||||
error(2, "No versions available for package '{0}'", Package)
|
||||
:- attr("node", Package), not attr("version", Package, _).
|
||||
|
||||
% A virtual package may or may not have a version, but never has more than one
|
||||
error(2, "No version for '{0}' satisfies '@{1}' and '@{2}'", Virtual, Version1, Version2)
|
||||
error(100, "Cannot select a single version for virtual '{0}'", Virtual)
|
||||
:- attr("virtual_node", Virtual),
|
||||
attr("version", Virtual, Version1),
|
||||
attr("version", Virtual, Version2),
|
||||
Version1 < Version2. % see[1]
|
||||
2 { attr("version", Virtual, Version) }.
|
||||
|
||||
% If we select a deprecated version, mark the package as deprecated
|
||||
attr("deprecated", Package, Version) :-
|
||||
@@ -144,10 +152,10 @@ possible_version_weight(Package, Weight)
|
||||
|
||||
% More specific error message if the version cannot satisfy some constraint
|
||||
% Otherwise covered by `no_version_error` and `versions_conflict_error`.
|
||||
error(1, "No valid version for '{0}' satisfies '@{1}'", Package, Constraint)
|
||||
error(10, "Cannot satisfy '{0}@{1}'", Package, Constraint)
|
||||
:- attr("node_version_satisfies", Package, Constraint),
|
||||
C = #count{ Version : attr("version", Package, Version), version_satisfies(Package, Constraint, Version)},
|
||||
C < 1.
|
||||
attr("version", Package, Version),
|
||||
not version_satisfies(Package, Constraint, Version).
|
||||
|
||||
attr("node_version_satisfies", Package, Constraint)
|
||||
:- attr("version", Package, Version), version_satisfies(Package, Constraint, Version).
|
||||
@@ -253,7 +261,7 @@ attr("node", Dependency) :- attr("node", Package), depends_on(Package, Dependenc
|
||||
% dependencies) and get a two-node unconnected graph
|
||||
needed(Package) :- attr("root", Package).
|
||||
needed(Dependency) :- needed(Package), depends_on(Package, Dependency).
|
||||
error(1, "'{0}' is not a valid dependency for any package in the DAG", Package)
|
||||
error(10, "'{0}' is not a valid dependency for any package in the DAG", Package)
|
||||
:- attr("node", Package),
|
||||
not needed(Package).
|
||||
|
||||
@@ -262,7 +270,7 @@ error(1, "'{0}' is not a valid dependency for any package in the DAG", Package)
|
||||
% this ensures that we solve around them
|
||||
path(Parent, Child) :- depends_on(Parent, Child).
|
||||
path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||
error(2, "Cyclic dependency detected between '{0}' and '{1}'\n Consider changing variants to avoid the cycle", A, B)
|
||||
error(100, "Cyclic dependency detected between '{0}' and '{1}' (consider changing variants to avoid the cycle)", A, B)
|
||||
:- path(A, B),
|
||||
path(B, A).
|
||||
|
||||
@@ -272,7 +280,7 @@ error(2, "Cyclic dependency detected between '{0}' and '{1}'\n Consider chang
|
||||
%-----------------------------------------------------------------------------
|
||||
% Conflicts
|
||||
%-----------------------------------------------------------------------------
|
||||
error(0, Msg) :- attr("node", Package),
|
||||
error(1, Msg) :- attr("node", Package),
|
||||
conflict(Package, TriggerID, ConstraintID, Msg),
|
||||
condition_holds(TriggerID),
|
||||
condition_holds(ConstraintID),
|
||||
@@ -301,15 +309,14 @@ attr("virtual_node", Virtual)
|
||||
% The provider must be selected among the possible providers.
|
||||
{ provider(Package, Virtual) : possible_provider(Package, Virtual) }
|
||||
:- attr("virtual_node", Virtual).
|
||||
error(2, "Cannot find valid provider for virtual {0}", Virtual)
|
||||
|
||||
error(100, "Cannot find valid provider for virtual {0}", Virtual)
|
||||
:- attr("virtual_node", Virtual),
|
||||
P = #count{ Package : provider(Package, Virtual)},
|
||||
P < 1.
|
||||
error(2, "Spec cannot include multiple providers for virtual '{0}'\n Requested '{1}' and '{2}'", Virtual, P1, P2)
|
||||
not provider(_, Virtual).
|
||||
|
||||
error(100, "Cannot select a single provider for virtual '{0}'", Virtual)
|
||||
:- attr("virtual_node", Virtual),
|
||||
provider(P1, Virtual),
|
||||
provider(P2, Virtual),
|
||||
P1 < P2.
|
||||
2 { provider(P, Virtual) }.
|
||||
|
||||
% virtual roots imply virtual nodes, and that one provider is a root
|
||||
attr("virtual_node", Virtual) :- attr("virtual_root", Virtual).
|
||||
@@ -398,14 +405,13 @@ possible_provider_weight(Dependency, Virtual, 100, "fallback") :- provider(Depen
|
||||
{ external_version(Package, Version, Weight):
|
||||
version_declared(Package, Version, Weight, "external") }
|
||||
:- external(Package).
|
||||
error(2, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(Package),
|
||||
not external_version(Package, _, _).
|
||||
error(2, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
|
||||
error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(Package),
|
||||
external_version(Package, Version1, Weight1),
|
||||
external_version(Package, Version2, Weight2),
|
||||
(Version1, Weight1) < (Version2, Weight2). % see[1]
|
||||
2 { external_version(Package, Version, Weight) }.
|
||||
|
||||
version_weight(Package, Weight) :- external_version(Package, Version, Weight).
|
||||
attr("version", Package, Version) :- external_version(Package, Version, Weight).
|
||||
@@ -440,7 +446,7 @@ external_conditions_hold(Package, LocalIndex) :-
|
||||
|
||||
% it cannot happen that a spec is external, but none of the external specs
|
||||
% conditions hold.
|
||||
error(2, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
error(100, "Attempted to use external for '{0}' which does not satisfy any configured external spec", Package)
|
||||
:- external(Package),
|
||||
not external_conditions_hold(Package, _).
|
||||
|
||||
@@ -488,7 +494,7 @@ requirement_weight(Package, Group, W) :-
|
||||
requirement_policy(Package, Group, "any_of"),
|
||||
requirement_group_satisfied(Package, Group).
|
||||
|
||||
error(2, "Cannot satisfy the requirements in packages.yaml for the '{0}' package. You may want to delete them to proceed with concretization. To check where the requirements are defined run 'spack config blame packages'", Package) :-
|
||||
error(100, "Cannot satisfy the requirements in packages.yaml for package '{0}'. You may want to delete them to proceed with concretization. To check where the requirements are defined run 'spack config blame packages'", Package) :-
|
||||
activate_requirement_rules(Package),
|
||||
requirement_group(Package, X),
|
||||
not requirement_group_satisfied(Package, X).
|
||||
@@ -518,20 +524,20 @@ attr("variant_value", Package, Variant, Value) :-
|
||||
attr("variant_propagate", Package, Variant, Value, _),
|
||||
variant_possible_value(Package, Variant, Value).
|
||||
|
||||
error(2, "{0} and {1} cannot both propagate variant '{2}' to package {3} with values '{4}' and '{5}'", Source1, Source2, Variant, Package, Value1, Value2) :-
|
||||
error(100, "{0} and {1} cannot both propagate variant '{2}' to package {3} with values '{4}' and '{5}'", Source1, Source2, Variant, Package, Value1, Value2) :-
|
||||
attr("variant_propagate", Package, Variant, Value1, Source1),
|
||||
attr("variant_propagate", Package, Variant, Value2, Source2),
|
||||
variant(Package, Variant),
|
||||
Value1 < Value2.
|
||||
|
||||
% a variant cannot be set if it is not a variant on the package
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
||||
error(100, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
||||
:- attr("variant_set", Package, Variant),
|
||||
not variant(Package, Variant),
|
||||
build(Package).
|
||||
|
||||
% a variant cannot take on a value if it is not a variant of the package
|
||||
error(2, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
||||
error(100, "Cannot set variant '{0}' for package '{1}' because the variant condition cannot be satisfied for the given spec", Variant, Package)
|
||||
:- attr("variant_value", Package, Variant, _),
|
||||
not variant(Package, Variant),
|
||||
build(Package).
|
||||
@@ -554,20 +560,18 @@ attr("variant_value", Package, Variant, Value) :-
|
||||
build(Package).
|
||||
|
||||
|
||||
error(2, "'{0}' required multiple values for single-valued variant '{1}'\n Requested 'Spec({1}={2})' and 'Spec({1}={3})'", Package, Variant, Value1, Value2)
|
||||
error(100, "'{0}' required multiple values for single-valued variant '{1}'", Package, Variant)
|
||||
:- attr("node", Package),
|
||||
variant(Package, Variant),
|
||||
variant_single_value(Package, Variant),
|
||||
build(Package),
|
||||
attr("variant_value", Package, Variant, Value1),
|
||||
attr("variant_value", Package, Variant, Value2),
|
||||
Value1 < Value2. % see[1]
|
||||
error(2, "No valid value for variant '{1}' of package '{0}'", Package, Variant)
|
||||
2 { attr("variant_value", Package, Variant, Value) }.
|
||||
|
||||
error(100, "No valid value for variant '{1}' of package '{0}'", Package, Variant)
|
||||
:- attr("node", Package),
|
||||
variant(Package, Variant),
|
||||
build(Package),
|
||||
C = #count{ Value : attr("variant_value", Package, Variant, Value) },
|
||||
C < 1.
|
||||
not attr("variant_value", Package, Variant, _).
|
||||
|
||||
% if a variant is set to anything, it is considered 'set'.
|
||||
attr("variant_set", Package, Variant) :- attr("variant_set", Package, Variant, _).
|
||||
@@ -575,7 +579,7 @@ attr("variant_set", Package, Variant) :- attr("variant_set", Package, Variant, _
|
||||
% A variant cannot have a value that is not also a possible value
|
||||
% This only applies to packages we need to build -- concrete packages may
|
||||
% have been built w/different variants from older/different package versions.
|
||||
error(1, "'Spec({1}={2})' is not a valid value for '{0}' variant '{1}'", Package, Variant, Value)
|
||||
error(10, "'Spec({1}={2})' is not a valid value for '{0}' variant '{1}'", Package, Variant, Value)
|
||||
:- attr("variant_value", Package, Variant, Value),
|
||||
not variant_possible_value(Package, Variant, Value),
|
||||
build(Package).
|
||||
@@ -583,7 +587,7 @@ error(1, "'Spec({1}={2})' is not a valid value for '{0}' variant '{1}'", Package
|
||||
% Some multi valued variants accept multiple values from disjoint sets.
|
||||
% Ensure that we respect that constraint and we don't pick values from more
|
||||
% than one set at once
|
||||
error(2, "{0} variant '{1}' cannot have values '{2}' and '{3}' as they come from disjoing value sets", Package, Variant, Value1, Value2)
|
||||
error(100, "{0} variant '{1}' cannot have values '{2}' and '{3}' as they come from disjoint value sets", Package, Variant, Value1, Value2)
|
||||
:- attr("variant_value", Package, Variant, Value1),
|
||||
attr("variant_value", Package, Variant, Value2),
|
||||
variant_value_from_disjoint_sets(Package, Variant, Value1, Set1),
|
||||
@@ -618,6 +622,7 @@ variant_not_default(Package, Variant, Value)
|
||||
% A default variant value that is not used
|
||||
variant_default_not_used(Package, Variant, Value)
|
||||
:- variant_default_value(Package, Variant, Value),
|
||||
variant(Package, Variant),
|
||||
not attr("variant_value", Package, Variant, Value),
|
||||
attr("node", Package).
|
||||
|
||||
@@ -650,7 +655,7 @@ variant_default_value(Package, Variant, Value) :-
|
||||
|
||||
% Treat 'none' in a special way - it cannot be combined with other
|
||||
% values even if the variant is multi-valued
|
||||
error(2, "{0} variant '{1}' cannot have values '{2}' and 'none'", Package, Variant, Value)
|
||||
error(100, "{0} variant '{1}' cannot have values '{2}' and 'none'", Package, Variant, Value)
|
||||
:- attr("variant_value", Package, Variant, Value),
|
||||
attr("variant_value", Package, Variant, "none"),
|
||||
Value != "none",
|
||||
@@ -698,18 +703,6 @@ attr("node_platform", Package, Platform)
|
||||
% platform is set if set to anything
|
||||
attr("node_platform_set", Package) :- attr("node_platform_set", Package, _).
|
||||
|
||||
% each node must have a single platform
|
||||
error(2, "No valid platform found for {0}", Package)
|
||||
:- attr("node", Package),
|
||||
C = #count{ Platform : attr("node_platform", Package, Platform)},
|
||||
C < 1.
|
||||
|
||||
error(2, "Cannot concretize {0} with multiple platforms\n Requested 'platform={1}' and 'platform={2}'", Package, Platform1, Platform2)
|
||||
:- attr("node", Package),
|
||||
attr("node_platform", Package, Platform1),
|
||||
attr("node_platform", Package, Platform2),
|
||||
Platform1 < Platform2. % see[1]
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% OS semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -719,25 +712,14 @@ os(OS) :- os(OS, _).
|
||||
% one os per node
|
||||
{ attr("node_os", Package, OS) : os(OS) } :- attr("node", Package).
|
||||
|
||||
error(2, "Cannot find valid operating system for '{0}'", Package)
|
||||
:- attr("node", Package),
|
||||
C = #count{ OS : attr("node_os", Package, OS)},
|
||||
C < 1.
|
||||
|
||||
error(2, "Cannot concretize {0} with multiple operating systems\n Requested 'os={1}' and 'os={2}'", Package, OS1, OS2)
|
||||
:- attr("node", Package),
|
||||
attr("node_os", Package, OS1),
|
||||
attr("node_os", Package, OS2),
|
||||
OS1 < OS2. %see [1]
|
||||
|
||||
% can't have a non-buildable OS on a node we need to build
|
||||
error(2, "Cannot concretize '{0} os={1}'. Operating system '{1}' is not buildable", Package, OS)
|
||||
error(100, "Cannot select '{0} os={1}' (operating system '{1}' is not buildable)", Package, OS)
|
||||
:- build(Package),
|
||||
attr("node_os", Package, OS),
|
||||
not buildable_os(OS).
|
||||
|
||||
% can't have dependencies on incompatible OS's
|
||||
error(2, "{0} and dependency {1} have incompatible operating systems 'os={2}' and 'os={3}'", Package, Dependency, PackageOS, DependencyOS)
|
||||
error(100, "{0} and dependency {1} have incompatible operating systems 'os={2}' and 'os={3}'", Package, Dependency, PackageOS, DependencyOS)
|
||||
:- depends_on(Package, Dependency),
|
||||
attr("node_os", Package, PackageOS),
|
||||
attr("node_os", Dependency, DependencyOS),
|
||||
@@ -781,19 +763,8 @@ attr("node_os", Package, OS) :- attr("node_os_set", Package, OS), attr("node", P
|
||||
% Each node has only one target chosen among the known targets
|
||||
{ attr("node_target", Package, Target) : target(Target) } :- attr("node", Package).
|
||||
|
||||
error(2, "Cannot find valid target for '{0}'", Package)
|
||||
:- attr("node", Package),
|
||||
C = #count{Target : attr("node_target", Package, Target)},
|
||||
C < 1.
|
||||
|
||||
error(2, "Cannot concretize '{0}' with multiple targets\n Requested 'target={1}' and 'target={2}'", Package, Target1, Target2)
|
||||
:- attr("node", Package),
|
||||
attr("node_target", Package, Target1),
|
||||
attr("node_target", Package, Target2),
|
||||
Target1 < Target2. % see[1]
|
||||
|
||||
% If a node must satisfy a target constraint, enforce it
|
||||
error(1, "'{0} target={1}' cannot satisfy constraint 'target={2}'", Package, Target, Constraint)
|
||||
error(10, "'{0} target={1}' cannot satisfy constraint 'target={2}'", Package, Target, Constraint)
|
||||
:- attr("node_target", Package, Target),
|
||||
attr("node_target_satisfies", Package, Constraint),
|
||||
not target_satisfies(Constraint, Target).
|
||||
@@ -804,7 +775,7 @@ attr("node_target_satisfies", Package, Constraint)
|
||||
:- attr("node_target", Package, Target), target_satisfies(Constraint, Target).
|
||||
|
||||
% If a node has a target, all of its dependencies must be compatible with that target
|
||||
error(2, "Cannot find compatible targets for {0} and {1}", Package, Dependency)
|
||||
error(100, "Cannot find compatible targets for {0} and {1}", Package, Dependency)
|
||||
:- depends_on(Package, Dependency),
|
||||
attr("node_target", Package, Target),
|
||||
not node_target_compatible(Dependency, Target).
|
||||
@@ -819,7 +790,7 @@ node_target_compatible(Package, Target)
|
||||
#defined target_satisfies/2.
|
||||
|
||||
% can't use targets on node if the compiler for the node doesn't support them
|
||||
error(2, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Target, Compiler, Version)
|
||||
error(100, "{0} compiler '{2}@{3}' incompatible with 'target={1}'", Package, Target, Compiler, Version)
|
||||
:- attr("node_target", Package, Target),
|
||||
node_compiler(Package, CompilerID),
|
||||
not compiler_supports_target(CompilerID, Target),
|
||||
@@ -848,7 +819,7 @@ node_target_mismatch(Parent, Dependency)
|
||||
not node_target_match(Parent, Dependency).
|
||||
|
||||
% disallow reusing concrete specs that don't have a compatible target
|
||||
error(2, "'{0} target={1}' is not compatible with this machine", Package, Target)
|
||||
error(100, "'{0} target={1}' is not compatible with this machine", Package, Target)
|
||||
:- attr("node", Package),
|
||||
attr("node_target", Package, Target),
|
||||
not target(Target).
|
||||
@@ -881,41 +852,34 @@ attr("node_compiler_version", Package, CompilerName, CompilerVersion)
|
||||
attr("node_compiler", Package, CompilerName)
|
||||
:- attr("node_compiler_version", Package, CompilerName, CompilerVersion).
|
||||
|
||||
error(2, "No valid compiler version found for '{0}'", Package)
|
||||
error(100, "No valid compiler version found for '{0}'", Package)
|
||||
:- attr("node", Package),
|
||||
not node_compiler(Package, _).
|
||||
|
||||
error(2, "Cannot concretize {0} with two compilers {1}@{2} and {3}@{4}", Package, C1, V1, C2, V2)
|
||||
:- attr("node", Package),
|
||||
attr("node_compiler_version", Package, C1, V1),
|
||||
attr("node_compiler_version", Package, C2, V2),
|
||||
(C1, V1) < (C2, V2). % see[1]
|
||||
|
||||
% We can't have a compiler be enforced and select the version from another compiler
|
||||
error(2, "Cannot concretize {0} with two compilers {1}@{2} and {3}@{4}", Package, C1, V1, C2, V2)
|
||||
:- attr("node_compiler_version", Package, C1, V1),
|
||||
attr("node_compiler_version", Package, C2, V2),
|
||||
(C1, V1) < (C2, V2).
|
||||
error(100, "Cannot select a single compiler for package {0}", Package)
|
||||
:- attr("node", Package),
|
||||
2 { attr("node_compiler_version", Package, C, V) }.
|
||||
|
||||
error(2, "Cannot concretize {0} with two compilers {1} and {2}@{3}", Package, Compiler1, Compiler2, Version)
|
||||
error(100, "Cannot concretize {0} with two compilers {1} and {2}@{3}", Package, Compiler1, Compiler2, Version)
|
||||
:- attr("node_compiler", Package, Compiler1),
|
||||
attr("node_compiler_version", Package, Compiler2, Version),
|
||||
Compiler1 != Compiler2.
|
||||
|
||||
% If the compiler of a node cannot be satisfied, raise
|
||||
error(1, "No valid compiler for {0} satisfies '%{1}'", Package, Compiler)
|
||||
error(10, "No valid compiler for {0} satisfies '%{1}'", Package, Compiler)
|
||||
:- attr("node", Package),
|
||||
attr("node_compiler_version_satisfies", Package, Compiler, ":"),
|
||||
not compiler_version_satisfies(Compiler, ":", _).
|
||||
|
||||
% If the compiler of a node must satisfy a constraint, then its version
|
||||
% must be chosen among the ones that satisfy said constraint
|
||||
error(2, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
||||
error(100, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
||||
:- attr("node", Package),
|
||||
attr("node_compiler_version_satisfies", Package, Compiler, Constraint),
|
||||
not compiler_version_satisfies(Compiler, Constraint, _).
|
||||
|
||||
error(2, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
||||
error(100, "No valid version for '{0}' compiler '{1}' satisfies '@{2}'", Package, Compiler, Constraint)
|
||||
:- attr("node", Package),
|
||||
attr("node_compiler_version_satisfies", Package, Compiler, Constraint),
|
||||
not compiler_version_satisfies(Compiler, Constraint, ID),
|
||||
@@ -941,7 +905,7 @@ attr("node_compiler_version_satisfies", Package, Compiler, Constraint)
|
||||
% Cannot select a compiler if it is not supported on the OS
|
||||
% Compilers that are explicitly marked as allowed
|
||||
% are excluded from this check
|
||||
error(2, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", Package, Compiler, Version, OS)
|
||||
error(100, "{0} compiler '%{1}@{2}' incompatible with 'os={3}'", Package, Compiler, Version, OS)
|
||||
:- attr("node_os", Package, OS),
|
||||
node_compiler(Package, CompilerID),
|
||||
compiler_name(CompilerID, Compiler),
|
||||
@@ -990,7 +954,7 @@ compiler_weight(Package, 100)
|
||||
not default_compiler_preference(CompilerID, _).
|
||||
|
||||
% For the time being, be strict and reuse only if the compiler match one we have on the system
|
||||
error(2, "Compiler {1}@{2} requested for {0} cannot be found. Set install_missing_compilers:true if intended.", Package, Compiler, Version)
|
||||
error(100, "Compiler {1}@{2} requested for {0} cannot be found. Set install_missing_compilers:true if intended.", Package, Compiler, Version)
|
||||
:- attr("node_compiler_version", Package, Compiler, Version),
|
||||
not node_compiler(Package, _).
|
||||
|
||||
@@ -1018,7 +982,7 @@ node_flag_inherited(Dependency, FlagType, Flag)
|
||||
can_inherit_flags(Package, Dependency, FlagType),
|
||||
attr("node_flag_propagate", Package, FlagType).
|
||||
|
||||
error(2, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, Package, FlagType) :-
|
||||
error(100, "{0} and {1} cannot both propagate compiler flags '{2}' to {3}", Source1, Source2, Package, FlagType) :-
|
||||
depends_on(Source1, Package),
|
||||
depends_on(Source2, Package),
|
||||
attr("node_flag_propagate", Source1, FlagType),
|
||||
@@ -1076,7 +1040,7 @@ attr("no_flags", Package, FlagType)
|
||||
% You can't install a hash, if it is not installed
|
||||
:- attr("hash", Package, Hash), not installed_hash(Package, Hash).
|
||||
% This should be redundant given the constraint above
|
||||
:- attr("hash", Package, Hash1), attr("hash", Package, Hash2), Hash1 < Hash2.
|
||||
:- attr("node", Package), 2 { attr("hash", Package, Hash) }.
|
||||
|
||||
% if a hash is selected, we impose all the constraints that implies
|
||||
impose(Hash) :- attr("hash", Package, Hash).
|
||||
@@ -1127,18 +1091,14 @@ build_priority(Package, 0) :- attr("node", Package), not optimize_for_reuse().
|
||||
% Optimization to avoid errors
|
||||
%-----------------------------------------------------------------
|
||||
% Some errors are handled as rules instead of constraints because
|
||||
% it allows us to explain why something failed. Here we optimize
|
||||
% HEAVILY against the facts generated by those rules.
|
||||
% it allows us to explain why something failed.
|
||||
#minimize{ 0@1000: #true}.
|
||||
#minimize{ 0@1001: #true}.
|
||||
#minimize{ 0@1002: #true}.
|
||||
|
||||
#minimize{ 1000@1000+Priority,Msg: error(Priority, Msg) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1: error(Priority, Msg, Arg1) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2: error(Priority, Msg, Arg1, Arg2) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3: error(Priority, Msg, Arg1, Arg2, Arg3) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3,Arg4: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4) }.
|
||||
#minimize{ 1000@1000+Priority,Msg,Arg1,Arg2,Arg3,Arg4,Arg5: error(Priority, Msg, Arg1, Arg2, Arg3, Arg4, Arg5) }.
|
||||
#minimize{ Weight@1000,Msg: error(Weight, Msg) }.
|
||||
#minimize{ Weight@1000,Msg,Arg1: error(Weight, Msg, Arg1) }.
|
||||
#minimize{ Weight@1000,Msg,Arg1,Arg2: error(Weight, Msg, Arg1, Arg2) }.
|
||||
#minimize{ Weight@1000,Msg,Arg1,Arg2,Arg3: error(Weight, Msg, Arg1, Arg2, Arg3) }.
|
||||
#minimize{ Weight@1000,Msg,Arg1,Arg2,Arg3,Arg4: error(Weight, Msg, Arg1, Arg2, Arg3, Arg4) }.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% How to optimize the spec (high to low priority)
|
||||
|
@@ -23,6 +23,5 @@
|
||||
#show error/4.
|
||||
#show error/5.
|
||||
#show error/6.
|
||||
#show error/7.
|
||||
|
||||
% debug
|
||||
|
@@ -35,6 +35,7 @@
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.mirror
|
||||
import spack.paths
|
||||
import spack.spec
|
||||
import spack.util.lock
|
||||
import spack.util.path as sup
|
||||
import spack.util.pattern as pattern
|
||||
@@ -49,6 +50,13 @@
|
||||
stage_prefix = "spack-stage-"
|
||||
|
||||
|
||||
def compute_stage_name(spec):
|
||||
"""Determine stage name given a spec"""
|
||||
default_stage_structure = "spack-stage-{name}-{version}-{hash}"
|
||||
stage_name_structure = spack.config.get("config:stage_name", default=default_stage_structure)
|
||||
return spec.format(format_string=stage_name_structure)
|
||||
|
||||
|
||||
def create_stage_root(path: str) -> None:
|
||||
"""Create the stage root directory and ensure appropriate access perms."""
|
||||
assert os.path.isabs(path) and len(path.strip()) > 1
|
||||
@@ -150,7 +158,10 @@ def _resolve_paths(candidates):
|
||||
|
||||
# Ensure the path is unique per user.
|
||||
can_path = sup.canonicalize_path(path)
|
||||
if user not in can_path:
|
||||
# When multiple users share a stage root, we can avoid conflicts between
|
||||
# them by adding a per-user subdirectory.
|
||||
# Avoid doing this on Windows to keep stage absolute path as short as possible.
|
||||
if user not in can_path and not sys.platform == "win32":
|
||||
can_path = os.path.join(can_path, user)
|
||||
|
||||
paths.append(can_path)
|
||||
|
@@ -20,6 +20,7 @@
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
from typing import Union
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -196,7 +197,7 @@ def _store():
|
||||
|
||||
|
||||
#: Singleton store instance
|
||||
store = llnl.util.lang.Singleton(_store)
|
||||
store: Union[Store, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_store)
|
||||
|
||||
|
||||
def _store_root():
|
||||
|
@@ -352,6 +352,53 @@ def test_Wl_parsing(wrapper_environment):
|
||||
)
|
||||
|
||||
|
||||
def test_Xlinker_parsing(wrapper_environment):
|
||||
# -Xlinker <x> ... -Xlinker <y> may have compiler flags inbetween, like -O3 in this
|
||||
# example. Also check that a trailing -Xlinker (which is a compiler error) is not
|
||||
# dropped or given an empty argument.
|
||||
check_args(
|
||||
cc,
|
||||
[
|
||||
"-Xlinker",
|
||||
"-rpath",
|
||||
"-O3",
|
||||
"-Xlinker",
|
||||
"/a",
|
||||
"-Xlinker",
|
||||
"--flag",
|
||||
"-Xlinker",
|
||||
"-rpath=/b",
|
||||
"-Xlinker",
|
||||
],
|
||||
[real_cc]
|
||||
+ target_args
|
||||
+ [
|
||||
"-Wl,--disable-new-dtags",
|
||||
"-Wl,-rpath,/a",
|
||||
"-Wl,-rpath,/b",
|
||||
"-O3",
|
||||
"-Xlinker",
|
||||
"--flag",
|
||||
"-Xlinker",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def test_rpath_without_value(wrapper_environment):
|
||||
# cc -Wl,-rpath without a value shouldn't drop -Wl,-rpath;
|
||||
# same for -Xlinker
|
||||
check_args(
|
||||
cc,
|
||||
["-Wl,-rpath", "-O3", "-g"],
|
||||
[real_cc] + target_args + ["-Wl,--disable-new-dtags", "-O3", "-g", "-Wl,-rpath"],
|
||||
)
|
||||
check_args(
|
||||
cc,
|
||||
["-Xlinker", "-rpath", "-O3", "-g"],
|
||||
[real_cc] + target_args + ["-Wl,--disable-new-dtags", "-O3", "-g", "-Xlinker", "-rpath"],
|
||||
)
|
||||
|
||||
|
||||
def test_dep_rpath(wrapper_environment):
|
||||
"""Ensure RPATHs for root package are added."""
|
||||
check_args(cc, test_args, [real_cc] + target_args + common_compile_args)
|
||||
|
@@ -33,6 +33,7 @@
|
||||
from spack.spec import CompilerSpec, Spec
|
||||
from spack.util.pattern import Bunch
|
||||
|
||||
config_cmd = spack.main.SpackCommand("config")
|
||||
ci_cmd = spack.main.SpackCommand("ci")
|
||||
env_cmd = spack.main.SpackCommand("env")
|
||||
mirror_cmd = spack.main.SpackCommand("mirror")
|
||||
@@ -412,7 +413,7 @@ def test_ci_generate_with_env_missing_section(
|
||||
"""
|
||||
)
|
||||
|
||||
expect_out = 'Error: Environment yaml does not have "ci" section'
|
||||
expect_out = "Environment does not have `ci` a configuration"
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
@@ -1842,12 +1843,11 @@ def test_ci_generate_prune_env_vars(
|
||||
spack:
|
||||
specs:
|
||||
- libelf
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- submapping:
|
||||
gitlab-ci:
|
||||
mappings:
|
||||
- match:
|
||||
- arch=test-debian6-core2
|
||||
build-job:
|
||||
runner-attributes:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
@@ -2290,3 +2290,124 @@ def test_cmd_first_line():
|
||||
)
|
||||
|
||||
assert spack.cmd.first_line(doc) == first
|
||||
|
||||
|
||||
legacy_spack_yaml_contents = """
|
||||
spack:
|
||||
definitions:
|
||||
- bootstrap:
|
||||
- cmake@3.4.3
|
||||
- old-gcc-pkgs:
|
||||
- archive-files
|
||||
- callpath
|
||||
# specify ^openblas-with-lapack to ensure that builtin.mock repo flake8
|
||||
# package (which can also provide lapack) is not chosen, as it violates
|
||||
# a package-level check which requires exactly one fetch strategy (this
|
||||
# is apparently not an issue for other tests that use it).
|
||||
- hypre@0.2.15 ^openblas-with-lapack
|
||||
specs:
|
||||
- matrix:
|
||||
- [$old-gcc-pkgs]
|
||||
mirrors:
|
||||
test-mirror: file:///some/fake/mirror
|
||||
{0}:
|
||||
bootstrap:
|
||||
- name: bootstrap
|
||||
compiler-agnostic: true
|
||||
match_behavior: first
|
||||
mappings:
|
||||
- match:
|
||||
- arch=test-debian6-core2
|
||||
runner-attributes:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
- match:
|
||||
- arch=test-debian6-m1
|
||||
runner-attributes:
|
||||
tags:
|
||||
- donotcare
|
||||
image: donotcare
|
||||
service-job-attributes:
|
||||
image: donotcare
|
||||
tags: [donotcare]
|
||||
cdash:
|
||||
build-group: Not important
|
||||
url: https://my.fake.cdash
|
||||
project: Not used
|
||||
site: Nothing
|
||||
"""
|
||||
|
||||
|
||||
@pytest.mark.regression("36409")
|
||||
def test_gitlab_ci_deprecated(
|
||||
tmpdir,
|
||||
mutable_mock_env_path,
|
||||
install_mockery,
|
||||
mock_packages,
|
||||
monkeypatch,
|
||||
ci_base_environment,
|
||||
mock_binary_index,
|
||||
):
|
||||
mirror_url = "file:///some/fake/mirror"
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(legacy_spack_yaml_contents.format("gitlab-ci"))
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("create", "test", "./spack.yaml")
|
||||
outputfile = "generated-pipeline.yaml"
|
||||
|
||||
with ev.read("test"):
|
||||
ci_cmd("generate", "--output-file", outputfile)
|
||||
|
||||
with open(outputfile) as f:
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
|
||||
found_spec = False
|
||||
for ci_key in yaml_contents.keys():
|
||||
if "(bootstrap)" in ci_key:
|
||||
found_spec = True
|
||||
assert "cmake" in ci_key
|
||||
assert found_spec
|
||||
assert "stages" in yaml_contents
|
||||
assert len(yaml_contents["stages"]) == 6
|
||||
assert yaml_contents["stages"][0] == "stage-0"
|
||||
assert yaml_contents["stages"][5] == "stage-rebuild-index"
|
||||
|
||||
assert "rebuild-index" in yaml_contents
|
||||
rebuild_job = yaml_contents["rebuild-index"]
|
||||
expected = "spack buildcache update-index --keys --mirror-url {0}".format(mirror_url)
|
||||
assert rebuild_job["script"][0] == expected
|
||||
|
||||
assert "variables" in yaml_contents
|
||||
assert "SPACK_ARTIFACTS_ROOT" in yaml_contents["variables"]
|
||||
artifacts_root = yaml_contents["variables"]["SPACK_ARTIFACTS_ROOT"]
|
||||
assert artifacts_root == "jobs_scratch_dir"
|
||||
|
||||
|
||||
@pytest.mark.regression("36045")
|
||||
def test_gitlab_ci_update(
|
||||
tmpdir,
|
||||
mutable_mock_env_path,
|
||||
install_mockery,
|
||||
mock_packages,
|
||||
monkeypatch,
|
||||
ci_base_environment,
|
||||
mock_binary_index,
|
||||
):
|
||||
filename = str(tmpdir.join("spack.yaml"))
|
||||
with open(filename, "w") as f:
|
||||
f.write(legacy_spack_yaml_contents.format("ci"))
|
||||
|
||||
with tmpdir.as_cwd():
|
||||
env_cmd("update", "-y", ".")
|
||||
|
||||
with open("spack.yaml") as f:
|
||||
contents = f.read()
|
||||
yaml_contents = syaml.load(contents)
|
||||
|
||||
ci_root = yaml_contents["spack"]["ci"]
|
||||
|
||||
assert "pipeline-gen" in ci_root
|
||||
|
@@ -13,6 +13,7 @@
|
||||
import spack.database
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
import spack.schema.config
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.util.spack_yaml as syaml
|
||||
@@ -648,3 +649,26 @@ def test_config_prefer_upstream(
|
||||
|
||||
# Make sure a message about the conflicting hdf5's was given.
|
||||
assert "- hdf5" in output
|
||||
|
||||
|
||||
def test_environment_config_update(tmpdir, mutable_config, monkeypatch):
|
||||
with open(tmpdir.join("spack.yaml"), "w") as f:
|
||||
f.write(
|
||||
"""\
|
||||
spack:
|
||||
config:
|
||||
ccache: true
|
||||
"""
|
||||
)
|
||||
|
||||
def update_config(data):
|
||||
data["ccache"] = False
|
||||
return True
|
||||
|
||||
monkeypatch.setattr(spack.schema.config, "update", update_config)
|
||||
|
||||
with ev.Environment(str(tmpdir)):
|
||||
config("update", "-y", "config")
|
||||
|
||||
with ev.Environment(str(tmpdir)) as e:
|
||||
assert not e.raw_yaml["spack"]["config"]["ccache"]
|
||||
|
@@ -295,7 +295,7 @@ def test_env_install_same_spec_twice(install_mockery, mock_fetch):
|
||||
|
||||
# The second installation reports all packages already installed
|
||||
out = install("cmake-client")
|
||||
assert "already installed" in out
|
||||
assert "Executing phase" not in out
|
||||
|
||||
|
||||
def test_env_definition_symlink(install_mockery, mock_fetch, tmpdir):
|
||||
@@ -2554,10 +2554,10 @@ def test_lockfile_not_deleted_on_write_error(tmpdir, monkeypatch):
|
||||
|
||||
# If I run concretize again and there's an error during write,
|
||||
# the spack.lock file shouldn't disappear from disk
|
||||
def _write_helper_raise(self, x, y):
|
||||
def _write_helper_raise(self):
|
||||
raise RuntimeError("some error")
|
||||
|
||||
monkeypatch.setattr(ev.Environment, "_update_and_write_manifest", _write_helper_raise)
|
||||
monkeypatch.setattr(ev.Environment, "update_manifest", _write_helper_raise)
|
||||
with ev.Environment(str(tmpdir)) as e:
|
||||
e.concretize(force=True)
|
||||
with pytest.raises(RuntimeError):
|
||||
|
@@ -741,6 +741,17 @@ def test_install_deps_then_package(tmpdir, mock_fetch, install_mockery):
|
||||
assert os.path.exists(root.prefix)
|
||||
|
||||
|
||||
@pytest.mark.regression("30224")
|
||||
def test_install_overwrite_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock_env_path):
|
||||
env("create", "test")
|
||||
|
||||
with ev.read("test"):
|
||||
install("--add", "dependency-install")
|
||||
output = install("-y", "--overwrite", "dependency-install")
|
||||
|
||||
assert "Executing phase" in output
|
||||
|
||||
|
||||
@pytest.mark.regression("12002")
|
||||
def test_install_only_dependencies_in_env(
|
||||
tmpdir, mock_fetch, install_mockery, mutable_mock_env_path
|
||||
|
@@ -22,7 +22,6 @@
|
||||
import spack.repo
|
||||
import spack.variant as vt
|
||||
from spack.concretize import find_spec
|
||||
from spack.solver.asp import UnsatisfiableSpecError
|
||||
from spack.spec import Spec
|
||||
from spack.version import ver
|
||||
|
||||
@@ -1763,8 +1762,7 @@ def test_misleading_error_message_on_version(self, mutable_database):
|
||||
solver = spack.solver.asp.Solver()
|
||||
setup = spack.solver.asp.SpackSolverSetup()
|
||||
with pytest.raises(
|
||||
spack.solver.asp.UnsatisfiableSpecError,
|
||||
match="'dep-with-variants' satisfies '@999'",
|
||||
spack.solver.asp.UnsatisfiableSpecError, match="'dep-with-variants@999'"
|
||||
):
|
||||
solver.driver.solve(setup, [root_spec], reuse=reusable_specs)
|
||||
|
||||
@@ -1845,16 +1843,13 @@ def test_git_ref_version_is_equivalent_to_specified_version(self, git_ref):
|
||||
assert s.satisfies("@0.1:")
|
||||
|
||||
@pytest.mark.parametrize("git_ref", ("a" * 40, "0.2.15", "fbranch"))
|
||||
def test_git_ref_version_errors_if_unknown_version(self, git_ref):
|
||||
def test_git_ref_version_succeeds_with_unknown_version(self, git_ref):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer cannot account for git hashes")
|
||||
# main is not defined in the package.py for this file
|
||||
s = Spec("develop-branch-version@git.%s=main" % git_ref)
|
||||
with pytest.raises(
|
||||
UnsatisfiableSpecError,
|
||||
match="The reference version 'main' for package 'develop-branch-version'",
|
||||
):
|
||||
s.concretized()
|
||||
s.concretize()
|
||||
assert s.satisfies("develop-branch-version@main")
|
||||
|
||||
@pytest.mark.regression("31484")
|
||||
def test_installed_externals_are_reused(self, mutable_database, repo_with_changing_recipe):
|
||||
@@ -2114,7 +2109,6 @@ def test_compiler_match_constraints_when_selected(self):
|
||||
"fc": "/usr/bin/gfortran",
|
||||
},
|
||||
"operating_system": "debian6",
|
||||
"target": "x86_64",
|
||||
"modules": [],
|
||||
}
|
||||
},
|
||||
@@ -2128,7 +2122,6 @@ def test_compiler_match_constraints_when_selected(self):
|
||||
"fc": "/usr/bin/gfortran",
|
||||
},
|
||||
"operating_system": "debian6",
|
||||
"target": "x86_64",
|
||||
"modules": [],
|
||||
}
|
||||
},
|
||||
@@ -2150,7 +2143,6 @@ def test_compiler_with_custom_non_numeric_version(self, mock_executable):
|
||||
"spec": "gcc@foo",
|
||||
"paths": {"cc": gcc_path, "cxx": gcc_path, "f77": None, "fc": None},
|
||||
"operating_system": "debian6",
|
||||
"target": "x86_64",
|
||||
"modules": [],
|
||||
}
|
||||
}
|
||||
|
@@ -13,6 +13,7 @@
|
||||
import spack.util.spack_yaml as syaml
|
||||
from spack.solver.asp import UnsatisfiableSpecError
|
||||
from spack.spec import Spec
|
||||
from spack.util.url import path_to_file_url
|
||||
|
||||
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Windows uses old concretizer")
|
||||
|
||||
@@ -140,6 +141,151 @@ def test_requirement_isnt_optional(concretize_scope, test_repo):
|
||||
Spec("x@1.1").concretize()
|
||||
|
||||
|
||||
def test_git_user_supplied_reference_satisfaction(
|
||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||
):
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
|
||||
monkeypatch.setattr(
|
||||
spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False
|
||||
)
|
||||
|
||||
specs = ["v@{commit0}=2.2", "v@{commit0}", "v@2.2", "v@{commit0}=2.3"]
|
||||
|
||||
format_info = {"commit0": commits[0]}
|
||||
|
||||
hash_eq_ver, just_hash, just_ver, hash_eq_other_ver = [
|
||||
Spec(x.format(**format_info)) for x in specs
|
||||
]
|
||||
|
||||
assert hash_eq_ver.satisfies(just_hash)
|
||||
assert not just_hash.satisfies(hash_eq_ver)
|
||||
assert hash_eq_ver.satisfies(just_ver)
|
||||
assert not just_ver.satisfies(hash_eq_ver)
|
||||
assert not hash_eq_ver.satisfies(hash_eq_other_ver)
|
||||
assert not hash_eq_other_ver.satisfies(hash_eq_ver)
|
||||
|
||||
|
||||
def test_requirement_adds_new_version(
|
||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||
):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration" " requirements")
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False
|
||||
)
|
||||
|
||||
a_commit_hash = commits[0]
|
||||
conf_str = """\
|
||||
packages:
|
||||
v:
|
||||
require: "@{0}=2.2"
|
||||
""".format(
|
||||
a_commit_hash
|
||||
)
|
||||
update_packages_config(conf_str)
|
||||
|
||||
s1 = Spec("v").concretized()
|
||||
assert s1.satisfies("@2.2")
|
||||
assert s1.satisfies("@{0}".format(a_commit_hash))
|
||||
# Make sure the git commit info is retained
|
||||
assert isinstance(s1.version, spack.version.GitVersion)
|
||||
assert s1.version.ref == a_commit_hash
|
||||
|
||||
|
||||
def test_requirement_adds_git_hash_version(
|
||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||
):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration" " requirements")
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False
|
||||
)
|
||||
|
||||
a_commit_hash = commits[0]
|
||||
conf_str = """\
|
||||
packages:
|
||||
v:
|
||||
require: "@{0}"
|
||||
""".format(
|
||||
a_commit_hash
|
||||
)
|
||||
update_packages_config(conf_str)
|
||||
|
||||
s1 = Spec("v").concretized()
|
||||
assert s1.satisfies("@{0}".format(a_commit_hash))
|
||||
|
||||
|
||||
def test_requirement_adds_multiple_new_versions(
|
||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||
):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration" " requirements")
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False
|
||||
)
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
v:
|
||||
require:
|
||||
- one_of: ["@{0}=2.2", "@{1}=2.3"]
|
||||
""".format(
|
||||
commits[0], commits[1]
|
||||
)
|
||||
update_packages_config(conf_str)
|
||||
|
||||
s1 = Spec("v").concretized()
|
||||
assert s1.satisfies("@2.2")
|
||||
|
||||
s2 = Spec("v@{0}".format(commits[1])).concretized()
|
||||
assert s2.satisfies("@{0}".format(commits[1]))
|
||||
assert s2.satisfies("@2.3")
|
||||
|
||||
|
||||
# TODO: this belongs in the concretize_preferences test module but uses
|
||||
# fixtures defined only here
|
||||
def test_preference_adds_new_version(
|
||||
concretize_scope, test_repo, mock_git_version_info, monkeypatch
|
||||
):
|
||||
if spack.config.get("config:concretizer") == "original":
|
||||
pytest.skip("Original concretizer does not support configuration" " requirements")
|
||||
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(
|
||||
spack.package_base.PackageBase, "git", path_to_file_url(repo_path), raising=False
|
||||
)
|
||||
|
||||
conf_str = """\
|
||||
packages:
|
||||
v:
|
||||
version: ["{0}=2.2", "{1}=2.3"]
|
||||
""".format(
|
||||
commits[0], commits[1]
|
||||
)
|
||||
update_packages_config(conf_str)
|
||||
|
||||
s1 = Spec("v").concretized()
|
||||
assert s1.satisfies("@2.2")
|
||||
assert s1.satisfies("@{0}".format(commits[0]))
|
||||
|
||||
s2 = Spec("v@2.3").concretized()
|
||||
# Note: this check will fail: the command-line spec version is preferred
|
||||
# assert s2.satisfies("@{0}".format(commits[1]))
|
||||
assert s2.satisfies("@2.3")
|
||||
|
||||
s3 = Spec("v@{0}".format(commits[1])).concretized()
|
||||
assert s3.satisfies("@{0}".format(commits[1]))
|
||||
# Note: this check will fail: the command-line spec version is preferred
|
||||
# assert s3.satisfies("@2.3")
|
||||
|
||||
|
||||
def test_requirement_is_successfully_applied(concretize_scope, test_repo):
|
||||
"""If a simple requirement can be satisfied, make sure the
|
||||
concretization succeeds and the requirement spec is applied.
|
||||
|
@@ -0,0 +1,5 @@
|
||||
enable:
|
||||
- tcl
|
||||
tcl:
|
||||
all:
|
||||
autoload: none
|
@@ -161,3 +161,31 @@ def test_environment_cant_modify_environments_root(tmpdir):
|
||||
with pytest.raises(ev.SpackEnvironmentError):
|
||||
e = ev.Environment(tmpdir.strpath)
|
||||
ev.activate(e)
|
||||
|
||||
|
||||
@pytest.mark.regression("35420")
|
||||
@pytest.mark.parametrize(
|
||||
"original_content",
|
||||
[
|
||||
(
|
||||
"""\
|
||||
spack:
|
||||
specs:
|
||||
- matrix:
|
||||
# test
|
||||
- - a
|
||||
concretizer:
|
||||
unify: false"""
|
||||
)
|
||||
],
|
||||
)
|
||||
def test_roundtrip_spack_yaml_with_comments(original_content, mock_packages, config, tmp_path):
|
||||
"""Ensure that round-tripping a spack.yaml file doesn't change its content."""
|
||||
spack_yaml = tmp_path / "spack.yaml"
|
||||
spack_yaml.write_text(original_content)
|
||||
|
||||
e = ev.Environment(str(tmp_path))
|
||||
e.update_manifest()
|
||||
|
||||
content = spack_yaml.read_text()
|
||||
assert content == original_content
|
||||
|
@@ -871,3 +871,34 @@ def test_filesummary(tmpdir):
|
||||
assert fs.filesummary(p, print_bytes=8) == (26, b"abcdefgh...stuvwxyz")
|
||||
assert fs.filesummary(p, print_bytes=13) == (26, b"abcdefghijklmnopqrstuvwxyz")
|
||||
assert fs.filesummary(p, print_bytes=100) == (26, b"abcdefghijklmnopqrstuvwxyz")
|
||||
|
||||
|
||||
@pytest.mark.parametrize("bfs_depth", [1, 2, 10])
|
||||
def test_find_first_file(tmpdir, bfs_depth):
|
||||
# Create a structure: a/a/a/{file1,file2}, b/a, c/a, d/{a,file1}
|
||||
tmpdir.join("a", "a", "a").ensure(dir=True)
|
||||
tmpdir.join("b", "a").ensure(dir=True)
|
||||
tmpdir.join("c", "a").ensure(dir=True)
|
||||
tmpdir.join("d", "a").ensure(dir=True)
|
||||
tmpdir.join("e").ensure(dir=True)
|
||||
|
||||
fs.touch(tmpdir.join("a", "a", "a", "file1"))
|
||||
fs.touch(tmpdir.join("a", "a", "a", "file2"))
|
||||
fs.touch(tmpdir.join("d", "file1"))
|
||||
|
||||
root = str(tmpdir)
|
||||
|
||||
# Iterative deepening: should find low-depth file1.
|
||||
assert os.path.samefile(
|
||||
fs.find_first(root, "file*", bfs_depth=bfs_depth), os.path.join(root, "d", "file1")
|
||||
)
|
||||
|
||||
assert fs.find_first(root, "nonexisting", bfs_depth=bfs_depth) is None
|
||||
|
||||
assert os.path.samefile(
|
||||
fs.find_first(root, ["nonexisting", "file2"], bfs_depth=bfs_depth),
|
||||
os.path.join(root, "a", "a", "a", "file2"),
|
||||
)
|
||||
|
||||
# Should find first dir
|
||||
assert os.path.samefile(fs.find_first(root, "a", bfs_depth=bfs_depth), os.path.join(root, "a"))
|
||||
|
@@ -67,7 +67,7 @@ def test_modules_default_symlink(
|
||||
module_type, mock_packages, mock_module_filename, mock_module_defaults, config
|
||||
):
|
||||
spec = spack.spec.Spec("mpileaks@2.3").concretized()
|
||||
mock_module_defaults(spec.format("{name}{@version}"))
|
||||
mock_module_defaults(spec.format("{name}{@version}"), True)
|
||||
|
||||
generator_cls = spack.modules.module_types[module_type]
|
||||
generator = generator_cls(spec, "default")
|
||||
@@ -77,6 +77,9 @@ def test_modules_default_symlink(
|
||||
assert os.path.islink(link_path)
|
||||
assert os.readlink(link_path) == mock_module_filename
|
||||
|
||||
generator.remove()
|
||||
assert not os.path.lexists(link_path)
|
||||
|
||||
|
||||
class MockDb(object):
|
||||
def __init__(self, db_ids, spec_hash_to_db):
|
||||
|
@@ -19,11 +19,11 @@ def modulefile_content(request):
|
||||
|
||||
writer_cls = getattr(request.module, "writer_cls")
|
||||
|
||||
def _impl(spec_str, module_set_name="default"):
|
||||
def _impl(spec_str, module_set_name="default", explicit=True):
|
||||
# Write the module file
|
||||
spec = spack.spec.Spec(spec_str)
|
||||
spec.concretize()
|
||||
generator = writer_cls(spec, module_set_name)
|
||||
generator = writer_cls(spec, module_set_name, explicit)
|
||||
generator.write(overwrite=True)
|
||||
|
||||
# Get its filename
|
||||
@@ -56,10 +56,10 @@ def factory(request):
|
||||
# Class of the module file writer
|
||||
writer_cls = getattr(request.module, "writer_cls")
|
||||
|
||||
def _mock(spec_string, module_set_name="default"):
|
||||
def _mock(spec_string, module_set_name="default", explicit=True):
|
||||
spec = spack.spec.Spec(spec_string)
|
||||
spec.concretize()
|
||||
return writer_cls(spec, module_set_name), spec
|
||||
return writer_cls(spec, module_set_name, explicit), spec
|
||||
|
||||
return _mock
|
||||
|
||||
|
@@ -2,7 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import re
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
@@ -88,7 +88,7 @@ def test_compilers_provided_different_name(self, factory, module_configuration):
|
||||
assert provides["compiler"] == spack.spec.CompilerSpec("oneapi@3.0")
|
||||
|
||||
def test_simple_case(self, modulefile_content, module_configuration):
|
||||
"""Tests the generation of a simple Tcl module file."""
|
||||
"""Tests the generation of a simple Lua module file."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
content = modulefile_content(mpich_spec_string)
|
||||
@@ -128,21 +128,49 @@ def test_alter_environment(self, modulefile_content, module_configuration, confi
|
||||
|
||||
content = modulefile_content("libdwarf platform=test target=core2")
|
||||
|
||||
assert len([x for x in content if x.startswith('prepend-path("CMAKE_PREFIX_PATH"')]) == 0
|
||||
assert len([x for x in content if x.startswith('prepend_path("CMAKE_PREFIX_PATH"')]) == 0
|
||||
assert len([x for x in content if 'setenv("FOO", "foo")' in x]) == 0
|
||||
assert len([x for x in content if 'unsetenv("BAR")' in x]) == 0
|
||||
|
||||
def test_prepend_path_separator(self, modulefile_content, module_configuration):
|
||||
"""Tests modifications to run-time environment."""
|
||||
"""Tests that we can use custom delimiters to manipulate path lists."""
|
||||
|
||||
module_configuration("module_path_separator")
|
||||
content = modulefile_content("module-path-separator")
|
||||
|
||||
for line in content:
|
||||
if re.match(r'[a-z]+_path\("COLON"', line):
|
||||
assert line.endswith('"foo", ":")')
|
||||
elif re.match(r'[a-z]+_path\("SEMICOLON"', line):
|
||||
assert line.endswith('"bar", ";")')
|
||||
assert len([x for x in content if 'append_path("COLON", "foo", ":")' in x]) == 1
|
||||
assert len([x for x in content if 'prepend_path("COLON", "foo", ":")' in x]) == 1
|
||||
assert len([x for x in content if 'remove_path("COLON", "foo", ":")' in x]) == 1
|
||||
assert len([x for x in content if 'append_path("SEMICOLON", "bar", ";")' in x]) == 1
|
||||
assert len([x for x in content if 'prepend_path("SEMICOLON", "bar", ";")' in x]) == 1
|
||||
assert len([x for x in content if 'remove_path("SEMICOLON", "bar", ";")' in x]) == 1
|
||||
assert len([x for x in content if 'append_path("SPACE", "qux", " ")' in x]) == 1
|
||||
assert len([x for x in content if 'remove_path("SPACE", "qux", " ")' in x]) == 1
|
||||
|
||||
def test_help_message(self, modulefile_content, module_configuration):
|
||||
"""Tests the generation of module help message."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
content = modulefile_content("mpileaks target=core2")
|
||||
|
||||
help_msg = (
|
||||
"help([[Name : mpileaks]])"
|
||||
"help([[Version: 2.3]])"
|
||||
"help([[Target : core2]])"
|
||||
"help()"
|
||||
"help([[Mpileaks is a mock package that passes audits]])"
|
||||
)
|
||||
assert help_msg in "".join(content)
|
||||
|
||||
content = modulefile_content("libdwarf target=core2")
|
||||
|
||||
help_msg = (
|
||||
"help([[Name : libdwarf]])"
|
||||
"help([[Version: 20130729]])"
|
||||
"help([[Target : core2]])"
|
||||
"depends_on("
|
||||
)
|
||||
assert help_msg in "".join(content)
|
||||
|
||||
@pytest.mark.parametrize("config_name", ["exclude", "blacklist"])
|
||||
def test_exclude(self, modulefile_content, module_configuration, config_name):
|
||||
|
@@ -108,6 +108,49 @@ def test_alter_environment(self, modulefile_content, module_configuration, confi
|
||||
assert len([x for x in content if "module load foo/bar" in x]) == 1
|
||||
assert len([x for x in content if "setenv LIBDWARF_ROOT" in x]) == 1
|
||||
|
||||
def test_prepend_path_separator(self, modulefile_content, module_configuration):
|
||||
"""Tests that we can use custom delimiters to manipulate path lists."""
|
||||
|
||||
module_configuration("module_path_separator")
|
||||
content = modulefile_content("module-path-separator")
|
||||
|
||||
assert len([x for x in content if 'append-path --delim ":" COLON "foo"' in x]) == 1
|
||||
assert len([x for x in content if 'prepend-path --delim ":" COLON "foo"' in x]) == 1
|
||||
assert len([x for x in content if 'remove-path --delim ":" COLON "foo"' in x]) == 1
|
||||
assert len([x for x in content if 'append-path --delim ";" SEMICOLON "bar"' in x]) == 1
|
||||
assert len([x for x in content if 'prepend-path --delim ";" SEMICOLON "bar"' in x]) == 1
|
||||
assert len([x for x in content if 'remove-path --delim ";" SEMICOLON "bar"' in x]) == 1
|
||||
assert len([x for x in content if 'append-path --delim " " SPACE "qux"' in x]) == 1
|
||||
assert len([x for x in content if 'remove-path --delim " " SPACE "qux"' in x]) == 1
|
||||
|
||||
def test_help_message(self, modulefile_content, module_configuration):
|
||||
"""Tests the generation of module help message."""
|
||||
|
||||
module_configuration("autoload_direct")
|
||||
content = modulefile_content("mpileaks target=core2")
|
||||
|
||||
help_msg = (
|
||||
"proc ModulesHelp { } {"
|
||||
' puts stderr "Name : mpileaks"'
|
||||
' puts stderr "Version: 2.3"'
|
||||
' puts stderr "Target : core2"'
|
||||
' puts stderr ""'
|
||||
' puts stderr "Mpileaks is a mock package that passes audits"'
|
||||
"}"
|
||||
)
|
||||
assert help_msg in "".join(content)
|
||||
|
||||
content = modulefile_content("libdwarf target=core2")
|
||||
|
||||
help_msg = (
|
||||
"proc ModulesHelp { } {"
|
||||
' puts stderr "Name : libdwarf"'
|
||||
' puts stderr "Version: 20130729"'
|
||||
' puts stderr "Target : core2"'
|
||||
"}"
|
||||
)
|
||||
assert help_msg in "".join(content)
|
||||
|
||||
@pytest.mark.parametrize("config_name", ["exclude", "blacklist"])
|
||||
def test_exclude(self, modulefile_content, module_configuration, config_name):
|
||||
"""Tests excluding the generation of selected modules."""
|
||||
@@ -313,9 +356,7 @@ def test_extend_context(self, modulefile_content, module_configuration):
|
||||
@pytest.mark.regression("4400")
|
||||
@pytest.mark.db
|
||||
@pytest.mark.parametrize("config_name", ["exclude_implicits", "blacklist_implicits"])
|
||||
def test_exclude_implicits(
|
||||
self, modulefile_content, module_configuration, database, config_name
|
||||
):
|
||||
def test_exclude_implicits(self, module_configuration, database, config_name):
|
||||
module_configuration(config_name)
|
||||
|
||||
# mpileaks has been installed explicitly when setting up
|
||||
@@ -332,6 +373,23 @@ def test_exclude_implicits(
|
||||
writer = writer_cls(item, "default")
|
||||
assert writer.conf.excluded
|
||||
|
||||
@pytest.mark.regression("12105")
|
||||
@pytest.mark.parametrize("config_name", ["exclude_implicits", "blacklist_implicits"])
|
||||
def test_exclude_implicits_with_arg(self, module_configuration, config_name):
|
||||
module_configuration(config_name)
|
||||
|
||||
# mpileaks is defined as explicit with explicit argument set on writer
|
||||
mpileaks_spec = spack.spec.Spec("mpileaks")
|
||||
mpileaks_spec.concretize()
|
||||
writer = writer_cls(mpileaks_spec, "default", True)
|
||||
assert not writer.conf.excluded
|
||||
|
||||
# callpath is defined as implicit with explicit argument set on writer
|
||||
callpath_spec = spack.spec.Spec("callpath")
|
||||
callpath_spec.concretize()
|
||||
writer = writer_cls(callpath_spec, "default", False)
|
||||
assert writer.conf.excluded
|
||||
|
||||
@pytest.mark.regression("9624")
|
||||
@pytest.mark.db
|
||||
def test_autoload_with_constraints(self, modulefile_content, module_configuration, database):
|
||||
|
@@ -765,8 +765,11 @@ def test_resolve_paths(self):
|
||||
|
||||
# resolved path without user appends user
|
||||
paths = [os.path.join(os.path.sep, "a", "b", "c")]
|
||||
can_paths = [paths[0]]
|
||||
user = getpass.getuser()
|
||||
can_paths = [os.path.join(paths[0], user)]
|
||||
|
||||
if sys.platform != "win32":
|
||||
can_paths = [os.path.join(paths[0], user)]
|
||||
assert spack.stage._resolve_paths(paths) == can_paths
|
||||
|
||||
# resolved path with node including user does not append user
|
||||
@@ -789,7 +792,7 @@ def test_resolve_paths(self):
|
||||
res_paths[1] = can_tempdir
|
||||
res_paths[2] = os.path.join(can_tempdir, user)
|
||||
res_paths[3] = os.path.join(can_tempdir, "stage", user)
|
||||
else:
|
||||
elif sys.platform != "win32":
|
||||
res_paths[0] = os.path.join(res_paths[0], user)
|
||||
|
||||
assert spack.stage._resolve_paths(paths) == res_paths
|
||||
|
@@ -444,7 +444,7 @@ def padding_filter(string):
|
||||
r"(/{pad})+" # the padding string repeated one or more times
|
||||
r"(/{longest_prefix})?(?=/)" # trailing prefix of padding as path component
|
||||
)
|
||||
regex = regex.replace("/", os.sep)
|
||||
regex = regex.replace("/", re.escape(os.sep))
|
||||
regex = regex.format(pad=pad, longest_prefix=longest_prefix)
|
||||
_filter_re = re.compile(regex)
|
||||
|
||||
|
@@ -627,7 +627,20 @@ def satisfies(self, other):
|
||||
self_cmp = self._cmp(other.ref_lookup)
|
||||
other_cmp = other._cmp(self.ref_lookup)
|
||||
|
||||
if other.is_ref:
|
||||
if self.is_ref and other.is_ref:
|
||||
if self.ref != other.ref:
|
||||
return False
|
||||
elif self.user_supplied_reference and other.user_supplied_reference:
|
||||
return self.ref_version == other.ref_version
|
||||
elif other.user_supplied_reference:
|
||||
return False
|
||||
else:
|
||||
# In this case, 'other' does not supply a version equivalence
|
||||
# with "=" and the commit strings are equal. 'self' may specify
|
||||
# a version equivalence, but that is extra info and will
|
||||
# satisfy no matter what it is.
|
||||
return True
|
||||
elif other.is_ref:
|
||||
# if other is a ref then satisfaction requires an exact version match
|
||||
# i.e. the GitRef must match this.version for satisfaction
|
||||
# this creates an asymmetric comparison:
|
||||
|
@@ -9,15 +9,10 @@ default:
|
||||
SPACK_TARGET_PLATFORM: "darwin"
|
||||
SPACK_TARGET_ARCH: "x86_64"
|
||||
|
||||
.linux_x86_64:
|
||||
.linux_x86_64_v3:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
SPACK_TARGET_ARCH: "x86_64"
|
||||
|
||||
.linux_x86_64_v4:
|
||||
variables:
|
||||
SPACK_TARGET_PLATFORM: "linux"
|
||||
SPACK_TARGET_ARCH: "x86_64_v4"
|
||||
SPACK_TARGET_ARCH: "x86_64_v3"
|
||||
|
||||
.linux_aarch64:
|
||||
variables:
|
||||
@@ -107,6 +102,37 @@ default:
|
||||
extends: [ ".generate-base" ]
|
||||
tags: ["spack", "public", "medium", "x86_64"]
|
||||
|
||||
.generate-deprecated:
|
||||
stage: generate
|
||||
script:
|
||||
- uname -a || true
|
||||
- grep -E 'vendor|model name' /proc/cpuinfo 2>/dev/null | sort -u || head -n10 /proc/cpuinfo 2>/dev/null || true
|
||||
- nproc || true
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- cd share/spack/gitlab/cloud_pipelines/stacks/${SPACK_CI_STACK_NAME}
|
||||
- spack env activate --without-view .
|
||||
- spack
|
||||
ci generate --check-index-only
|
||||
--buildcache-destination "${SPACK_BUILDCACHE_DESTINATION}"
|
||||
--artifacts-root "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/cloud-ci-pipeline.yml"
|
||||
after_script:
|
||||
- cat /proc/loadavg || true
|
||||
artifacts:
|
||||
paths:
|
||||
- "${CI_PROJECT_DIR}/jobs_scratch_dir"
|
||||
variables:
|
||||
KUBERNETES_CPU_REQUEST: 4000m
|
||||
KUBERNETES_MEMORY_REQUEST: 16G
|
||||
interruptible: true
|
||||
timeout: 60 minutes
|
||||
retry:
|
||||
max: 2
|
||||
when:
|
||||
- always
|
||||
tags: ["spack", "public", "medium", "x86_64"]
|
||||
|
||||
.generate-aarch64:
|
||||
extends: [ ".generate" ]
|
||||
tags: ["spack", "public", "medium", "aarch64"]
|
||||
@@ -114,12 +140,18 @@ default:
|
||||
.pr-generate:
|
||||
extends: [ ".pr", ".generate" ]
|
||||
|
||||
.pr-generate-deprecated:
|
||||
extends: [ ".pr", ".generate-deprecated" ]
|
||||
|
||||
.pr-generate-aarch64:
|
||||
extends: [ ".pr", ".generate-aarch64" ]
|
||||
|
||||
.protected-generate:
|
||||
extends: [ ".protected", ".generate" ]
|
||||
|
||||
.protected-generate-deprecated:
|
||||
extends: [ ".protected", ".generate-deprecated" ]
|
||||
|
||||
.protected-generate-aarch64:
|
||||
extends: [ ".protected", ".generate-aarch64" ]
|
||||
|
||||
@@ -143,6 +175,8 @@ protected-publish:
|
||||
variables:
|
||||
AWS_ACCESS_KEY_ID: ${PROTECTED_MIRRORS_AWS_ACCESS_KEY_ID}
|
||||
AWS_SECRET_ACCESS_KEY: ${PROTECTED_MIRRORS_AWS_SECRET_ACCESS_KEY}
|
||||
KUBERNETES_CPU_REQUEST: 4000m
|
||||
KUBERNETES_MEMORY_REQUEST: 16G
|
||||
script:
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
@@ -169,7 +203,7 @@ protected-publish:
|
||||
# My Super Cool Pipeline
|
||||
########################################
|
||||
# .my-super-cool-stack:
|
||||
# extends: [ ".linux_x86_64" ]
|
||||
# extends: [ ".linux_x86_64_v3" ]
|
||||
# variables:
|
||||
# SPACK_CI_STACK_NAME: my-super-cool-stack
|
||||
# tags: [ "all", "tags", "your", "job", "needs"]
|
||||
@@ -324,7 +358,7 @@ protected-publish:
|
||||
# E4S pipeline
|
||||
########################################
|
||||
.e4s:
|
||||
extends: [ ".linux_x86_64" ]
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: e4s
|
||||
|
||||
@@ -362,7 +396,7 @@ e4s-protected-build:
|
||||
# GPU Testing Pipeline
|
||||
########################################
|
||||
.gpu-tests:
|
||||
extends: [ ".linux_x86_64" ]
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: gpu-tests
|
||||
|
||||
@@ -400,7 +434,7 @@ gpu-tests-protected-build:
|
||||
# E4S OneAPI Pipeline
|
||||
########################################
|
||||
.e4s-oneapi:
|
||||
extends: [ ".linux_x86_64" ]
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: e4s-oneapi
|
||||
|
||||
@@ -478,7 +512,7 @@ e4s-power-protected-build:
|
||||
# Build tests for different build-systems
|
||||
#########################################
|
||||
.build_systems:
|
||||
extends: [ ".linux_x86_64" ]
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: build_systems
|
||||
|
||||
@@ -514,7 +548,7 @@ build_systems-protected-build:
|
||||
# RADIUSS
|
||||
#########################################
|
||||
.radiuss:
|
||||
extends: [ ".linux_x86_64" ]
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: radiuss
|
||||
|
||||
@@ -562,7 +596,7 @@ radiuss-protected-build:
|
||||
image: { "name": "ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09", "entrypoint": [""] }
|
||||
|
||||
.radiuss-aws:
|
||||
extends: [ ".linux_x86_64_v4" ]
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: radiuss-aws
|
||||
|
||||
@@ -634,7 +668,7 @@ radiuss-aws-aarch64-protected-build:
|
||||
# ECP Data & Vis SDK
|
||||
########################################
|
||||
.data-vis-sdk:
|
||||
extends: [ ".linux_x86_64" ]
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: data-vis-sdk
|
||||
|
||||
@@ -679,7 +713,7 @@ data-vis-sdk-protected-build:
|
||||
image: { "name": "ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09", "entrypoint": [""] }
|
||||
|
||||
.aws-ahug:
|
||||
extends: [ ".linux_x86_64_v4" ]
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: aws-ahug
|
||||
|
||||
@@ -757,7 +791,7 @@ aws-ahug-aarch64-protected-build:
|
||||
image: { "name": "ghcr.io/spack/e4s-amazonlinux-2:v2023-03-09", "entrypoint": [""] }
|
||||
|
||||
.aws-isc:
|
||||
extends: [ ".linux_x86_64_v4" ]
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: aws-isc
|
||||
|
||||
@@ -830,7 +864,7 @@ aws-isc-aarch64-protected-build:
|
||||
# Spack Tutorial
|
||||
########################################
|
||||
.tutorial:
|
||||
extends: [ ".linux_x86_64" ]
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: tutorial
|
||||
|
||||
@@ -866,7 +900,7 @@ tutorial-protected-build:
|
||||
# Machine Learning - Linux x86_64 (CPU)
|
||||
#######################################
|
||||
.ml-linux-x86_64-cpu:
|
||||
extends: [ ".linux_x86_64_v4" ]
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: ml-linux-x86_64-cpu
|
||||
|
||||
@@ -906,7 +940,7 @@ ml-linux-x86_64-cpu-protected-build:
|
||||
# Machine Learning - Linux x86_64 (CUDA)
|
||||
########################################
|
||||
.ml-linux-x86_64-cuda:
|
||||
extends: [ ".linux_x86_64_v4" ]
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: ml-linux-x86_64-cuda
|
||||
|
||||
@@ -946,7 +980,7 @@ ml-linux-x86_64-cuda-protected-build:
|
||||
# Machine Learning - Linux x86_64 (ROCm)
|
||||
########################################
|
||||
.ml-linux-x86_64-rocm:
|
||||
extends: [ ".linux_x86_64_v4" ]
|
||||
extends: [ ".linux_x86_64_v3" ]
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: ml-linux-x86_64-rocm
|
||||
|
||||
@@ -981,3 +1015,39 @@ ml-linux-x86_64-rocm-protected-build:
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: ml-linux-x86_64-rocm-protected-generate
|
||||
|
||||
|
||||
########################################
|
||||
# Deprecated CI testing
|
||||
########################################
|
||||
.deprecated-ci:
|
||||
variables:
|
||||
SPACK_CI_STACK_NAME: deprecated
|
||||
|
||||
deprecated-ci-pr-generate:
|
||||
extends: [ ".pr-generate-deprecated", ".deprecated-ci" ]
|
||||
|
||||
deprecated-ci-protected-generate:
|
||||
extends: [ ".protected-generate-deprecated", ".deprecated-ci" ]
|
||||
|
||||
deprecated-ci-pr-build:
|
||||
extends: [ ".pr-build", ".deprecated-ci" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: deprecated-ci-pr-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: deprecated-ci-pr-generate
|
||||
|
||||
deprecated-ci-protected-build:
|
||||
extends: [ ".protected-build", ".deprecated-ci" ]
|
||||
trigger:
|
||||
include:
|
||||
- artifact: jobs_scratch_dir/cloud-ci-pipeline.yml
|
||||
job: deprecated-ci-protected-generate
|
||||
strategy: depend
|
||||
needs:
|
||||
- artifacts: True
|
||||
job: deprecated-ci-protected-generate
|
||||
|
@@ -5,4 +5,4 @@ ci:
|
||||
- - curl -LfsS "https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz" -o gmake.tar.gz
|
||||
- printf "fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz" | sha256sum --check --strict --quiet
|
||||
- tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null
|
||||
tags: ["x86_64"]
|
||||
tags: ["x86_64_v3"]
|
@@ -1,8 +0,0 @@
|
||||
ci:
|
||||
pipeline-gen:
|
||||
- build-job:
|
||||
before_script:
|
||||
- - curl -LfsS "https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz" -o gmake.tar.gz
|
||||
- printf "fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz" | sha256sum --check --strict --quiet
|
||||
- tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null
|
||||
tags: ["x86_64_v4"]
|
@@ -1,5 +1,8 @@
|
||||
spack:
|
||||
view: false
|
||||
packages:
|
||||
all:
|
||||
require: target=x86_64_v3
|
||||
definitions:
|
||||
- default_specs:
|
||||
- 'uncrustify build_system=autotools'
|
||||
@@ -11,7 +14,7 @@ spack:
|
||||
- r-rcpp # RPackage
|
||||
- ruby-rake # RubyPackage
|
||||
- arch:
|
||||
- '%gcc target=x86_64'
|
||||
- '%gcc'
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
|
@@ -10,7 +10,7 @@ spack:
|
||||
libglx:
|
||||
require: ^mesa +glx
|
||||
all:
|
||||
target: [x86_64]
|
||||
require: target=x86_64_v3
|
||||
|
||||
definitions:
|
||||
- paraview_specs:
|
||||
@@ -52,6 +52,7 @@ spack:
|
||||
- matrix:
|
||||
- [$sdk_base_spec]
|
||||
- [$^paraview_specs]
|
||||
- - ^hdf5@1.14 # Non-VisIt can build HDF5 1.14
|
||||
# Test ParaView builds with differnt GL backends
|
||||
- matrix:
|
||||
- [$sdk_base_spec]
|
||||
|
@@ -0,0 +1,94 @@
|
||||
###
|
||||
# Spack pipeline for testing deprecated gitlab-ci configuration
|
||||
###
|
||||
spack:
|
||||
view: false
|
||||
concretizer:
|
||||
reuse: false
|
||||
unify: false
|
||||
config:
|
||||
concretizer: clingo
|
||||
db_lock_timeout: 120
|
||||
install_tree:
|
||||
padded_length: 256
|
||||
projections:
|
||||
all: '{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'
|
||||
deprecated: true
|
||||
packages:
|
||||
all:
|
||||
require: target=x86_64
|
||||
specs:
|
||||
- readline
|
||||
|
||||
mirrors:
|
||||
mirror: s3://spack-binaries/develop/deprecated
|
||||
gitlab-ci:
|
||||
broken-tests-packages:
|
||||
- gptune
|
||||
broken-specs-url: s3://spack-binaries/broken-specs
|
||||
image: ghcr.io/spack/tutorial-ubuntu-18.04:v2021-11-02
|
||||
before_script:
|
||||
- curl -LfsS "https://github.com/JuliaBinaryWrappers/GNUMake_jll.jl/releases/download/GNUMake-v4.3.0+1/GNUMake.v4.3.0.x86_64-linux-gnu.tar.gz"
|
||||
-o gmake.tar.gz
|
||||
- printf "fef1f59e56d2d11e6d700ba22d3444b6e583c663d6883fd0a4f63ab8bd280f0f gmake.tar.gz"
|
||||
| sha256sum --check --strict --quiet
|
||||
- tar -xzf gmake.tar.gz -C /usr bin/make 2> /dev/null
|
||||
- uname -a || true
|
||||
- grep -E "vendor|model name" /proc/cpuinfo 2>/dev/null | sort -u || head -n10
|
||||
/proc/cpuinfo 2>/dev/null || true
|
||||
- nproc
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
- spack arch
|
||||
script:
|
||||
- spack compiler find
|
||||
- cd ${SPACK_CONCRETE_ENV_DIR}
|
||||
- spack env activate --without-view .
|
||||
- if [ -n "$SPACK_BUILD_JOBS" ]; then spack config add "config:build_jobs:$SPACK_BUILD_JOBS";
|
||||
fi
|
||||
- spack config add "config:install_tree:projections:${SPACK_JOB_SPEC_PKG_NAME}:'morepadding/{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}'"
|
||||
- mkdir -p ${SPACK_ARTIFACTS_ROOT}/user_data
|
||||
# AWS runners mount E4S public key (verification), UO runners mount public/private (signing/verification)
|
||||
- if [[ -r /mnt/key/e4s.gpg ]]; then spack gpg trust /mnt/key/e4s.gpg; fi
|
||||
# UO runners mount intermediate ci public key (verification), AWS runners mount public/private (signing/verification)
|
||||
- if [[ -r /mnt/key/intermediate_ci_signing_key.gpg ]]; then spack gpg trust /mnt/key/intermediate_ci_signing_key.gpg;
|
||||
fi
|
||||
- if [[ -r /mnt/key/spack_public_key.gpg ]]; then spack gpg trust /mnt/key/spack_public_key.gpg;
|
||||
fi
|
||||
- spack --color=always --backtrace ci rebuild --tests > >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_out.txt)
|
||||
2> >(tee ${SPACK_ARTIFACTS_ROOT}/user_data/pipeline_err.txt >&2)
|
||||
after_script:
|
||||
- cat /proc/loadavg || true
|
||||
match_behavior: first
|
||||
mappings:
|
||||
- match:
|
||||
- '@:'
|
||||
runner-attributes:
|
||||
tags: [spack, public, small, x86_64]
|
||||
variables:
|
||||
CI_JOB_SIZE: small
|
||||
SPACK_BUILD_JOBS: '1'
|
||||
KUBERNETES_CPU_REQUEST: 500m
|
||||
KUBERNETES_MEMORY_REQUEST: 500M
|
||||
signing-job-attributes:
|
||||
image: {name: 'ghcr.io/spack/notary:latest', entrypoint: ['']}
|
||||
tags: [aws]
|
||||
script:
|
||||
- aws s3 sync --exclude "*" --include "*spec.json*" ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache
|
||||
/tmp
|
||||
- /sign.sh
|
||||
- aws s3 sync --exclude "*" --include "*spec.json.sig*" /tmp ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache
|
||||
- aws s3 cp /tmp/public_keys ${SPACK_REMOTE_MIRROR_OVERRIDE}/build_cache/_pgp
|
||||
--recursive --exclude "*" --include "*.pub"
|
||||
|
||||
service-job-attributes:
|
||||
image: ghcr.io/spack/tutorial-ubuntu-18.04:v2021-11-02
|
||||
before_script:
|
||||
- . "./share/spack/setup-env.sh"
|
||||
- spack --version
|
||||
tags: [spack, public, x86_64]
|
||||
cdash:
|
||||
build-group: Spack Deprecated CI
|
||||
url: https://cdash.spack.io
|
||||
project: Spack Testing
|
||||
site: Cloud Gitlab Infrastructure
|
@@ -62,7 +62,7 @@ spack:
|
||||
- conduit
|
||||
- datatransferkit
|
||||
- dyninst
|
||||
- ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 ~paraview +pnetcdf +sz +unifyfs +veloc +visit +vtkm +zfp # +paraview fails: FAILED: VTK/Filters/Statistics/CMakeFiles/FiltersStatistics-objects.dir/vtkPCAStatistics.cxx.o: /tmp/ccgvkIk5.s: Assembler messages: /tmp/ccgvkIk5.s:260012: Error: invalid machine `power10'
|
||||
- ecp-data-vis-sdk ~cuda ~rocm +adios2 +ascent +cinema +darshan +faodel +hdf5 ~paraview +pnetcdf +sz +unifyfs +veloc ~visit +vtkm +zfp # +paraview fails: FAILED: VTK/Filters/Statistics/CMakeFiles/FiltersStatistics-objects.dir/vtkPCAStatistics.cxx.o: /tmp/ccgvkIk5.s: Assembler messages: /tmp/ccgvkIk5.s:260012: Error: invalid machine `power10'
|
||||
- exaworks
|
||||
- flecsi
|
||||
- flit
|
||||
|
@@ -6,7 +6,7 @@ spack:
|
||||
providers:
|
||||
blas: [openblas]
|
||||
mpi: [mpich]
|
||||
target: [x86_64]
|
||||
require: target=x86_64_v3
|
||||
variants: +mpi amdgpu_target=gfx90a cuda_arch=80
|
||||
tbb:
|
||||
require: "intel-tbb"
|
||||
|
@@ -7,7 +7,7 @@ spack:
|
||||
providers:
|
||||
blas: [openblas]
|
||||
mpi: [mpich]
|
||||
target: [x86_64]
|
||||
require: target=x86_64_v3
|
||||
variants: +mpi amdgpu_target=gfx90a cuda_arch=80
|
||||
tbb:
|
||||
require: "intel-tbb"
|
||||
|
@@ -2,84 +2,77 @@ spack:
|
||||
view: false
|
||||
packages:
|
||||
all:
|
||||
target: [x86_64_v3]
|
||||
require: target=x86_64_v3
|
||||
variants: ~cuda~rocm
|
||||
|
||||
definitions:
|
||||
- packages:
|
||||
# Horovod
|
||||
- py-horovod
|
||||
|
||||
# Hugging Face
|
||||
- py-transformers
|
||||
|
||||
# JAX
|
||||
- py-jax
|
||||
- py-jaxlib
|
||||
|
||||
# Keras
|
||||
- py-keras
|
||||
- py-keras-applications
|
||||
- py-keras-preprocessing
|
||||
- py-keras2onnx
|
||||
|
||||
# PyTorch
|
||||
- py-botorch
|
||||
- py-efficientnet-pytorch
|
||||
- py-gpytorch
|
||||
- py-kornia
|
||||
- py-lightning
|
||||
- py-pytorch-gradual-warmup-lr
|
||||
- py-pytorch-lightning
|
||||
- py-segmentation-models-pytorch
|
||||
- py-timm
|
||||
- py-torch
|
||||
- py-torch-cluster
|
||||
- py-torch-geometric
|
||||
- py-torch-nvidia-apex
|
||||
- py-torch-scatter
|
||||
- py-torch-sparse
|
||||
- py-torch-spline-conv
|
||||
- py-torchaudio
|
||||
- py-torchdata
|
||||
- py-torchfile
|
||||
- py-torchgeo
|
||||
- py-torchmeta
|
||||
- py-torchmetrics
|
||||
- py-torchtext
|
||||
- py-torchvision
|
||||
- py-vector-quantize-pytorch
|
||||
|
||||
# scikit-learn
|
||||
- py-scikit-learn
|
||||
- py-scikit-learn-extra
|
||||
|
||||
# TensorBoard
|
||||
- py-tensorboard
|
||||
- py-tensorboard-data-server
|
||||
- py-tensorboard-plugin-wit
|
||||
- py-tensorboardx
|
||||
|
||||
# TensorFlow
|
||||
- py-tensorflow
|
||||
- py-tensorflow-datasets
|
||||
- py-tensorflow-estimator
|
||||
- py-tensorflow-hub
|
||||
- py-tensorflow-metadata
|
||||
- py-tensorflow-probability
|
||||
|
||||
# XGBoost
|
||||
- py-xgboost
|
||||
# - r-xgboost
|
||||
- xgboost
|
||||
|
||||
- arch:
|
||||
- target=x86_64_v3
|
||||
mpi:
|
||||
require: openmpi
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
- [$packages]
|
||||
- [$arch]
|
||||
# Horovod
|
||||
- py-horovod
|
||||
|
||||
# Hugging Face
|
||||
- py-transformers
|
||||
|
||||
# JAX
|
||||
- py-jax
|
||||
- py-jaxlib
|
||||
|
||||
# Keras
|
||||
- py-keras
|
||||
- py-keras-applications
|
||||
- py-keras-preprocessing
|
||||
- py-keras2onnx
|
||||
|
||||
# PyTorch
|
||||
- py-botorch
|
||||
- py-efficientnet-pytorch
|
||||
- py-gpytorch
|
||||
- py-kornia
|
||||
- py-lightning
|
||||
- py-pytorch-gradual-warmup-lr
|
||||
- py-pytorch-lightning
|
||||
- py-segmentation-models-pytorch
|
||||
- py-timm
|
||||
- py-torch
|
||||
- py-torch-cluster
|
||||
- py-torch-geometric
|
||||
- py-torch-nvidia-apex
|
||||
- py-torch-scatter
|
||||
- py-torch-sparse
|
||||
- py-torch-spline-conv
|
||||
- py-torchaudio
|
||||
- py-torchdata
|
||||
- py-torchfile
|
||||
- py-torchgeo
|
||||
- py-torchmeta
|
||||
- py-torchmetrics
|
||||
- py-torchtext
|
||||
- py-torchvision
|
||||
- py-vector-quantize-pytorch
|
||||
|
||||
# scikit-learn
|
||||
- py-scikit-learn
|
||||
- py-scikit-learn-extra
|
||||
|
||||
# TensorBoard
|
||||
- py-tensorboard
|
||||
- py-tensorboard-data-server
|
||||
- py-tensorboard-plugin-wit
|
||||
- py-tensorboardx
|
||||
|
||||
# TensorFlow
|
||||
- py-tensorflow
|
||||
- py-tensorflow-datasets
|
||||
- py-tensorflow-estimator
|
||||
- py-tensorflow-hub
|
||||
- py-tensorflow-metadata
|
||||
- py-tensorflow-probability
|
||||
|
||||
# XGBoost
|
||||
- py-xgboost
|
||||
# - r-xgboost
|
||||
- xgboost
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/ml-linux-x86_64-cpu" }
|
||||
|
||||
|
@@ -2,87 +2,80 @@ spack:
|
||||
view: false
|
||||
packages:
|
||||
all:
|
||||
target: [x86_64_v3]
|
||||
require: target=x86_64_v3
|
||||
variants: ~rocm+cuda cuda_arch=80
|
||||
llvm:
|
||||
# https://github.com/spack/spack/issues/27999
|
||||
require: ~cuda
|
||||
|
||||
definitions:
|
||||
- packages:
|
||||
# Horovod
|
||||
- py-horovod
|
||||
|
||||
# Hugging Face
|
||||
- py-transformers
|
||||
|
||||
# JAX
|
||||
- py-jax
|
||||
- py-jaxlib
|
||||
|
||||
# Keras
|
||||
- py-keras
|
||||
- py-keras-applications
|
||||
- py-keras-preprocessing
|
||||
- py-keras2onnx
|
||||
|
||||
# PyTorch
|
||||
- py-botorch
|
||||
- py-efficientnet-pytorch
|
||||
- py-gpytorch
|
||||
- py-kornia
|
||||
- py-lightning
|
||||
- py-pytorch-gradual-warmup-lr
|
||||
- py-pytorch-lightning
|
||||
- py-segmentation-models-pytorch
|
||||
- py-timm
|
||||
- py-torch
|
||||
- py-torch-cluster
|
||||
- py-torch-geometric
|
||||
- py-torch-nvidia-apex
|
||||
- py-torch-scatter
|
||||
- py-torch-sparse
|
||||
- py-torch-spline-conv
|
||||
- py-torchaudio
|
||||
- py-torchdata
|
||||
- py-torchfile
|
||||
- py-torchgeo
|
||||
- py-torchmeta
|
||||
- py-torchmetrics
|
||||
- py-torchtext
|
||||
- py-torchvision
|
||||
- py-vector-quantize-pytorch
|
||||
|
||||
# scikit-learn
|
||||
- py-scikit-learn
|
||||
- py-scikit-learn-extra
|
||||
|
||||
# TensorBoard
|
||||
- py-tensorboard
|
||||
- py-tensorboard-data-server
|
||||
- py-tensorboard-plugin-wit
|
||||
- py-tensorboardx
|
||||
|
||||
# TensorFlow
|
||||
- py-tensorflow
|
||||
- py-tensorflow-datasets
|
||||
- py-tensorflow-estimator
|
||||
- py-tensorflow-hub
|
||||
- py-tensorflow-metadata
|
||||
- py-tensorflow-probability
|
||||
|
||||
# XGBoost
|
||||
- py-xgboost
|
||||
# - r-xgboost
|
||||
- xgboost
|
||||
|
||||
- arch:
|
||||
- target=x86_64_v3
|
||||
mpi:
|
||||
require: openmpi
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
- [$packages]
|
||||
- [$arch]
|
||||
# Horovod
|
||||
- py-horovod
|
||||
|
||||
# Hugging Face
|
||||
- py-transformers
|
||||
|
||||
# JAX
|
||||
- py-jax
|
||||
- py-jaxlib
|
||||
|
||||
# Keras
|
||||
- py-keras
|
||||
- py-keras-applications
|
||||
- py-keras-preprocessing
|
||||
- py-keras2onnx
|
||||
|
||||
# PyTorch
|
||||
- py-botorch
|
||||
- py-efficientnet-pytorch
|
||||
- py-gpytorch
|
||||
- py-kornia
|
||||
- py-lightning
|
||||
- py-pytorch-gradual-warmup-lr
|
||||
- py-pytorch-lightning
|
||||
- py-segmentation-models-pytorch
|
||||
- py-timm
|
||||
- py-torch
|
||||
- py-torch-cluster
|
||||
- py-torch-geometric
|
||||
- py-torch-nvidia-apex
|
||||
- py-torch-scatter
|
||||
- py-torch-sparse
|
||||
- py-torch-spline-conv
|
||||
- py-torchaudio
|
||||
- py-torchdata
|
||||
- py-torchfile
|
||||
- py-torchgeo
|
||||
- py-torchmeta
|
||||
- py-torchmetrics
|
||||
- py-torchtext
|
||||
- py-torchvision
|
||||
- py-vector-quantize-pytorch
|
||||
|
||||
# scikit-learn
|
||||
- py-scikit-learn
|
||||
- py-scikit-learn-extra
|
||||
|
||||
# TensorBoard
|
||||
- py-tensorboard
|
||||
- py-tensorboard-data-server
|
||||
- py-tensorboard-plugin-wit
|
||||
- py-tensorboardx
|
||||
|
||||
# TensorFlow
|
||||
- py-tensorflow
|
||||
- py-tensorflow-datasets
|
||||
- py-tensorflow-estimator
|
||||
- py-tensorflow-hub
|
||||
- py-tensorflow-metadata
|
||||
- py-tensorflow-probability
|
||||
|
||||
# XGBoost
|
||||
- py-xgboost
|
||||
# - r-xgboost
|
||||
- xgboost
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/ml-linux-x86_64-cuda" }
|
||||
|
||||
|
@@ -2,90 +2,83 @@ spack:
|
||||
view: false
|
||||
packages:
|
||||
all:
|
||||
target: [x86_64_v3]
|
||||
require: target=x86_64_v3
|
||||
variants: ~cuda+rocm amdgpu_target=gfx90a
|
||||
gl:
|
||||
require: "osmesa"
|
||||
py-torch:
|
||||
# Does not yet support Spack-installed ROCm
|
||||
require: ~rocm
|
||||
|
||||
definitions:
|
||||
- packages:
|
||||
# Horovod
|
||||
- py-horovod
|
||||
|
||||
# Hugging Face
|
||||
- py-transformers
|
||||
|
||||
# JAX
|
||||
- py-jax
|
||||
- py-jaxlib
|
||||
|
||||
# Keras
|
||||
- py-keras
|
||||
- py-keras-applications
|
||||
- py-keras-preprocessing
|
||||
- py-keras2onnx
|
||||
|
||||
# PyTorch
|
||||
# Does not yet support Spack-install ROCm
|
||||
# - py-botorch
|
||||
# - py-efficientnet-pytorch
|
||||
# - py-gpytorch
|
||||
# - py-kornia
|
||||
# - py-lightning
|
||||
# - py-pytorch-gradual-warmup-lr
|
||||
# - py-pytorch-lightning
|
||||
# - py-segmentation-models-pytorch
|
||||
# - py-timm
|
||||
# - py-torch
|
||||
# - py-torch-cluster
|
||||
# - py-torch-geometric
|
||||
# - py-torch-nvidia-apex
|
||||
# - py-torch-scatter
|
||||
# - py-torch-sparse
|
||||
# - py-torch-spline-conv
|
||||
# - py-torchaudio
|
||||
# - py-torchdata
|
||||
# - py-torchfile
|
||||
# - py-torchgeo
|
||||
# - py-torchmeta
|
||||
# - py-torchmetrics
|
||||
# - py-torchtext
|
||||
# - py-torchvision
|
||||
# - py-vector-quantize-pytorch
|
||||
|
||||
# scikit-learn
|
||||
- py-scikit-learn
|
||||
- py-scikit-learn-extra
|
||||
|
||||
# TensorBoard
|
||||
- py-tensorboard
|
||||
- py-tensorboard-data-server
|
||||
- py-tensorboard-plugin-wit
|
||||
- py-tensorboardx
|
||||
|
||||
# TensorFlow
|
||||
- py-tensorflow
|
||||
- py-tensorflow-datasets
|
||||
- py-tensorflow-estimator
|
||||
- py-tensorflow-hub
|
||||
- py-tensorflow-metadata
|
||||
- py-tensorflow-probability
|
||||
|
||||
# XGBoost
|
||||
- py-xgboost
|
||||
# - r-xgboost
|
||||
- xgboost
|
||||
|
||||
- arch:
|
||||
- target=x86_64_v3
|
||||
mpi:
|
||||
require: openmpi
|
||||
|
||||
specs:
|
||||
- matrix:
|
||||
- [$packages]
|
||||
- [$arch]
|
||||
# Horovod
|
||||
- py-horovod
|
||||
|
||||
# Hugging Face
|
||||
- py-transformers
|
||||
|
||||
# JAX
|
||||
- py-jax
|
||||
- py-jaxlib
|
||||
|
||||
# Keras
|
||||
- py-keras
|
||||
- py-keras-applications
|
||||
- py-keras-preprocessing
|
||||
- py-keras2onnx
|
||||
|
||||
# PyTorch
|
||||
# Does not yet support Spack-install ROCm
|
||||
# - py-botorch
|
||||
# - py-efficientnet-pytorch
|
||||
# - py-gpytorch
|
||||
# - py-kornia
|
||||
# - py-lightning
|
||||
# - py-pytorch-gradual-warmup-lr
|
||||
# - py-pytorch-lightning
|
||||
# - py-segmentation-models-pytorch
|
||||
# - py-timm
|
||||
# - py-torch
|
||||
# - py-torch-cluster
|
||||
# - py-torch-geometric
|
||||
# - py-torch-nvidia-apex
|
||||
# - py-torch-scatter
|
||||
# - py-torch-sparse
|
||||
# - py-torch-spline-conv
|
||||
# - py-torchaudio
|
||||
# - py-torchdata
|
||||
# - py-torchfile
|
||||
# - py-torchgeo
|
||||
# - py-torchmeta
|
||||
# - py-torchmetrics
|
||||
# - py-torchtext
|
||||
# - py-torchvision
|
||||
# - py-vector-quantize-pytorch
|
||||
|
||||
# scikit-learn
|
||||
- py-scikit-learn
|
||||
- py-scikit-learn-extra
|
||||
|
||||
# TensorBoard
|
||||
- py-tensorboard
|
||||
- py-tensorboard-data-server
|
||||
- py-tensorboard-plugin-wit
|
||||
- py-tensorboardx
|
||||
|
||||
# TensorFlow
|
||||
- py-tensorflow
|
||||
- py-tensorflow-datasets
|
||||
- py-tensorflow-estimator
|
||||
- py-tensorflow-hub
|
||||
- py-tensorflow-metadata
|
||||
- py-tensorflow-probability
|
||||
|
||||
# XGBoost
|
||||
- py-xgboost
|
||||
# - r-xgboost
|
||||
- xgboost
|
||||
|
||||
mirrors: { "mirror": "s3://spack-binaries/develop/ml-linux-x86_64-rocm" }
|
||||
|
||||
|
@@ -11,6 +11,7 @@ spack:
|
||||
- openmpi
|
||||
- mpich
|
||||
variants: +mpi cuda_arch=70
|
||||
require: target=x86_64_v3
|
||||
|
||||
definitions:
|
||||
- radiuss:
|
||||
|
@@ -2,7 +2,7 @@ spack:
|
||||
view: false
|
||||
packages:
|
||||
all:
|
||||
target: [x86_64]
|
||||
require: target=x86_64_v3
|
||||
|
||||
providers:
|
||||
mpi: [mvapich2]
|
||||
|
@@ -7,7 +7,7 @@ spack:
|
||||
view: false
|
||||
packages:
|
||||
all:
|
||||
target: [x86_64]
|
||||
require: target=x86_64_v3
|
||||
tbb:
|
||||
require: intel-tbb
|
||||
definitions:
|
||||
|
@@ -6,6 +6,7 @@
|
||||
# Build stage with Spack pre-installed and ready to be used
|
||||
FROM {{ build.image }} as builder
|
||||
|
||||
{% block build_stage %}
|
||||
{% if os_packages_build %}
|
||||
# Install OS packages needed to build the software
|
||||
RUN {% if os_package_update %}{{ os_packages_build.update }} \
|
||||
@@ -19,7 +20,11 @@ RUN mkdir {{ paths.environment }} \
|
||||
{{ manifest }} > {{ paths.environment }}/spack.yaml
|
||||
|
||||
# Install the software, remove unnecessary deps
|
||||
{% if depfile %}
|
||||
RUN cd {{ paths.environment }} && spack env activate . && spack concretize && spack env depfile -o Makefile && make -j $(nproc) && spack gc -y
|
||||
{% else %}
|
||||
RUN cd {{ paths.environment }} && spack env activate . && spack install --fail-fast && spack gc -y
|
||||
{% endif %}
|
||||
{% if strip %}
|
||||
|
||||
# Strip all the binaries
|
||||
@@ -37,7 +42,9 @@ RUN cd {{ paths.environment }} && \
|
||||
{% if extra_instructions.build %}
|
||||
{{ extra_instructions.build }}
|
||||
{% endif %}
|
||||
{% endblock build_stage %}
|
||||
{% endif %}
|
||||
|
||||
{% if render_phase.final %}
|
||||
# Bare OS image to run the installed executables
|
||||
FROM {{ run.image }}
|
||||
@@ -48,6 +55,8 @@ COPY --from=builder {{ paths.hidden_view }} {{ paths.hidden_view }}
|
||||
COPY --from=builder {{ paths.view }} {{ paths.view }}
|
||||
COPY --from=builder /etc/profile.d/z10_spack_environment.sh /etc/profile.d/z10_spack_environment.sh
|
||||
|
||||
{% block final_stage %}
|
||||
|
||||
{% if os_packages_final %}
|
||||
RUN {% if os_package_update %}{{ os_packages_final.update }} \
|
||||
&& {% endif %}{{ os_packages_final.install }} {{ os_packages_final.list | join | replace('\n', ' ') }} \
|
||||
@@ -57,6 +66,7 @@ RUN {% if os_package_update %}{{ os_packages_final.update }} \
|
||||
|
||||
{{ extra_instructions.final }}
|
||||
{% endif %}
|
||||
{% endblock final_stage %}
|
||||
{% for label, value in labels.items() %}
|
||||
LABEL "{{ label }}"="{{ value }}"
|
||||
{% endfor %}
|
||||
|
@@ -3,6 +3,7 @@ From: {{ build.image }}
|
||||
Stage: build
|
||||
|
||||
%post
|
||||
{% block build_stage %}
|
||||
{% if os_packages_build.list %}
|
||||
# Update, install and cleanup of system packages needed at build-time
|
||||
{% if os_package_update %}
|
||||
@@ -20,10 +21,14 @@ EOF
|
||||
|
||||
# Install all the required software
|
||||
. /opt/spack/share/spack/setup-env.sh
|
||||
spack env activate .
|
||||
spack install --fail-fast
|
||||
spack -e . concretize
|
||||
{% if depfile %}
|
||||
spack -e . env depfile -o Makefile
|
||||
make -j $(nproc)
|
||||
{% else %}
|
||||
spack -e . install
|
||||
{% endif %}
|
||||
spack gc -y
|
||||
spack env deactivate
|
||||
spack env activate --sh -d . >> {{ paths.environment }}/environment_modifications.sh
|
||||
{% if strip %}
|
||||
|
||||
@@ -37,7 +42,7 @@ EOF
|
||||
{% if extra_instructions.build %}
|
||||
{{ extra_instructions.build }}
|
||||
{% endif %}
|
||||
|
||||
{% endblock build_stage %}
|
||||
{% if apps %}
|
||||
{% for application, help_text in apps.items() %}
|
||||
|
||||
@@ -61,6 +66,7 @@ Stage: final
|
||||
{{ paths.environment }}/environment_modifications.sh {{ paths.environment }}/environment_modifications.sh
|
||||
|
||||
%post
|
||||
{% block final_stage %}
|
||||
{% if os_packages_final.list %}
|
||||
# Update, install and cleanup of system packages needed at run-time
|
||||
{% if os_package_update %}
|
||||
@@ -74,6 +80,7 @@ Stage: final
|
||||
{% if extra_instructions.final %}
|
||||
{{ extra_instructions.final }}
|
||||
{% endif %}
|
||||
{% endblock final_stage %}
|
||||
{% if runscript %}
|
||||
|
||||
%runscript
|
||||
|
@@ -15,7 +15,11 @@ whatis([[Short description : {{ short_description }}]])
|
||||
whatis([[Configure options : {{ configure_options }}]])
|
||||
{% endif %}
|
||||
|
||||
help([[Name : {{ spec.name }}]])
|
||||
help([[Version: {{ spec.version }}]])
|
||||
help([[Target : {{ spec.target }}]])
|
||||
{% if long_description %}
|
||||
help()
|
||||
help([[{{ long_description| textwrap(72)| join() }}]])
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
@@ -70,9 +74,9 @@ depends_on("{{ module }}")
|
||||
{% for command_name, cmd in environment_modifications %}
|
||||
{% if command_name == 'PrependPath' %}
|
||||
prepend_path("{{ cmd.name }}", "{{ cmd.value }}", "{{ cmd.separator }}")
|
||||
{% elif command_name == 'AppendPath' %}
|
||||
{% elif command_name in ('AppendPath', 'AppendFlagsEnv') %}
|
||||
append_path("{{ cmd.name }}", "{{ cmd.value }}", "{{ cmd.separator }}")
|
||||
{% elif command_name == 'RemovePath' %}
|
||||
{% elif command_name in ('RemovePath', 'RemoveFlagsEnv') %}
|
||||
remove_path("{{ cmd.name }}", "{{ cmd.value }}", "{{ cmd.separator }}")
|
||||
{% elif command_name == 'SetEnv' %}
|
||||
setenv("{{ cmd.name }}", "{{ cmd.value }}")
|
||||
|
@@ -14,11 +14,15 @@
|
||||
module-whatis "{{ short_description }}"
|
||||
{% endif %}
|
||||
|
||||
{% if long_description %}
|
||||
proc ModulesHelp { } {
|
||||
{{ long_description| textwrap(72)| quote()| prepend_to_line('puts stderr ')| join() }}
|
||||
}
|
||||
puts stderr "Name : {{ spec.name }}"
|
||||
puts stderr "Version: {{ spec.version }}"
|
||||
puts stderr "Target : {{ spec.target }}"
|
||||
{% if long_description %}
|
||||
puts stderr ""
|
||||
{{ long_description| textwrap(72)| quote()| prepend_to_line(' puts stderr ')| join() }}
|
||||
{% endif %}
|
||||
}
|
||||
{% endblock %}
|
||||
|
||||
{% block autoloads %}
|
||||
@@ -41,34 +45,18 @@ conflict {{ name }}
|
||||
|
||||
{% block environment %}
|
||||
{% for command_name, cmd in environment_modifications %}
|
||||
{% if cmd.separator != ':' %}
|
||||
{# A non-standard separator is required #}
|
||||
{% if command_name == 'PrependPath' %}
|
||||
prepend-path --delim "{{ cmd.separator }}" {{ cmd.name }} "{{ cmd.value }}"
|
||||
{% elif command_name == 'AppendPath' %}
|
||||
{% elif command_name in ('AppendPath', 'AppendFlagsEnv') %}
|
||||
append-path --delim "{{ cmd.separator }}" {{ cmd.name }} "{{ cmd.value }}"
|
||||
{% elif command_name == 'RemovePath' %}
|
||||
{% elif command_name in ('RemovePath', 'RemoveFlagsEnv') %}
|
||||
remove-path --delim "{{ cmd.separator }}" {{ cmd.name }} "{{ cmd.value }}"
|
||||
{% elif command_name == 'SetEnv' %}
|
||||
setenv --delim "{{ cmd.separator }}" {{ cmd.name }} "{{ cmd.value }}"
|
||||
{% elif command_name == 'UnsetEnv' %}
|
||||
unsetenv {{ cmd.name }}
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{# We are using the usual separator #}
|
||||
{% if command_name == 'PrependPath' %}
|
||||
prepend-path {{ cmd.name }} "{{ cmd.value }}"
|
||||
{% elif command_name == 'AppendPath' %}
|
||||
append-path {{ cmd.name }} "{{ cmd.value }}"
|
||||
{% elif command_name == 'RemovePath' %}
|
||||
remove-path {{ cmd.name }} "{{ cmd.value }}"
|
||||
{% elif command_name == 'SetEnv' %}
|
||||
setenv {{ cmd.name }} "{{ cmd.value }}"
|
||||
{% elif command_name == 'UnsetEnv' %}
|
||||
unsetenv {{ cmd.name }}
|
||||
{% endif %}
|
||||
{# #}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endblock %}
|
||||
|
||||
|
@@ -20,3 +20,6 @@ def setup_run_environment(self, env):
|
||||
env.append_path("SEMICOLON", "bar", separator=";")
|
||||
env.prepend_path("SEMICOLON", "bar", separator=";")
|
||||
env.remove_path("SEMICOLON", "bar", separator=";")
|
||||
|
||||
env.append_flags("SPACE", "qux")
|
||||
env.remove_flags("SPACE", "qux")
|
||||
|
@@ -70,7 +70,7 @@ def plat_arch(self):
|
||||
return arch
|
||||
|
||||
def is_64bit(self):
|
||||
return "64" in self.pkg.spec.target.family
|
||||
return "64" in str(self.pkg.spec.target.family)
|
||||
|
||||
def build(self, spec, prefix):
|
||||
link_type = "1" if "static" in spec.variants["link_type"].value else "0"
|
||||
|
@@ -15,6 +15,9 @@ class AbseilCpp(CMakePackage):
|
||||
maintainers("jcftang")
|
||||
tags = ["windows"]
|
||||
|
||||
version(
|
||||
"20230125.2", sha256="9a2b5752d7bfade0bdeee2701de17c9480620f8b237e1964c1b9967c75374906"
|
||||
)
|
||||
version(
|
||||
"20220623.0", sha256="4208129b49006089ba1d6710845a45e31c59b0ab6bff9e5788a87f55c5abd602"
|
||||
)
|
||||
@@ -55,16 +58,18 @@ class AbseilCpp(CMakePackage):
|
||||
|
||||
variant(
|
||||
"cxxstd",
|
||||
values=("11", "14", "17", "20"),
|
||||
default="11",
|
||||
values=(conditional("11", when="@:2022"), "14", "17", "20"),
|
||||
default="14",
|
||||
description="C++ standard used during compilation",
|
||||
)
|
||||
|
||||
depends_on("cmake@3.10:", when="@2023:", type="build")
|
||||
depends_on("cmake@3.5:", when="@2019:", type="build")
|
||||
depends_on("cmake@3.1:", type="build")
|
||||
|
||||
def cmake_args(self):
|
||||
shared = "ON" if "+shared" in self.spec else "OFF"
|
||||
cxxstd = self.spec.variants["cxxstd"].value
|
||||
return [
|
||||
self.define("BUILD_TESTING", "OFF"),
|
||||
self.define("BUILD_SHARED_LIBS:Bool", shared),
|
||||
self.define("CMAKE_CXX_STANDARD", cxxstd),
|
||||
self.define("BUILD_TESTING", False),
|
||||
self.define_from_variant("BUILD_SHARED_LIBS", "shared"),
|
||||
self.define_from_variant("CMAKE_CXX_STANDARD", "cxxstd"),
|
||||
]
|
||||
|
@@ -265,6 +265,7 @@ class Acts(CMakePackage, CudaPackage):
|
||||
depends_on("eigen @3.3.7:3.3.99", when="@:15.0")
|
||||
depends_on("geant4", when="+fatras_geant4")
|
||||
depends_on("geant4", when="+geant4")
|
||||
depends_on("git-lfs", when="@12.0.0:")
|
||||
depends_on("gperftools", when="+profilecpu")
|
||||
depends_on("gperftools", when="+profilemem")
|
||||
depends_on("hepmc3 @3.2.1:", when="+hepmc3")
|
||||
|
@@ -26,7 +26,7 @@ class Amdblis(BlisBase):
|
||||
"""
|
||||
|
||||
_name = "amdblis"
|
||||
homepage = "https://developer.amd.com/amd-aocl/blas-library/"
|
||||
homepage = "https://www.amd.com/en/developer/aocl/blis.html"
|
||||
url = "https://github.com/amd/blis/archive/3.0.tar.gz"
|
||||
git = "https://github.com/amd/blis.git"
|
||||
|
||||
|
@@ -31,7 +31,7 @@ class Amdfftw(FftwBase):
|
||||
"""
|
||||
|
||||
_name = "amdfftw"
|
||||
homepage = "https://developer.amd.com/amd-aocl/fftw/"
|
||||
homepage = "https://www.amd.com/en/developer/aocl/fftw.html"
|
||||
url = "https://github.com/amd/amd-fftw/archive/3.0.tar.gz"
|
||||
git = "https://github.com/amd/amd-fftw.git"
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user