Compare commits
243 Commits
e4s-22.02
...
versions/g
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dd668047fb | ||
|
|
2ac11812ba | ||
|
|
f6ea56276f | ||
|
|
cfae035a7b | ||
|
|
9dfaa3d8fd | ||
|
|
858c780e1f | ||
|
|
afc397be3a | ||
|
|
cd2cb4aef5 | ||
|
|
ebf92b428e | ||
|
|
1834ab971d | ||
|
|
ed6274a0bf | ||
|
|
450bbdd854 | ||
|
|
49069e4f58 | ||
|
|
20471b8420 | ||
|
|
5cd1e08e8a | ||
|
|
c72735229f | ||
|
|
79c0f631de | ||
|
|
01c1e6860f | ||
|
|
cbfe0d7492 | ||
|
|
a6aff211d2 | ||
|
|
dfdb11bc71 | ||
|
|
2c331a1d7f | ||
|
|
916c94fd65 | ||
|
|
1599d841c0 | ||
|
|
e6dcd382ee | ||
|
|
a94f11a2b2 | ||
|
|
a1f32cdaff | ||
|
|
8d118104c7 | ||
|
|
a43e3f3ffb | ||
|
|
bab331ff34 | ||
|
|
d9113eb5ec | ||
|
|
4831daa2dc | ||
|
|
d5dd6471fc | ||
|
|
277f578707 | ||
|
|
3478b06262 | ||
|
|
7268ab75f4 | ||
|
|
f2c5092588 | ||
|
|
d05560ee32 | ||
|
|
71f081366b | ||
|
|
327fcf7a54 | ||
|
|
8cefe38b0e | ||
|
|
88eb437d94 | ||
|
|
a155975b8b | ||
|
|
b20df12d09 | ||
|
|
f60c6ca485 | ||
|
|
a3947381c7 | ||
|
|
6eef12cd10 | ||
|
|
2bd016895d | ||
|
|
10f3113b3c | ||
|
|
9b298fd7e4 | ||
|
|
8f4e029e3a | ||
|
|
d29253bd7a | ||
|
|
8eefab4033 | ||
|
|
abfd300eef | ||
|
|
dd5943bc6f | ||
|
|
00ed99dc16 | ||
|
|
db2340007a | ||
|
|
2610423e78 | ||
|
|
888eb11565 | ||
|
|
1cc5391443 | ||
|
|
b082c33c85 | ||
|
|
d62b8f0bf3 | ||
|
|
c17f8d938e | ||
|
|
c6556b7a06 | ||
|
|
31c8567007 | ||
|
|
8c1e54180e | ||
|
|
dc01f9597e | ||
|
|
8f372fc88f | ||
|
|
6cc2e7bcd4 | ||
|
|
d7b9ad6456 | ||
|
|
ceea479b56 | ||
|
|
8bd9527a71 | ||
|
|
205e9f7d73 | ||
|
|
a8b1fbde41 | ||
|
|
3243731c1c | ||
|
|
0ed94f9529 | ||
|
|
96236f229f | ||
|
|
dccc58c0ad | ||
|
|
2e0cf6f9ee | ||
|
|
b2a0b6d6c3 | ||
|
|
fddc58387c | ||
|
|
0b4f40ab79 | ||
|
|
b21d30d640 | ||
|
|
b1f223d224 | ||
|
|
9fef13ce95 | ||
|
|
e4ba7bb044 | ||
|
|
ab1e9d717e | ||
|
|
8e4ccf91e4 | ||
|
|
125e4e00b4 | ||
|
|
1ddad522a4 | ||
|
|
1cb82dc542 | ||
|
|
17c065a750 | ||
|
|
a9ba40164a | ||
|
|
535262844b | ||
|
|
ed447e1ac7 | ||
|
|
d840c3a069 | ||
|
|
6a259ecd85 | ||
|
|
36b0730fac | ||
|
|
800933bbdf | ||
|
|
2516885615 | ||
|
|
2446771b63 | ||
|
|
f9843367ed | ||
|
|
b08ed91309 | ||
|
|
0d3ecff903 | ||
|
|
aca6b73a6c | ||
|
|
92970c2006 | ||
|
|
f47e24381d | ||
|
|
2941afe9e0 | ||
|
|
e6521e7379 | ||
|
|
37f021ef3c | ||
|
|
2ab1ace5f4 | ||
|
|
8daee48231 | ||
|
|
8485474140 | ||
|
|
51488dbff5 | ||
|
|
1953d986ae | ||
|
|
fef58db792 | ||
|
|
36c64c8012 | ||
|
|
8cd95b9f35 | ||
|
|
7912a8e90b | ||
|
|
2210f84a91 | ||
|
|
b8d042273a | ||
|
|
9173fd7c61 | ||
|
|
275608e2f2 | ||
|
|
f744640289 | ||
|
|
b9d26caab8 | ||
|
|
0dd5d493d5 | ||
|
|
e7894b4863 | ||
|
|
94d75d0327 | ||
|
|
96fceb6e38 | ||
|
|
fc8c3ada56 | ||
|
|
60fe21ddd7 | ||
|
|
0ffce33447 | ||
|
|
043794362a | ||
|
|
f36a6f3fc0 | ||
|
|
7995b7eac4 | ||
|
|
509f1cc00a | ||
|
|
7459aa6c95 | ||
|
|
2852126196 | ||
|
|
9e2d78cffc | ||
|
|
e19f29da66 | ||
|
|
39fcafaf45 | ||
|
|
22d07b328e | ||
|
|
f37855f5bb | ||
|
|
f33770553f | ||
|
|
e886a61a6c | ||
|
|
bbb81d5d68 | ||
|
|
99707beae7 | ||
|
|
be53b5db96 | ||
|
|
45f3b2fc52 | ||
|
|
e5f6914bd2 | ||
|
|
7fd94fc4bc | ||
|
|
2ed52d32c7 | ||
|
|
31e538795e | ||
|
|
0ad3319243 | ||
|
|
4efd47d0c3 | ||
|
|
43016d0ff4 | ||
|
|
5796de8bcb | ||
|
|
60e9af6e0f | ||
|
|
ae84ce535b | ||
|
|
5bdc72e2ed | ||
|
|
7a1364fcb2 | ||
|
|
6c61c2695a | ||
|
|
b768fb85c6 | ||
|
|
b3f5f55f95 | ||
|
|
00f0d11b8f | ||
|
|
e6142e8183 | ||
|
|
98fa2c6f10 | ||
|
|
38643dcd7e | ||
|
|
e7e6a16064 | ||
|
|
fefe65a35b | ||
|
|
4d669bfdf4 | ||
|
|
d93f9b82ac | ||
|
|
fa132614e0 | ||
|
|
a0bd6c8817 | ||
|
|
dcdf5022ad | ||
|
|
b021cf39aa | ||
|
|
8f5fcc6e95 | ||
|
|
e8c5f195a7 | ||
|
|
36020d69d7 | ||
|
|
b1ff9c05bc | ||
|
|
d33973df6c | ||
|
|
a2b8e0c3e9 | ||
|
|
f155de7462 | ||
|
|
800ed16e7a | ||
|
|
1903e45eec | ||
|
|
87a3b72ef0 | ||
|
|
884da5e326 | ||
|
|
9fc9386944 | ||
|
|
e84a2db23d | ||
|
|
5c1edbe00a | ||
|
|
5272e72344 | ||
|
|
3c1b2c0fc9 | ||
|
|
e6ea4c788a | ||
|
|
5cd8fc37ba | ||
|
|
3e69449ecd | ||
|
|
ef921e4107 | ||
|
|
6d6641f706 | ||
|
|
8f19bf2f31 | ||
|
|
fe5cb90f83 | ||
|
|
b0dc83afff | ||
|
|
4c8582dfc8 | ||
|
|
8654cc93ef | ||
|
|
5d58b94322 | ||
|
|
924be97a9b | ||
|
|
dfd83e60ac | ||
|
|
0f9f636f38 | ||
|
|
2fa892daa2 | ||
|
|
c433a35fdb | ||
|
|
23ddfba16d | ||
|
|
52d4e209e2 | ||
|
|
5b34b947a8 | ||
|
|
dea9766336 | ||
|
|
939e94790f | ||
|
|
9da8f18e3a | ||
|
|
f707987275 | ||
|
|
39c4af5f79 | ||
|
|
9cd311c82d | ||
|
|
53ca65b103 | ||
|
|
c987d06a19 | ||
|
|
79f22423b8 | ||
|
|
cebe4fdf1d | ||
|
|
d61c1f623c | ||
|
|
55996d3ad4 | ||
|
|
3640c258dc | ||
|
|
a8d440d3ab | ||
|
|
7fc9c16f9e | ||
|
|
6864b28fd8 | ||
|
|
cf0c9affff | ||
|
|
09a8656f1f | ||
|
|
9f1c6c0c29 | ||
|
|
fdec3b47cc | ||
|
|
c313a72e76 | ||
|
|
28caa0225f | ||
|
|
30fafa63e0 | ||
|
|
08cad7d0ee | ||
|
|
9165e3fb86 | ||
|
|
d54a5d9dd8 | ||
|
|
76489eb213 | ||
|
|
64dd6378d4 | ||
|
|
67ea14098d | ||
|
|
5b80c4ab6c | ||
|
|
1377c02e26 | ||
|
|
13013d0291 |
18
.github/ISSUE_TEMPLATE/build_error.yml
vendored
18
.github/ISSUE_TEMPLATE/build_error.yml
vendored
@@ -16,19 +16,29 @@ body:
|
||||
attributes:
|
||||
label: Steps to reproduce the issue
|
||||
description: |
|
||||
Fill in the exact spec you are trying to build and the relevant part of the error message
|
||||
placeholder: |
|
||||
Fill in the console output from the exact spec you are trying to build.
|
||||
value: |
|
||||
```console
|
||||
$ spack install <spec>
|
||||
$ spack spec -I <spec>
|
||||
...
|
||||
```
|
||||
- type: textarea
|
||||
id: error
|
||||
attributes:
|
||||
label: Error message
|
||||
description: |
|
||||
Please post the error message from spack inside the `<details>` tag below:
|
||||
value: |
|
||||
<details><summary>Error message</summary><pre>
|
||||
...
|
||||
</pre></details>
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: information
|
||||
attributes:
|
||||
label: Information on your system
|
||||
description: Please include the output of `spack debug report`
|
||||
description: Please include the output of `spack debug report`.
|
||||
validations:
|
||||
required: true
|
||||
- type: markdown
|
||||
|
||||
26
.github/workflows/bootstrap.yml
vendored
26
.github/workflows/bootstrap.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison bison-devel libstdc++-static
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -61,7 +61,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
cmake bison
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc gfortran git gnupg2 gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
bzip2 curl file gcc-c++ gcc gcc-fortran tar git gpg2 gzip \
|
||||
make patch unzip which xz python3 python3-devel tree \
|
||||
cmake bison
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -138,7 +138,7 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install cmake bison@2.7 tree
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Bootstrap clingo
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -157,8 +157,8 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install tree
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Bootstrap clingo
|
||||
@@ -174,8 +174,8 @@ jobs:
|
||||
matrix:
|
||||
python-version: ['2.7', '3.5', '3.6', '3.7', '3.8', '3.9']
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup repo and non-root user
|
||||
@@ -202,7 +202,7 @@ jobs:
|
||||
apt-get install -y \
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
bzip2 curl file g++ gcc patchelf gfortran git gzip \
|
||||
make patch unzip xz-utils python3 python3-dev tree \
|
||||
gawk
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -256,7 +256,7 @@ jobs:
|
||||
brew install tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
@@ -272,7 +272,7 @@ jobs:
|
||||
brew install gawk tree
|
||||
# Remove GnuPG since we want to bootstrap it
|
||||
sudo rm -rf /usr/local/bin/gpg
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846
|
||||
- name: Bootstrap GnuPG
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
|
||||
6
.github/workflows/build-containers.yml
vendored
6
.github/workflows/build-containers.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
name: Build ${{ matrix.dockerfile[0] }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
|
||||
- name: Set Container Tag Normal (Nightly)
|
||||
run: |
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # @v1
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@42d299face0c5c43a0487c477f595ac9cf22f1a7 # @v1
|
||||
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7 # @v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
|
||||
- name: Log in to DockerHub
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
uses: docker/login-action@42d299face0c5c43a0487c477f595ac9cf22f1a7 # @v1
|
||||
uses: docker/login-action@dd4fa0671be5250ee6f50aedf4cb05514abda2c7 # @v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
12
.github/workflows/macos_python.yml
vendored
12
.github/workflows/macos_python.yml
vendored
@@ -24,8 +24,8 @@ jobs:
|
||||
name: gcc with clang
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -39,8 +39,8 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
timeout-minutes: 700
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
@@ -52,8 +52,8 @@ jobs:
|
||||
name: scipy, mpl, pd
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: spack install
|
||||
|
||||
32
.github/workflows/unit_tests.yaml
vendored
32
.github/workflows/unit_tests.yaml
vendored
@@ -15,8 +15,8 @@ jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
@@ -31,10 +31,10 @@ jobs:
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
with_coverage: ${{ steps.coverage.outputs.with_coverage }}
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
@@ -106,10 +106,10 @@ jobs:
|
||||
- python-version: 3.9
|
||||
concretizer: original
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
@@ -171,10 +171,10 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
@@ -218,7 +218,7 @@ jobs:
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
@@ -237,10 +237,10 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
@@ -286,10 +286,10 @@ jobs:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
@@ -331,8 +331,8 @@ jobs:
|
||||
needs: [ validate, style, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@ec3a7ce113134d7a93b817d10a8272cb61118579 # @v2
|
||||
- uses: actions/setup-python@f38219332975fe8f9c04cca981d674bf22aea1d3 # @v2
|
||||
- uses: actions/checkout@a12a3943b4bdde767164f792f33f40b04645d846 # @v2
|
||||
- uses: actions/setup-python@0ebf233433c08fb9061af664d501c3f3ff0e9e20 # @v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
|
||||
17
etc/spack/defaults/concretizer.yaml
Normal file
17
etc/spack/defaults/concretizer.yaml
Normal file
@@ -0,0 +1,17 @@
|
||||
# -------------------------------------------------------------------------
|
||||
# This is the default spack configuration file.
|
||||
#
|
||||
# Settings here are versioned with Spack and are intended to provide
|
||||
# sensible defaults out of the box. Spack maintainers should edit this
|
||||
# file to keep it current.
|
||||
#
|
||||
# Users can override these settings by editing
|
||||
# `$SPACK_ROOT/etc/spack/concretizer.yaml`, `~/.spack/concretizer.yaml`,
|
||||
# or by adding a `concretizer:` section to an environment.
|
||||
# -------------------------------------------------------------------------
|
||||
concretizer:
|
||||
# Whether to consider installed packages or packages from buildcaches when
|
||||
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization.
|
||||
reuse: false
|
||||
@@ -155,14 +155,17 @@ config:
|
||||
|
||||
# The concretization algorithm to use in Spack. Options are:
|
||||
#
|
||||
# 'original': Spack's original greedy, fixed-point concretizer. This
|
||||
# algorithm can make decisions too early and will not backtrack
|
||||
# sufficiently for many specs.
|
||||
#
|
||||
# 'clingo': Uses a logic solver under the hood to solve DAGs with full
|
||||
# backtracking and optimization for user preferences. Spack will
|
||||
# try to bootstrap the logic solver, if not already available.
|
||||
#
|
||||
# 'original': Spack's original greedy, fixed-point concretizer. This
|
||||
# algorithm can make decisions too early and will not backtrack
|
||||
# sufficiently for many specs. This will soon be deprecated in
|
||||
# favor of clingo.
|
||||
#
|
||||
# See `concretizer.yaml` for more settings you can fine-tune when
|
||||
# using clingo.
|
||||
concretizer: clingo
|
||||
|
||||
|
||||
|
||||
@@ -194,9 +194,9 @@ Reusing installed dependencies
|
||||
|
||||
.. warning::
|
||||
|
||||
The ``--reuse`` option described here is experimental, and it will
|
||||
likely be replaced with a different option and configuration settings
|
||||
in the next Spack release.
|
||||
The ``--reuse`` option described here will become the default installation
|
||||
method in the next Spack version, and you will be able to get the current
|
||||
behavior by using ``spack install --fresh``.
|
||||
|
||||
By default, when you run ``spack install``, Spack tries to build a new
|
||||
version of the package you asked for, along with updated versions of
|
||||
@@ -216,6 +216,9 @@ the ``mpich`` will be build with the installed versions, if possible.
|
||||
You can use the :ref:`spack spec -I <cmd-spack-spec>` command to see what
|
||||
will be reused and what will be built before you install.
|
||||
|
||||
You can configure Spack to use the ``--reuse`` behavior by default in
|
||||
``concretizer.yaml``.
|
||||
|
||||
.. _cmd-spack-uninstall:
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
@@ -1280,7 +1283,7 @@ Normally users don't have to bother specifying the architecture if they
|
||||
are installing software for their current host, as in that case the
|
||||
values will be detected automatically. If you need fine-grained control
|
||||
over which packages use which targets (or over *all* packages' default
|
||||
target), see :ref:`concretization-preferences`.
|
||||
target), see :ref:`package-preferences`.
|
||||
|
||||
.. admonition:: Cray machines
|
||||
|
||||
|
||||
@@ -209,11 +209,49 @@ Specific limitations include:
|
||||
then Spack will not add a new external entry (``spack config blame packages``
|
||||
can help locate all external entries).
|
||||
|
||||
.. _concretization-preferences:
|
||||
.. _concretizer-options:
|
||||
|
||||
--------------------------
|
||||
Concretization Preferences
|
||||
--------------------------
|
||||
----------------------
|
||||
Concretizer options
|
||||
----------------------
|
||||
|
||||
``packages.yaml`` gives the concretizer preferences for specific packages,
|
||||
but you can also use ``concretizer.yaml`` to customize aspects of the
|
||||
algorithm it uses to select the dependencies you install:
|
||||
|
||||
.. _code-block: yaml
|
||||
|
||||
concretizer:
|
||||
# Whether to consider installed packages or packages from buildcaches when
|
||||
# concretizing specs. If `true`, we'll try to use as many installs/binaries
|
||||
# as possible, rather than building. If `false`, we'll always give you a fresh
|
||||
# concretization.
|
||||
reuse: false
|
||||
|
||||
^^^^^^^^^^^^^^^^
|
||||
``reuse``
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
This controls whether Spack will prefer to use installed packages (``true``), or
|
||||
whether it will do a "fresh" installation and prefer the latest settings from
|
||||
``package.py`` files and ``packages.yaml`` (``false``). .
|
||||
|
||||
You can use ``spack install --reuse`` to enable reuse for a single installation,
|
||||
and you can use ``spack install --fresh`` to do a fresh install if ``reuse`` is
|
||||
enabled by default.
|
||||
|
||||
.. note::
|
||||
|
||||
``reuse: false`` is the current default, but ``reuse: true`` will be the default
|
||||
in the next Spack release. You will still be able to use ``spack install --fresh``
|
||||
to get the old behavior.
|
||||
|
||||
|
||||
.. _package-preferences:
|
||||
|
||||
-------------------
|
||||
Package Preferences
|
||||
-------------------
|
||||
|
||||
Spack can be configured to prefer certain compilers, package
|
||||
versions, dependencies, and variants during concretization.
|
||||
@@ -269,6 +307,7 @@ concretization rules. A provider lists a value that packages may
|
||||
``depend_on`` (e.g, MPI) and a list of rules for fulfilling that
|
||||
dependency.
|
||||
|
||||
|
||||
.. _package_permissions:
|
||||
|
||||
-------------------
|
||||
|
||||
@@ -649,7 +649,7 @@ follow `the next section <intel-install-libs_>`_ instead.
|
||||
|
||||
* If you specified a custom variant (for example ``+vtune``) you may want to add this as your
|
||||
preferred variant in the packages configuration for the ``intel-parallel-studio`` package
|
||||
as described in :ref:`concretization-preferences`. Otherwise you will have to specify
|
||||
as described in :ref:`package-preferences`. Otherwise you will have to specify
|
||||
the variant everytime ``intel-parallel-studio`` is being used as ``mkl``, ``fftw`` or ``mpi``
|
||||
implementation to avoid pulling in a different variant.
|
||||
|
||||
@@ -811,13 +811,13 @@ by one of the following means:
|
||||
$ spack install libxc@3.0.0%intel
|
||||
|
||||
|
||||
* Alternatively, request Intel compilers implicitly by concretization preferences.
|
||||
* Alternatively, request Intel compilers implicitly by package preferences.
|
||||
Configure the order of compilers in the appropriate ``packages.yaml`` file,
|
||||
under either an ``all:`` or client-package-specific entry, in a
|
||||
``compiler:`` list. Consult the Spack documentation for
|
||||
`Configuring Package Preferences <https://spack-tutorial.readthedocs.io/en/latest/tutorial_configuration.html#configuring-package-preferences>`_
|
||||
and
|
||||
:ref:`Concretization Preferences <concretization-preferences>`.
|
||||
:ref:`Package Preferences <package-preferences>`.
|
||||
|
||||
Example: ``etc/spack/packages.yaml`` might simply contain:
|
||||
|
||||
@@ -867,7 +867,7 @@ virtual package, in order of decreasing preference. To learn more about the
|
||||
``providers:`` settings, see the Spack tutorial for
|
||||
`Configuring Package Preferences <https://spack-tutorial.readthedocs.io/en/latest/tutorial_configuration.html#configuring-package-preferences>`_
|
||||
and the section
|
||||
:ref:`Concretization Preferences <concretization-preferences>`.
|
||||
:ref:`Package Preferences <package-preferences>`.
|
||||
|
||||
Example: The following fairly minimal example for ``packages.yaml`` shows how
|
||||
to exclusively use the standalone ``intel-mkl`` package for all the linear
|
||||
|
||||
@@ -13,12 +13,16 @@ Spack has many configuration files. Here is a quick list of them, in
|
||||
case you want to skip directly to specific docs:
|
||||
|
||||
* :ref:`compilers.yaml <compiler-config>`
|
||||
* :ref:`concretizer.yaml <concretizer-options>`
|
||||
* :ref:`config.yaml <config-yaml>`
|
||||
* :ref:`mirrors.yaml <mirrors>`
|
||||
* :ref:`modules.yaml <modules>`
|
||||
* :ref:`packages.yaml <build-settings>`
|
||||
* :ref:`repos.yaml <repositories>`
|
||||
|
||||
You can also add any of these as inline configuration in ``spack.yaml``
|
||||
in an :ref:`environment <environment-configuration>`.
|
||||
|
||||
-----------
|
||||
YAML Format
|
||||
-----------
|
||||
|
||||
@@ -705,7 +705,8 @@ as follows:
|
||||
|
||||
#. The following special strings are considered larger than any other
|
||||
numeric or non-numeric version component, and satisfy the following
|
||||
order between themselves: ``develop > main > master > head > trunk``.
|
||||
order between themselves:
|
||||
``develop > main > master > head > trunk > stable``.
|
||||
|
||||
#. Numbers are ordered numerically, are less than special strings, and
|
||||
larger than other non-numeric components.
|
||||
@@ -2469,6 +2470,24 @@ Now, the ``py-numpy`` package can be used as an argument to ``spack
|
||||
activate``. When it is activated, all the files in its prefix will be
|
||||
symbolically linked into the prefix of the python package.
|
||||
|
||||
A package can only extend one other package at a time. To support packages
|
||||
that may extend one of a list of other packages, Spack supports multiple
|
||||
``extends`` directives as long as at most one of them is selected as
|
||||
a dependency during concretization. For example, a lua package could extend
|
||||
either lua or luajit, but not both:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
class LuaLpeg(Package):
|
||||
...
|
||||
variant('use_lua', default=True)
|
||||
extends('lua', when='+use_lua')
|
||||
extends('lua-luajit', when='~use_lua')
|
||||
...
|
||||
|
||||
Now, a user can install, and activate, the ``lua-lpeg`` package for either
|
||||
lua or luajit.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Adding additional constraints
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -2858,7 +2877,7 @@ be concretized on their system. For example, one user may prefer packages
|
||||
built with OpenMPI and the Intel compiler. Another user may prefer
|
||||
packages be built with MVAPICH and GCC.
|
||||
|
||||
See the :ref:`concretization-preferences` section for more details.
|
||||
See the :ref:`package-preferences` section for more details.
|
||||
|
||||
|
||||
.. _group_when_spec:
|
||||
|
||||
@@ -1638,12 +1638,18 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
||||
raise TypeError(message)
|
||||
|
||||
# Construct the right suffix for the library
|
||||
if shared is True:
|
||||
suffix = 'dylib' if sys.platform == 'darwin' else 'so'
|
||||
if shared:
|
||||
# Used on both Linux and macOS
|
||||
suffixes = ['so']
|
||||
if sys.platform == 'darwin':
|
||||
# Only used on macOS
|
||||
suffixes.append('dylib')
|
||||
else:
|
||||
suffix = 'a'
|
||||
suffixes = ['a']
|
||||
|
||||
# List of libraries we are searching with suffixes
|
||||
libraries = ['{0}.{1}'.format(lib, suffix) for lib in libraries]
|
||||
libraries = ['{0}.{1}'.format(lib, suffix) for lib in libraries
|
||||
for suffix in suffixes]
|
||||
|
||||
if not recursive:
|
||||
# If not recursive, look for the libraries directly in root
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
from __future__ import division
|
||||
|
||||
import contextlib
|
||||
import functools
|
||||
import inspect
|
||||
import os
|
||||
@@ -12,9 +13,10 @@
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import six
|
||||
from six import string_types
|
||||
|
||||
from llnl.util.compat import Hashable, MutableMapping, zip_longest
|
||||
from llnl.util.compat import MutableMapping, zip_longest
|
||||
|
||||
# Ignore emacs backups when listing modules
|
||||
ignore_modules = [r'^\.#', '~$']
|
||||
@@ -164,6 +166,19 @@ def union_dicts(*dicts):
|
||||
return result
|
||||
|
||||
|
||||
# Used as a sentinel that disambiguates tuples passed in *args from coincidentally
|
||||
# matching tuples formed from kwargs item pairs.
|
||||
_kwargs_separator = (object(),)
|
||||
|
||||
|
||||
def stable_args(*args, **kwargs):
|
||||
"""A key factory that performs a stable sort of the parameters."""
|
||||
key = args
|
||||
if kwargs:
|
||||
key += _kwargs_separator + tuple(sorted(kwargs.items()))
|
||||
return key
|
||||
|
||||
|
||||
def memoized(func):
|
||||
"""Decorator that caches the results of a function, storing them in
|
||||
an attribute of that function.
|
||||
@@ -171,15 +186,23 @@ def memoized(func):
|
||||
func.cache = {}
|
||||
|
||||
@functools.wraps(func)
|
||||
def _memoized_function(*args):
|
||||
if not isinstance(args, Hashable):
|
||||
# Not hashable, so just call the function.
|
||||
return func(*args)
|
||||
def _memoized_function(*args, **kwargs):
|
||||
key = stable_args(*args, **kwargs)
|
||||
|
||||
if args not in func.cache:
|
||||
func.cache[args] = func(*args)
|
||||
|
||||
return func.cache[args]
|
||||
try:
|
||||
return func.cache[key]
|
||||
except KeyError:
|
||||
ret = func(*args, **kwargs)
|
||||
func.cache[key] = ret
|
||||
return ret
|
||||
except TypeError as e:
|
||||
# TypeError is raised when indexing into a dict if the key is unhashable.
|
||||
raise six.raise_from(
|
||||
UnhashableArguments(
|
||||
"args + kwargs '{}' was not hashable for function '{}'"
|
||||
.format(key, func.__name__),
|
||||
),
|
||||
e)
|
||||
|
||||
return _memoized_function
|
||||
|
||||
@@ -921,3 +944,15 @@ def elide_list(line_list, max_num=10):
|
||||
return line_list[:max_num - 1] + ['...'] + line_list[-1:]
|
||||
else:
|
||||
return line_list
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def nullcontext(*args, **kwargs):
|
||||
"""Empty context manager.
|
||||
TODO: replace with contextlib.nullcontext() if we ever require python 3.7.
|
||||
"""
|
||||
yield
|
||||
|
||||
|
||||
class UnhashableArguments(TypeError):
|
||||
"""Raise when an @memoized function receives unhashable arg or kwarg values."""
|
||||
|
||||
@@ -821,13 +821,12 @@ def setup_package(pkg, dirty, context='build'):
|
||||
for mod in pkg.compiler.modules:
|
||||
load_module(mod)
|
||||
|
||||
# kludge to handle cray mpich and libsci being automatically loaded by
|
||||
# PrgEnv modules on cray platform. Module unload does no damage when
|
||||
# kludge to handle cray libsci being automatically loaded by PrgEnv
|
||||
# modules on cray platform. Module unload does no damage when
|
||||
# unnecessary
|
||||
on_cray, _ = _on_cray()
|
||||
if on_cray:
|
||||
for mod in ['cray-mpich', 'cray-libsci']:
|
||||
module('unload', mod)
|
||||
module('unload', 'cray-libsci')
|
||||
|
||||
if target.module_name:
|
||||
load_module(target.module_name)
|
||||
|
||||
@@ -106,7 +106,9 @@ def cuda_flags(arch_list):
|
||||
# This implies that the last one in the list has to be updated at
|
||||
# each release of a new cuda minor version.
|
||||
conflicts('%gcc@10:', when='+cuda ^cuda@:11.0')
|
||||
conflicts('%gcc@11:', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%gcc@12:', when='+cuda ^cuda@:11.6')
|
||||
conflicts('%clang@12:', when='+cuda ^cuda@:11.4.0')
|
||||
conflicts('%clang@13:', when='+cuda ^cuda@:11.5')
|
||||
conflicts('%clang@14:', when='+cuda ^cuda@:11.6')
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@
|
||||
install,
|
||||
)
|
||||
|
||||
import spack.error
|
||||
from spack.build_environment import dso_suffix
|
||||
from spack.package import InstallError, PackageBase, run_after
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
@@ -1333,5 +1334,42 @@ def uninstall_ism(self):
|
||||
debug_print(os.getcwd())
|
||||
return
|
||||
|
||||
@property
|
||||
def base_lib_dir(self):
|
||||
"""Provide the library directory located in the base of Intel installation.
|
||||
"""
|
||||
d = self.normalize_path('')
|
||||
d = os.path.join(d, 'lib')
|
||||
|
||||
debug_print(d)
|
||||
return d
|
||||
|
||||
@run_after('install')
|
||||
def modify_LLVMgold_rpath(self):
|
||||
"""Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so.
|
||||
|
||||
These are needed explicitly at dependent link time when
|
||||
`ld -plugin LLVMgold.so` is called by the compiler.
|
||||
"""
|
||||
if self._has_compilers:
|
||||
LLVMgold_libs = find_libraries('LLVMgold', self.base_lib_dir,
|
||||
shared=True, recursive=True)
|
||||
# Ignore ia32 entries as they mostly ignore throughout the rest
|
||||
# of the file.
|
||||
# The first entry in rpath preserves the original, the seconds entry
|
||||
# is the location of libimf.so. If this relative location is changed
|
||||
# in compiler releases, then we need to search for libimf.so instead
|
||||
# of this static path.
|
||||
for lib in LLVMgold_libs:
|
||||
if not self.spec.satisfies('^patchelf'):
|
||||
raise spack.error.SpackError(
|
||||
'Attempting to patch RPATH in LLVMgold.so.'
|
||||
+ '`patchelf` dependency should be set in package.py'
|
||||
)
|
||||
patchelf = Executable('patchelf')
|
||||
rpath = ':'.join([patchelf('--print-rpath', lib, output=str).strip(),
|
||||
'$ORIGIN/../compiler/lib/intel64_lin'])
|
||||
patchelf('--set-rpath', rpath, lib)
|
||||
|
||||
# Check that self.prefix is there after installation
|
||||
run_after('install')(PackageBase.sanity_check_prefix)
|
||||
|
||||
@@ -1114,6 +1114,10 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
if pr_mirror_url:
|
||||
output_object['variables']['SPACK_PR_MIRROR_URL'] = pr_mirror_url
|
||||
|
||||
spack_stack_name = os.environ.get('SPACK_CI_STACK_NAME', None)
|
||||
if spack_stack_name:
|
||||
output_object['variables']['SPACK_CI_STACK_NAME'] = spack_stack_name
|
||||
|
||||
sorted_output = {}
|
||||
for output_key, output_value in sorted(output_object.items()):
|
||||
sorted_output[output_key] = output_value
|
||||
|
||||
@@ -154,7 +154,6 @@ def parse_specs(args, **kwargs):
|
||||
concretize = kwargs.get('concretize', False)
|
||||
normalize = kwargs.get('normalize', False)
|
||||
tests = kwargs.get('tests', False)
|
||||
reuse = kwargs.get('reuse', False)
|
||||
|
||||
try:
|
||||
sargs = args
|
||||
@@ -163,7 +162,7 @@ def parse_specs(args, **kwargs):
|
||||
specs = spack.spec.parse(sargs)
|
||||
for spec in specs:
|
||||
if concretize:
|
||||
spec.concretize(tests=tests, reuse=reuse) # implies normalize
|
||||
spec.concretize(tests=tests) # implies normalize
|
||||
elif normalize:
|
||||
spec.normalize(tests=tests)
|
||||
|
||||
|
||||
@@ -322,11 +322,68 @@ def add_cdash_args(subparser, add_help):
|
||||
)
|
||||
|
||||
|
||||
@arg
|
||||
def reuse():
|
||||
return Args(
|
||||
'--reuse', action='store_true', default=False,
|
||||
help='reuse installed dependencies'
|
||||
class ConfigSetAction(argparse.Action):
|
||||
"""Generic action for setting spack config options from CLI.
|
||||
|
||||
This works like a ``store_const`` action but you can set the
|
||||
``dest`` to some Spack configuration path (like ``concretizer:reuse``)
|
||||
and the ``const`` will be stored there using ``spack.config.set()``
|
||||
"""
|
||||
def __init__(self,
|
||||
option_strings,
|
||||
dest,
|
||||
const,
|
||||
default=None,
|
||||
required=False,
|
||||
help=None,
|
||||
metavar=None):
|
||||
# save the config option we're supposed to set
|
||||
self.config_path = dest
|
||||
|
||||
# destination is translated to a legal python identifier by
|
||||
# substituting '_' for ':'.
|
||||
dest = dest.replace(":", "_")
|
||||
|
||||
super(ConfigSetAction, self).__init__(
|
||||
option_strings=option_strings,
|
||||
dest=dest,
|
||||
nargs=0,
|
||||
const=const,
|
||||
default=default,
|
||||
required=required,
|
||||
help=help
|
||||
)
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string):
|
||||
# Retrieve the name of the config option and set it to
|
||||
# the const from the constructor or a value from the CLI.
|
||||
# Note that this is only called if the argument is actually
|
||||
# specified on the command line.
|
||||
spack.config.set(self.config_path, self.const, scope="command_line")
|
||||
|
||||
|
||||
def add_concretizer_args(subparser):
|
||||
"""Add a subgroup of arguments for controlling concretization.
|
||||
|
||||
These will appear in a separate group called 'concretizer arguments'.
|
||||
There's no need to handle them in your command logic -- they all use
|
||||
``ConfigSetAction``, which automatically handles setting configuration
|
||||
options.
|
||||
|
||||
If you *do* need to access a value passed on the command line, you can
|
||||
get at, e.g., the ``concretizer:reuse`` via ``args.concretizer_reuse``.
|
||||
Just substitute ``_`` for ``:``.
|
||||
"""
|
||||
subgroup = subparser.add_argument_group("concretizer arguments")
|
||||
subgroup.add_argument(
|
||||
'-U', '--fresh', action=ConfigSetAction, dest="concretizer:reuse",
|
||||
const=False, default=None,
|
||||
help='do not reuse installed deps; build newest configuration'
|
||||
)
|
||||
subgroup.add_argument(
|
||||
'--reuse', action=ConfigSetAction, dest="concretizer:reuse",
|
||||
const=True, default=None,
|
||||
help='reuse installed dependencies/buildcaches when possible'
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
spack.cmd.common.arguments.add_common_arguments(subparser, ['reuse'])
|
||||
subparser.add_argument(
|
||||
'-f', '--force', action='store_true',
|
||||
help="Re-concretize even if already concretized.")
|
||||
@@ -24,6 +23,8 @@ def setup_parser(subparser):
|
||||
dependencies are only added for the environment's root specs. When 'all' is
|
||||
chosen, test dependencies are enabled for all packages in the environment.""")
|
||||
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
|
||||
def concretize(parser, args):
|
||||
env = spack.cmd.require_active_env(cmd_name='concretize')
|
||||
@@ -36,8 +37,6 @@ def concretize(parser, args):
|
||||
tests = False
|
||||
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(
|
||||
force=args.force, tests=tests, reuse=args.reuse
|
||||
)
|
||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||
ev.display_specs(concretized_specs)
|
||||
env.write()
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
arguments.add_common_arguments(subparser, ['jobs', 'reuse'])
|
||||
arguments.add_common_arguments(subparser, ['jobs'])
|
||||
subparser.add_argument(
|
||||
'-d', '--source-path', dest='source_path', default=None,
|
||||
help="path to source directory. defaults to the current directory")
|
||||
@@ -59,6 +59,8 @@ def setup_parser(subparser):
|
||||
cd_group = subparser.add_mutually_exclusive_group()
|
||||
arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
|
||||
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
|
||||
def dev_build(self, args):
|
||||
if not args.spec:
|
||||
@@ -86,7 +88,7 @@ def dev_build(self, args):
|
||||
# Forces the build to run out of the source directory.
|
||||
spec.constrain('dev_path=%s' % source_path)
|
||||
|
||||
spec.concretize(reuse=args.reuse)
|
||||
spec.concretize()
|
||||
package = spack.repo.get(spec)
|
||||
|
||||
if package.installed:
|
||||
|
||||
@@ -39,8 +39,17 @@ def setup_parser(subparser):
|
||||
'--scope', choices=scopes, metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope('packages'),
|
||||
help="configuration scope to modify")
|
||||
find_parser.add_argument(
|
||||
'--all', action='store_true',
|
||||
help="search for all packages that Spack knows about"
|
||||
)
|
||||
spack.cmd.common.arguments.add_common_arguments(find_parser, ['tags'])
|
||||
find_parser.add_argument('packages', nargs=argparse.REMAINDER)
|
||||
find_parser.epilog = (
|
||||
'The search is by default on packages tagged with the "build-tools" or '
|
||||
'"core-packages" tags. Use the --all option to search for every possible '
|
||||
'package Spack knows how to find.'
|
||||
)
|
||||
|
||||
sp.add_parser(
|
||||
'list', help='list detectable packages, by repository and name'
|
||||
@@ -48,6 +57,14 @@ def setup_parser(subparser):
|
||||
|
||||
|
||||
def external_find(args):
|
||||
# If the user didn't specify anything, search for build tools by default
|
||||
if not args.tags and not args.all and not args.packages:
|
||||
args.tags = ['core-packages', 'build-tools']
|
||||
|
||||
# If the user specified both --all and --tag, then --all has precedence
|
||||
if args.all and args.tags:
|
||||
args.tags = []
|
||||
|
||||
# Construct the list of possible packages to be detected
|
||||
packages_to_check = []
|
||||
|
||||
@@ -64,9 +81,10 @@ def external_find(args):
|
||||
# Since tags are cached it's much faster to construct what we need
|
||||
# to search directly, rather than filtering after the fact
|
||||
packages_to_check = [
|
||||
spack.repo.get(pkg) for pkg in
|
||||
spack.repo.path.packages_with_tags(*args.tags)
|
||||
spack.repo.get(pkg) for tag in args.tags for pkg in
|
||||
spack.repo.path.packages_with_tags(tag)
|
||||
]
|
||||
packages_to_check = list(set(packages_to_check))
|
||||
|
||||
# If the list of packages is empty, search for every possible package
|
||||
if not args.tags and not packages_to_check:
|
||||
|
||||
@@ -78,7 +78,7 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-u', '--until', type=str, dest='until', default=None,
|
||||
help="phase to stop after when installing (default None)")
|
||||
arguments.add_common_arguments(subparser, ['jobs', 'reuse'])
|
||||
arguments.add_common_arguments(subparser, ['jobs'])
|
||||
subparser.add_argument(
|
||||
'--overwrite', action='store_true',
|
||||
help="reinstall an existing spec, even if it has dependents")
|
||||
@@ -182,6 +182,8 @@ def setup_parser(subparser):
|
||||
arguments.add_cdash_args(subparser, False)
|
||||
arguments.add_common_arguments(subparser, ['yes_to_all', 'spec'])
|
||||
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
|
||||
def default_log_file(spec):
|
||||
"""Computes the default filename for the log file and creates
|
||||
@@ -339,7 +341,7 @@ def get_tests(specs):
|
||||
|
||||
if not args.only_concrete:
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(tests=tests, reuse=args.reuse)
|
||||
concretized_specs = env.concretize(tests=tests)
|
||||
ev.display_specs(concretized_specs)
|
||||
|
||||
# save view regeneration for later, so that we only do it
|
||||
@@ -397,9 +399,7 @@ def get_tests(specs):
|
||||
kwargs['tests'] = tests
|
||||
|
||||
try:
|
||||
specs = spack.cmd.parse_specs(
|
||||
args.spec, concretize=True, tests=tests, reuse=args.reuse
|
||||
)
|
||||
specs = spack.cmd.parse_specs(args.spec, concretize=True, tests=tests)
|
||||
except SpackError as e:
|
||||
tty.debug(e)
|
||||
reporter.concretization_report(e.message)
|
||||
|
||||
@@ -44,7 +44,7 @@ def setup_parser(subparser):
|
||||
|
||||
# Below are arguments w.r.t. spec display (like spack spec)
|
||||
arguments.add_common_arguments(
|
||||
subparser, ['long', 'very_long', 'install_status', 'reuse']
|
||||
subparser, ['long', 'very_long', 'install_status']
|
||||
)
|
||||
subparser.add_argument(
|
||||
'-y', '--yaml', action='store_const', dest='format', default=None,
|
||||
@@ -71,6 +71,8 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'specs', nargs=argparse.REMAINDER, help="specs of packages")
|
||||
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
|
||||
def solve(parser, args):
|
||||
# these are the same options as `spack spec`
|
||||
@@ -86,11 +88,11 @@ def solve(parser, args):
|
||||
'hashes': args.long or args.very_long
|
||||
}
|
||||
|
||||
# process dump options
|
||||
dump = re.split(r'\s*,\s*', args.show)
|
||||
if 'all' in dump:
|
||||
dump = show_options
|
||||
for d in dump:
|
||||
# process output options
|
||||
show = re.split(r'\s*,\s*', args.show)
|
||||
if 'all' in show:
|
||||
show = show_options
|
||||
for d in show:
|
||||
if d not in show_options:
|
||||
raise ValueError(
|
||||
"Invalid option for '--show': '%s'\nchoose from: (%s)"
|
||||
@@ -102,21 +104,29 @@ def solve(parser, args):
|
||||
|
||||
specs = spack.cmd.parse_specs(args.specs)
|
||||
|
||||
# dump generated ASP program
|
||||
result = asp.solve(
|
||||
specs, dump=dump, models=models, timers=args.timers, stats=args.stats,
|
||||
reuse=args.reuse,
|
||||
# set up solver parameters
|
||||
# Note: reuse and other concretizer prefs are passed as configuration
|
||||
solver = asp.Solver()
|
||||
output = sys.stdout if "asp" in show else None
|
||||
result = solver.solve(
|
||||
specs,
|
||||
out=output,
|
||||
models=models,
|
||||
timers=args.timers,
|
||||
stats=args.stats,
|
||||
setup_only=(set(show) == {'asp'})
|
||||
)
|
||||
if 'solutions' not in dump:
|
||||
if 'solutions' not in show:
|
||||
return
|
||||
|
||||
# die if no solution was found
|
||||
result.raise_if_unsat()
|
||||
|
||||
# dump the solutions as concretized specs
|
||||
if 'solutions' in dump:
|
||||
# show the solutions as concretized specs
|
||||
if 'solutions' in show:
|
||||
opt, _, _ = min(result.answers)
|
||||
if ("opt" in dump) and (not args.format):
|
||||
|
||||
if ("opt" in show) and (not args.format):
|
||||
tty.msg("Best of %d considered solutions." % result.nmodels)
|
||||
tty.msg("Optimization Criteria:")
|
||||
|
||||
|
||||
@@ -5,14 +5,15 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import sys
|
||||
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.environment as ev
|
||||
import spack.hash_types as ht
|
||||
import spack.spec
|
||||
import spack.store
|
||||
@@ -24,11 +25,14 @@
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.epilog = """\
|
||||
when an environment is active and no specs are provided, the environment root \
|
||||
specs are used instead
|
||||
|
||||
for further documentation regarding the spec syntax, see:
|
||||
spack help --spec
|
||||
"""
|
||||
arguments.add_common_arguments(
|
||||
subparser, ['long', 'very_long', 'install_status', 'reuse']
|
||||
subparser, ['long', 'very_long', 'install_status']
|
||||
)
|
||||
subparser.add_argument(
|
||||
'-y', '--yaml', action='store_const', dest='format', default=None,
|
||||
@@ -52,13 +56,7 @@ def setup_parser(subparser):
|
||||
help='show dependency types')
|
||||
arguments.add_common_arguments(subparser, ['specs'])
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def nullcontext():
|
||||
"""Empty context manager.
|
||||
TODO: replace with contextlib.nullcontext() if we ever require python 3.7.
|
||||
"""
|
||||
yield
|
||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
||||
|
||||
|
||||
def spec(parser, args):
|
||||
@@ -75,41 +73,46 @@ def spec(parser, args):
|
||||
|
||||
# use a read transaction if we are getting install status for every
|
||||
# spec in the DAG. This avoids repeatedly querying the DB.
|
||||
tree_context = nullcontext
|
||||
tree_context = lang.nullcontext
|
||||
if args.install_status:
|
||||
tree_context = spack.store.db.read_transaction
|
||||
|
||||
if not args.specs:
|
||||
tty.die("spack spec requires at least one spec")
|
||||
# Use command line specified specs, otherwise try to use environment specs.
|
||||
if args.specs:
|
||||
input_specs = spack.cmd.parse_specs(args.specs)
|
||||
specs = [(s, s.concretized()) for s in input_specs]
|
||||
else:
|
||||
env = ev.active_environment()
|
||||
if env:
|
||||
env.concretize()
|
||||
specs = env.concretized_specs()
|
||||
else:
|
||||
tty.die("spack spec requires at least one spec or an active environment")
|
||||
|
||||
concretize_kwargs = {
|
||||
'reuse': args.reuse
|
||||
}
|
||||
|
||||
for spec in spack.cmd.parse_specs(args.specs):
|
||||
for (input, output) in specs:
|
||||
# With -y, just print YAML to output.
|
||||
if args.format:
|
||||
if spec.name in spack.repo.path or spec.virtual:
|
||||
spec.concretize(**concretize_kwargs)
|
||||
|
||||
# The user can specify the hash type to use
|
||||
hash_type = getattr(ht, args.hash_type)
|
||||
|
||||
if args.format == 'yaml':
|
||||
# use write because to_yaml already has a newline.
|
||||
sys.stdout.write(spec.to_yaml(hash=hash_type))
|
||||
sys.stdout.write(output.to_yaml(hash=hash_type))
|
||||
else:
|
||||
print(spec.to_json(hash=hash_type))
|
||||
print(output.to_json(hash=hash_type))
|
||||
continue
|
||||
|
||||
with tree_context():
|
||||
tree_kwargs['hashes'] = False # Always False for input spec
|
||||
print("Input spec")
|
||||
print("--------------------------------")
|
||||
print(spec.tree(**tree_kwargs))
|
||||
# Only show the headers for input specs that are not concrete to avoid
|
||||
# repeated output. This happens because parse_specs outputs concrete
|
||||
# specs for `/hash` inputs.
|
||||
if not input.concrete:
|
||||
tree_kwargs['hashes'] = False # Always False for input spec
|
||||
print("Input spec")
|
||||
print("--------------------------------")
|
||||
print(input.tree(**tree_kwargs))
|
||||
print("Concretized")
|
||||
print("--------------------------------")
|
||||
|
||||
tree_kwargs['hashes'] = args.long or args.very_long
|
||||
print("Concretized")
|
||||
print("--------------------------------")
|
||||
spec.concretize(**concretize_kwargs)
|
||||
print(spec.tree(**tree_kwargs))
|
||||
print(output.tree(**tree_kwargs))
|
||||
|
||||
@@ -53,6 +53,10 @@ def setup_parser(subparser):
|
||||
'--fail-first', action='store_true',
|
||||
help="Stop after the first failed package."
|
||||
)
|
||||
run_parser.add_argument(
|
||||
'--externals', action='store_true',
|
||||
help="Test packages that are externally installed."
|
||||
)
|
||||
run_parser.add_argument(
|
||||
'--keep-stage',
|
||||
action='store_true',
|
||||
@@ -203,7 +207,8 @@ def test_run(args):
|
||||
with reporter('test', test_suite.stage):
|
||||
test_suite(remove_directory=not args.keep_stage,
|
||||
dirty=args.dirty,
|
||||
fail_first=args.fail_first)
|
||||
fail_first=args.fail_first,
|
||||
externals=args.externals)
|
||||
|
||||
|
||||
def test_list(args):
|
||||
@@ -332,9 +337,17 @@ def _report_suite_results(test_suite, args, constraints):
|
||||
pkg_id, status = line.split()
|
||||
results[pkg_id] = status
|
||||
|
||||
failed, skipped, untested = 0, 0, 0
|
||||
for pkg_id in test_specs:
|
||||
if pkg_id in results:
|
||||
status = results[pkg_id]
|
||||
if status == 'FAILED':
|
||||
failed += 1
|
||||
elif status == 'NO-TESTS':
|
||||
untested += 1
|
||||
elif status == 'SKIPPED':
|
||||
skipped += 1
|
||||
|
||||
if args.failed and status != 'FAILED':
|
||||
continue
|
||||
|
||||
@@ -346,6 +359,9 @@ def _report_suite_results(test_suite, args, constraints):
|
||||
with open(log_file, 'r') as f:
|
||||
msg += '\n{0}'.format(''.join(f.readlines()))
|
||||
tty.msg(msg)
|
||||
|
||||
spack.install_test.write_test_summary(
|
||||
failed, skipped, untested, len(test_specs))
|
||||
else:
|
||||
msg = "Test %s has no results.\n" % test_suite.name
|
||||
msg += " Check if it is running with "
|
||||
|
||||
@@ -701,7 +701,7 @@ def find_spec(spec, condition, default=None):
|
||||
visited.add(id(relative))
|
||||
|
||||
# Then search all other relatives in the DAG *except* spec
|
||||
for relative in spec.root.traverse(deptypes=all):
|
||||
for relative in spec.root.traverse(deptype='all'):
|
||||
if relative is spec:
|
||||
continue
|
||||
if id(relative) in visited:
|
||||
@@ -748,11 +748,11 @@ def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
|
||||
def _concretize_specs_together_new(*abstract_specs, **kwargs):
|
||||
import spack.solver.asp
|
||||
concretization_kwargs = {
|
||||
'tests': kwargs.get('tests', False),
|
||||
'reuse': kwargs.get('reuse', False)
|
||||
}
|
||||
result = spack.solver.asp.solve(abstract_specs, **concretization_kwargs)
|
||||
|
||||
solver = spack.solver.asp.Solver()
|
||||
solver.tests = kwargs.get('tests', False)
|
||||
|
||||
result = solver.solve(abstract_specs)
|
||||
result.raise_if_unsat()
|
||||
return [s.copy() for s in result.specs]
|
||||
|
||||
@@ -788,15 +788,10 @@ def make_concretization_repository(abstract_specs):
|
||||
abstract_specs = [spack.spec.Spec(s) for s in abstract_specs]
|
||||
concretization_repository = make_concretization_repository(abstract_specs)
|
||||
|
||||
concretization_kwargs = {
|
||||
'tests': kwargs.get('tests', False),
|
||||
'reuse': kwargs.get('reuse', False)
|
||||
}
|
||||
|
||||
with spack.repo.additional_repository(concretization_repository):
|
||||
# Spec from a helper package that depends on all the abstract_specs
|
||||
concretization_root = spack.spec.Spec('concretizationroot')
|
||||
concretization_root.concretize(**concretization_kwargs)
|
||||
concretization_root.concretize(tests=kwargs.get("tests", False))
|
||||
# Retrieve the direct dependencies
|
||||
concrete_specs = [
|
||||
concretization_root[spec.name].copy() for spec in abstract_specs
|
||||
|
||||
@@ -53,6 +53,7 @@
|
||||
import spack.schema
|
||||
import spack.schema.bootstrap
|
||||
import spack.schema.compilers
|
||||
import spack.schema.concretizer
|
||||
import spack.schema.config
|
||||
import spack.schema.env
|
||||
import spack.schema.mirrors
|
||||
@@ -69,6 +70,7 @@
|
||||
#: Dict from section names -> schema for that section
|
||||
section_schemas = {
|
||||
'compilers': spack.schema.compilers.schema,
|
||||
'concretizer': spack.schema.concretizer.schema,
|
||||
'mirrors': spack.schema.mirrors.schema,
|
||||
'repos': spack.schema.repos.schema,
|
||||
'packages': spack.schema.packages.schema,
|
||||
@@ -101,7 +103,7 @@
|
||||
'dirty': False,
|
||||
'build_jobs': min(16, cpus_available()),
|
||||
'build_stage': '$tempdir/spack-stage',
|
||||
'concretizer': 'original',
|
||||
'concretizer': 'clingo',
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@
|
||||
pass
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.hash_types as ht
|
||||
@@ -52,12 +53,6 @@
|
||||
from spack.util.crypto import bit_length
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def nullcontext(*args, **kwargs):
|
||||
yield
|
||||
|
||||
|
||||
# TODO: Provide an API automatically retyring a build after detecting and
|
||||
# TODO: clearing a failure.
|
||||
|
||||
@@ -404,8 +399,8 @@ def __init__(self, root, db_dir=None, upstream_dbs=None,
|
||||
self._write_transaction_impl = lk.WriteTransaction
|
||||
self._read_transaction_impl = lk.ReadTransaction
|
||||
else:
|
||||
self._write_transaction_impl = nullcontext
|
||||
self._read_transaction_impl = nullcontext
|
||||
self._write_transaction_impl = lang.nullcontext
|
||||
self._read_transaction_impl = lang.nullcontext
|
||||
|
||||
self._record_fields = record_fields
|
||||
|
||||
|
||||
@@ -1083,7 +1083,7 @@ def is_develop(self, spec):
|
||||
"""Returns true when the spec is built from local sources"""
|
||||
return spec.name in self.dev_specs
|
||||
|
||||
def concretize(self, force=False, tests=False, reuse=False):
|
||||
def concretize(self, force=False, tests=False):
|
||||
"""Concretize user_specs in this environment.
|
||||
|
||||
Only concretizes specs that haven't been concretized yet unless
|
||||
@@ -1097,8 +1097,6 @@ def concretize(self, force=False, tests=False, reuse=False):
|
||||
already concretized
|
||||
tests (bool or list or set): False to run no tests, True to test
|
||||
all packages, or a list of package names to run tests for some
|
||||
reuse (bool): if True try to maximize reuse of already installed
|
||||
specs, if False don't account for installation status.
|
||||
|
||||
Returns:
|
||||
List of specs that have been concretized. Each entry is a tuple of
|
||||
@@ -1112,15 +1110,15 @@ def concretize(self, force=False, tests=False, reuse=False):
|
||||
|
||||
# Pick the right concretization strategy
|
||||
if self.concretization == 'together':
|
||||
return self._concretize_together(tests=tests, reuse=reuse)
|
||||
return self._concretize_together(tests=tests)
|
||||
|
||||
if self.concretization == 'separately':
|
||||
return self._concretize_separately(tests=tests, reuse=reuse)
|
||||
return self._concretize_separately(tests=tests)
|
||||
|
||||
msg = 'concretization strategy not implemented [{0}]'
|
||||
raise SpackEnvironmentError(msg.format(self.concretization))
|
||||
|
||||
def _concretize_together(self, tests=False, reuse=False):
|
||||
def _concretize_together(self, tests=False):
|
||||
"""Concretization strategy that concretizes all the specs
|
||||
in the same DAG.
|
||||
"""
|
||||
@@ -1153,14 +1151,14 @@ def _concretize_together(self, tests=False, reuse=False):
|
||||
self.specs_by_hash = {}
|
||||
|
||||
concrete_specs = spack.concretize.concretize_specs_together(
|
||||
*self.user_specs, tests=tests, reuse=reuse
|
||||
*self.user_specs, tests=tests
|
||||
)
|
||||
concretized_specs = [x for x in zip(self.user_specs, concrete_specs)]
|
||||
for abstract, concrete in concretized_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
return concretized_specs
|
||||
|
||||
def _concretize_separately(self, tests=False, reuse=False):
|
||||
def _concretize_separately(self, tests=False):
|
||||
"""Concretization strategy that concretizes separately one
|
||||
user spec after the other.
|
||||
"""
|
||||
@@ -1185,7 +1183,7 @@ def _concretize_separately(self, tests=False, reuse=False):
|
||||
):
|
||||
if uspec not in old_concretized_user_specs:
|
||||
root_specs.append(uspec)
|
||||
arguments.append((uspec_constraints, tests, reuse))
|
||||
arguments.append((uspec_constraints, tests))
|
||||
|
||||
# Ensure we don't try to bootstrap clingo in parallel
|
||||
if spack.config.get('config:concretizer') == 'clingo':
|
||||
@@ -2009,7 +2007,7 @@ def _tree_to_display(spec):
|
||||
print('')
|
||||
|
||||
|
||||
def _concretize_from_constraints(spec_constraints, tests=False, reuse=False):
|
||||
def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
# Accept only valid constraints from list and concretize spec
|
||||
# Get the named spec even if out of order
|
||||
root_spec = [s for s in spec_constraints if s.name]
|
||||
@@ -2028,7 +2026,7 @@ def _concretize_from_constraints(spec_constraints, tests=False, reuse=False):
|
||||
if c not in invalid_constraints:
|
||||
s.constrain(c)
|
||||
try:
|
||||
return s.concretized(tests=tests, reuse=reuse)
|
||||
return s.concretized(tests=tests)
|
||||
except spack.spec.InvalidDependencyError as e:
|
||||
invalid_deps_string = ['^' + d for d in e.invalid_deps]
|
||||
invalid_deps = [c for c in spec_constraints
|
||||
@@ -2048,9 +2046,9 @@ def _concretize_from_constraints(spec_constraints, tests=False, reuse=False):
|
||||
|
||||
|
||||
def _concretize_task(packed_arguments):
|
||||
spec_constraints, tests, reuse = packed_arguments
|
||||
spec_constraints, tests = packed_arguments
|
||||
with tty.SuppressOutput(msg_enabled=False):
|
||||
return _concretize_from_constraints(spec_constraints, tests, reuse)
|
||||
return _concretize_from_constraints(spec_constraints, tests)
|
||||
|
||||
|
||||
def make_repo_path(root):
|
||||
|
||||
@@ -73,7 +73,7 @@ def deactivate_header(shell):
|
||||
cmds += 'if [ ! -z ${SPACK_ENV+x} ]; then\n'
|
||||
cmds += 'unset SPACK_ENV; export SPACK_ENV;\n'
|
||||
cmds += 'fi;\n'
|
||||
cmds += 'unalias despacktivate;\n'
|
||||
cmds += 'alias despacktivate > /dev/null 2>&1 && unalias despacktivate;\n'
|
||||
cmds += 'if [ ! -z ${SPACK_OLD_PS1+x} ]; then\n'
|
||||
cmds += ' if [ "$SPACK_OLD_PS1" = \'$$$$\' ]; then\n'
|
||||
cmds += ' unset PS1; export PS1;\n'
|
||||
|
||||
@@ -94,6 +94,16 @@ def write_test_suite_file(suite):
|
||||
sjson.dump(suite.to_dict(), stream=f)
|
||||
|
||||
|
||||
def write_test_summary(num_failed, num_skipped, num_untested, num_specs):
|
||||
failed = "{0} failed, ".format(num_failed) if num_failed else ''
|
||||
skipped = "{0} skipped, ".format(num_skipped) if num_skipped else ''
|
||||
no_tests = "{0} no-tests, ".format(num_untested) if num_untested else ''
|
||||
num_passed = num_specs - num_failed - num_untested - num_skipped
|
||||
|
||||
print("{:=^80}".format(" {0}{1}{2}{3} passed of {4} specs "
|
||||
.format(failed, no_tests, skipped, num_passed, num_specs)))
|
||||
|
||||
|
||||
class TestSuite(object):
|
||||
def __init__(self, specs, alias=None):
|
||||
# copy so that different test suites have different package objects
|
||||
@@ -128,7 +138,9 @@ def __call__(self, *args, **kwargs):
|
||||
remove_directory = kwargs.get('remove_directory', True)
|
||||
dirty = kwargs.get('dirty', False)
|
||||
fail_first = kwargs.get('fail_first', False)
|
||||
externals = kwargs.get('externals', False)
|
||||
|
||||
skipped, untested = 0, 0
|
||||
for spec in self.specs:
|
||||
try:
|
||||
if spec.package.test_suite:
|
||||
@@ -149,9 +161,7 @@ def __call__(self, *args, **kwargs):
|
||||
fs.mkdirp(test_dir)
|
||||
|
||||
# run the package tests
|
||||
spec.package.do_test(
|
||||
dirty=dirty
|
||||
)
|
||||
spec.package.do_test(dirty=dirty, externals=externals)
|
||||
|
||||
# Clean up on success
|
||||
if remove_directory:
|
||||
@@ -160,7 +170,17 @@ def __call__(self, *args, **kwargs):
|
||||
# Log test status based on whether any non-pass-only test
|
||||
# functions were called
|
||||
tested = os.path.exists(self.tested_file_for_spec(spec))
|
||||
status = 'PASSED' if tested else 'NO-TESTS'
|
||||
if tested:
|
||||
status = 'PASSED'
|
||||
else:
|
||||
self.ensure_stage()
|
||||
if spec.external and not externals:
|
||||
status = 'SKIPPED'
|
||||
skipped += 1
|
||||
else:
|
||||
status = 'NO-TESTS'
|
||||
untested += 1
|
||||
|
||||
self.write_test_result(spec, status)
|
||||
except BaseException as exc:
|
||||
self.fails += 1
|
||||
@@ -179,6 +199,8 @@ def __call__(self, *args, **kwargs):
|
||||
self.current_test_spec = None
|
||||
self.current_base_spec = None
|
||||
|
||||
write_test_summary(self.fails, skipped, untested, len(self.specs))
|
||||
|
||||
if self.fails:
|
||||
raise TestSuiteFailure(self.fails)
|
||||
|
||||
|
||||
@@ -632,9 +632,14 @@ def __init__(self, pkg_count):
|
||||
# Counters used for showing status information in the terminal title
|
||||
self.pkg_num = 0
|
||||
self.pkg_count = pkg_count
|
||||
self.pkg_ids = set()
|
||||
|
||||
def next_pkg(self):
|
||||
self.pkg_num += 1
|
||||
def next_pkg(self, pkg):
|
||||
pkg_id = package_id(pkg)
|
||||
|
||||
if pkg_id not in self.pkg_ids:
|
||||
self.pkg_num += 1
|
||||
self.pkg_ids.add(pkg_id)
|
||||
|
||||
def set(self, text):
|
||||
if not spack.config.get('config:terminal_title', False):
|
||||
@@ -1548,8 +1553,6 @@ def install(self):
|
||||
term_status = TermStatusLine(enabled=sys.stdout.isatty() and not tty.is_debug())
|
||||
|
||||
while self.build_pq:
|
||||
term_title.next_pkg()
|
||||
|
||||
task = self._pop_task()
|
||||
if task is None:
|
||||
continue
|
||||
@@ -1559,6 +1562,7 @@ def install(self):
|
||||
keep_prefix = install_args.get('keep_prefix')
|
||||
|
||||
pkg, pkg_id, spec = task.pkg, task.pkg_id, task.pkg.spec
|
||||
term_title.next_pkg(pkg)
|
||||
term_title.set('Processing {0}'.format(pkg.name))
|
||||
tty.debug('Processing {0}: task={1}'.format(pkg_id, task))
|
||||
# Ensure that the current spec has NO uninstalled dependencies,
|
||||
|
||||
@@ -447,6 +447,9 @@ def make_argument_parser(**kwargs):
|
||||
parser.add_argument(
|
||||
'-m', '--mock', action='store_true',
|
||||
help="use mock packages instead of real ones")
|
||||
parser.add_argument(
|
||||
'-b', '--bootstrap', action='store_true',
|
||||
help="use bootstrap configuration (bootstrap store, config, externals)")
|
||||
parser.add_argument(
|
||||
'-p', '--profile', action='store_true', dest='spack_profile',
|
||||
help="profile execution using cProfile")
|
||||
@@ -856,9 +859,22 @@ def _main(argv=None):
|
||||
cmd_name = args.command[0]
|
||||
cmd_name = aliases.get(cmd_name, cmd_name)
|
||||
|
||||
command = parser.add_command(cmd_name)
|
||||
# set up a bootstrap context, if asked.
|
||||
# bootstrap context needs to include parsing the command, b/c things
|
||||
# like `ConstraintAction` and `ConfigSetAction` happen at parse time.
|
||||
bootstrap_context = llnl.util.lang.nullcontext()
|
||||
if args.bootstrap:
|
||||
import spack.bootstrap as bootstrap # avoid circular imports
|
||||
bootstrap_context = bootstrap.ensure_bootstrap_configuration()
|
||||
|
||||
# Re-parse with the proper sub-parser added.
|
||||
with bootstrap_context:
|
||||
return finish_parse_and_run(parser, cmd_name, env_format_error)
|
||||
|
||||
|
||||
def finish_parse_and_run(parser, cmd_name, env_format_error):
|
||||
"""Finish parsing after we know the command to run."""
|
||||
# add the found command to the parser and re-run then re-parse
|
||||
command = parser.add_command(cmd_name)
|
||||
args, unknown = parser.parse_known_args()
|
||||
|
||||
# Now that we know what command this is and what its args are, determine
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import os
|
||||
import platform as py_platform
|
||||
import re
|
||||
|
||||
@@ -12,10 +13,49 @@
|
||||
from ._operating_system import OperatingSystem
|
||||
|
||||
|
||||
# FIXME: store versions inside OperatingSystem as a Version instead of string
|
||||
def macos_version():
|
||||
"""temporary workaround to return a macOS version as a Version object
|
||||
"""Get the current macOS version as a version object.
|
||||
|
||||
This has three mechanisms for determining the macOS version, which is used
|
||||
for spack identification (the ``os`` in the spec's ``arch``) and indirectly
|
||||
for setting the value of ``MACOSX_DEPLOYMENT_TARGET``, which affects the
|
||||
``minos`` value of the ``LC_BUILD_VERSION`` macho header. Mixing ``minos``
|
||||
values can lead to lots of linker warnings, and using a consistent version
|
||||
(pinned to the major OS version) allows distribution across clients that
|
||||
might be slightly behind.
|
||||
|
||||
The version determination is made with three mechanisms in decreasing
|
||||
priority:
|
||||
|
||||
1. The ``MACOSX_DEPLOYMENT_TARGET`` variable overrides the actual operating
|
||||
system version, just like the value can be used to build for older macOS
|
||||
targets on newer systems. Spack currently will truncate this value when
|
||||
building packages, but at least the major version will be the same.
|
||||
|
||||
2. The system ``sw_vers`` command reports the actual operating system
|
||||
version.
|
||||
|
||||
3. The Python ``platform.mac_ver`` function is a fallback if the operating
|
||||
system identification fails, because some Python versions and/or
|
||||
installations report the OS
|
||||
on which Python was *built* rather than the one on which it is running.
|
||||
"""
|
||||
env_ver = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)
|
||||
if env_ver:
|
||||
return Version(env_ver)
|
||||
|
||||
try:
|
||||
output = Executable('sw_vers')(output=str, fail_on_error=False)
|
||||
except Exception:
|
||||
# FileNotFoundError, or spack.util.executable.ProcessError
|
||||
pass
|
||||
else:
|
||||
match = re.search(r'ProductVersion:\s*([0-9.]+)', output)
|
||||
if match:
|
||||
return Version(match.group(1))
|
||||
|
||||
# Fall back to python-reported version, which can be inaccurate around
|
||||
# macOS 11 (e.g. showing 10.16 for macOS 12)
|
||||
return Version(py_platform.mac_ver()[0])
|
||||
|
||||
|
||||
@@ -26,7 +66,7 @@ def macos_cltools_version():
|
||||
SDK path.
|
||||
"""
|
||||
pkgutil = Executable('pkgutil')
|
||||
output = pkgutil('--pkg-info=com.apple.pkg.CLTools_Executables',
|
||||
output = pkgutil('--pkg-info=com.apple.pkg.cltools_executables',
|
||||
output=str, fail_on_error=False)
|
||||
match = re.search(r'version:\s*([0-9.]+)', output)
|
||||
if match:
|
||||
@@ -99,11 +139,13 @@ def __init__(self):
|
||||
'12': 'monterey',
|
||||
}
|
||||
|
||||
version = macos_version()
|
||||
|
||||
# Big Sur versions go 11.0, 11.0.1, 11.1 (vs. prior versions that
|
||||
# only used the minor component)
|
||||
part = 1 if macos_version() >= Version('11') else 2
|
||||
part = 1 if version >= Version('11') else 2
|
||||
|
||||
mac_ver = str(macos_version().up_to(part))
|
||||
mac_ver = str(version.up_to(part))
|
||||
name = mac_releases.get(mac_ver, "macos")
|
||||
super(MacOs, self).__init__(name, mac_ver)
|
||||
|
||||
|
||||
@@ -1186,22 +1186,27 @@ def extendee_spec(self):
|
||||
if not self.extendees:
|
||||
return None
|
||||
|
||||
# TODO: allow more than one extendee.
|
||||
name = next(iter(self.extendees))
|
||||
deps = []
|
||||
|
||||
# If the extendee is in the spec's deps already, return that.
|
||||
for dep in self.spec.traverse(deptypes=('link', 'run')):
|
||||
if name == dep.name:
|
||||
return dep
|
||||
for dep in self.spec.traverse(deptype=('link', 'run')):
|
||||
if dep.name in self.extendees:
|
||||
deps.append(dep)
|
||||
|
||||
# TODO: allow more than one active extendee.
|
||||
if deps:
|
||||
assert len(deps) == 1
|
||||
return deps[0]
|
||||
|
||||
# if the spec is concrete already, then it extends something
|
||||
# that is an *optional* dependency, and the dep isn't there.
|
||||
if self.spec._concrete:
|
||||
return None
|
||||
else:
|
||||
# TODO: do something sane here with more than one extendee
|
||||
# If it's not concrete, then return the spec from the
|
||||
# extends() directive since that is all we know so far.
|
||||
spec, kwargs = self.extendees[name]
|
||||
spec, kwargs = next(iter(self.extendees.items()))
|
||||
return spec
|
||||
|
||||
@property
|
||||
@@ -1791,7 +1796,7 @@ def cache_extra_test_sources(self, srcs):
|
||||
fsys.mkdirp(os.path.dirname(dest_path))
|
||||
fsys.copy(src_path, dest_path)
|
||||
|
||||
def do_test(self, dirty=False):
|
||||
def do_test(self, dirty=False, externals=False):
|
||||
if self.test_requires_compiler:
|
||||
compilers = spack.compilers.compilers_for_spec(
|
||||
self.spec.compiler, arch_spec=self.spec.architecture)
|
||||
@@ -1808,7 +1813,12 @@ def do_test(self, dirty=False):
|
||||
self.tested_file = self.test_suite.tested_file_for_spec(self.spec)
|
||||
fsys.touch(self.test_log_file) # Otherwise log_parse complains
|
||||
|
||||
kwargs = {'dirty': dirty, 'fake': False, 'context': 'test'}
|
||||
kwargs = {
|
||||
'dirty': dirty, 'fake': False, 'context': 'test',
|
||||
'externals': externals
|
||||
}
|
||||
if tty.is_verbose():
|
||||
kwargs['verbose'] = True
|
||||
spack.build_environment.start_build_process(self, test_process, kwargs)
|
||||
|
||||
def test(self):
|
||||
@@ -2553,7 +2563,11 @@ def fetch_remote_versions(self, concurrency=128):
|
||||
|
||||
try:
|
||||
return spack.util.web.find_versions_of_archive(
|
||||
self.all_urls, self.list_url, self.list_depth, concurrency
|
||||
self.all_urls,
|
||||
self.list_url,
|
||||
self.list_depth,
|
||||
concurrency,
|
||||
reference_package=self,
|
||||
)
|
||||
except spack.util.web.NoNetworkConnectionError as e:
|
||||
tty.die("Package.fetch_versions couldn't connect to:", e.url,
|
||||
@@ -2629,12 +2643,26 @@ def has_test_method(pkg):
|
||||
)
|
||||
|
||||
|
||||
def print_test_message(logger, msg, verbose):
|
||||
if verbose:
|
||||
with logger.force_echo():
|
||||
print(msg)
|
||||
else:
|
||||
print(msg)
|
||||
|
||||
|
||||
def test_process(pkg, kwargs):
|
||||
with tty.log.log_output(pkg.test_log_file) as logger:
|
||||
verbose = kwargs.get('verbose', False)
|
||||
externals = kwargs.get('externals', False)
|
||||
with tty.log.log_output(pkg.test_log_file, verbose) as logger:
|
||||
with logger.force_echo():
|
||||
tty.msg('Testing package {0}'
|
||||
.format(pkg.test_suite.test_pkg_id(pkg.spec)))
|
||||
|
||||
if pkg.spec.external and not externals:
|
||||
print_test_message(logger, 'Skipped external package', verbose)
|
||||
return
|
||||
|
||||
# use debug print levels for log file to record commands
|
||||
old_debug = tty.is_debug()
|
||||
tty.set_debug(True)
|
||||
@@ -2715,6 +2743,8 @@ def test_process(pkg, kwargs):
|
||||
# non-pass-only methods
|
||||
if ran_actual_test_function:
|
||||
fsys.touch(pkg.tested_file)
|
||||
else:
|
||||
print_test_message(logger, 'No tests to run', verbose)
|
||||
|
||||
|
||||
inject_flags = PackageBase.inject_flags
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import platform
|
||||
import platform as py_platform
|
||||
|
||||
import archspec.cpu
|
||||
|
||||
@@ -38,7 +38,7 @@ def __init__(self):
|
||||
|
||||
@classmethod
|
||||
def detect(cls):
|
||||
return 'darwin' in platform.system().lower()
|
||||
return 'darwin' in py_platform.system().lower()
|
||||
|
||||
def setup_platform_environment(self, pkg, env):
|
||||
"""Specify deployment target based on target OS version.
|
||||
@@ -60,4 +60,13 @@ def setup_platform_environment(self, pkg, env):
|
||||
"""
|
||||
|
||||
os = self.operating_sys[pkg.spec.os]
|
||||
env.set('MACOSX_DEPLOYMENT_TARGET', str(os.version))
|
||||
version = os.version
|
||||
if len(version) == 1:
|
||||
# Version has only one component: add a minor version to prevent
|
||||
# potential errors with `ld`,
|
||||
# which fails with `-macosx_version_min 11`
|
||||
# but succeeds with `-macosx_version_min 11.0`.
|
||||
# Most compilers seem to perform this translation automatically,
|
||||
# but older GCC does not.
|
||||
version = str(version) + '.0'
|
||||
env.set('MACOSX_DEPLOYMENT_TARGET', str(version))
|
||||
|
||||
@@ -171,7 +171,13 @@ def wrapper(instance, *args, **kwargs):
|
||||
value = None
|
||||
try:
|
||||
value = do_fn(instance, *args, **kwargs)
|
||||
package['result'] = 'success'
|
||||
|
||||
externals = kwargs.get('externals', False)
|
||||
skip_externals = pkg.spec.external and not externals
|
||||
if do_fn.__name__ == 'do_test' and skip_externals:
|
||||
package['result'] = 'skipped'
|
||||
else:
|
||||
package['result'] = 'success'
|
||||
package['stdout'] = fetch_log(pkg, do_fn, self.dir)
|
||||
package['installed_from_binary_cache'] = \
|
||||
pkg.installed_from_binary_cache
|
||||
|
||||
30
lib/spack/spack/schema/concretizer.py
Normal file
30
lib/spack/spack/schema/concretizer.py
Normal file
@@ -0,0 +1,30 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for concretizer.yaml configuration file.
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/concretizer.py
|
||||
:lines: 13-
|
||||
"""
|
||||
|
||||
properties = {
|
||||
'concretizer': {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'reuse': {'type': 'boolean'},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/draft-07/schema#',
|
||||
'title': 'Spack concretizer configuration file schema',
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'properties': properties,
|
||||
}
|
||||
@@ -9,7 +9,6 @@
|
||||
import itertools
|
||||
import os
|
||||
import pprint
|
||||
import sys
|
||||
import types
|
||||
import warnings
|
||||
|
||||
@@ -25,7 +24,7 @@
|
||||
# There may be a better way to detect this
|
||||
clingo_cffi = hasattr(clingo.Symbol, '_rep')
|
||||
except ImportError:
|
||||
clingo = None
|
||||
clingo = None # type: ignore
|
||||
clingo_cffi = False
|
||||
|
||||
import llnl.util.lang
|
||||
@@ -474,18 +473,16 @@ def bootstrap_clingo():
|
||||
|
||||
|
||||
class PyclingoDriver(object):
|
||||
def __init__(self, cores=True, asp=None):
|
||||
def __init__(self, cores=True):
|
||||
"""Driver for the Python clingo interface.
|
||||
|
||||
Arguments:
|
||||
cores (bool): whether to generate unsatisfiable cores for better
|
||||
error reporting.
|
||||
asp (file-like): optional stream to write a text-based ASP program
|
||||
for debugging or verification.
|
||||
"""
|
||||
bootstrap_clingo()
|
||||
|
||||
self.out = asp or llnl.util.lang.Devnull()
|
||||
self.out = llnl.util.lang.Devnull()
|
||||
self.cores = cores
|
||||
|
||||
def title(self, name, char):
|
||||
@@ -528,9 +525,30 @@ def fact(self, head, assumption=False):
|
||||
self.assumptions.append(atom)
|
||||
|
||||
def solve(
|
||||
self, solver_setup, specs, dump=None, nmodels=0,
|
||||
timers=False, stats=False, tests=False, reuse=False,
|
||||
self,
|
||||
setup,
|
||||
specs,
|
||||
nmodels=0,
|
||||
timers=False,
|
||||
stats=False,
|
||||
out=None,
|
||||
setup_only=False
|
||||
):
|
||||
"""Set up the input and solve for dependencies of ``specs``.
|
||||
|
||||
Arguments:
|
||||
setup (SpackSolverSetup): An object to set up the ASP problem.
|
||||
specs (list): List of ``Spec`` objects to solve for.
|
||||
nmodels (list): Number of models to consider (default 0 for unlimited).
|
||||
timers (bool): Print out coarse timers for different solve phases.
|
||||
stats (bool): Whether to output Clingo's internal solver statistics.
|
||||
out: Optional output stream for the generated ASP program.
|
||||
setup_only (bool): if True, stop after setup and don't solve (default False).
|
||||
"""
|
||||
# allow solve method to override the output stream
|
||||
if out is not None:
|
||||
self.out = out
|
||||
|
||||
timer = spack.util.timer.Timer()
|
||||
|
||||
# Initialize the control object for the solver
|
||||
@@ -545,7 +563,7 @@ def solve(
|
||||
self.assumptions = []
|
||||
with self.control.backend() as backend:
|
||||
self.backend = backend
|
||||
solver_setup.setup(self, specs, tests=tests, reuse=reuse)
|
||||
setup.setup(self, specs)
|
||||
timer.phase("setup")
|
||||
|
||||
# read in the main ASP program and display logic -- these are
|
||||
@@ -566,6 +584,10 @@ def visit(node):
|
||||
path = os.path.join(parent_dir, 'concretize.lp')
|
||||
parse_files([path], visit)
|
||||
|
||||
# If we're only doing setup, just return an empty solve result
|
||||
if setup_only:
|
||||
return Result(specs)
|
||||
|
||||
# Load the file itself
|
||||
self.control.load(os.path.join(parent_dir, 'concretize.lp'))
|
||||
self.control.load(os.path.join(parent_dir, "display.lp"))
|
||||
@@ -640,7 +662,7 @@ def stringify(x):
|
||||
class SpackSolverSetup(object):
|
||||
"""Class to set up and run a Spack concretization solve."""
|
||||
|
||||
def __init__(self):
|
||||
def __init__(self, reuse=False, tests=False):
|
||||
self.gen = None # set by setup()
|
||||
|
||||
self.declared_versions = {}
|
||||
@@ -665,6 +687,12 @@ def __init__(self):
|
||||
# Caches to optimize the setup phase of the solver
|
||||
self.target_specs_cache = None
|
||||
|
||||
# whether to add installed/binary hashes to the solve
|
||||
self.reuse = reuse
|
||||
|
||||
# whether to add installed/binary hashes to the solve
|
||||
self.tests = tests
|
||||
|
||||
def pkg_version_rules(self, pkg):
|
||||
"""Output declared versions of a package.
|
||||
|
||||
@@ -866,7 +894,7 @@ def pkg_rules(self, pkg, tests):
|
||||
self.package_provider_rules(pkg)
|
||||
|
||||
# dependencies
|
||||
self.package_dependencies_rules(pkg, tests)
|
||||
self.package_dependencies_rules(pkg)
|
||||
|
||||
# virtual preferences
|
||||
self.virtual_preferences(
|
||||
@@ -932,17 +960,17 @@ def package_provider_rules(self, pkg):
|
||||
))
|
||||
self.gen.newline()
|
||||
|
||||
def package_dependencies_rules(self, pkg, tests):
|
||||
def package_dependencies_rules(self, pkg):
|
||||
"""Translate 'depends_on' directives into ASP logic."""
|
||||
for _, conditions in sorted(pkg.dependencies.items()):
|
||||
for cond, dep in sorted(conditions.items()):
|
||||
deptypes = dep.type.copy()
|
||||
# Skip test dependencies if they're not requested
|
||||
if not tests:
|
||||
if not self.tests:
|
||||
deptypes.discard("test")
|
||||
|
||||
# ... or if they are requested only for certain packages
|
||||
if not isinstance(tests, bool) and pkg.name not in tests:
|
||||
if not isinstance(self.tests, bool) and pkg.name not in self.tests:
|
||||
deptypes.discard("test")
|
||||
|
||||
# if there are no dependency types to be considered
|
||||
@@ -1288,16 +1316,26 @@ def key_fn(item):
|
||||
|
||||
for spec in specs:
|
||||
for dep in spec.traverse():
|
||||
if dep.versions.concrete:
|
||||
# Concrete versions used in abstract specs from cli. They
|
||||
# all have idx equal to 0, which is the best possible. In
|
||||
# any case they will be used due to being set from the cli.
|
||||
self.declared_versions[dep.name].append(DeclaredVersion(
|
||||
version=dep.version,
|
||||
idx=0,
|
||||
origin=version_provenance.spec
|
||||
))
|
||||
self.possible_versions[dep.name].add(dep.version)
|
||||
if not dep.versions.concrete:
|
||||
continue
|
||||
|
||||
known_versions = self.possible_versions[dep.name]
|
||||
if (not dep.version.is_commit and
|
||||
any(v.satisfies(dep.version) for v in known_versions)):
|
||||
# some version we know about satisfies this constraint, so we
|
||||
# should use that one. e.g, if the user asks for qt@5 and we
|
||||
# know about qt@5.5.
|
||||
continue
|
||||
|
||||
# if there is a concrete version on the CLI *that we know nothing
|
||||
# about*, add it to the known versions. Use idx=0, which is the
|
||||
# best possible, so they're guaranteed to be used preferentially.
|
||||
self.declared_versions[dep.name].append(DeclaredVersion(
|
||||
version=dep.version,
|
||||
idx=0,
|
||||
origin=version_provenance.spec
|
||||
))
|
||||
self.possible_versions[dep.name].add(dep.version)
|
||||
|
||||
def _supported_targets(self, compiler_name, compiler_version, targets):
|
||||
"""Get a list of which targets are supported by the compiler.
|
||||
@@ -1589,8 +1627,9 @@ def define_variant_values(self):
|
||||
|
||||
"""
|
||||
# Tell the concretizer about possible values from specs we saw in
|
||||
# spec_clauses()
|
||||
for pkg, variant, value in sorted(self.variant_values_from_specs):
|
||||
# spec_clauses(). We might want to order these facts by pkg and name
|
||||
# if we are debugging.
|
||||
for pkg, variant, value in self.variant_values_from_specs:
|
||||
self.gen.fact(fn.variant_possible_value(pkg, variant, value))
|
||||
|
||||
def _facts_from_concrete_spec(self, spec, possible):
|
||||
@@ -1642,7 +1681,7 @@ def define_installed_packages(self, specs, possible):
|
||||
# TODO: (or any mirror really) doesn't have binaries.
|
||||
pass
|
||||
|
||||
def setup(self, driver, specs, tests=False, reuse=False):
|
||||
def setup(self, driver, specs):
|
||||
"""Generate an ASP program with relevant constraints for specs.
|
||||
|
||||
This calls methods on the solve driver to set up the problem with
|
||||
@@ -1689,7 +1728,7 @@ def setup(self, driver, specs, tests=False, reuse=False):
|
||||
self.gen.h1("Concrete input spec definitions")
|
||||
self.define_concrete_input_specs(specs, possible)
|
||||
|
||||
if reuse:
|
||||
if self.reuse:
|
||||
self.gen.h1("Installed packages")
|
||||
self.gen.fact(fn.optimize_for_reuse())
|
||||
self.gen.newline()
|
||||
@@ -1713,7 +1752,7 @@ def setup(self, driver, specs, tests=False, reuse=False):
|
||||
self.gen.h1('Package Constraints')
|
||||
for pkg in sorted(pkgs):
|
||||
self.gen.h2('Package rules: %s' % pkg)
|
||||
self.pkg_rules(pkg, tests=tests)
|
||||
self.pkg_rules(pkg, tests=self.tests)
|
||||
self.gen.h2('Package preferences: %s' % pkg)
|
||||
self.preferred_variants(pkg)
|
||||
self.preferred_targets(pkg)
|
||||
@@ -2016,33 +2055,64 @@ def _develop_specs_from_env(spec, env):
|
||||
spec.constrain(dev_info['spec'])
|
||||
|
||||
|
||||
#
|
||||
# These are handwritten parts for the Spack ASP model.
|
||||
#
|
||||
def solve(specs, dump=(), models=0, timers=False, stats=False, tests=False,
|
||||
reuse=False):
|
||||
"""Solve for a stable model of specs.
|
||||
class Solver(object):
|
||||
"""This is the main external interface class for solving.
|
||||
|
||||
It manages solver configuration and preferences in once place. It sets up the solve
|
||||
and passes the setup method to the driver, as well.
|
||||
|
||||
Properties of interest:
|
||||
|
||||
``reuse (bool)``
|
||||
Whether to try to reuse existing installs/binaries
|
||||
|
||||
Arguments:
|
||||
specs (list): list of Specs to solve.
|
||||
dump (tuple): what to dump
|
||||
models (int): number of models to search (default: 0)
|
||||
"""
|
||||
driver = PyclingoDriver()
|
||||
if "asp" in dump:
|
||||
driver.out = sys.stdout
|
||||
def __init__(self):
|
||||
self.driver = PyclingoDriver()
|
||||
|
||||
# Check upfront that the variants are admissible
|
||||
for root in specs:
|
||||
for s in root.traverse():
|
||||
if s.virtual:
|
||||
continue
|
||||
spack.spec.Spec.ensure_valid_variants(s)
|
||||
# These properties are settable via spack configuration, and overridable
|
||||
# by setting them directly as properties.
|
||||
self.reuse = spack.config.get("concretizer:reuse", False)
|
||||
|
||||
setup = SpackSolverSetup()
|
||||
return driver.solve(
|
||||
setup, specs, dump, models, timers, stats, tests, reuse
|
||||
)
|
||||
def solve(
|
||||
self,
|
||||
specs,
|
||||
out=None,
|
||||
models=0,
|
||||
timers=False,
|
||||
stats=False,
|
||||
tests=False,
|
||||
setup_only=False,
|
||||
):
|
||||
"""
|
||||
Arguments:
|
||||
specs (list): List of ``Spec`` objects to solve for.
|
||||
out: Optionally write the generate ASP program to a file-like object.
|
||||
models (int): Number of models to search (default: 0 for unlimited).
|
||||
timers (bool): Print out coarse fimers for different solve phases.
|
||||
stats (bool): Print out detailed stats from clingo.
|
||||
tests (bool or tuple): If True, concretize test dependencies for all packages.
|
||||
If a tuple of package names, concretize test dependencies for named
|
||||
packages (defaults to False: do not concretize test dependencies).
|
||||
setup_only (bool): if True, stop after setup and don't solve (default False).
|
||||
"""
|
||||
# Check upfront that the variants are admissible
|
||||
for root in specs:
|
||||
for s in root.traverse():
|
||||
if s.virtual:
|
||||
continue
|
||||
spack.spec.Spec.ensure_valid_variants(s)
|
||||
|
||||
setup = SpackSolverSetup(reuse=self.reuse, tests=tests)
|
||||
return self.driver.solve(
|
||||
setup,
|
||||
specs,
|
||||
nmodels=models,
|
||||
timers=timers,
|
||||
stats=stats,
|
||||
out=out,
|
||||
setup_only=setup_only,
|
||||
)
|
||||
|
||||
|
||||
class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
|
||||
|
||||
@@ -2605,7 +2605,7 @@ def ensure_no_deprecated(root):
|
||||
msg += " For each package listed, choose another spec\n"
|
||||
raise SpecDeprecatedError(msg)
|
||||
|
||||
def _new_concretize(self, tests=False, reuse=False):
|
||||
def _new_concretize(self, tests=False):
|
||||
import spack.solver.asp
|
||||
|
||||
if not self.name:
|
||||
@@ -2615,7 +2615,8 @@ def _new_concretize(self, tests=False, reuse=False):
|
||||
if self._concrete:
|
||||
return
|
||||
|
||||
result = spack.solver.asp.solve([self], tests=tests, reuse=reuse)
|
||||
solver = spack.solver.asp.Solver()
|
||||
result = solver.solve([self], tests=tests)
|
||||
result.raise_if_unsat()
|
||||
|
||||
# take the best answer
|
||||
@@ -2633,23 +2634,17 @@ def _new_concretize(self, tests=False, reuse=False):
|
||||
self._dup(concretized)
|
||||
self._mark_concrete()
|
||||
|
||||
def concretize(self, tests=False, reuse=False):
|
||||
def concretize(self, tests=False):
|
||||
"""Concretize the current spec.
|
||||
|
||||
Args:
|
||||
tests (bool or list): if False disregard 'test' dependencies,
|
||||
if a list of names activate them for the packages in the list,
|
||||
if True activate 'test' dependencies for all packages.
|
||||
reuse (bool): if True try to maximize reuse of already installed
|
||||
specs, if False don't account for installation status.
|
||||
"""
|
||||
if spack.config.get('config:concretizer') == "clingo":
|
||||
self._new_concretize(tests, reuse=reuse)
|
||||
self._new_concretize(tests)
|
||||
else:
|
||||
if reuse:
|
||||
msg = ('maximizing reuse of installed specs is not '
|
||||
'possible with the original concretizer')
|
||||
raise spack.error.SpecError(msg)
|
||||
self._old_concretize(tests)
|
||||
|
||||
def _mark_root_concrete(self, value=True):
|
||||
@@ -2674,7 +2669,7 @@ def _mark_concrete(self, value=True):
|
||||
s.clear_cached_hashes()
|
||||
s._mark_root_concrete(value)
|
||||
|
||||
def concretized(self, tests=False, reuse=False):
|
||||
def concretized(self, tests=False):
|
||||
"""This is a non-destructive version of concretize().
|
||||
|
||||
First clones, then returns a concrete version of this package
|
||||
@@ -2684,11 +2679,9 @@ def concretized(self, tests=False, reuse=False):
|
||||
tests (bool or list): if False disregard 'test' dependencies,
|
||||
if a list of names activate them for the packages in the list,
|
||||
if True activate 'test' dependencies for all packages.
|
||||
reuse (bool): if True try to maximize reuse of already installed
|
||||
specs, if False don't account for installation status.
|
||||
"""
|
||||
clone = self.copy(caches=True)
|
||||
clone.concretize(tests=tests, reuse=reuse)
|
||||
clone.concretize(tests=tests)
|
||||
return clone
|
||||
|
||||
def flat_dependencies(self, **kwargs):
|
||||
@@ -4279,7 +4272,7 @@ def tree(self, **kwargs):
|
||||
|
||||
out = ""
|
||||
for d, dep_spec in self.traverse_edges(
|
||||
order='pre', cover=cover, depth=True, deptypes=deptypes):
|
||||
order='pre', cover=cover, depth=True, deptype=deptypes):
|
||||
node = dep_spec.spec
|
||||
|
||||
if prefix is not None:
|
||||
|
||||
@@ -103,16 +103,8 @@ def test_bootstrap_search_for_compilers_with_environment_active(
|
||||
|
||||
@pytest.mark.regression('26189')
|
||||
def test_config_yaml_is_preserved_during_bootstrap(mutable_config):
|
||||
# Mock the command line scope
|
||||
expected_dir = '/tmp/test'
|
||||
internal_scope = spack.config.InternalConfigScope(
|
||||
name='command_line', data={
|
||||
'config': {
|
||||
'test_stage': expected_dir
|
||||
}
|
||||
}
|
||||
)
|
||||
spack.config.config.push_scope(internal_scope)
|
||||
spack.config.set("config:test_stage", expected_dir, scope="command_line")
|
||||
|
||||
assert spack.config.get('config:test_stage') == expected_dir
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.config
|
||||
import spack.environment as ev
|
||||
import spack.main
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@@ -112,3 +113,18 @@ def test_root_and_dep_match_returns_root(mock_packages, mutable_mock_env_path):
|
||||
env_spec2 = spack.cmd.matching_spec_from_env(
|
||||
spack.cmd.parse_specs(['b@1.0'])[0])
|
||||
assert env_spec2
|
||||
|
||||
|
||||
def test_concretizer_arguments(mutable_config, mock_packages):
|
||||
"""Ensure that ConfigSetAction is doing the right thing."""
|
||||
spec = spack.main.SpackCommand("spec")
|
||||
|
||||
assert spack.config.get("concretizer:reuse", None) is None
|
||||
|
||||
spec("--reuse", "zlib")
|
||||
|
||||
assert spack.config.get("concretizer:reuse", None) is True
|
||||
|
||||
spec("--fresh", "zlib")
|
||||
|
||||
assert spack.config.get("concretizer:reuse", None) is False
|
||||
|
||||
@@ -291,11 +291,11 @@ def test_env_install_two_specs_same_dep(
|
||||
assert 'depb: Executing phase:' in out
|
||||
assert 'a: Executing phase:' in out
|
||||
|
||||
depb = spack.repo.path.get_pkg_class('depb')
|
||||
assert depb.installed, 'Expected depb to be installed'
|
||||
depb = spack.store.db.query_one('depb', installed=True)
|
||||
assert depb, 'Expected depb to be installed'
|
||||
|
||||
a = spack.repo.path.get_pkg_class('a')
|
||||
assert a.installed, 'Expected a to be installed'
|
||||
a = spack.store.db.query_one('a', installed=True)
|
||||
assert a, 'Expected a to be installed'
|
||||
|
||||
|
||||
def test_remove_after_concretize():
|
||||
|
||||
@@ -124,7 +124,7 @@ def test_find_external_cmd_not_buildable(
|
||||
|
||||
def test_find_external_cmd_full_repo(
|
||||
mutable_config, working_env, mock_executable, mutable_mock_repo):
|
||||
"""Test invoking 'spack external find' with no additional arguments, which
|
||||
"""Test invoking 'spack external find --all' with no additional arguments
|
||||
iterates through each package in the repository.
|
||||
"""
|
||||
|
||||
@@ -134,7 +134,7 @@ def test_find_external_cmd_full_repo(
|
||||
prefix = os.path.dirname(os.path.dirname(exe_path1))
|
||||
|
||||
os.environ['PATH'] = ':'.join([os.path.dirname(exe_path1)])
|
||||
external('find')
|
||||
external('find', '--all')
|
||||
|
||||
pkgs_cfg = spack.config.get('packages')
|
||||
pkg_cfg = pkgs_cfg['find-externals1']
|
||||
|
||||
@@ -248,11 +248,11 @@ def test_install_overwrite_not_installed(
|
||||
|
||||
def test_install_commit(
|
||||
mock_git_version_info, install_mockery, mock_packages, monkeypatch):
|
||||
"""
|
||||
Test installing a git package from a commit.
|
||||
"""Test installing a git package from a commit.
|
||||
|
||||
This ensures Spack associates commit versions with their packages in time to do
|
||||
version lookups. Details of version lookup tested elsewhere.
|
||||
|
||||
This ensures Spack appropriately associates commit versions with their
|
||||
packages in time to do version lookups. Details of version lookup tested elsewhere
|
||||
"""
|
||||
repo_path, filename, commits = mock_git_version_info
|
||||
monkeypatch.setattr(spack.package.PackageBase,
|
||||
@@ -262,6 +262,7 @@ def test_install_commit(
|
||||
commit = commits[-1]
|
||||
spec = spack.spec.Spec('git-test-commit@%s' % commit)
|
||||
spec.concretize()
|
||||
print(spec)
|
||||
spec.package.do_install()
|
||||
|
||||
# Ensure first commit file contents were written
|
||||
|
||||
@@ -7,8 +7,10 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import spack.environment as ev
|
||||
import spack.spec
|
||||
from spack.main import SpackCommand
|
||||
import spack.store
|
||||
from spack.main import SpackCommand, SpackCommandError
|
||||
|
||||
pytestmark = pytest.mark.usefixtures('config', 'mutable_mock_repo')
|
||||
|
||||
@@ -26,6 +28,33 @@ def test_spec():
|
||||
assert 'mpich@3.0.4' in output
|
||||
|
||||
|
||||
def test_spec_concretizer_args(mutable_config, mutable_database):
|
||||
"""End-to-end test of CLI concretizer prefs.
|
||||
|
||||
It's here to make sure that everything works from CLI
|
||||
options to `solver.py`, and that config options are not
|
||||
lost along the way.
|
||||
"""
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.xfail('Known failure of the original concretizer')
|
||||
|
||||
# remove two non-preferred mpileaks installations
|
||||
# so that reuse will pick up the zmpi one
|
||||
uninstall = SpackCommand("uninstall")
|
||||
uninstall("-y", "mpileaks^mpich")
|
||||
uninstall("-y", "mpileaks^mpich2")
|
||||
|
||||
# get the hash of mpileaks^zmpi
|
||||
mpileaks_zmpi = spack.store.db.query_one("mpileaks^zmpi")
|
||||
h = mpileaks_zmpi.dag_hash()[:7]
|
||||
|
||||
output = spec("--fresh", "-l", "mpileaks")
|
||||
assert h not in output
|
||||
|
||||
output = spec("--reuse", "-l", "mpileaks")
|
||||
assert h in output
|
||||
|
||||
|
||||
def test_spec_yaml():
|
||||
output = spec('--yaml', 'mpileaks')
|
||||
|
||||
@@ -85,6 +114,20 @@ def test_spec_deptypes_edges():
|
||||
|
||||
|
||||
def test_spec_returncode():
|
||||
with pytest.raises(spack.main.SpackCommandError):
|
||||
with pytest.raises(SpackCommandError):
|
||||
spec()
|
||||
assert spec.returncode == 1
|
||||
|
||||
|
||||
def test_env_aware_spec(mutable_mock_env_path):
|
||||
env = ev.create('test')
|
||||
env.add('mpileaks')
|
||||
|
||||
with env:
|
||||
output = spec()
|
||||
assert 'mpileaks@2.3' in output
|
||||
assert 'callpath@1.0' in output
|
||||
assert 'dyninst@8.2' in output
|
||||
assert 'libdwarf@20130729' in output
|
||||
assert 'libelf@0.8.1' in output
|
||||
assert 'mpich@3.0.4' in output
|
||||
|
||||
@@ -211,6 +211,7 @@ def test_test_list_all(mock_packages):
|
||||
"printing-package",
|
||||
"py-extension1",
|
||||
"py-extension2",
|
||||
"simple-standalone-test",
|
||||
"test-error",
|
||||
"test-fail",
|
||||
])
|
||||
@@ -251,3 +252,41 @@ def test_hash_change(mock_test_stage, mock_packages, mock_archive, mock_fetch,
|
||||
# The results should be obtainable
|
||||
results_output = spack_test('results')
|
||||
assert 'PASSED' in results_output
|
||||
|
||||
|
||||
def test_test_results_none(mock_packages, mock_test_stage):
|
||||
name = 'trivial'
|
||||
spec = spack.spec.Spec('trivial-smoke-test').concretized()
|
||||
suite = spack.install_test.TestSuite([spec], name)
|
||||
suite.ensure_stage()
|
||||
spack.install_test.write_test_suite_file(suite)
|
||||
results = spack_test('results', name)
|
||||
assert 'has no results' in results
|
||||
assert 'if it is running' in results
|
||||
|
||||
|
||||
@pytest.mark.parametrize('status,expected', [
|
||||
('FAILED', '1 failed'),
|
||||
('NO-TESTS', '1 no-tests'),
|
||||
('SKIPPED', '1 skipped'),
|
||||
('PASSED', '1 passed'),
|
||||
])
|
||||
def test_test_results_status(mock_packages, mock_test_stage, status, expected):
|
||||
name = 'trivial'
|
||||
spec = spack.spec.Spec('trivial-smoke-test').concretized()
|
||||
suite = spack.install_test.TestSuite([spec], name)
|
||||
suite.ensure_stage()
|
||||
spack.install_test.write_test_suite_file(suite)
|
||||
suite.write_test_result(spec, status)
|
||||
|
||||
for opt in ['', '--failed', '--log']:
|
||||
args = ['results', name]
|
||||
if opt:
|
||||
args.insert(1, opt)
|
||||
|
||||
results = spack_test(*args)
|
||||
if opt == '--failed' and status != 'FAILED':
|
||||
assert status not in results
|
||||
else:
|
||||
assert status in results
|
||||
assert expected in results
|
||||
|
||||
@@ -1345,7 +1345,7 @@ def test_non_default_provider_of_multiple_virtuals(self):
|
||||
('mpich~debug', True)
|
||||
])
|
||||
def test_concrete_specs_are_not_modified_on_reuse(
|
||||
self, mutable_database, spec_str, expect_installed
|
||||
self, mutable_database, spec_str, expect_installed, config
|
||||
):
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip('Original concretizer cannot reuse specs')
|
||||
@@ -1354,7 +1354,8 @@ def test_concrete_specs_are_not_modified_on_reuse(
|
||||
# when reused specs are added to the mix. This prevents things
|
||||
# like additional constraints being added to concrete specs in
|
||||
# the answer set produced by clingo.
|
||||
s = spack.spec.Spec(spec_str).concretized(reuse=True)
|
||||
with spack.config.override("concretizer:reuse", True):
|
||||
s = spack.spec.Spec(spec_str).concretized()
|
||||
assert s.package.installed is expect_installed
|
||||
assert s.satisfies(spec_str, strict=True)
|
||||
|
||||
@@ -1375,3 +1376,16 @@ def test_sticky_variant_in_package(self):
|
||||
|
||||
s = spack.spec.Spec('sticky-variant %clang').concretized()
|
||||
assert s.satisfies('%clang') and s.satisfies('~allow-gcc')
|
||||
|
||||
def test_do_not_invent_new_concrete_versions_unless_necessary(self):
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip(
|
||||
"Original concretizer doesn't resolve concrete versions to known ones"
|
||||
)
|
||||
|
||||
# ensure we select a known satisfying version rather than creating
|
||||
# a new '2.7' version.
|
||||
assert ver("2.7.11") == Spec("python@2.7").concretized().version
|
||||
|
||||
# Here there is no known satisfying version - use the one on the spec.
|
||||
assert ver("2.7.21") == Spec("python@2.7.21").concretized().version
|
||||
|
||||
@@ -612,19 +612,23 @@ def configuration_dir(tmpdir_factory, linux_os):
|
||||
shutil.rmtree(str(tmpdir))
|
||||
|
||||
|
||||
def _create_mock_configuration_scopes(configuration_dir):
|
||||
"""Create the configuration scopes used in `config` and `mutable_config`."""
|
||||
scopes = [
|
||||
spack.config.InternalConfigScope('_builtin', spack.config.config_defaults),
|
||||
]
|
||||
scopes += [
|
||||
spack.config.ConfigScope(name, str(configuration_dir.join(name)))
|
||||
for name in ['site', 'system', 'user']
|
||||
]
|
||||
scopes += [spack.config.InternalConfigScope('command_line')]
|
||||
return scopes
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def mock_configuration_scopes(configuration_dir):
|
||||
"""Create a persistent Configuration object from the configuration_dir."""
|
||||
defaults = spack.config.InternalConfigScope(
|
||||
'_builtin', spack.config.config_defaults
|
||||
)
|
||||
test_scopes = [defaults]
|
||||
test_scopes += [
|
||||
spack.config.ConfigScope(name, str(configuration_dir.join(name)))
|
||||
for name in ['site', 'system', 'user']]
|
||||
test_scopes.append(spack.config.InternalConfigScope('command_line'))
|
||||
|
||||
yield test_scopes
|
||||
yield _create_mock_configuration_scopes(configuration_dir)
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
@@ -640,9 +644,7 @@ def mutable_config(tmpdir_factory, configuration_dir):
|
||||
mutable_dir = tmpdir_factory.mktemp('mutable_config').join('tmp')
|
||||
configuration_dir.copy(mutable_dir)
|
||||
|
||||
scopes = [spack.config.ConfigScope(name, str(mutable_dir.join(name)))
|
||||
for name in ['site', 'system', 'user']]
|
||||
|
||||
scopes = _create_mock_configuration_scopes(mutable_dir)
|
||||
with spack.config.use_configuration(*scopes) as cfg:
|
||||
yield cfg
|
||||
|
||||
@@ -662,6 +664,8 @@ def mutable_empty_config(tmpdir_factory, configuration_dir):
|
||||
def no_compilers_yaml(mutable_config):
|
||||
"""Creates a temporary configuration without compilers.yaml"""
|
||||
for scope, local_config in mutable_config.scopes.items():
|
||||
if not local_config.path: # skip internal scopes
|
||||
continue
|
||||
compilers_yaml = os.path.join(local_config.path, 'compilers.yaml')
|
||||
if os.path.exists(compilers_yaml):
|
||||
os.remove(compilers_yaml)
|
||||
|
||||
@@ -7,5 +7,6 @@
|
||||
|
||||
<a href="foo-4.5.tar.gz">foo-4.5.tar.gz.</a>
|
||||
<a href="foo-4.5-rc5.tar.gz">foo-4.1-rc5.tar.gz.</a>
|
||||
<a href="foo-4.5.0.tar.gz">foo-4.5.0.tar.gz.</a>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -231,32 +231,12 @@ def _spec(spec, preferred_mirrors=None):
|
||||
|
||||
def test_try_install_from_binary_cache(install_mockery, mock_packages,
|
||||
monkeypatch):
|
||||
"""Tests SystemExit path for_try_install_from_binary_cache.
|
||||
|
||||
This test does not make sense. We tell spack there is a mirror
|
||||
with a binary for this spec and then expect it to die because there
|
||||
are no mirrors configured."""
|
||||
# def _mirrors_for_spec(spec, full_hash_match=False):
|
||||
# spec = spack.spec.Spec('mpi').concretized()
|
||||
# return [{
|
||||
# 'mirror_url': 'notused',
|
||||
# 'spec': spec,
|
||||
# }]
|
||||
|
||||
"""Test return false when no match exists in the mirror"""
|
||||
spec = spack.spec.Spec('mpich')
|
||||
spec.concretize()
|
||||
|
||||
# monkeypatch.setattr(
|
||||
# spack.binary_distribution, 'get_mirrors_for_spec', _mirrors_for_spec)
|
||||
|
||||
# with pytest.raises(SystemExit):
|
||||
# inst._try_install_from_binary_cache(spec.package, False, False)
|
||||
result = inst._try_install_from_binary_cache(spec.package, False, False)
|
||||
assert(not result)
|
||||
|
||||
# captured = capsys.readouterr()
|
||||
# assert 'add a spack mirror to allow download' in str(captured)
|
||||
|
||||
|
||||
def test_installer_repr(install_mockery):
|
||||
const_arg = installer_args(['trivial-install-test-package'], {})
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
import pytest
|
||||
|
||||
import llnl.util.lang
|
||||
from llnl.util.lang import match_predicate, pretty_date
|
||||
from llnl.util.lang import match_predicate, memoized, pretty_date, stable_args
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@@ -205,3 +205,63 @@ def _cmp_key(self):
|
||||
assert hash(a) == hash(a2)
|
||||
assert hash(b) == hash(b)
|
||||
assert hash(b) == hash(b2)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"args1,kwargs1,args2,kwargs2",
|
||||
[
|
||||
# Ensure tuples passed in args are disambiguated from equivalent kwarg items.
|
||||
(('a', 3), {}, (), {'a': 3})
|
||||
],
|
||||
)
|
||||
def test_unequal_args(args1, kwargs1, args2, kwargs2):
|
||||
assert stable_args(*args1, **kwargs1) != stable_args(*args2, **kwargs2)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"args1,kwargs1,args2,kwargs2",
|
||||
[
|
||||
# Ensure that kwargs are stably sorted.
|
||||
((), {'a': 3, 'b': 4}, (), {'b': 4, 'a': 3}),
|
||||
],
|
||||
)
|
||||
def test_equal_args(args1, kwargs1, args2, kwargs2):
|
||||
assert stable_args(*args1, **kwargs1) == stable_args(*args2, **kwargs2)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"args, kwargs",
|
||||
[
|
||||
((1,), {}),
|
||||
((), {'a': 3}),
|
||||
((1,), {'a': 3}),
|
||||
],
|
||||
)
|
||||
def test_memoized(args, kwargs):
|
||||
@memoized
|
||||
def f(*args, **kwargs):
|
||||
return 'return-value'
|
||||
assert f(*args, **kwargs) == 'return-value'
|
||||
key = stable_args(*args, **kwargs)
|
||||
assert list(f.cache.keys()) == [key]
|
||||
assert f.cache[key] == 'return-value'
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"args, kwargs",
|
||||
[
|
||||
(([1],), {}),
|
||||
((), {'a': [1]})
|
||||
],
|
||||
)
|
||||
def test_memoized_unhashable(args, kwargs):
|
||||
"""Check that an exception is raised clearly"""
|
||||
@memoized
|
||||
def f(*args, **kwargs):
|
||||
return None
|
||||
with pytest.raises(llnl.util.lang.UnhashableArguments) as exc_info:
|
||||
f(*args, **kwargs)
|
||||
exc_msg = str(exc_info.value)
|
||||
key = stable_args(*args, **kwargs)
|
||||
assert str(key) in exc_msg
|
||||
assert "function 'f'" in exc_msg
|
||||
|
||||
@@ -18,10 +18,9 @@
|
||||
|
||||
import pytest
|
||||
|
||||
import llnl.util.tty.log
|
||||
from llnl.util.lang import uniq
|
||||
from llnl.util.tty.log import log_output
|
||||
from llnl.util.tty.pty import PseudoShell
|
||||
import llnl.util.tty.log as log
|
||||
import llnl.util.lang as lang
|
||||
import llnl.util.tty.pty as pty
|
||||
|
||||
from spack.util.executable import which
|
||||
|
||||
@@ -33,14 +32,9 @@
|
||||
pass
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def nullcontext():
|
||||
yield
|
||||
|
||||
|
||||
def test_log_python_output_with_echo(capfd, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
with log_output('foo.txt', echo=True):
|
||||
with log.log_output('foo.txt', echo=True):
|
||||
print('logged')
|
||||
|
||||
# foo.txt has output
|
||||
@@ -53,7 +47,7 @@ def test_log_python_output_with_echo(capfd, tmpdir):
|
||||
|
||||
def test_log_python_output_without_echo(capfd, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
with log_output('foo.txt'):
|
||||
with log.log_output('foo.txt'):
|
||||
print('logged')
|
||||
|
||||
# foo.txt has output
|
||||
@@ -66,7 +60,7 @@ def test_log_python_output_without_echo(capfd, tmpdir):
|
||||
|
||||
def test_log_python_output_with_invalid_utf8(capfd, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
with log_output('foo.txt'):
|
||||
with log.log_output('foo.txt'):
|
||||
sys.stdout.buffer.write(b'\xc3\x28\n')
|
||||
|
||||
# python2 and 3 treat invalid UTF-8 differently
|
||||
@@ -85,7 +79,7 @@ def test_log_python_output_with_invalid_utf8(capfd, tmpdir):
|
||||
def test_log_python_output_and_echo_output(capfd, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
# echo two lines
|
||||
with log_output('foo.txt') as logger:
|
||||
with log.log_output('foo.txt') as logger:
|
||||
with logger.force_echo():
|
||||
print('force echo')
|
||||
print('logged')
|
||||
@@ -104,7 +98,7 @@ def _log_filter_fn(string):
|
||||
|
||||
def test_log_output_with_filter(capfd, tmpdir):
|
||||
with tmpdir.as_cwd():
|
||||
with log_output('foo.txt', filter_fn=_log_filter_fn):
|
||||
with log.log_output('foo.txt', filter_fn=_log_filter_fn):
|
||||
print('foo blah')
|
||||
print('blah foo')
|
||||
print('foo foo')
|
||||
@@ -118,7 +112,7 @@ def test_log_output_with_filter(capfd, tmpdir):
|
||||
|
||||
# now try with echo
|
||||
with tmpdir.as_cwd():
|
||||
with log_output('foo.txt', echo=True, filter_fn=_log_filter_fn):
|
||||
with log.log_output('foo.txt', echo=True, filter_fn=_log_filter_fn):
|
||||
print('foo blah')
|
||||
print('blah foo')
|
||||
print('foo foo')
|
||||
@@ -140,7 +134,7 @@ def test_log_subproc_and_echo_output_no_capfd(capfd, tmpdir):
|
||||
# here, and echoing in test_log_subproc_and_echo_output_capfd below.
|
||||
with capfd.disabled():
|
||||
with tmpdir.as_cwd():
|
||||
with log_output('foo.txt') as logger:
|
||||
with log.log_output('foo.txt') as logger:
|
||||
with logger.force_echo():
|
||||
echo('echo')
|
||||
print('logged')
|
||||
@@ -157,7 +151,7 @@ def test_log_subproc_and_echo_output_capfd(capfd, tmpdir):
|
||||
# interferes with the logged data. See
|
||||
# test_log_subproc_and_echo_output_no_capfd for tests on the logfile.
|
||||
with tmpdir.as_cwd():
|
||||
with log_output('foo.txt') as logger:
|
||||
with log.log_output('foo.txt') as logger:
|
||||
with logger.force_echo():
|
||||
echo('echo')
|
||||
print('logged')
|
||||
@@ -177,7 +171,7 @@ def handler(signum, frame):
|
||||
signal.signal(signal.SIGUSR1, handler)
|
||||
|
||||
log_path = kwargs["log_path"]
|
||||
with log_output(log_path):
|
||||
with log.log_output(log_path):
|
||||
while running[0]:
|
||||
print("line")
|
||||
time.sleep(1e-3)
|
||||
@@ -306,25 +300,25 @@ def mock_shell_fg_bg_no_termios(proc, ctl, **kwargs):
|
||||
|
||||
@contextlib.contextmanager
|
||||
def no_termios():
|
||||
saved = llnl.util.tty.log.termios
|
||||
llnl.util.tty.log.termios = None
|
||||
saved = log.termios
|
||||
log.termios = None
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
llnl.util.tty.log.termios = saved
|
||||
log.termios = saved
|
||||
|
||||
|
||||
@pytest.mark.skipif(not which("ps"), reason="requires ps utility")
|
||||
@pytest.mark.skipif(not termios, reason="requires termios support")
|
||||
@pytest.mark.parametrize('test_fn,termios_on_or_off', [
|
||||
# tests with termios
|
||||
(mock_shell_fg, nullcontext),
|
||||
(mock_shell_bg, nullcontext),
|
||||
(mock_shell_bg_fg, nullcontext),
|
||||
(mock_shell_fg_bg, nullcontext),
|
||||
(mock_shell_tstp_cont, nullcontext),
|
||||
(mock_shell_tstp_tstp_cont, nullcontext),
|
||||
(mock_shell_tstp_tstp_cont_cont, nullcontext),
|
||||
(mock_shell_fg, lang.nullcontext),
|
||||
(mock_shell_bg, lang.nullcontext),
|
||||
(mock_shell_bg_fg, lang.nullcontext),
|
||||
(mock_shell_fg_bg, lang.nullcontext),
|
||||
(mock_shell_tstp_cont, lang.nullcontext),
|
||||
(mock_shell_tstp_tstp_cont, lang.nullcontext),
|
||||
(mock_shell_tstp_tstp_cont_cont, lang.nullcontext),
|
||||
# tests without termios
|
||||
(mock_shell_fg_no_termios, no_termios),
|
||||
(mock_shell_bg, no_termios),
|
||||
@@ -342,7 +336,7 @@ def test_foreground_background(test_fn, termios_on_or_off, tmpdir):
|
||||
process stop and start.
|
||||
|
||||
"""
|
||||
shell = PseudoShell(test_fn, simple_logger)
|
||||
shell = pty.PseudoShell(test_fn, simple_logger)
|
||||
log_path = str(tmpdir.join("log.txt"))
|
||||
|
||||
# run the shell test
|
||||
@@ -375,7 +369,7 @@ def handler(signum, frame):
|
||||
v_lock = kwargs["v_lock"]
|
||||
|
||||
sys.stderr.write(os.getcwd() + "\n")
|
||||
with log_output(log_path) as logger:
|
||||
with log.log_output(log_path) as logger:
|
||||
with logger.force_echo():
|
||||
print("forced output")
|
||||
|
||||
@@ -446,7 +440,7 @@ def mock_shell_v_v_no_termios(proc, ctl, **kwargs):
|
||||
@pytest.mark.skipif(not which("ps"), reason="requires ps utility")
|
||||
@pytest.mark.skipif(not termios, reason="requires termios support")
|
||||
@pytest.mark.parametrize('test_fn,termios_on_or_off', [
|
||||
(mock_shell_v_v, nullcontext),
|
||||
(mock_shell_v_v, lang.nullcontext),
|
||||
(mock_shell_v_v_no_termios, no_termios),
|
||||
])
|
||||
def test_foreground_background_output(
|
||||
@@ -457,7 +451,7 @@ def test_foreground_background_output(
|
||||
|
||||
return
|
||||
|
||||
shell = PseudoShell(test_fn, synchronized_logger)
|
||||
shell = pty.PseudoShell(test_fn, synchronized_logger)
|
||||
log_path = str(tmpdir.join("log.txt"))
|
||||
|
||||
# Locks for synchronizing with minion
|
||||
@@ -485,8 +479,8 @@ def test_foreground_background_output(
|
||||
|
||||
# also get lines of log file
|
||||
assert os.path.exists(log_path)
|
||||
with open(log_path) as log:
|
||||
log = log.read().strip().split("\n")
|
||||
with open(log_path) as logfile:
|
||||
log_data = logfile.read().strip().split("\n")
|
||||
|
||||
# Controller and minion process coordinate with locks such that the
|
||||
# minion writes "off" when echo is off, and "on" when echo is on. The
|
||||
@@ -494,12 +488,12 @@ def test_foreground_background_output(
|
||||
# lines if the controller is slow. The important thing to observe
|
||||
# here is that we started seeing 'on' in the end.
|
||||
assert (
|
||||
['forced output', 'on'] == uniq(output) or
|
||||
['forced output', 'off', 'on'] == uniq(output)
|
||||
['forced output', 'on'] == lang.uniq(output) or
|
||||
['forced output', 'off', 'on'] == lang.uniq(output)
|
||||
)
|
||||
|
||||
# log should be off for a while, then on, then off
|
||||
assert (
|
||||
['forced output', 'off', 'on', 'off'] == uniq(log) and
|
||||
log.count("off") > 2 # ensure some "off" lines were omitted
|
||||
['forced output', 'off', 'on', 'off'] == lang.uniq(log_data) and
|
||||
log_data.count("off") > 2 # ensure some "off" lines were omitted
|
||||
)
|
||||
|
||||
@@ -974,7 +974,6 @@ def test_canonical_deptype(self):
|
||||
canonical_deptype(('foo',))
|
||||
|
||||
def test_invalid_literal_spec(self):
|
||||
|
||||
# Can't give type 'build' to a top-level spec
|
||||
with pytest.raises(spack.spec.SpecParseError):
|
||||
Spec.from_literal({'foo:build': None})
|
||||
@@ -982,3 +981,11 @@ def test_invalid_literal_spec(self):
|
||||
# Can't use more than one ':' separator
|
||||
with pytest.raises(KeyError):
|
||||
Spec.from_literal({'foo': {'bar:build:link': None}})
|
||||
|
||||
def test_spec_tree_respect_deptypes(self):
|
||||
# Version-test-root uses version-test-pkg as a build dependency
|
||||
s = Spec('version-test-root').concretized()
|
||||
out = s.tree(deptypes='all')
|
||||
assert 'version-test-pkg' in out
|
||||
out = s.tree(deptypes=('link', 'run'))
|
||||
assert 'version-test-pkg' not in out
|
||||
|
||||
@@ -87,6 +87,34 @@ def test_do_test(mock_packages, install_mockery, mock_test_stage):
|
||||
assert os.path.exists(data_filename)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('arguments,status,msg', [
|
||||
({}, 'SKIPPED', 'Skipped'),
|
||||
({'externals': True}, 'NO-TESTS', 'No tests'),
|
||||
])
|
||||
def test_test_external(mock_packages, install_mockery, mock_test_stage,
|
||||
arguments, status, msg):
|
||||
def ensure_results(filename, expected):
|
||||
assert os.path.exists(filename)
|
||||
with open(filename, 'r') as fd:
|
||||
lines = fd.readlines()
|
||||
have = False
|
||||
for line in lines:
|
||||
if expected in line:
|
||||
have = True
|
||||
break
|
||||
assert have
|
||||
|
||||
name = 'trivial-smoke-test'
|
||||
spec = spack.spec.Spec(name).concretized()
|
||||
spec.external_path = '/path/to/external/{0}'.format(name)
|
||||
|
||||
test_suite = spack.install_test.TestSuite([spec])
|
||||
test_suite(**arguments)
|
||||
|
||||
ensure_results(test_suite.results_file, status)
|
||||
ensure_results(test_suite.log_file_for_spec(spec), msg)
|
||||
|
||||
|
||||
def test_test_stage_caches(mock_packages, install_mockery, mock_test_stage):
|
||||
def ensure_current_cache_fail(test_suite):
|
||||
with pytest.raises(spack.install_test.TestSuiteSpecError):
|
||||
@@ -121,6 +149,23 @@ def test_test_spec_run_once(mock_packages, install_mockery, mock_test_stage):
|
||||
test_suite()
|
||||
|
||||
|
||||
def test_test_spec_verbose(mock_packages, install_mockery, mock_test_stage):
|
||||
spec = spack.spec.Spec('simple-standalone-test').concretized()
|
||||
test_suite = spack.install_test.TestSuite([spec])
|
||||
|
||||
test_suite(verbose=True)
|
||||
passed, msg = False, False
|
||||
with open(test_suite.log_file_for_spec(spec), 'r') as fd:
|
||||
for line in fd:
|
||||
if 'simple stand-alone test' in line:
|
||||
msg = True
|
||||
elif 'PASSED' in line:
|
||||
passed = True
|
||||
|
||||
assert msg
|
||||
assert passed
|
||||
|
||||
|
||||
def test_get_test_suite():
|
||||
assert not spack.install_test.get_test_suite('nothing')
|
||||
|
||||
|
||||
@@ -195,22 +195,42 @@ def test_from_list_url(mock_packages, config, spec, url, digest, _fetch_method):
|
||||
assert fetch_strategy.extra_options == {'timeout': 60}
|
||||
|
||||
|
||||
@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
|
||||
def test_from_list_url_unspecified(mock_packages, config, _fetch_method):
|
||||
"""Test non-specific URLs from the url-list-test package."""
|
||||
with spack.config.override('config:url_fetch_method', _fetch_method):
|
||||
pkg = spack.repo.get('url-list-test')
|
||||
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
|
||||
@pytest.mark.parametrize("requested_version,tarball,digest", [
|
||||
# This version is in the web data path (test/data/web/4.html), but not in the
|
||||
# url-list-test package. We expect Spack to generate a URL with the new version.
|
||||
("4.5.0", "foo-4.5.0.tar.gz", None),
|
||||
# This version is in web data path and not in the package file, BUT the 2.0.0b2
|
||||
# version in the package file satisfies 2.0.0, so Spack will use the known version.
|
||||
# TODO: this is *probably* not what the user wants, but it's here as an example
|
||||
# TODO: for that reason. We can't express "exactly 2.0.0" right now, and we don't
|
||||
# TODO: have special cases that would make 2.0.0b2 less than 2.0.0. We should
|
||||
# TODO: probably revisit this in our versioning scheme.
|
||||
("2.0.0", "foo-2.0.0b2.tar.gz", "000000000000000000000000000200b2"),
|
||||
])
|
||||
def test_new_version_from_list_url(
|
||||
mock_packages, config, _fetch_method, requested_version, tarball, digest
|
||||
):
|
||||
if spack.config.get('config:concretizer') == 'original':
|
||||
pytest.skip(
|
||||
"Original concretizer doesn't resolve concrete versions to known ones"
|
||||
)
|
||||
|
||||
spec = Spec('url-list-test @2.0.0').concretized()
|
||||
"""Test non-specific URLs from the url-list-test package."""
|
||||
with spack.config.override("config:url_fetch_method", _fetch_method):
|
||||
pkg = spack.repo.get("url-list-test")
|
||||
|
||||
spec = Spec("url-list-test @%s" % requested_version).concretized()
|
||||
pkg = spack.repo.get(spec)
|
||||
fetch_strategy = fs.from_list_url(pkg)
|
||||
|
||||
assert isinstance(fetch_strategy, fs.URLFetchStrategy)
|
||||
assert os.path.basename(fetch_strategy.url) == 'foo-2.0.0.tar.gz'
|
||||
assert fetch_strategy.digest is None
|
||||
assert os.path.basename(fetch_strategy.url) == tarball
|
||||
assert fetch_strategy.digest == digest
|
||||
assert fetch_strategy.extra_options == {}
|
||||
pkg.fetch_options = {'timeout': 60}
|
||||
pkg.fetch_options = {"timeout": 60}
|
||||
fetch_strategy = fs.from_list_url(pkg)
|
||||
assert fetch_strategy.extra_options == {'timeout': 60}
|
||||
assert fetch_strategy.extra_options == {"timeout": 60}
|
||||
|
||||
|
||||
def test_nosource_from_list_url(mock_packages, config):
|
||||
|
||||
@@ -562,7 +562,7 @@ def _urlopen(req, *args, **kwargs):
|
||||
|
||||
|
||||
def find_versions_of_archive(
|
||||
archive_urls, list_url=None, list_depth=0, concurrency=32
|
||||
archive_urls, list_url=None, list_depth=0, concurrency=32, reference_package=None
|
||||
):
|
||||
"""Scrape web pages for new versions of a tarball.
|
||||
|
||||
@@ -577,6 +577,10 @@ def find_versions_of_archive(
|
||||
list_depth (int): max depth to follow links on list_url pages.
|
||||
Defaults to 0.
|
||||
concurrency (int): maximum number of concurrent requests
|
||||
reference_package (spack.package.Package or None): a spack package
|
||||
used as a reference for url detection. Uses the url_for_version
|
||||
method on the package to produce reference urls which, if found,
|
||||
are preferred.
|
||||
"""
|
||||
if not isinstance(archive_urls, (list, tuple)):
|
||||
archive_urls = [archive_urls]
|
||||
@@ -638,11 +642,26 @@ def find_versions_of_archive(
|
||||
# Walk through archive_url links first.
|
||||
# Any conflicting versions will be overwritten by the list_url links.
|
||||
versions = {}
|
||||
matched = set()
|
||||
for url in archive_urls + sorted(links):
|
||||
if any(re.search(r, url) for r in regexes):
|
||||
try:
|
||||
ver = spack.url.parse_version(url)
|
||||
if ver in matched:
|
||||
continue
|
||||
versions[ver] = url
|
||||
# prevent this version from getting overwritten
|
||||
if url in archive_urls:
|
||||
matched.add(ver)
|
||||
elif reference_package is not None:
|
||||
if url == reference_package.url_for_version(ver):
|
||||
matched.add(ver)
|
||||
else:
|
||||
extrapolated_urls = [
|
||||
spack.url.substitute_version(u, ver) for u in archive_urls
|
||||
]
|
||||
if url in extrapolated_urls:
|
||||
matched.add(ver)
|
||||
except spack.url.UndetectableVersionError:
|
||||
continue
|
||||
|
||||
|
||||
@@ -403,15 +403,32 @@ def satisfies(self, other):
|
||||
"""
|
||||
super_sat = super(MultiValuedVariant, self).satisfies(other)
|
||||
|
||||
if not super_sat:
|
||||
return False
|
||||
|
||||
if '*' in other or '*' in self:
|
||||
return True
|
||||
|
||||
# allow prefix find on patches
|
||||
if self.name == 'patches':
|
||||
return all(any(w.startswith(v) for w in self.value) for v in other.value)
|
||||
|
||||
# Otherwise we want all the values in `other` to be also in `self`
|
||||
return super_sat and (all(v in self.value for v in other.value) or
|
||||
'*' in other or '*' in self)
|
||||
return all(v in self.value for v in other.value)
|
||||
|
||||
def append(self, value):
|
||||
"""Add another value to this multi-valued variant."""
|
||||
self._value = tuple(sorted((value,) + self._value))
|
||||
self._original_value = ",".join(self._value)
|
||||
|
||||
def __str__(self):
|
||||
# Special-case patches to not print the full 64 character hashes
|
||||
if self.name == 'patches':
|
||||
values_str = ','.join(x[:7] for x in self.value)
|
||||
else:
|
||||
values_str = ','.join(str(x) for x in self.value)
|
||||
return '{0}={1}'.format(self.name, values_str)
|
||||
|
||||
|
||||
class SingleValuedVariant(AbstractVariant):
|
||||
"""A variant that can hold multiple values, but one at a time."""
|
||||
|
||||
@@ -59,7 +59,7 @@
|
||||
"(?:[+][0-9A-Za-z-]+)?)")
|
||||
|
||||
# Infinity-like versions. The order in the list implies the comparison rules
|
||||
infinity_versions = ['develop', 'main', 'master', 'head', 'trunk']
|
||||
infinity_versions = ['develop', 'main', 'master', 'head', 'trunk', 'stable']
|
||||
|
||||
iv_min_len = min(len(s) for s in infinity_versions)
|
||||
|
||||
@@ -1090,14 +1090,16 @@ def lookup_commit(self, commit):
|
||||
# We may later design a custom error to re-raise
|
||||
self.fetcher.git('cat-file', '-e', '%s^{commit}' % commit)
|
||||
|
||||
|
||||
# Lookup of commits to spack versions
|
||||
commit_to_version = {}
|
||||
|
||||
### Associate tags with versions
|
||||
# List tags (refs) by date, so last reference of a tag is newest
|
||||
tag_info = self.fetcher.git(
|
||||
"for-each-ref", "--sort=creatordate", "--format",
|
||||
"%(objectname) %(refname)", "refs/tags", output=str).split('\n')
|
||||
|
||||
# Lookup of commits to spack versions
|
||||
commit_to_version = {}
|
||||
|
||||
for entry in tag_info:
|
||||
if not entry:
|
||||
continue
|
||||
@@ -1116,6 +1118,35 @@ def lookup_commit(self, commit):
|
||||
semver = match.groupdict()['semver']
|
||||
commit_to_version[tag_commit] = semver
|
||||
|
||||
### Associate commits on the tip of branches with versions
|
||||
branch_info = self.fetcher.git(
|
||||
'for-each-ref', '--formaat', '%(objectname) %(refname)', 'refs/branches',
|
||||
output=str).split('\n')
|
||||
|
||||
for entry in branch_info:
|
||||
if not entry:
|
||||
continue
|
||||
branch_commit, branch = entry.split()
|
||||
branch = branch.replace('refs/branches/', '', 1)
|
||||
|
||||
# For each branch, try to match to a version
|
||||
for v, v_args in self.pkg.versions.items():
|
||||
# If a known version has this branch, we've found it
|
||||
v_branch = v_args.get('branch', None)
|
||||
if v_branch:
|
||||
commit_to_version[branch_commit] = v.string
|
||||
break
|
||||
# If the branch name matches a known version, found it
|
||||
if v.string == branch or 'v' + v.string == branch:
|
||||
commit_to_version[branch_commit] = v.string
|
||||
break
|
||||
else:
|
||||
# If the branch matches semver, use that
|
||||
match = SEMVER_REGEX.match(branch)
|
||||
if match:
|
||||
semver = match.groupdict()['semver']
|
||||
commit_to_version[tag_commit] = semver
|
||||
|
||||
ancestor_commits = []
|
||||
for tag_commit in commit_to_version:
|
||||
self.fetcher.git(
|
||||
|
||||
@@ -49,6 +49,8 @@ spack:
|
||||
variants: +termlib
|
||||
openblas:
|
||||
variants: threads=openmp
|
||||
openturns:
|
||||
version: [1.18]
|
||||
trilinos:
|
||||
variants: +amesos +amesos2 +anasazi +aztec +belos +boost +epetra +epetraext +ifpack +ifpack2 +intrepid +intrepid2 +isorropia +kokkos +ml +minitensor +muelu +nox +piro +phalanx +rol +rythmos +sacado +stk +shards +shylu +stokhos +stratimikos +teko +tempus +tpetra +trilinoscouplings +zoltan +zoltan2 +superlu-dist gotype=long_long
|
||||
xz:
|
||||
|
||||
@@ -200,3 +200,6 @@ despacktivate
|
||||
echo "Correct error exit codes for activate and deactivate"
|
||||
fails spack env activate nonexisiting_environment
|
||||
fails spack env deactivate
|
||||
|
||||
echo "Correct error exit codes for unit-test when it fails"
|
||||
fails spack unit-test fail
|
||||
|
||||
@@ -335,7 +335,7 @@ _spacktivate() {
|
||||
_spack() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --show-cores --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
|
||||
SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --show-cores --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -b --bootstrap -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
|
||||
else
|
||||
SPACK_COMPREPLY="activate add analyze arch audit blame bootstrap build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop diff docs edit env extensions external fetch find gc gpg graph help info install license list load location log-parse maintainers mark mirror module monitor patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style tags test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
|
||||
fi
|
||||
@@ -709,7 +709,7 @@ _spack_compilers() {
|
||||
}
|
||||
|
||||
_spack_concretize() {
|
||||
SPACK_COMPREPLY="-h --help --reuse -f --force --test"
|
||||
SPACK_COMPREPLY="-h --help -f --force --test -U --fresh --reuse"
|
||||
}
|
||||
|
||||
_spack_config() {
|
||||
@@ -870,7 +870,7 @@ _spack_deprecate() {
|
||||
_spack_dev_build() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -j --jobs --reuse -d --source-path -i --ignore-dependencies -n --no-checksum --deprecated --keep-prefix --skip-patch -q --quiet --drop-in --test -b --before -u --until --clean --dirty"
|
||||
SPACK_COMPREPLY="-h --help -j --jobs -d --source-path -i --ignore-dependencies -n --no-checksum --deprecated --keep-prefix --skip-patch -q --quiet --drop-in --test -b --before -u --until --clean --dirty -U --fresh --reuse"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -1024,7 +1024,7 @@ _spack_external() {
|
||||
_spack_external_find() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --not-buildable --scope -t --tag"
|
||||
SPACK_COMPREPLY="-h --help --not-buildable --scope --all -t --tag"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -1166,7 +1166,7 @@ _spack_info() {
|
||||
_spack_install() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --reuse --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --monitor --monitor-save-local --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix --include-build-deps --no-check-signature --require-full-hash-match --show-log-on-error --source -n --no-checksum --deprecated -v --verbose --fake --only-concrete --no-add -f --file --clean --dirty --test --run-tests --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all"
|
||||
SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --monitor --monitor-save-local --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix --include-build-deps --no-check-signature --require-full-hash-match --show-log-on-error --source -n --no-checksum --deprecated -v --verbose --fake --only-concrete --no-add -f --file --clean --dirty --test --run-tests --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all -U --fresh --reuse"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -1652,7 +1652,7 @@ _spack_restage() {
|
||||
_spack_solve() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --show --models -l --long -L --very-long -I --install-status --reuse -y --yaml -j --json -c --cover -N --namespaces -t --types --timers --stats"
|
||||
SPACK_COMPREPLY="-h --help --show --models -l --long -L --very-long -I --install-status -y --yaml -j --json -c --cover -N --namespaces -t --types --timers --stats -U --fresh --reuse"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -1661,7 +1661,7 @@ _spack_solve() {
|
||||
_spack_spec() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -l --long -L --very-long -I --install-status --reuse -y --yaml -j --json -c --cover -N --namespaces --hash-type -t --types"
|
||||
SPACK_COMPREPLY="-h --help -l --long -L --very-long -I --install-status -y --yaml -j --json -c --cover -N --namespaces --hash-type -t --types -U --fresh --reuse"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -1706,7 +1706,7 @@ _spack_test() {
|
||||
_spack_test_run() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --alias --fail-fast --fail-first --keep-stage --log-format --log-file --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp --help-cdash --clean --dirty"
|
||||
SPACK_COMPREPLY="-h --help --alias --fail-fast --fail-first --externals --keep-stage --log-format --log-file --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp --help-cdash --clean --dirty"
|
||||
else
|
||||
_installed_packages
|
||||
fi
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack import *
|
||||
|
||||
|
||||
class SimpleStandaloneTest(Package):
|
||||
"""This package has a simple stand-alone test features."""
|
||||
homepage = "http://www.example.com/simple_test"
|
||||
url = "http://www.unit-test-should-replace-this-url/simple_test-1.0.tar.gz"
|
||||
|
||||
version('1.0', '0123456789abcdef0123456789abcdef')
|
||||
|
||||
def test(self):
|
||||
msg = 'simple stand-alone test'
|
||||
self.run_test('echo', [msg],
|
||||
expected=[msg],
|
||||
purpose='test: running {0}'.format(msg))
|
||||
@@ -350,3 +350,37 @@ diff -uprN spack-src.org/src/98_main/abinit.F90 spack-src/src/98_main/abinit.F90
|
||||
codename='ABINIT'//repeat(' ',18)
|
||||
call herald(codename,abinit_version,ab_out)
|
||||
call herald(codename,abinit_version,std_out)
|
||||
diff -ru spack-src.org/config/specs/corelibs.conf spack-src/config/specs/corelibs.conf
|
||||
--- spack-src.org/config/specs/corelibs.conf 2021-10-14 11:14:26.000000000 +0900
|
||||
+++ spack-src/config/specs/corelibs.conf 2021-10-14 11:19:29.000000000 +0900
|
||||
@@ -242,7 +242,7 @@
|
||||
dependencies = gpu
|
||||
|
||||
[68_rsprc]
|
||||
-dependencies = bigdft
|
||||
+dependencies = bigdft netcdf
|
||||
|
||||
[69_wfdesc]
|
||||
abirules = no
|
||||
diff -ru spack-src.org/src/68_rsprc/Makefile.am spack-src/src/68_rsprc/Makefile.am
|
||||
--- spack-src.org/src/68_rsprc/Makefile.am 2021-10-14 11:15:36.000000000 +0900
|
||||
+++ spack-src/src/68_rsprc/Makefile.am 2021-10-14 11:20:13.000000000 +0900
|
||||
@@ -36,6 +36,7 @@
|
||||
@src_53_ffts_incs@ \
|
||||
@src_incs_incs@ \
|
||||
@lib_bigdft_incs@ \
|
||||
+ @lib_netcdf_incs@ \
|
||||
@fallbacks_incs@ \
|
||||
@abi_extra_incs@ \
|
||||
@fc_mod_incs@
|
||||
diff -u spack-src/src/68_rsprc/Makefile.in.org spack-src/src/68_rsprc/Makefile.am
|
||||
--- spack-src/src/68_rsprc/Makefile.in.org 2021-11-05 14:40:10.000000000 +0900
|
||||
+++ spack-src/src/68_rsprc/Makefile.in 2021-11-05 14:43:22.000000000 +0900
|
||||
@@ -845,6 +845,7 @@
|
||||
@src_53_ffts_incs@ \
|
||||
@src_incs_incs@ \
|
||||
@lib_bigdft_incs@ \
|
||||
+ @lib_netcdf_incs@ \
|
||||
@fallbacks_incs@ \
|
||||
@abi_extra_incs@ \
|
||||
@fc_mod_incs@
|
||||
|
||||
569
var/spack/repos/builtin/packages/abinit/fix_for_fujitsu.v9.patch
Normal file
569
var/spack/repos/builtin/packages/abinit/fix_for_fujitsu.v9.patch
Normal file
@@ -0,0 +1,569 @@
|
||||
diff --git a/configure b/configure
|
||||
--- a/configure 2021-04-30 18:25:20.000000000 +0900
|
||||
+++ b/configure 2021-10-14 16:05:13.000000000 +0900
|
||||
@@ -13019,6 +13019,30 @@
|
||||
|
||||
# Do some sanity checking of the arguments
|
||||
|
||||
+ cc_info_string=`${CC} --version 2>&1 | grep 'FCC'`
|
||||
+ abi_result=$cc_info_string
|
||||
+ if test "${abi_result}" = ""; then
|
||||
+ abi_result="no"
|
||||
+ cc_info_string=""
|
||||
+ abi_cc_vendor="unknown"
|
||||
+ abi_cc_version="unknown"
|
||||
+ else
|
||||
+
|
||||
+$as_echo "#define CC_FUJITSU 1" >>confdefs.h
|
||||
+
|
||||
+ abi_cc_vendor="fujitsu"
|
||||
+ abi_cc_version=`echo "${abi_result}" | sed -e 's/[^0-9]*\([0-9]\)/\1/' -e 's/ .*$//'`
|
||||
+ if test "${abi_cc_version}" = "${abi_result}"; then
|
||||
+ abi_cc_version="unknown"
|
||||
+ fi
|
||||
+ abi_result="yes"
|
||||
+ fi
|
||||
+
|
||||
+ fi
|
||||
+ if test "${abi_cc_vendor}" = "unknown"; then
|
||||
+
|
||||
+ # Do some sanity checking of the arguments
|
||||
+
|
||||
cc_info_string=`${CC} --version 2>/dev/null | head -n 1`
|
||||
abi_result=`echo "${cc_info_string}" | grep '[Cc]lang'`
|
||||
if test "${abi_result}" = ""; then
|
||||
@@ -13891,6 +13915,30 @@
|
||||
|
||||
# Do some sanity checking of the arguments
|
||||
|
||||
+ cxx_info_string=`${CXX} --version 2>&1|grep 'FCC'`
|
||||
+ abi_result=$cxx_info_string
|
||||
+ if test "${abi_result}" = ""; then
|
||||
+ abi_result="no"
|
||||
+ cxx_info_string=""
|
||||
+ abi_cxx_vendor="unknown"
|
||||
+ abi_cxx_version="unknown"
|
||||
+ else
|
||||
+
|
||||
+$as_echo "#define CXX_FUJITSU 1" >>confdefs.h
|
||||
+
|
||||
+ abi_cxx_vendor="fujitsu"
|
||||
+ abi_cxx_version=`echo "${abi_result}" | sed -e 's/[^0-9]*\([0-9]\)/\1/' -e 's/ .*$//'`
|
||||
+ if test "${abi_cxx_version}" = "${abi_result}"; then
|
||||
+ abi_cxx_version="unknown"
|
||||
+ fi
|
||||
+ abi_result="yes"
|
||||
+ fi
|
||||
+
|
||||
+ fi
|
||||
+ if test "${abi_cxx_vendor}" = "unknown"; then
|
||||
+
|
||||
+ # Do some sanity checking of the arguments
|
||||
+
|
||||
cxx_info_string=`${CXX} --version 2>/dev/null | head -n 1`
|
||||
abi_result=`echo "${cxx_info_string}" | grep '[Cc]lang'`
|
||||
if test "${abi_result}" = ""; then
|
||||
@@ -14697,6 +14745,32 @@
|
||||
fi
|
||||
echo "${fc_info_string}" >>"${tmp_fc_info_file}"
|
||||
|
||||
+ if test "${abi_fc_vendor}" = "unknown"; then
|
||||
+
|
||||
+ # Do some sanity checking of the arguments
|
||||
+
|
||||
+ fc_info_string=`${FC} --version 2>&1 | head -n 1 `
|
||||
+ abi_result=`echo "${fc_info_string}" | grep 'FRT'`
|
||||
+ if test "${abi_result}" = ""; then
|
||||
+ abi_result="no"
|
||||
+ fc_info_string=""
|
||||
+ abi_fc_vendor="unknown"
|
||||
+ abi_fc_version="unknown"
|
||||
+ else
|
||||
+
|
||||
+$as_echo "#define FC_FUJITSU 1" >>confdefs.h
|
||||
+
|
||||
+ abi_fc_vendor="fujitsu"
|
||||
+ abi_fc_version=`echo "${abi_result}" | sed -e 's/[^0-9]*\([0-9]\)/\1/' -e 's/ .*$//'`
|
||||
+ if test "${abi_fc_version}" = "${abi_result}"; then
|
||||
+ abi_fc_version="unknown"
|
||||
+ fi
|
||||
+ abi_result="yes"
|
||||
+ fi
|
||||
+
|
||||
+ fi
|
||||
+ echo "${fc_info_string}" >>"${tmp_fc_info_file}"
|
||||
+
|
||||
if test "${abi_fc_vendor}" = "unknown"; then
|
||||
|
||||
# Do some sanity checking of the arguments
|
||||
@@ -15049,6 +15123,7 @@
|
||||
# gfortran 4.3 outputs lines setting COLLECT_GCC_OPTIONS, COMPILER_PATH,
|
||||
# LIBRARY_PATH; skip all such settings.
|
||||
ac_fc_v_output=`eval $ac_link 5>&1 2>&1 |
|
||||
+ sed -r "s/(\-L)(\/[^ ]+)+(\/bin\/\.\.\/lib64\/nofjobj)//g" |
|
||||
sed '/^Driving:/d; /^Configured with:/d;
|
||||
'"/^[_$as_cr_Letters][_$as_cr_alnum]*=/d"`
|
||||
$as_echo "$ac_fc_v_output" >&5
|
||||
@@ -15157,6 +15232,7 @@
|
||||
# gfortran 4.3 outputs lines setting COLLECT_GCC_OPTIONS, COMPILER_PATH,
|
||||
# LIBRARY_PATH; skip all such settings.
|
||||
ac_fc_v_output=`eval $ac_link 5>&1 2>&1 |
|
||||
+ sed -r "s/(\-L)(\/[^ ]+)+(\/bin\/\.\.\/lib64\/nofjobj)//g" |
|
||||
sed '/^Driving:/d; /^Configured with:/d;
|
||||
'"/^[_$as_cr_Letters][_$as_cr_alnum]*=/d"`
|
||||
$as_echo "$ac_fc_v_output" >&5
|
||||
@@ -15982,6 +16058,13 @@
|
||||
openmp='-qopenmp'
|
||||
CFLAGS_PIC='-fPIC'
|
||||
;;
|
||||
+ fujitsu)
|
||||
+ abi_cc_vendor_hnt="fujitsu"
|
||||
+ abi_cc_version_hnt="default"
|
||||
+ abi_sys_spec_hnt="default"
|
||||
+ CFLAGS_PIC='-fPIC'
|
||||
+ CFLAGS_HINTS='-fopenmp'
|
||||
+ ;;
|
||||
llvm)
|
||||
abi_cc_vendor_hnt="llvm"
|
||||
abi_cc_version_hnt="default"
|
||||
@@ -16050,6 +16133,12 @@
|
||||
CXX_LDFLAGS_HINTS='-static-libgcc -static-intel'
|
||||
CXXFLAGS_PIC='-fPIC'
|
||||
;;
|
||||
+ fujitsu)
|
||||
+ abi_cxx_vendor_hnt="fujitsu"
|
||||
+ abi_cxx_version_hnt="default"
|
||||
+ abi_sys_spec_hnt="default"
|
||||
+ CXXFLAGS_PIC='-fPIC'
|
||||
+ ;;
|
||||
llvm)
|
||||
abi_cxx_vendor_hnt="llvm"
|
||||
abi_cxx_version_hnt="default"
|
||||
@@ -16222,6 +16311,27 @@
|
||||
FCFLAGS_HINTS='-Mextend'
|
||||
FC_LDFLAGS_HINTS=''
|
||||
;;
|
||||
+ fujitsu)
|
||||
+ abi_fc_vendor_hnt="fujitsu"
|
||||
+ abi_sys_spec_hnt="default"
|
||||
+ FCFLAGS_FIXEDFORM='-Fixed -X7'
|
||||
+ FCFLAGS_FREEFORM='-Free -X9'
|
||||
+ FCFLAGS_MODDIR='-M ../mods'
|
||||
+ FCFLAGS_PIC='-KPIC'
|
||||
+ FC_LDFLAGS_HINTS=''
|
||||
+ case "${abi_fc_version}" in
|
||||
+ 4.*)
|
||||
+ abi_fc_version_hnt="4.x"
|
||||
+ FCFLAGS_OPENMP='-Kopenmp'
|
||||
+ FCFLAGS_HINTS=''
|
||||
+ ;;
|
||||
+ *)
|
||||
+ abi_fc_version_hnt="default"
|
||||
+ FCFLAGS_HINTS='-Am -Ee -Ep'
|
||||
+ FCFLAGS_OPENMP='--openmp'
|
||||
+ ;;
|
||||
+ esac
|
||||
+ ;;
|
||||
esac # [case: abi_fc_vendor, indent: 0, item: True]
|
||||
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: ${abi_fc_vendor_hnt}/${abi_fc_version_hnt}/${abi_sys_spec_hnt}" >&5
|
||||
@@ -16878,6 +16988,25 @@
|
||||
;;
|
||||
esac # [case: abi_optim_flavor, indent: 2, item: True]
|
||||
;;
|
||||
+ fujitsu)
|
||||
+ abi_cc_vendor_opt="fujitsu"
|
||||
+ abi_cc_version_opt="default"
|
||||
+ abi_cpu_spec_opt="default"
|
||||
+ case "${abi_optim_flavor}" in
|
||||
+ safe)
|
||||
+ abi_optim_flavor_opt="safe"
|
||||
+ CFLAGS_OPTIM="-O2"
|
||||
+ ;;
|
||||
+ standard)
|
||||
+ abi_optim_flavor_opt="standard"
|
||||
+ CFLAGS_OPTIM="-O2"
|
||||
+ ;;
|
||||
+ aggressive)
|
||||
+ abi_optim_flavor_opt="aggressive"
|
||||
+ CFLAGS_OPTIM="-Kfast"
|
||||
+ ;;
|
||||
+ esac # [case: abi_optim_flavor, indent: 2, item: True]
|
||||
+ ;;
|
||||
llvm)
|
||||
abi_cc_vendor_opt="llvm"
|
||||
abi_cc_version_opt="default"
|
||||
@@ -17031,6 +17160,25 @@
|
||||
;;
|
||||
esac # [case: abi_optim_flavor, indent: 2, item: True]
|
||||
;;
|
||||
+ fujitsu)
|
||||
+ abi_cxx_vendor_opt="fujitsu"
|
||||
+ abi_cxx_version_opt="default"
|
||||
+ abi_cpu_spec_opt="default"
|
||||
+ case "${abi_optim_flavor}" in
|
||||
+ safe)
|
||||
+ abi_optim_flavor_opt="safe"
|
||||
+ CXXFLAGS_OPTIM="-O2"
|
||||
+ ;;
|
||||
+ standard)
|
||||
+ abi_optim_flavor_opt="standard"
|
||||
+ CXXFLAGS_OPTIM="-O2"
|
||||
+ ;;
|
||||
+ aggressive)
|
||||
+ abi_optim_flavor_opt="aggressive"
|
||||
+ CXXFLAGS_OPTIM="-Kfast"
|
||||
+ ;;
|
||||
+ esac # [case: abi_optim_flavor, indent: 2, item: True]
|
||||
+ ;;
|
||||
llvm)
|
||||
abi_cxx_vendor_opt="llvm"
|
||||
abi_cxx_version_opt="default"
|
||||
@@ -17302,6 +17450,46 @@
|
||||
;;
|
||||
esac # [case: abi_optim_flavor, indent: 2, item: True]
|
||||
;;
|
||||
+ fujitsu)
|
||||
+ abi_fc_vendor_opt="fujitsu"
|
||||
+ abi_cpu_spec_opt="default"
|
||||
+ case "${abi_fc_version}" in
|
||||
+ 4.*)
|
||||
+ abi_fc_version_opt="4.X"
|
||||
+ case "${abi_optim_flavor}" in
|
||||
+ safe)
|
||||
+ abi_optim_flavor_opt="safe"
|
||||
+ FCFLAGS_OPTIM="-O2 -Koptmsg=2 -Nlst=t"
|
||||
+ ;;
|
||||
+ standard)
|
||||
+ abi_optim_flavor_opt="standard"
|
||||
+ FCFLAGS_OPTIM="-O2 -Koptmsg=2 -Nlst=t"
|
||||
+ ;;
|
||||
+ aggressive)
|
||||
+ abi_optim_flavor_opt="aggressive"
|
||||
+ FCFLAGS_OPTIM="-Kfast -Koptmsg=2 -Nlst=t"
|
||||
+ ;;
|
||||
+ esac
|
||||
+ ;;
|
||||
+ *)
|
||||
+ abi_fc_version_opt="default"
|
||||
+ case "${abi_optim_flavor}" in
|
||||
+ safe)
|
||||
+ abi_optim_flavor_opt="safe"
|
||||
+ FCFLAGS_OPTIM="-Of -X9 -Ps -Wv,-md"
|
||||
+ ;;
|
||||
+ standard)
|
||||
+ abi_optim_flavor_opt="standard"
|
||||
+ FCFLAGS_OPTIM="-Of -X9 -Ps -Wv,-md"
|
||||
+ ;;
|
||||
+ aggressive)
|
||||
+ abi_optim_flavor_opt="aggressive"
|
||||
+ FCFLAGS_OPTIM="-Of -X9 -Ps -Wv,-md"
|
||||
+ ;;
|
||||
+ esac
|
||||
+ ;;
|
||||
+ esac
|
||||
+ ;;
|
||||
esac # [case: abi_fc_vendor, indent: 0, item: True]
|
||||
|
||||
{ $as_echo "$as_me:${as_lineno-$LINENO}: result: ${abi_fc_vendor_opt}/${abi_fc_version_opt}/${abi_cpu_spec_opt}" >&5
|
||||
index 3174381..ac7118f 100644
|
||||
diff --git a/shared/common/src/10_defs/defs_basis.F90 b/shared/common/src/10_defs/defs_basis.F90
|
||||
--- a/shared/common/src/10_defs/defs_basis.F90
|
||||
+++ b/shared/common/src/10_defs/defs_basis.F90
|
||||
@@ -80,7 +80,7 @@ module defs_basis
|
||||
! do not trim input strings and use character(len=500) :: msg
|
||||
|
||||
integer, parameter :: fnlen=264 ! maximum length of file name variables
|
||||
- integer, parameter :: strlen=2000000 ! maximum length of input string
|
||||
+ integer, parameter :: strlen=65000 ! maximum length of input string
|
||||
|
||||
! The input file used to run the code, set by parsefile.
|
||||
! It will be added to the netcdf files in ntck_open_create
|
||||
diff --git a/shared/common/src/14_hidewrite/m_cppopts_dumper.F90 b/shared/common/src/14_hidewrite/m_cppopts_dumper.F90
|
||||
index 9bf19f4..79d94a9 100644
|
||||
--- a/shared/common/src/14_hidewrite/m_cppopts_dumper.F90
|
||||
+++ b/shared/common/src/14_hidewrite/m_cppopts_dumper.F90
|
||||
@@ -89,6 +88,16 @@ subroutine dump_cpp_options(my_unit)
|
||||
msg = ""
|
||||
end if
|
||||
#endif
|
||||
+#if defined CC_FUJITSU
|
||||
+ write(tmp,"(1x,a25)") "CC_FUJITSU"
|
||||
+ msg = trim(msg)//trim(tmp)
|
||||
+ msg_index = msg_index + 1
|
||||
+ if ( mod(msg_index,3) == 0 ) then
|
||||
+ msg = trim(msg)//ch10
|
||||
+ write(my_unit,'(a)') msg
|
||||
+ msg = ""
|
||||
+ end if
|
||||
+#endif
|
||||
#if defined CC_GNU
|
||||
write(tmp,"(1x,a25)") "CC_GNU"
|
||||
msg = trim(msg)//trim(tmp)
|
||||
@@ -149,6 +158,16 @@ subroutine dump_cpp_options(my_unit)
|
||||
msg = ""
|
||||
end if
|
||||
#endif
|
||||
+#if defined CXX_FUJITSU
|
||||
+ write(tmp,"(1x,a25)") "CXX_FUJITSU"
|
||||
+ msg = trim(msg)//trim(tmp)
|
||||
+ msg_index = msg_index + 1
|
||||
+ if ( mod(msg_index,3) == 0 ) then
|
||||
+ msg = trim(msg)//ch10
|
||||
+ write(my_unit,'(a)') msg
|
||||
+ msg = ""
|
||||
+ end if
|
||||
+#endif
|
||||
#if defined CXX_GNU
|
||||
write(tmp,"(1x,a25)") "CXX_GNU"
|
||||
msg = trim(msg)//trim(tmp)
|
||||
@@ -259,6 +278,16 @@ subroutine dump_cpp_options(my_unit)
|
||||
msg = ""
|
||||
end if
|
||||
#endif
|
||||
+#if defined FC_FUJITSU
|
||||
+ write(tmp,"(1x,a25)") "FC_FUJITSU"
|
||||
+ msg = trim(msg)//trim(tmp)
|
||||
+ msg_index = msg_index + 1
|
||||
+ if ( mod(msg_index,3) == 0 ) then
|
||||
+ msg = trim(msg)//ch10
|
||||
+ write(my_unit,'(a)') msg
|
||||
+ msg = ""
|
||||
+ end if
|
||||
+#endif
|
||||
#if defined FC_GNU
|
||||
write(tmp,"(1x,a25)") "FC_GNU"
|
||||
msg = trim(msg)//trim(tmp)
|
||||
diff --git a/src/43_wvl_wrappers/m_wvl_denspot.F90 b/src/43_wvl_wrappers/m_wvl_denspot.F90
|
||||
index 18f8785..e06ac4a 100644
|
||||
--- a/src/43_wvl_wrappers/m_wvl_denspot.F90
|
||||
+++ b/src/43_wvl_wrappers/m_wvl_denspot.F90
|
||||
@@ -88,7 +88,7 @@ subroutine wvl_denspot_set(den,gth_params,ixc,natom,nsppol,rprimd,wvl,&
|
||||
real(dp), intent(in) :: rprimd(3, 3)
|
||||
real(dp), intent(in) :: wvl_frmult,wvl_crmult
|
||||
real(dp), intent(inout) :: xred(3,natom)
|
||||
- type(wvl_denspot_type), intent(out) :: den
|
||||
+ type(wvl_denspot_type), intent(inout) :: den
|
||||
type(wvl_internal_type),intent(in) :: wvl
|
||||
type(pseudopotential_gth_type),intent(in)::gth_params
|
||||
|
||||
diff --git a/src/43_wvl_wrappers/m_wvl_wfs.F90 b/src/43_wvl_wrappers/m_wvl_wfs.F90
|
||||
index 4643fff..a1307da 100644
|
||||
--- a/src/43_wvl_wrappers/m_wvl_wfs.F90
|
||||
+++ b/src/43_wvl_wrappers/m_wvl_wfs.F90
|
||||
@@ -96,7 +96,7 @@ subroutine wvl_wfs_set(alphadiis, spinmagntarget, kpt, me, natom, nband, nkpt, n
|
||||
integer, intent(in) :: natom, nkpt, nsppol, nspinor, nband, nwfshist,me,nproc
|
||||
real(dp), intent(in) :: spinmagntarget, wvl_crmult, wvl_frmult, alphadiis
|
||||
type(pseudopotential_type),intent(in) :: psps
|
||||
- type(wvl_wf_type),intent(out) :: wfs
|
||||
+ type(wvl_wf_type),intent(inout) :: wfs
|
||||
type(wvl_internal_type), intent(in) :: wvl
|
||||
!arrays
|
||||
real(dp), intent(in) :: kpt(3,nkpt)
|
||||
diff --git a/src/52_fft_mpi_noabirule/m_fftw3.F90 b/src/52_fft_mpi_noabirule/m_fftw3.F90
|
||||
index bdeb0ce..8d4e0e3 100644
|
||||
--- a/src/52_fft_mpi_noabirule/m_fftw3.F90
|
||||
+++ b/src/52_fft_mpi_noabirule/m_fftw3.F90
|
||||
@@ -4191,7 +4191,7 @@ subroutine fftw3_mpiback_wf(cplexwf,ndat,n1,n2,n3,nd1,nd2,nd3proc,&
|
||||
integer,intent(in) :: cplexwf,ndat,n1,n2,n3,nd1,nd2,nd3proc
|
||||
integer,intent(in) :: max1,max2,max3,m1,m2,m3,md1,md2proc,md3,comm_fft
|
||||
real(dp),intent(in) :: zf(2,md1,md3,md2proc,ndat)
|
||||
- real(dp),intent(out) :: zr(2,nd1,nd2,nd3proc,ndat)
|
||||
+ real(dp),intent(inout) :: zr(2,nd1,nd2,nd3proc,ndat)
|
||||
|
||||
#ifdef HAVE_FFTW3
|
||||
!Local variables-------------------------------
|
||||
@@ -4548,7 +4548,7 @@ subroutine fftw3_mpiforw_wf(cplexwf,ndat,n1,n2,n3,nd1,nd2,nd3proc,&
|
||||
integer,intent(in) :: max1,max2,max3,m1,m2,m3,md1,md2proc,md3,comm_fft
|
||||
!arrays
|
||||
real(dp),intent(inout) :: zr(2,nd1,nd2,nd3proc,ndat)
|
||||
- real(dp),intent(out) :: zf(2,md1,md3,md2proc,ndat)
|
||||
+ real(dp),intent(inout) :: zf(2,md1,md3,md2proc,ndat)
|
||||
|
||||
!Local variables-------------------------------
|
||||
!scalars
|
||||
@@ -4896,7 +4896,7 @@ subroutine fftw3_mpiback(cplex,ndat,n1,n2,n3,nd1,nd2,nd3,nd1eff,nd2proc,nd3proc,
|
||||
! real space input
|
||||
integer,intent(in) :: cplex,ndat,n1,n2,n3,nd1,nd2,nd3,nd1eff,nd2proc,nd3proc,option,comm_fft
|
||||
real(dp),intent(in) :: zf(2,nd1,nd3,nd2proc,ndat)
|
||||
- real(dp),intent(out) :: zr(2,nd1eff,nd2,nd3proc,ndat)
|
||||
+ real(dp),intent(inout) :: zr(2,nd1eff,nd2,nd3proc,ndat)
|
||||
|
||||
!Local variables-------------------------------
|
||||
!scalaras
|
||||
@@ -5197,7 +5197,7 @@ subroutine fftw3_mpiforw(cplex,ndat,n1,n2,n3,nd1,nd2,nd3,nd1eff,nd2proc,nd3proc,
|
||||
integer,intent(in) :: ndat,n1,n2,n3,nd1,nd2,nd3,nd1eff,nd2proc,nd3proc,option
|
||||
!arrays
|
||||
real(dp),intent(in) :: zr(2,nd1eff,nd2,nd3proc,ndat)
|
||||
- real(dp),intent(out) :: zf(2,nd1,nd3,nd2proc,ndat)
|
||||
+ real(dp),intent(inout) :: zf(2,nd1,nd3,nd2proc,ndat)
|
||||
|
||||
!Local variables-------------------------------
|
||||
!scalars
|
||||
@@ -6379,7 +6379,7 @@ subroutine fftw3_mpiback_manywf(cplexwf,ndat,n1,n2,n3,nd1,nd2,nd3proc,&
|
||||
integer,intent(in) :: cplexwf,ndat,n1,n2,n3,nd1,nd2,nd3proc
|
||||
integer,intent(in) :: max1,max2,max3,m1,m2,m3,md1,md2proc,md3,comm_fft
|
||||
real(dp),intent(in) :: zf(2,md1,md3,md2proc,ndat)
|
||||
- real(dp),intent(out) :: zr(2,nd1,nd2,nd3proc,ndat)
|
||||
+ real(dp),intent(inout) :: zr(2,nd1,nd2,nd3proc,ndat)
|
||||
|
||||
#ifdef HAVE_FFTW3
|
||||
!Local variables-------------------------------
|
||||
@@ -6733,7 +6733,7 @@ subroutine fftw3_mpiforw_manywf(cplexwf,ndat,n1,n2,n3,nd1,nd2,nd3proc,&
|
||||
integer,intent(in) :: max1,max2,max3,m1,m2,m3,md1,md2proc,md3,comm_fft
|
||||
!arrays
|
||||
real(dp),intent(inout) :: zr(2,nd1,nd2,nd3proc,ndat)
|
||||
- real(dp),intent(out) :: zf(2,md1,md3,md2proc,ndat)
|
||||
+ real(dp),intent(inout) :: zf(2,md1,md3,md2proc,ndat)
|
||||
|
||||
!Local variables-------------------------------
|
||||
!scalars
|
||||
diff --git a/src/62_poisson/m_psolver.F90 b/src/62_poisson/m_psolver.F90
|
||||
index 1f4a20a..b4ba3cf 100644
|
||||
--- a/src/62_poisson/m_psolver.F90
|
||||
+++ b/src/62_poisson/m_psolver.F90
|
||||
@@ -110,7 +110,7 @@ subroutine psolver_rhohxc(enhartr, enxc, envxc, icoulomb, ixc, &
|
||||
integer,intent(in) :: usexcnhat,usepaw,xclevel
|
||||
real(dp),intent(in) :: rprimd(3,3)
|
||||
real(dp), intent(in) :: xc_denpos
|
||||
- real(dp), intent(out) :: enxc, envxc, enhartr, vxcavg
|
||||
+ real(dp), intent(inout) :: enxc, envxc, enhartr, vxcavg
|
||||
type(mpi_type), intent(in) :: mpi_enreg
|
||||
type(wvl_internal_type), intent(in) :: wvl
|
||||
type(wvl_denspot_type), intent(inout) :: wvl_den
|
||||
@@ -120,8 +120,8 @@ subroutine psolver_rhohxc(enhartr, enxc, envxc, icoulomb, ixc, &
|
||||
real(dp),intent(in) :: xccc3d(n3xccc)
|
||||
real(dp),intent(in) :: nhat(nfft,nspden*nhatdim)
|
||||
real(dp),intent(inout) :: rhor(nfft, nspden)
|
||||
- real(dp),intent(out) :: vhartr(nfft)
|
||||
- real(dp),intent(out) :: vxc(nfft, nspden)
|
||||
+ real(dp),intent(inout) :: vhartr(nfft)
|
||||
+ real(dp),intent(inout) :: vxc(nfft, nspden)
|
||||
|
||||
!Local variables-------------------------------
|
||||
#if defined HAVE_BIGDFT
|
||||
@@ -562,12 +562,12 @@ subroutine psolver_hartree(enhartr, hgrid, icoulomb, me, mpi_comm, nfft, ngfft,
|
||||
!Arguments ------------------------------------
|
||||
!scalars
|
||||
integer, intent(in) :: nfft, nspden, icoulomb, usewvl, mpi_comm, me, nproc, nscforder
|
||||
- real(dp), intent(out) :: enhartr
|
||||
+ real(dp), intent(inout) :: enhartr
|
||||
!arrays
|
||||
integer, intent(in) :: ngfft(3)
|
||||
real(dp),intent(in) :: hgrid(3)
|
||||
real(dp),intent(in) :: rhor(nfft,nspden)
|
||||
- real(dp),intent(out) :: vhartr(nfft)
|
||||
+ real(dp),intent(inout) :: vhartr(nfft)
|
||||
|
||||
!Local variables-------------------------------
|
||||
#if defined HAVE_BIGDFT
|
||||
diff --git a/src/62_wvl_wfs/m_wvl_psi.F90 b/src/62_wvl_wfs/m_wvl_psi.F90
|
||||
index 7eb4b9c..bcb3b08 100644
|
||||
--- a/src/62_wvl_wfs/m_wvl_psi.F90
|
||||
+++ b/src/62_wvl_wfs/m_wvl_psi.F90
|
||||
@@ -234,16 +234,16 @@ subroutine wvl_psitohpsi(alphamix,eexctX, eexcu, ehart, ekin_sum, epot_sum, epro
|
||||
!scalars
|
||||
integer, intent(in) :: me, nproc, itrp, iter, iscf, natom, nfft, nspden
|
||||
real(dp), intent(in) :: alphamix
|
||||
- real(dp), intent(out) :: rpnrm
|
||||
+ real(dp), intent(inout) :: rpnrm
|
||||
logical, intent(in) :: scf
|
||||
logical, intent(in) :: wvlbigdft
|
||||
type(wvl_data), intent(inout) :: wvl
|
||||
real(dp), intent(inout) :: eexctX,eSIC_DC,ehart,eexcu,vexcu, ekin_sum, epot_sum, eproj_sum
|
||||
- real(dp), dimension(6), intent(out) :: xcstr
|
||||
+ real(dp), dimension(6), intent(inout) :: xcstr
|
||||
real(dp), intent(inout) :: xcart(3, natom)
|
||||
!arrays
|
||||
- real(dp),intent(out), optional :: vxc(nfft,nspden)
|
||||
- real(dp),intent(out), optional :: vtrial(nfft,nspden)
|
||||
+ real(dp),intent(inout), optional :: vxc(nfft,nspden)
|
||||
+ real(dp),intent(inout), optional :: vtrial(nfft,nspden)
|
||||
|
||||
!Local variables-------------------------------
|
||||
!scalars
|
||||
@@ -454,7 +454,7 @@ subroutine wvl_tail_corrections(dtset, energies, etotal, mpi_enreg, psps, wvl, x
|
||||
|
||||
!Arguments ------------------------------------
|
||||
!scalars
|
||||
- real(dp),intent(out) :: etotal
|
||||
+ real(dp),intent(inout) :: etotal
|
||||
type(MPI_type),intent(in) :: mpi_enreg
|
||||
type(dataset_type),intent(in) :: dtset
|
||||
type(energies_type),intent(inout) :: energies
|
||||
diff --git a/src/67_common/m_mklocl_realspace.F90 b/src/67_common/m_mklocl_realspace.F90
|
||||
index 423dd4d..ba43c6e 100644
|
||||
--- a/src/67_common/m_mklocl_realspace.F90
|
||||
+++ b/src/67_common/m_mklocl_realspace.F90
|
||||
@@ -1622,7 +1622,7 @@ subroutine local_forces_wvl(iproc,natom,rxyz,hxh,hyh,hzh,n1,n2,n3,n3pi,i3s,n1i,n
|
||||
!arrays
|
||||
real(dp),intent(in) :: rxyz(3,natom)
|
||||
real(dp),dimension(*),intent(in) :: rho,pot
|
||||
- real(dp),intent(out) :: floc(3,natom)
|
||||
+ real(dp),intent(inout) :: floc(3,natom)
|
||||
|
||||
!Local variables -------------------------
|
||||
#if defined HAVE_BIGDFT
|
||||
diff --git a/src/67_common/mkcore_wvl.F90 b/src/67_common/mkcore_wvl.F90
|
||||
index fc58fbc..6960e64 100644
|
||||
--- a/src/67_common/mkcore_wvl.F90
|
||||
+++ b/src/67_common/mkcore_wvl.F90
|
||||
@@ -127,7 +127,7 @@ subroutine mkcore_wvl(atindx1,corstr,grxc,natom,nattyp,nfft,nspden,ntypat,n1xccc
|
||||
integer,intent(in) :: atindx1(natom),nattyp(ntypat)
|
||||
real(dp),intent(in) :: rprimd(3,3),xccc1d(n1xccc,6,ntypat),xcccrc(ntypat),xred(3,natom)
|
||||
real(dp),intent(in),target :: vxc(nfft,nspden)
|
||||
- real(dp),intent(out) :: corstr(6),grxc(3,natom)
|
||||
+ real(dp),intent(inout) :: corstr(6),grxc(3,natom)
|
||||
real(dp),intent(inout) :: xccc3d(n3xccc)
|
||||
type(pawrad_type),intent(in) :: pawrad(:)
|
||||
type(pawtab_type),intent(in) :: pawtab(:)
|
||||
@@ -568,8 +568,8 @@ subroutine mkcore_wvl_old(atindx1,corstr,dyfrx2,geocode,grxc,h,natom,&
|
||||
real(dp),intent(in) :: psppar(0:4,0:6,ntypat),rprimd(3,3)
|
||||
real(dp),intent(in)::xred(3,natom)
|
||||
real(dp),intent(in)::vxc(nfft,nspden)
|
||||
- real(dp),intent(out)::xccc3d(n3xccc)
|
||||
- real(dp),intent(out) :: corstr(6),dyfrx2(3,3,natom),grxc(3,natom)
|
||||
+ real(dp),intent(inout)::xccc3d(n3xccc)
|
||||
+ real(dp),intent(inout) :: corstr(6),dyfrx2(3,3,natom),grxc(3,natom)
|
||||
type(pawtab_type),intent(in) :: pawtab(ntypat)
|
||||
type(pawrad_type),intent(in) :: pawrad(ntypat)
|
||||
|
||||
diff --git a/src/78_effpot/m_spmat_base.F90 b/src/78_effpot/m_spmat_base.F90
|
||||
index be4e8b9..0590a33 100644
|
||||
--- a/src/78_effpot/m_spmat_base.F90
|
||||
+++ b/src/78_effpot/m_spmat_base.F90
|
||||
@@ -113,7 +113,7 @@ contains
|
||||
subroutine base_mat2d_t_mv(self, x, b)
|
||||
class(base_mat2d_t), intent(in) :: self
|
||||
real(dp), intent(in) :: x(self%ncol)
|
||||
- real(dp), intent(out) :: b(self%nrow)
|
||||
+ real(dp), intent(inout) :: b(self%nrow)
|
||||
ABI_UNUSED_A(x)
|
||||
ABI_UNUSED_A(b)
|
||||
end subroutine base_mat2d_t_mv
|
||||
diff --git a/src/78_effpot/m_spmat_csr.F90 b/src/78_effpot/m_spmat_csr.F90
|
||||
index 389abd3..d5904e7 100644
|
||||
--- a/src/78_effpot/m_spmat_csr.F90
|
||||
+++ b/src/78_effpot/m_spmat_csr.F90
|
||||
@@ -201,7 +201,7 @@ contains
|
||||
subroutine CSR_mat_t_mv(self, x, b)
|
||||
class(CSR_mat_t), intent(in):: self
|
||||
real(dp), intent(in) :: x(self%ncol)
|
||||
- real(dp), intent(out) :: b(self%nrow)
|
||||
+ real(dp), intent(inout) :: b(self%nrow)
|
||||
integer::irow, i1, i2, i
|
||||
b(:)=0.0d0
|
||||
!$OMP PARALLEL DO private(i, i1, i2)
|
||||
diff --git a/src/78_effpot/m_spmat_dense.F90 b/src/78_effpot/m_spmat_dense.F90
|
||||
index c861e47..a53b46f 100644
|
||||
--- a/src/78_effpot/m_spmat_dense.F90
|
||||
+++ b/src/78_effpot/m_spmat_dense.F90
|
||||
@@ -114,7 +114,7 @@ contains
|
||||
subroutine dense_mat_t_mv(self, x, b)
|
||||
class(dense_mat_t), intent(in) :: self
|
||||
real(dp), intent(in) :: x(self%ncol)
|
||||
- real(dp), intent(out) :: b(self%nrow)
|
||||
+ real(dp), intent(inout) :: b(self%nrow)
|
||||
call dgemv("N", self%nrow, self%ncol, 1.0d0,self%mat , 2, x, 1, 0.0d0, b, 1)
|
||||
end subroutine dense_mat_t_mv
|
||||
|
||||
@@ -30,6 +30,7 @@ class Abinit(AutotoolsPackage):
|
||||
homepage = 'https://www.abinit.org/'
|
||||
url = 'https://www.abinit.org/sites/default/files/packages/abinit-8.6.3.tar.gz'
|
||||
|
||||
version('9.6.1', sha256='b6a12760fd728eb4aacca431ae12150609565bedbaa89763f219fcd869f79ac6')
|
||||
version('9.4.2', sha256='d40886f5c8b138bb4aa1ca05da23388eb70a682790cfe5020ecce4db1b1a76bc')
|
||||
version('8.10.3', sha256='ed626424b4472b93256622fbb9c7645fa3ffb693d4b444b07d488771ea7eaa75')
|
||||
version('8.10.2', sha256='4ee2e0329497bf16a9b2719fe0536cc50c5d5a07c65e18edaf15ba02251cbb73')
|
||||
@@ -99,10 +100,11 @@ class Abinit(AutotoolsPackage):
|
||||
# need openmp threading for abinit+openmp
|
||||
# TODO: The logic here can be reversed with the new concretizer. Instead of
|
||||
# using `conflicts`, `depends_on` could be used instead.
|
||||
for fftw in ['amdfftw', 'cray-fftw', 'fujitsu-fftw', 'fftw']:
|
||||
conflicts('+openmp', when='^{0}~openmp'.format(fftw),
|
||||
msg='Need to request {0} +openmp'.format(fftw))
|
||||
|
||||
mkl_message = 'Need to set dependent variant to threads=openmp'
|
||||
conflicts('+openmp',
|
||||
when='^fftw~openmp',
|
||||
msg='Need to request fftw +openmp')
|
||||
conflicts('+openmp',
|
||||
when='^intel-mkl threads=none',
|
||||
msg=mkl_message)
|
||||
@@ -113,13 +115,22 @@ class Abinit(AutotoolsPackage):
|
||||
when='^intel-parallel-studio +mkl threads=none',
|
||||
msg=mkl_message)
|
||||
|
||||
conflicts('+openmp',
|
||||
when='^fujitsu-ssl2 ~parallel',
|
||||
msg='Need to request fujitsu-ssl2 +parallel')
|
||||
|
||||
conflicts('~openmp',
|
||||
when='^fujitsu-ssl2 +parallel',
|
||||
msg='Need to request fujitsu-ssl2 ~parallel')
|
||||
|
||||
patch('rm_march_settings.patch', when='@:8')
|
||||
patch('rm_march_settings_v9.patch', when='@9:')
|
||||
|
||||
# Fix detection of Fujitsu compiler
|
||||
# Fix configure not to collect the option that causes an error
|
||||
# Fix intent(out) and unnecessary rewind to avoid compile error
|
||||
patch('fix_for_fujitsu.patch', when='%fj')
|
||||
patch('fix_for_fujitsu.patch', when='@:8 %fj')
|
||||
patch('fix_for_fujitsu.v9.patch', when='@9: %fj')
|
||||
|
||||
def configure_args(self):
|
||||
|
||||
@@ -129,8 +140,12 @@ def configure_args(self):
|
||||
options += self.with_or_without('libxml2')
|
||||
|
||||
oapp = options.append
|
||||
oapp('--with-optim-flavor={0}'
|
||||
.format(self.spec.variants['optimization-flavor'].value))
|
||||
if '@:8' in spec:
|
||||
oapp('--enable-optim={0}'
|
||||
.format(self.spec.variants['optimization-flavor'].value))
|
||||
else:
|
||||
oapp('--with-optim-flavor={0}'
|
||||
.format(self.spec.variants['optimization-flavor'].value))
|
||||
|
||||
if '+wannier90' in spec:
|
||||
if '@:8' in spec:
|
||||
@@ -184,6 +199,8 @@ def configure_args(self):
|
||||
linalg_flavor = 'mkl'
|
||||
elif '@9:' in spec and '^openblas' in spec:
|
||||
linalg_flavor = 'openblas'
|
||||
elif '@9:' in spec and '^fujitsu-ssl2' in spec:
|
||||
linalg_flavor = 'openblas'
|
||||
else:
|
||||
linalg_flavor = 'custom'
|
||||
|
||||
@@ -201,7 +218,7 @@ def configure_args(self):
|
||||
|
||||
if '^mkl' in spec:
|
||||
fftflavor = 'dfti'
|
||||
elif '^fftw' in spec:
|
||||
else:
|
||||
if '+openmp' in spec:
|
||||
fftflavor, fftlibs = 'fftw3-threads', '-lfftw3_omp -lfftw3 -lfftw3f'
|
||||
else:
|
||||
@@ -213,11 +230,11 @@ def configure_args(self):
|
||||
if '^mkl' in spec:
|
||||
oapp('--with-fft-incs={0}'.format(spec['fftw-api'].headers.cpp_flags))
|
||||
oapp('--with-fft-libs={0}'.format(spec['fftw-api'].libs.ld_flags))
|
||||
elif '^fftw' in spec:
|
||||
else:
|
||||
options.extend([
|
||||
'--with-fft-incs={0}'.format(spec['fftw'].headers.cpp_flags),
|
||||
'--with-fft-incs={0}'.format(spec['fftw-api'].headers.cpp_flags),
|
||||
'--with-fft-libs=-L{0} {1}'.format(
|
||||
spec['fftw'].prefix.lib, fftlibs),
|
||||
spec['fftw-api'].prefix.lib, fftlibs),
|
||||
])
|
||||
else:
|
||||
if '^mkl' in spec:
|
||||
@@ -225,11 +242,11 @@ def configure_args(self):
|
||||
'FFT_CPPFLAGS={0}'.format(spec['fftw-api'].headers.cpp_flags),
|
||||
'FFT_LIBs={0}'.format(spec['fftw-api'].libs.ld_flags),
|
||||
])
|
||||
elif '^fftw' in spec:
|
||||
else:
|
||||
options.extend([
|
||||
'FFTW3_CPPFLAGS={0}'.format(spec['fftw'].headers.cpp_flags),
|
||||
'FFTW3_CPPFLAGS={0}'.format(spec['fftw-api'].headers.cpp_flags),
|
||||
'FFTW3_LIBS=-L{0} {1}'.format(
|
||||
spec['fftw'].prefix.lib, fftlibs),
|
||||
spec['fftw-api'].prefix.lib, fftlibs),
|
||||
])
|
||||
|
||||
# LibXC library
|
||||
@@ -251,7 +268,9 @@ def configure_args(self):
|
||||
# Since version 8, Abinit started to use netcdf4 + hdf5 and we have
|
||||
# to link with the high level HDF5 library
|
||||
options.extend([
|
||||
'--with-netcdf-incs={0}'.format(netcdff.headers.cpp_flags),
|
||||
'--with-netcdf-incs={0}'.format(
|
||||
netcdfc.headers.cpp_flags + ' ' +
|
||||
netcdff.headers.cpp_flags),
|
||||
'--with-netcdf-libs={0}'.format(
|
||||
netcdff.libs.ld_flags + ' ' + hdf5.libs.ld_flags
|
||||
),
|
||||
|
||||
@@ -39,6 +39,8 @@ class Acts(CMakePackage, CudaPackage):
|
||||
# Supported Acts versions
|
||||
version('main', branch='main')
|
||||
version('master', branch='main', deprecated=True) # For compatibility
|
||||
version('17.1.0', commit='0d9c3a6da022da48d6401e10c273896a1f775a9e', submodules=True)
|
||||
version('17.0.0', commit='ccbf4c7d4ec3698bac4db9687fab2455a3f9c203', submodules=True)
|
||||
version('16.0.0', commit='9bd86921155e708189417b5a8019add10fd5b273', submodules=True)
|
||||
version('15.1.0', commit='a96e6db7de6075e85b6d5346bc89845eeb89b324', submodules=True)
|
||||
version('15.0.1', commit='b9469b8914f6a1bc47af0998eb7c9e8e20e4debc', submodules=True)
|
||||
@@ -115,7 +117,8 @@ class Acts(CMakePackage, CudaPackage):
|
||||
|
||||
# Variants that affect the core Acts library
|
||||
variant('benchmarks', default=False, description='Build the performance benchmarks', when='@0.16:')
|
||||
variant('examples', default=False, description='Build the examples', when='@0.23: +digitization +fatras +identification +json +tgeo')
|
||||
variant('examples', default=False, description='Build the examples', when='@0.23:16 +digitization +fatras +identification +json +tgeo')
|
||||
variant('examples', default=False, description='Build the examples', when='@17: +fatras +identification +json +tgeo')
|
||||
variant('integration_tests', default=False, description='Build the integration tests')
|
||||
variant('unit_tests', default=False, description='Build the unit tests')
|
||||
variant('log_failure_threshold', default='MAX', description='Log level above which examples should auto-crash')
|
||||
@@ -123,7 +126,7 @@ class Acts(CMakePackage, CudaPackage):
|
||||
# Variants that enable / disable Acts plugins
|
||||
variant('autodiff', default=False, description='Build the auto-differentiation plugin', when='@1.2:')
|
||||
variant('dd4hep', default=False, description='Build the DD4hep plugin', when='+tgeo')
|
||||
variant('digitization', default=False, description='Build the geometric digitization plugin')
|
||||
variant('digitization', default=False, description='Build the geometric digitization plugin', when='@:16')
|
||||
variant('fatras', default=False, description='Build the FAst TRAcking Simulation package', when='@0.16:')
|
||||
variant('fatras_geant4', default=False, description='Build Geant4 Fatras package')
|
||||
variant('identification', default=False, description='Build the Identification plugin')
|
||||
@@ -144,7 +147,7 @@ class Acts(CMakePackage, CudaPackage):
|
||||
# Build dependencies
|
||||
# FIXME: Use spack's vecmem package once there is one
|
||||
# (https://github.com/acts-project/acts/pull/998)
|
||||
depends_on('autodiff @0.6:', when='@develop +autodiff')
|
||||
depends_on('autodiff @0.6:', when='@17: +autodiff')
|
||||
depends_on('autodiff @0.5.11:0.5.99', when='@1.2:16 +autodiff')
|
||||
depends_on('boost @1.62:1.69 +program_options +test', when='@:0.10.3')
|
||||
depends_on('boost @1.71: +filesystem +program_options +test', when='@0.10.4:')
|
||||
@@ -202,7 +205,6 @@ def plugin_cmake_variant(plugin_name, spack_variant):
|
||||
cmake_variant("BENCHMARKS", "benchmarks"),
|
||||
plugin_cmake_variant("CUDA", "cuda"),
|
||||
plugin_cmake_variant("DD4HEP", "dd4hep"),
|
||||
plugin_cmake_variant("DIGITIZATION", "digitization"),
|
||||
cmake_variant("EXAMPLES", "examples"),
|
||||
example_cmake_variant("DD4HEP", "dd4hep"),
|
||||
example_cmake_variant("GEANT4", "geant4"),
|
||||
@@ -241,4 +243,7 @@ def plugin_cmake_variant(plugin_name, spack_variant):
|
||||
elif spec.satisfies('@0.14.0: +json'):
|
||||
args.append("-DACTS_USE_BUNDLED_NLOHMANN_JSON=OFF")
|
||||
|
||||
if spec.satisfies('@:16'):
|
||||
args.append(plugin_cmake_variant("DIGITIZATION", "digitization"))
|
||||
|
||||
return args
|
||||
|
||||
@@ -114,7 +114,7 @@ class Aomp(Package):
|
||||
depends_on('python@3:', type='build', when='@3.9.0:')
|
||||
depends_on('py-setuptools', when='@3.9.0:', type='build')
|
||||
|
||||
depends_on('mesa18~llvm@18.3:', type=('build', 'link'))
|
||||
depends_on('gl@4.5:', type=('build', 'link'))
|
||||
depends_on('py-pip', when='@3.8.0:', type='build')
|
||||
depends_on('py-wheel', when='@3.8.0:', type=('build', 'run'))
|
||||
depends_on('perl-data-dumper', type='build')
|
||||
|
||||
12
var/spack/repos/builtin/packages/apex/install-includes.patch
Normal file
12
var/spack/repos/builtin/packages/apex/install-includes.patch
Normal file
@@ -0,0 +1,12 @@
|
||||
diff --git a/src/apex/CMakeLists.standalone b/src/apex/CMakeLists.standalone
|
||||
index 5acfa34..bb43bd5 100644
|
||||
--- a/src/apex/CMakeLists.standalone
|
||||
+++ b/src/apex/CMakeLists.standalone
|
||||
@@ -143,6 +143,7 @@ INSTALL(FILES apex.h
|
||||
profiler.hpp
|
||||
task_wrapper.hpp
|
||||
task_identifier.hpp
|
||||
+ dependency_tree.hpp
|
||||
DESTINATION include)
|
||||
|
||||
INSTALL(TARGETS apex RUNTIME DESTINATION bin LIBRARY DESTINATION lib ARCHIVE DESTINATION lib)
|
||||
@@ -71,6 +71,12 @@ class Apex(CMakePackage):
|
||||
conflicts('+jemalloc', when='+gperftools')
|
||||
conflicts('+plugins', when='~activeharmony')
|
||||
|
||||
# Patches
|
||||
|
||||
# This patch ensures that the missing dependency_tree.hpp header is
|
||||
# installed
|
||||
patch('install-includes.patch', when='@2.3.2:2.4.1')
|
||||
|
||||
def cmake_args(self):
|
||||
args = []
|
||||
spec = self.spec
|
||||
|
||||
@@ -17,12 +17,17 @@ class Assimp(CMakePackage):
|
||||
maintainers = ['wdconinc']
|
||||
|
||||
version('master', branch='master')
|
||||
version('5.2.2', sha256='ad76c5d86c380af65a9d9f64e8fc57af692ffd80a90f613dfc6bd945d0b80bb4')
|
||||
version('5.2.1', sha256='c9cbbc8589639cd8c13f65e94a90422a70454e8fa150cf899b6038ba86e9ecff')
|
||||
version('5.1.4', sha256='bd32cdc27e1f8b7ac09d914ab92dd81d799c97e9e47315c1f40dcb7c6f7938c6')
|
||||
version('5.1.3', sha256='50a7bd2c8009945e1833c591d16f4f7c491a3c6190f69d9d007167aadb175c35')
|
||||
version('5.0.1', sha256='11310ec1f2ad2cd46b95ba88faca8f7aaa1efe9aa12605c55e3de2b977b3dbfc')
|
||||
version('4.0.1', sha256='60080d8ab4daaab309f65b3cffd99f19eb1af8d05623fff469b9b652818e286e')
|
||||
|
||||
patch('https://patch-diff.githubusercontent.com/raw/assimp/assimp/pull/4203.patch',
|
||||
sha256='a227714a215023184536e38b4bc7f8341f635e16bfb3b0ea029d420c29aacd2d',
|
||||
when='@5.1:5.2.2')
|
||||
|
||||
variant('shared', default=True,
|
||||
description='Enables the build of shared libraries')
|
||||
|
||||
|
||||
@@ -17,6 +17,9 @@ class Bazel(Package):
|
||||
url = "https://github.com/bazelbuild/bazel/releases/download/3.1.0/bazel-3.1.0-dist.zip"
|
||||
|
||||
maintainers = ['adamjstewart']
|
||||
|
||||
tags = ['build-tools']
|
||||
|
||||
version('4.0.0', sha256='d350f80e70654932db252db380d2ec0144a00e86f8d9f2b4c799ffdb48e9cdd1')
|
||||
version('3.7.2', sha256='de255bb42163a915312df9f4b86e5b874b46d9e8d4b72604b5123c3a845ed9b1')
|
||||
version('3.7.1', sha256='c9244e5905df6b0190113e26082c72d58b56b1b0dec66d076f083ce4089b0307')
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import re
|
||||
|
||||
|
||||
class Binutils(AutotoolsPackage, GNUMirrorPackage):
|
||||
@@ -12,6 +13,11 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage):
|
||||
|
||||
maintainers = ['alalazo']
|
||||
|
||||
tags = ['build-tools', 'core-packages']
|
||||
|
||||
executables = ['^nm$', '^readelf$']
|
||||
|
||||
version('2.38', sha256='070ec71cf077a6a58e0b959f05a09a35015378c2d8a51e90f3aeabfe30590ef8')
|
||||
version('2.37', sha256='67fc1a4030d08ee877a4867d3dcab35828148f87e1fd05da6db585ed5a166bd4')
|
||||
version('2.36.1', sha256='5b4bd2e79e30ce8db0abd76dd2c2eae14a94ce212cfc59d3c37d23e24bc6d7a3')
|
||||
version('2.35.2', sha256='cfa7644dbecf4591e136eb407c1c1da16578bd2b03f0c2e8acdceba194bb9d61')
|
||||
@@ -80,6 +86,12 @@ class Binutils(AutotoolsPackage, GNUMirrorPackage):
|
||||
# --disable-ld flag
|
||||
conflicts('~ld', '+gold')
|
||||
|
||||
@classmethod
|
||||
def determine_version(cls, exe):
|
||||
output = Executable(exe)('--version', output=str, error=str)
|
||||
match = re.search(r'GNU (nm|readelf).* (\S+)', output)
|
||||
return Version(match.group(2)).dotted.up_to(3) if match else None
|
||||
|
||||
def setup_build_environment(self, env):
|
||||
|
||||
if self.spec.satisfies('%cce'):
|
||||
|
||||
@@ -18,6 +18,8 @@ class Bison(AutotoolsPackage, GNUMirrorPackage):
|
||||
homepage = "https://www.gnu.org/software/bison/"
|
||||
gnu_mirror_path = "bison/bison-3.6.4.tar.gz"
|
||||
|
||||
tags = ['build-tools']
|
||||
|
||||
executables = ['^bison$']
|
||||
|
||||
version('3.8.2', sha256='06c9e13bdf7eb24d4ceb6b59205a4f67c2c7e7213119644430fe82fbd14a0abb')
|
||||
|
||||
@@ -19,9 +19,7 @@ class Blaze(CMakePackage):
|
||||
url = "https://bitbucket.org/blaze-lib/blaze/downloads/blaze-3.8.tar.gz"
|
||||
git = "https://bitbucket.org/blaze-lib/blaze.git"
|
||||
|
||||
# Blaze requires at least cmake 3.8.0 for C++14 features.
|
||||
depends_on('cmake@3.8.0:', type='build')
|
||||
depends_on('blas')
|
||||
maintainers = ['nilsvu']
|
||||
|
||||
version('master', branch='master')
|
||||
version('3.8', sha256='dfaae1a3a9fea0b3cc92e78c9858dcc6c93301d59f67de5d388a3a41c8a629ae')
|
||||
@@ -46,3 +44,57 @@ class Blaze(CMakePackage):
|
||||
version('1.2', sha256='16f56d4f61dca229fa7e17a0d1e348a1f3246c65cded2df5db33babebf8f9b9d')
|
||||
version('1.1', sha256='6add20eb9c176ea9f8091c49b101f46d1a1a6bd9c31553a6eff5e53603f0527f')
|
||||
version('1.0', sha256='ee13cfd467c1a4b0fe7cc58b61b846eae862167a90dd2e60559626a30418b5a3')
|
||||
|
||||
# These configuration options set defaults for dependent packages and
|
||||
# control Blaze dependencies. They can also be enabled or disabled with
|
||||
# compiler flags later by dependent packages, since Blaze is a header-only
|
||||
# library.
|
||||
# - BLAS mode is turned off by default in the Blaze CMake configuration (as
|
||||
# of v3.8), so we turn it off by default here as well.
|
||||
variant('blas', default=False, description='Enable BLAS kernels')
|
||||
# - LAPACK is only a link-time dependency, but Blaze provides a CMake
|
||||
# configuration check. It is enabled by default in the Blaze CMake
|
||||
# configuration (as of v3.8).
|
||||
variant('lapack', default=True, description='Enable LAPACK kernels')
|
||||
# - SMP mode is set to OpenMP by default in the Blaze CMake configuration
|
||||
# (as of v3.8), but isn't a required dependency.
|
||||
variant('smp', values=['none', 'openmp', 'cpp11', 'boost', 'hpx'],
|
||||
default='openmp', description='Shared memory parallelization mode')
|
||||
|
||||
# Blaze requires at least cmake 3.8.0 for C++14 features.
|
||||
depends_on('cmake@3.8.0:', type='build')
|
||||
depends_on('blas', when='+blas')
|
||||
depends_on('lapack', when='+lapack')
|
||||
depends_on('boost@1.54.0: +thread', when='smp=boost')
|
||||
depends_on('hpx', when='smp=hpx')
|
||||
|
||||
def cmake_args(self):
|
||||
args = [
|
||||
self.define_from_variant('BLAZE_BLAS_MODE', 'blas'),
|
||||
# These flags can be set at compile time, but it would be useful to
|
||||
# determine them from the BLAS provider if possible and pass them to
|
||||
# the CMake configuration:
|
||||
# - BLAZE_BLAS_IS_64BIT
|
||||
# - BLAZE_BLAS_IS_PARALLEL
|
||||
# The name of the header file is particularly important because
|
||||
# builds will fail if `BLAZE_BLAS_MODE` is enabled but the header
|
||||
# file is missing:
|
||||
# - BLAZE_BLAS_INCLUDE_FILE (defaults to <cblas.h>)
|
||||
self.define_from_variant('USE_LAPACK', 'lapack'),
|
||||
]
|
||||
|
||||
# SMP mode
|
||||
if self.spec.variants['smp'].value == 'none':
|
||||
args.append(self.define('BLAZE_SHARED_MEMORY_PARALLELIZATION', False))
|
||||
else:
|
||||
args.extend([
|
||||
self.define('BLAZE_SHARED_MEMORY_PARALLELIZATION', True),
|
||||
self.define('BLAZE_SMP_THREADS', {
|
||||
'openmp': 'OpenMP',
|
||||
'cpp11': 'C++11',
|
||||
'boost': 'Boost',
|
||||
'hpx': 'HPX',
|
||||
}[self.spec.variants['smp'].value])
|
||||
])
|
||||
|
||||
return args
|
||||
|
||||
@@ -11,10 +11,12 @@ class Bmi(AutotoolsPackage):
|
||||
|
||||
homepage = 'https://github.com/radix-io/bmi/'
|
||||
git = 'https://github.com/radix-io/bmi.git'
|
||||
url = 'https://github.com/radix-io/bmi/archive/v2.8.1.tar.gz'
|
||||
|
||||
maintainers = ['carns']
|
||||
|
||||
version('main', branch='main')
|
||||
version('2.8.1', sha256='28aa4341f0456cf20ee762f712d7c749ab8f864003329f9327c18ea03fc7ffdb')
|
||||
|
||||
depends_on('autoconf', type='build')
|
||||
depends_on('automake', type='build')
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
--- a/tools/build/src/tools/intel-linux.jam 2021-12-01 22:47:38.000000000 -0800
|
||||
+++ b/tools/build/src/tools/intel-linux.jam 2022-05-03 13:40:41.569430070 -0700
|
||||
@@ -276,7 +276,7 @@
|
||||
#
|
||||
actions compile.c++.pch
|
||||
{
|
||||
- rm -f "$(<)" && LD_LIBRARY_PATH="$(RUN_PATH)" "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -pch-create "$(<)" "$(>)"
|
||||
+ rm -f "$(<)" && LD_LIBRARY_PATH="$(RUN_PATH)" "$(CONFIG_COMMAND)" -x c++-header $(OPTIONS) $(USER_OPTIONS) -D$(DEFINES) -I"$(INCLUDES)" -c -Xclang -emit-pch -o "$(<)" "$(>)"
|
||||
}
|
||||
|
||||
actions compile.fortran
|
||||
@@ -314,9 +314,6 @@ def libs(self):
|
||||
# See https://github.com/spack/spack/issues/28273
|
||||
patch("pthread-stack-min-fix.patch", when="@1.69.0:1.72.0")
|
||||
|
||||
# https://www.intel.com/content/www/us/en/developer/articles/technical/building-boost-with-oneapi.html
|
||||
patch("1.78-intel-linux-jam.patch", when="@1.78 %oneapi")
|
||||
|
||||
def patch(self):
|
||||
# Disable SSSE3 and AVX2 when using the NVIDIA compiler
|
||||
if self.spec.satisfies('%nvhpc'):
|
||||
@@ -330,10 +327,6 @@ def patch(self):
|
||||
filter_file('-fast', '-O1', 'tools/build/src/tools/pgi.jam')
|
||||
filter_file('-fast', '-O1', 'tools/build/src/engine/build.sh')
|
||||
|
||||
# Fixes https://github.com/spack/spack/issues/29352
|
||||
if self.spec.satisfies('@1.78 %intel') or self.spec.satisfies('@1.78 %oneapi'):
|
||||
filter_file('-static', '', 'tools/build/src/engine/build.sh')
|
||||
|
||||
def url_for_version(self, version):
|
||||
if version >= Version('1.63.0'):
|
||||
url = "https://boostorg.jfrog.io/artifactory/main/release/{0}/source/boost_{1}.tar.bz2"
|
||||
|
||||
@@ -14,6 +14,7 @@ class CBlosc(CMakePackage):
|
||||
homepage = "https://www.blosc.org"
|
||||
url = "https://github.com/Blosc/c-blosc/archive/v1.11.1.tar.gz"
|
||||
|
||||
version('1.21.1', sha256='f387149eab24efa01c308e4cba0f59f64ccae57292ec9c794002232f7903b55b')
|
||||
version('1.21.0', sha256='b0ef4fda82a1d9cbd11e0f4b9685abf14372db51703c595ecd4d76001a8b342d')
|
||||
version('1.17.0', sha256='75d98c752b8cf0d4a6380a3089d56523f175b0afa2d0cf724a1bd0a1a8f975a4')
|
||||
version('1.16.3', sha256='bec56cb0956725beb93d50478e918aca09f489f1bfe543dbd3087827a7344396')
|
||||
|
||||
@@ -17,6 +17,7 @@ class CBlosc2(CMakePackage):
|
||||
maintainers = ['ax3l', 'robert-mijakovic']
|
||||
|
||||
version('develop', branch='master')
|
||||
version('2.0.4', sha256='90c78edcc262759dd16d243141513310624bb4fda3d98ac34dcfb78255e151c1')
|
||||
version('2.0.2', sha256='fba51ba601610441eea6046e384284b2d8d7884922060cf15369d01d713b9b77')
|
||||
version('2.0.1', sha256='35b93dfed479b1dfd9372d41d7843b60254ed1d71792577b95e489c28705874f')
|
||||
|
||||
|
||||
@@ -11,10 +11,9 @@ class Capstone(CMakePackage):
|
||||
multi-architecture disassembly framework."""
|
||||
|
||||
homepage = "https://www.capstone-engine.org/"
|
||||
url = "https://github.com/aquynh/capstone/archive/4.0.1.tar.gz"
|
||||
git = "https://github.com/aquynh/capstone.git"
|
||||
url = "https://github.com/capstone-engine/capstone/archive/4.0.1.tar.gz"
|
||||
git = "https://github.com/capstone-engine/capstone.git"
|
||||
|
||||
version('next', branch='next')
|
||||
version('master', branch='master')
|
||||
version('4.0.2', sha256='7c81d798022f81e7507f1a60d6817f63aa76e489aa4e7055255f21a22f5e526a')
|
||||
version('4.0.1', sha256='79bbea8dbe466bd7d051e037db5961fdb34f67c9fac5c3471dd105cfb1e05dc7')
|
||||
|
||||
@@ -11,7 +11,7 @@ class Cblas(Package):
|
||||
provide standard building blocks for performing basic vector and
|
||||
matrix operations."""
|
||||
|
||||
homepage = "http://www.netlib.org/blas/_cblas/"
|
||||
homepage = "http://www.netlib.org/blas/#_cblas"
|
||||
|
||||
# tarball has no version, but on the date below, this MD5 was correct.
|
||||
version('2015-06-06', sha256='0f6354fd67fabd909baf57ced2ef84e962db58fae126e4f41b21dd4fec60a2a3',
|
||||
|
||||
@@ -17,6 +17,8 @@ class Ccache(CMakePackage):
|
||||
url = "https://github.com/ccache/ccache/releases/download/v4.2.1/ccache-4.2.1.tar.gz"
|
||||
maintainers = ['haampie']
|
||||
|
||||
tags = ['build-tools']
|
||||
|
||||
executables = ['^ccache$']
|
||||
|
||||
version('4.5.1', sha256='f0d3cff5d555d6868f14a7d05696f0370074e475304fd5aa152b98f892364981')
|
||||
|
||||
@@ -20,6 +20,7 @@ class Cdo(AutotoolsPackage):
|
||||
|
||||
maintainers = ['skosukhin', 'Try2Code']
|
||||
|
||||
version('2.0.4', sha256='73c0c1e5348632e6e8452ea8e617c35499bc55c845ee2c1d42b912a7e00e5533', url='https://code.mpimet.mpg.de/attachments/download/26761/cdo-2.0.4.tar.gz')
|
||||
version('2.0.3', sha256='25520260ccb4e5324c27fa2160dfafc8152b180dd7f0133bd80425df3ef7c65a', url='https://code.mpimet.mpg.de/attachments/download/26676/cdo-2.0.3.tar.gz')
|
||||
version('2.0.2', sha256='34dfdd0d4126cfd35fc69e37e60901c8622d13ec5b3fa5f0fe6a1cc866cc5a70', url='https://code.mpimet.mpg.de/attachments/download/26654/cdo-2.0.2.tar.gz')
|
||||
version('2.0.1', sha256='d0794d261e22efa0adac8e6d18de2b60d54de5e1a4df6127c65fc417feb8fdac', url='https://code.mpimet.mpg.de/attachments/download/26477/cdo-2.0.1.tar.gz')
|
||||
|
||||
@@ -16,6 +16,8 @@ class Coreutils(AutotoolsPackage, GNUMirrorPackage):
|
||||
homepage = 'https://www.gnu.org/software/coreutils/'
|
||||
gnu_mirror_path = 'coreutils/coreutils-8.26.tar.xz'
|
||||
|
||||
tags = ['core-packages']
|
||||
|
||||
version('8.32', sha256='4458d8de7849df44ccab15e16b1548b285224dbba5f08fac070c1c0e0bcc4cfa')
|
||||
version('8.31', sha256='ff7a9c918edce6b4f4b2725e3f9b37b0c4d193531cac49a48b56c4d0d3a9e9fd')
|
||||
version('8.30', sha256='e831b3a86091496cdba720411f9748de81507798f6130adeaef872d206e1b057')
|
||||
|
||||
@@ -21,9 +21,10 @@ class Costa(CMakePackage):
|
||||
# note: The default archives produced with github do not have the archives
|
||||
# of the submodules.
|
||||
version('master', branch='master', submodules=True)
|
||||
version('2.0', sha256='ef283b904b1b77b7d0de401cbdc3d7850c77368f2b57249c6eaee3017794c4b8')
|
||||
version('2.0', sha256='de250197f31f7d23226c6956a687c3ff46fb0ff6c621a932428236c3f7925fe4')
|
||||
|
||||
variant('scalapack', default=False, description='Build with ScaLAPACK API')
|
||||
variant('shared', default=False, description="Build shared libraries")
|
||||
|
||||
depends_on('cmake@3.12:', type='build')
|
||||
depends_on('mpi@3:')
|
||||
@@ -53,5 +54,6 @@ def cmake_args(self):
|
||||
self.define('COSTA_WITH_APPS', 'OFF'),
|
||||
self.define('COSTA_WITH_TESTS', 'OFF'),
|
||||
self.define('COSTA_WITH_PROFILING', 'OFF'),
|
||||
self.define('COSTA_SCALAPACK', self.costa_scalapack_cmake_arg())
|
||||
self.define('COSTA_SCALAPACK', self.costa_scalapack_cmake_arg()),
|
||||
self.define('BUILD_SHARED_LIBS', '+shared' in self.spec)
|
||||
]
|
||||
|
||||
@@ -21,6 +21,7 @@ class Cp2k(MakefilePackage, CudaPackage):
|
||||
|
||||
maintainers = ['dev-zero']
|
||||
|
||||
version('9.1', sha256='fedb4c684a98ad857cd49b69a3ae51a73f85a9c36e9cb63e3b02320c74454ce6')
|
||||
version('8.2', sha256='2e24768720efed1a5a4a58e83e2aca502cd8b95544c21695eb0de71ed652f20a')
|
||||
version('8.1', sha256='7f37aead120730234a60b2989d0547ae5e5498d93b1e9b5eb548c041ee8e7772')
|
||||
version('7.1', sha256='ccd711a09a426145440e666310dd01cc5772ab103493c4ae6a3470898cd0addb')
|
||||
@@ -60,7 +61,7 @@ class Cp2k(MakefilePackage, CudaPackage):
|
||||
' with cuda_arch=35 for a K20x instead of a K40'))
|
||||
variant('cuda_fft', default=False,
|
||||
description=('Use CUDA also for FFTs in the PW part of CP2K'))
|
||||
variant('cuda_blas', default=False,
|
||||
variant('cuda_blas', default=False, when='@:7', # req in CP2K v8+
|
||||
description=('Use CUBLAS for general matrix operations in DBCSR'))
|
||||
|
||||
HFX_LMAX_RANGE = range(4, 8)
|
||||
@@ -85,8 +86,9 @@ class Cp2k(MakefilePackage, CudaPackage):
|
||||
depends_on('openblas threads=openmp', when='^openblas')
|
||||
|
||||
with when('smm=libxsmm'):
|
||||
depends_on('libxsmm@1.17:~header-only', when='@9.1:')
|
||||
# require libxsmm-1.11+ since 1.10 can leak file descriptors in Fortran
|
||||
depends_on('libxsmm@1.11:~header-only')
|
||||
depends_on('libxsmm@1.11:~header-only', when="@:8.9")
|
||||
# use pkg-config (support added in libxsmm-1.10) to link to libxsmm
|
||||
depends_on('pkgconfig', type='build')
|
||||
# please set variants: smm=blas by configuring packages.yaml or install
|
||||
@@ -108,7 +110,8 @@ class Cp2k(MakefilePackage, CudaPackage):
|
||||
depends_on('libxc@2.2.2:3', when='@:5', type='build')
|
||||
depends_on('libxc@4.0.3:4', when='@6.0:6.9', type='build')
|
||||
depends_on('libxc@4.0.3:4', when='@7.0:8.1')
|
||||
depends_on('libxc@5.1.3:5.1', when='@8.2:')
|
||||
depends_on('libxc@5.1.3:5.1', when='@8.2:8')
|
||||
depends_on('libxc@5.1.7:5.1', when='@9:')
|
||||
|
||||
with when('+mpi'):
|
||||
depends_on('mpi@2:')
|
||||
@@ -116,6 +119,7 @@ class Cp2k(MakefilePackage, CudaPackage):
|
||||
|
||||
with when('+cosma'):
|
||||
depends_on('cosma+scalapack')
|
||||
depends_on('cosma@2.5.1:', when='@9:')
|
||||
depends_on('cosma+cuda', when='+cuda')
|
||||
conflicts('~mpi')
|
||||
# COSMA support was introduced in 8+
|
||||
@@ -129,6 +133,7 @@ class Cp2k(MakefilePackage, CudaPackage):
|
||||
depends_on('elpa@2011.12:2017.11', when='@6.0:6')
|
||||
depends_on('elpa@2018.05:2020.11.001', when='@7.0:8.2')
|
||||
depends_on('elpa@2021.05:', when='@8.3:')
|
||||
depends_on('elpa@2021.11.001:', when='@9.1:')
|
||||
|
||||
with when('+plumed'):
|
||||
depends_on('plumed+shared')
|
||||
@@ -151,7 +156,8 @@ class Cp2k(MakefilePackage, CudaPackage):
|
||||
depends_on('sirius~openmp', when='~openmp')
|
||||
depends_on('sirius@:6', when='@:7')
|
||||
depends_on('sirius@7.0.0:7.0', when='@8:8.2')
|
||||
depends_on('sirius@7.2:', when='@8.3:')
|
||||
depends_on('sirius@7.2', when='@8.3:8.9')
|
||||
depends_on('sirius@7.3:', when='@9.1')
|
||||
conflicts('~mpi')
|
||||
# sirius support was introduced in 7+
|
||||
conflicts('@:6')
|
||||
@@ -508,6 +514,9 @@ def edit(self, spec, prefix):
|
||||
int(elpa.version[1])))
|
||||
fcflags += ['-I{0}'.format(join_path(elpa_incdir, 'elpa'))]
|
||||
|
||||
if '+cuda' in spec and '+cuda' in elpa:
|
||||
cppflags += ['-D__ELPA_NVIDIA_GPU']
|
||||
|
||||
if spec.satisfies('+sirius'):
|
||||
sirius = spec['sirius']
|
||||
cppflags.append('-D__SIRIUS')
|
||||
@@ -515,14 +524,29 @@ def edit(self, spec, prefix):
|
||||
libs += list(sirius.libs)
|
||||
|
||||
if spec.satisfies('+cuda'):
|
||||
cppflags += ['-D__ACC']
|
||||
libs += ['-lcudart', '-lnvrtc', '-lcuda']
|
||||
libs += [
|
||||
'-L{}'.format(spec['cuda'].libs.directories[0]),
|
||||
'-L{}/stubs'.format(spec['cuda'].libs.directories[0]),
|
||||
'-lcuda', '-lcudart', '-lnvrtc', '-lstdc++']
|
||||
|
||||
if spec.satisfies('+cuda_blas'):
|
||||
cppflags += ['-D__DBCSR_ACC=2']
|
||||
if spec.satisfies('@9:'):
|
||||
acc_compiler_var = 'OFFLOAD_CC'
|
||||
acc_flags_var = 'OFFLOAD_FLAGS'
|
||||
cppflags += [
|
||||
'-D__DBCSR_ACC',
|
||||
'-D__GRID_CUDA',
|
||||
'-DOFFLOAD_TARGET=cuda',
|
||||
]
|
||||
libs += ['-lcublas']
|
||||
else:
|
||||
cppflags += ['-D__DBCSR_ACC']
|
||||
acc_compiler_var = 'NVCC'
|
||||
acc_flags_var = 'NVFLAGS'
|
||||
cppflags += ['-D__ACC']
|
||||
if spec.satisfies('+cuda_blas'):
|
||||
cppflags += ['-D__DBCSR_ACC=2']
|
||||
libs += ['-lcublas']
|
||||
else:
|
||||
cppflags += ['-D__DBCSR_ACC']
|
||||
|
||||
if spec.satisfies('+cuda_fft'):
|
||||
cppflags += ['-D__PW_CUDA']
|
||||
@@ -616,8 +640,9 @@ def edit(self, spec, prefix):
|
||||
mkf.write('CPP = # {0} -E\n'.format(spack_cc))
|
||||
mkf.write('AR = ar -r\n')
|
||||
|
||||
if spec.satisfies('+cuda'):
|
||||
mkf.write('NVCC = {0}\n'.format(
|
||||
if '+cuda' in spec:
|
||||
mkf.write('{0} = {1}\n'.format(
|
||||
acc_compiler_var,
|
||||
join_path(spec['cuda'].prefix, 'bin', 'nvcc')))
|
||||
|
||||
# Write compiler flags to file
|
||||
@@ -631,7 +656,7 @@ def fflags(var, lst):
|
||||
mkf.write(fflags('CPPFLAGS', cppflags))
|
||||
mkf.write(fflags('CFLAGS', cflags))
|
||||
mkf.write(fflags('CXXFLAGS', cxxflags))
|
||||
mkf.write(fflags('NVFLAGS', nvflags))
|
||||
mkf.write(fflags(acc_flags_var, nvflags))
|
||||
mkf.write(fflags('FCFLAGS', fcflags))
|
||||
mkf.write(fflags('LDFLAGS', ldflags))
|
||||
mkf.write(fflags('LIBS', libs))
|
||||
|
||||
@@ -18,11 +18,12 @@ class CrayFftw(Package):
|
||||
need to load cray-mpich before cray-fftw.
|
||||
"""
|
||||
|
||||
homepage = "https://docs.nersc.gov/development/libraries/fftw/"
|
||||
homepage = "https://support.hpe.com/"
|
||||
has_code = False # Skip attempts to fetch source that is not available
|
||||
|
||||
maintainers = ['haampie']
|
||||
maintainers = ['haampie', 'lukebroskop']
|
||||
|
||||
version('3.3.8.12')
|
||||
version('3.3.8.8')
|
||||
version('3.3.8.7')
|
||||
|
||||
|
||||
27
var/spack/repos/builtin/packages/cray-pmi/package.py
Normal file
27
var/spack/repos/builtin/packages/cray-pmi/package.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack import *
|
||||
|
||||
|
||||
class CrayPmi(Package):
|
||||
"""Cray's Process Management Interface library"""
|
||||
|
||||
homepage = "https://docs.nersc.gov/development/compilers/wrappers/"
|
||||
has_code = False # Skip attempts to fetch source that is not available
|
||||
|
||||
maintainers = ['haampie']
|
||||
|
||||
version('5.0.17')
|
||||
version('5.0.16')
|
||||
version('5.0.11')
|
||||
|
||||
@property
|
||||
def headers(self):
|
||||
return find_headers('pmi', self.prefix.include, recursive=True)
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
return find_libraries(['libpmi'], root=self.prefix, recursive=True)
|
||||
@@ -25,6 +25,10 @@
|
||||
# format returned by platform.system() and 'arch' by platform.machine()
|
||||
|
||||
_versions = {
|
||||
'11.6.1': {
|
||||
'Linux-aarch64': ('80586b003d58030004d465f5331dc69ee26c95a29516fb2488ff10f034139cb2', 'https://developer.download.nvidia.com/compute/cuda/11.6.1/local_installers/cuda_11.6.1_510.47.03_linux_sbsa.run'),
|
||||
'Linux-x86_64': ('ab219afce00b74200113269866fbff75ead037bcfc23551a8338c2684c984d7e', 'https://developer.download.nvidia.com/compute/cuda/11.6.1/local_installers/cuda_11.6.1_510.47.03_linux.run'),
|
||||
'Linux-ppc64le': ('ef762efbc00b67d572823c6ec338cc2c0cf0c096f41e6bce18e8d4501f260956', 'https://developer.download.nvidia.com/compute/cuda/11.6.1/local_installers/cuda_11.6.1_510.47.03_linux_ppc64le.run')},
|
||||
'11.6.0': {
|
||||
'Linux-aarch64': ('5898579f5e59b708520883cb161089f5e4f3426158d1e9f973c49d224085d1d2', 'https://developer.download.nvidia.com/compute/cuda/11.6.0/local_installers/cuda_11.6.0_510.39.01_linux_sbsa.run'),
|
||||
'Linux-x86_64': ('1783da6d63970786040980b57fa3cb6420142159fc7d0e66f8f05c4905d98c83', 'https://developer.download.nvidia.com/compute/cuda/11.6.0/local_installers/cuda_11.6.0_510.39.01_linux.run'),
|
||||
@@ -37,6 +41,14 @@
|
||||
'Linux-aarch64': ('6ea9d520cc956cc751a5ac54f4acc39109627f4e614dd0b1a82cc86f2aa7d8c4', 'https://developer.download.nvidia.com/compute/cuda/11.5.0/local_installers/cuda_11.5.0_495.29.05_linux_sbsa.run'),
|
||||
'Linux-x86_64': ('ae0a1693d9497cf3d81e6948943e3794636900db71c98d58eefdacaf7f1a1e4c', 'https://developer.download.nvidia.com/compute/cuda/11.5.0/local_installers/cuda_11.5.0_495.29.05_linux.run'),
|
||||
'Linux-ppc64le': ('95baefdc5adf165189407b119861ffb2e9800fd94d7fc81d10fb81ed36dc12db', 'https://developer.download.nvidia.com/compute/cuda/11.5.0/local_installers/cuda_11.5.0_495.29.05_linux_ppc64le.run')},
|
||||
'11.4.4': {
|
||||
'Linux-aarch64': ('c5c08531e48e8fdc2704fa1c1f7195f2c7edd2ee10a466d0e24d05b77d109435', 'https://developer.download.nvidia.com/compute/cuda/11.4.4/local_installers/cuda_11.4.4_470.82.01_linux_sbsa.run'),
|
||||
'Linux-x86_64': ('44545a7abb4b66dfc201dcad787b5e8352e5b7ddf3e3cc5b2e9177af419c25c8', 'https://developer.download.nvidia.com/compute/cuda/11.4.4/local_installers/cuda_11.4.4_470.82.01_linux.run'),
|
||||
'Linux-ppc64le': ('c71cd4e6c05fde11c0485369a73e7f356080e7a18f0e3ad7244e8fc03a9dd3e2', 'https://developer.download.nvidia.com/compute/cuda/11.4.4/local_installers/cuda_11.4.4_470.82.01_linux_ppc64le.run')},
|
||||
'11.4.3': {
|
||||
'Linux-aarch64': ('e02db34a487ea3de3eec9db80efd09f12eb69d55aca686cecaeae96a9747b1d4', 'https://developer.download.nvidia.com/compute/cuda/11.4.3/local_installers/cuda_11.4.3_470.82.01_linux_sbsa.run'),
|
||||
'Linux-x86_64': ('749183821ffc051e123f12ebdeb171b263d55b86f0dd7c8f23611db1802d6c37', 'https://developer.download.nvidia.com/compute/cuda/11.4.3/local_installers/cuda_11.4.3_470.82.01_linux.run'),
|
||||
'Linux-ppc64le': ('08f29cc3ed0b3b82dd9b007186237be2352bb552f99230c450a25e768f5754ee', 'https://developer.download.nvidia.com/compute/cuda/11.4.3/local_installers/cuda_11.4.3_470.82.01_linux_ppc64le.run')},
|
||||
'11.4.2': {
|
||||
'Linux-aarch64': ('f2c4a52e06329606c8dfb7c5ea3f4cb4c0b28f9d3fdffeeb734fcc98daf580d8', 'https://developer.download.nvidia.com/compute/cuda/11.4.2/local_installers/cuda_11.4.2_470.57.02_linux_sbsa.run'),
|
||||
'Linux-x86_64': ('bbd87ca0e913f837454a796367473513cddef555082e4d86ed9a38659cc81f0a', 'https://developer.download.nvidia.com/compute/cuda/11.4.2/local_installers/cuda_11.4.2_470.57.02_linux.run'),
|
||||
|
||||
@@ -19,6 +19,7 @@ class Curl(AutotoolsPackage):
|
||||
|
||||
executables = ['^curl$']
|
||||
|
||||
version('7.81.0', sha256='1e7a38d7018ec060f1f16df839854f0889e94e122c4cfa5d3a37c2dc56f1e258')
|
||||
version('7.80.0', sha256='dd0d150e49cd950aff35e16b628edf04927f0289df42883750cf952bb858189c')
|
||||
version('7.79.1', sha256='de62c4ab9a9316393962e8b94777a570bb9f71feb580fb4475e412f2f9387851')
|
||||
version('7.79.0', sha256='d607a677f473f79f96c964100327125a6204a39d835dc00dab7fc0129b959f42')
|
||||
|
||||
@@ -20,6 +20,8 @@ class Cvs(AutotoolsPackage, GNUMirrorPackage):
|
||||
patch('https://gentoofan.org/gentoo/poly-c_overlay/dev-vcs/cvs/files/cvs-1.12.13.1-fix-gnulib-SEGV-vasnprintf.patch',
|
||||
sha256='e13db2acebad3ca5be5d8e0fa97f149b0f9661e4a9a731965c8226290c6413c0', when='@1.12.13')
|
||||
|
||||
tags = ['build-tools']
|
||||
|
||||
parallel = False
|
||||
executables = [r'^cvs$']
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@ class Dd4hep(CMakePackage):
|
||||
tags = ['hep']
|
||||
|
||||
version('master', branch='master')
|
||||
version('1.20', sha256='cf6af0c486d5c84e8c8a8e40ea16cec54d4ed78bffcef295a0eeeaedf51cab59')
|
||||
version('1.19', sha256='d2eccf5e8402ba7dab2e1d7236e12ee4db9b1c5e4253c40a140bf35580db1d9b')
|
||||
version('1.18', sha256='1e909a42b969dfd966224fa8ab1eca5aa05136baf3c00a140f2f6d812b497152')
|
||||
version('1.17', sha256='036a9908aaf1e13eaf5f2f43b6f5f4a8bdda8183ddc5befa77a4448dbb485826')
|
||||
|
||||
@@ -12,6 +12,8 @@ class Diffutils(AutotoolsPackage, GNUMirrorPackage):
|
||||
"""GNU Diffutils is a package of several programs related to finding
|
||||
differences between files."""
|
||||
|
||||
tags = ['core-packages']
|
||||
|
||||
executables = [r'^diff$']
|
||||
|
||||
homepage = "https://www.gnu.org/software/diffutils/"
|
||||
|
||||
@@ -45,8 +45,6 @@ class Dihydrogen(CMakePackage, CudaPackage, ROCmPackage):
|
||||
description='Enable ROCm/HIP language features.')
|
||||
variant('shared', default=True,
|
||||
description='Enables the build of shared libraries')
|
||||
variant('docs', default=False,
|
||||
description='Builds with support for building documentation')
|
||||
|
||||
# Variants related to BLAS
|
||||
variant('openmp_blas', default=False,
|
||||
@@ -117,9 +115,6 @@ class Dihydrogen(CMakePackage, CudaPackage, ROCmPackage):
|
||||
depends_on('ninja', type='build')
|
||||
depends_on('cmake@3.17.0:', type='build')
|
||||
|
||||
depends_on('py-breathe', type='build', when='+docs')
|
||||
depends_on('doxygen', type='build', when='+docs')
|
||||
|
||||
depends_on('llvm-openmp', when='%apple-clang +openmp')
|
||||
|
||||
# TODO: Debug linker errors when NVSHMEM is built with UCX
|
||||
|
||||
@@ -12,6 +12,8 @@ class Direnv(Package):
|
||||
homepage = "https://direnv.net/"
|
||||
url = "https://github.com/direnv/direnv/archive/v2.11.3.tar.gz"
|
||||
|
||||
maintainers = ['acastanedam']
|
||||
|
||||
version('2.30.2', sha256='a2ee14ebdbd9274ba8bf0896eeb94e98947a056611058dedd4dbb43167e076f3')
|
||||
version('2.20.0', sha256='cc72525b0a5b3c2ab9a52a3696e95562913cd431f923bcc967591e75b7541bff')
|
||||
version('2.11.3', sha256='2d34103a7f9645059270763a0cfe82085f6d9fe61b2a85aca558689df0e7b006')
|
||||
@@ -19,4 +21,4 @@ class Direnv(Package):
|
||||
depends_on('go', type='build')
|
||||
|
||||
def install(self, spec, prefix):
|
||||
make('install', "DESTDIR=%s" % prefix)
|
||||
make('install', "PREFIX=%s" % prefix)
|
||||
|
||||
@@ -13,7 +13,9 @@ class Dmtcp(AutotoolsPackage):
|
||||
|
||||
homepage = "http://dmtcp.sourceforge.net/"
|
||||
url = "https://sourceforge.net/projects/dmtcp/files/2.6.0/dmtcp-2.6.0.tar.gz/download"
|
||||
git = "https://github.com/dmtcp/dmtcp.git"
|
||||
|
||||
version('master', branch='master')
|
||||
version('2.6.0', sha256='3ed62a86dd0cb9c828b93ee8c7c852d6f9c96a0efa48bcfe867521adf7bced68')
|
||||
version('2.5.2', sha256='0e3e5e15bd401b7b6937f2b678cd7d6a252eab0a143d5740b89cc3bebb4282be')
|
||||
patch('for_aarch64.patch', when='@2.6.0 target=aarch64:')
|
||||
|
||||
74
var/spack/repos/builtin/packages/easi/package.py
Normal file
74
var/spack/repos/builtin/packages/easi/package.py
Normal file
@@ -0,0 +1,74 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from spack import *
|
||||
from spack.cmd.pkg import GitExe
|
||||
|
||||
|
||||
class Easi(CMakePackage):
|
||||
"""easi is a library for the Easy Initialization of models
|
||||
in three (or less or more) dimensional domains.
|
||||
"""
|
||||
|
||||
homepage = "https://easyinit.readthedocs.io"
|
||||
git = "https://github.com/SeisSol/easi.git"
|
||||
|
||||
maintainers = ['ThrudPrimrose', 'ravil-mobile', 'krenzland']
|
||||
|
||||
version('develop', branch='master')
|
||||
version('1.1.2', tag='v1.1.2')
|
||||
|
||||
variant('asagi', default=True, description='build with ASAGI support')
|
||||
variant('jit', default='impalajit', description='build with JIT support',
|
||||
values=('impalajit', 'impalajit-llvm', 'lua'),
|
||||
multi=False)
|
||||
|
||||
depends_on('asagi +mpi +mpi3', when='+asagi')
|
||||
depends_on('yaml-cpp@0.6.2')
|
||||
depends_on('impalajit-llvm@1.0.0', when='jit=impalajit-llvm')
|
||||
depends_on('lua@5.3.2', when='jit=lua')
|
||||
depends_on('git', type='build', when='jit=impalajit')
|
||||
|
||||
conflicts('jit=impalajit', when='target=aarch64:')
|
||||
conflicts('jit=impalajit', when='target=ppc64:')
|
||||
conflicts('jit=impalajit', when='target=ppc64le:')
|
||||
conflicts('jit=impalajit', when='target=riscv64:')
|
||||
|
||||
def pre_build(self):
|
||||
spec = self.spec
|
||||
if "jit=impalajit" in spec:
|
||||
impalajir_src = join_path(self.stage.source_path, 'impalajit')
|
||||
if os.path.isdir(impalajir_src):
|
||||
shutil.rmtree(impalajir_src)
|
||||
|
||||
git_exe = GitExe()
|
||||
git_exe('clone', 'https://github.com/uphoffc/ImpalaJIT.git', impalajir_src)
|
||||
with working_dir(join_path(impalajir_src, 'build'), create=True):
|
||||
cmake('..', '-DCMAKE_INSTALL_PREFIX={0}'.format(self.spec.prefix))
|
||||
make()
|
||||
make('install')
|
||||
|
||||
def cmake_args(self):
|
||||
self.pre_build()
|
||||
|
||||
args = []
|
||||
args.append(self.define_from_variant('ASAGI', 'asagi'))
|
||||
|
||||
with_impala = 'jit=impalajit' in self.spec
|
||||
with_impala |= 'jit=impalajit-llvm' in self.spec
|
||||
if with_impala:
|
||||
args.append(self.define('IMPALAJIT', True))
|
||||
backend_type = 'llvm' if 'jit=impalajit-llvm' in self.spec else 'original'
|
||||
args.append(self.define('IMPALAJIT_BACKEND', backend_type))
|
||||
|
||||
if 'jit=lua' in self.spec:
|
||||
args.append(self.define('IMPALAJIT', False))
|
||||
args.append(self.define('LUA', True))
|
||||
|
||||
return args
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user