Compare commits
364 Commits
e4s-21.02
...
per-instan
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
71b877b1d2 | ||
|
|
8c3d929d23 | ||
|
|
1803070d3d | ||
|
|
1f050993bc | ||
|
|
51d81af750 | ||
|
|
dbe210522c | ||
|
|
fc12fb46a1 | ||
|
|
0722f59cb2 | ||
|
|
c41c7ce638 | ||
|
|
95ad5b882c | ||
|
|
a1aec1450b | ||
|
|
45e076dd20 | ||
|
|
73311bc6cd | ||
|
|
3d74592be2 | ||
|
|
413c422e53 | ||
|
|
3d0adf3a8a | ||
|
|
4d02439820 | ||
|
|
c4b38e5102 | ||
|
|
495b9bcb60 | ||
|
|
5bc1ac6ca1 | ||
|
|
9e316b30d1 | ||
|
|
dca23f3c37 | ||
|
|
0c699d7494 | ||
|
|
a4e18a9544 | ||
|
|
76bfdbfb96 | ||
|
|
8899a08263 | ||
|
|
6a3bef4bb0 | ||
|
|
982d1afd01 | ||
|
|
6bfc0504cb | ||
|
|
adf19379cc | ||
|
|
f57626a7c4 | ||
|
|
138312efab | ||
|
|
364b359c68 | ||
|
|
623edc6674 | ||
|
|
22425da038 | ||
|
|
c2a9b7aa08 | ||
|
|
2fba3e2bd3 | ||
|
|
3a33d45d6d | ||
|
|
20fc160f21 | ||
|
|
95880d0dac | ||
|
|
01ea3e3469 | ||
|
|
6e3f7781d4 | ||
|
|
951c4a6cf2 | ||
|
|
62f9de535c | ||
|
|
af15801649 | ||
|
|
155c378406 | ||
|
|
28f2349d1e | ||
|
|
c128c58a93 | ||
|
|
29e36e7a17 | ||
|
|
d18de945b7 | ||
|
|
2741b69791 | ||
|
|
d203456790 | ||
|
|
c9043b6258 | ||
|
|
346d880384 | ||
|
|
d838b8f518 | ||
|
|
c77267f0a1 | ||
|
|
d49c0148b8 | ||
|
|
8c04354056 | ||
|
|
e56e72d910 | ||
|
|
718e65492e | ||
|
|
fa5ddcaa41 | ||
|
|
5e0aa8c2e7 | ||
|
|
8eb5f017df | ||
|
|
2b22d855b0 | ||
|
|
c9ce5a3292 | ||
|
|
f4b56620e5 | ||
|
|
df8dcef963 | ||
|
|
622b6d761d | ||
|
|
404d4dde48 | ||
|
|
3688f7aea0 | ||
|
|
bc127ec53e | ||
|
|
26bf91c690 | ||
|
|
b48fbeed69 | ||
|
|
83fde4e849 | ||
|
|
4a957509db | ||
|
|
9af0b11b86 | ||
|
|
0c4f140d88 | ||
|
|
5a30e5795f | ||
|
|
4bdd014c82 | ||
|
|
b052ff9be0 | ||
|
|
1acaaea34e | ||
|
|
48023b2932 | ||
|
|
86fad97165 | ||
|
|
ac15e3ec96 | ||
|
|
06cd29ad58 | ||
|
|
03e10b3cde | ||
|
|
46469786d8 | ||
|
|
17e51b2989 | ||
|
|
806f02438d | ||
|
|
44e70f40ce | ||
|
|
c84c0187ea | ||
|
|
ec0dc67e73 | ||
|
|
dfb0da2c85 | ||
|
|
f8ea3c5285 | ||
|
|
222666e400 | ||
|
|
a4fbaf2f89 | ||
|
|
f8a17371f6 | ||
|
|
aa79689c78 | ||
|
|
8a9af11403 | ||
|
|
d4516057e1 | ||
|
|
e1abb5cbc8 | ||
|
|
c055ffc79b | ||
|
|
ac6976dee5 | ||
|
|
9b5df573c0 | ||
|
|
c7e481de77 | ||
|
|
9d5937725e | ||
|
|
ce64a4170f | ||
|
|
bba41f16d4 | ||
|
|
32f6fdce7c | ||
|
|
6d8f59e6bc | ||
|
|
ece71f1648 | ||
|
|
07a9723c63 | ||
|
|
4aa24095c0 | ||
|
|
c4e81b9cdb | ||
|
|
cbb64156cb | ||
|
|
381da114f0 | ||
|
|
ab6c543948 | ||
|
|
b8815e577b | ||
|
|
187c23d3c5 | ||
|
|
0209d15ffd | ||
|
|
4c57c88d9e | ||
|
|
3305a39c55 | ||
|
|
629f94b4e1 | ||
|
|
245d67ed5c | ||
|
|
020c60649e | ||
|
|
43f4d2da99 | ||
|
|
22a93300b4 | ||
|
|
396936d241 | ||
|
|
818b416742 | ||
|
|
4df1f62fd3 | ||
|
|
4f1a76a0d1 | ||
|
|
d07cb59bef | ||
|
|
02c3b23a15 | ||
|
|
628f9eadb5 | ||
|
|
a405811bfe | ||
|
|
196db55855 | ||
|
|
c6f3f57c31 | ||
|
|
f901c61e68 | ||
|
|
8cf6ad9917 | ||
|
|
4c9c5393f1 | ||
|
|
15645147ed | ||
|
|
441d09cc27 | ||
|
|
e5103b6914 | ||
|
|
e57053bd32 | ||
|
|
d934363ae5 | ||
|
|
251e4282f1 | ||
|
|
2d623bab31 | ||
|
|
96e394845b | ||
|
|
e6c8fa1311 | ||
|
|
43dd7b84c0 | ||
|
|
9a565e0ec7 | ||
|
|
bac3ac4fdb | ||
|
|
8d2944bf77 | ||
|
|
889e83a9b5 | ||
|
|
047bb490cb | ||
|
|
2032c608e8 | ||
|
|
9e62eadce0 | ||
|
|
364b9e7a27 | ||
|
|
857f6392a2 | ||
|
|
b75b029706 | ||
|
|
12cad38aef | ||
|
|
18fbd58fe6 | ||
|
|
912606ad9a | ||
|
|
c9ba95cc5c | ||
|
|
603331e669 | ||
|
|
2aab415f3d | ||
|
|
b304b4bdb0 | ||
|
|
d36de79ba0 | ||
|
|
3e570ce694 | ||
|
|
e7cba04b95 | ||
|
|
ada6ecc797 | ||
|
|
ce7bde24d4 | ||
|
|
475c877fc9 | ||
|
|
8ffeb96c77 | ||
|
|
d7f1ff68fb | ||
|
|
195341113e | ||
|
|
4f1d9d6095 | ||
|
|
f949ae772d | ||
|
|
6ba7632d2b | ||
|
|
d166ca91f1 | ||
|
|
70c505a0b8 | ||
|
|
78d1b28b72 | ||
|
|
3c493ae629 | ||
|
|
9c7669ed67 | ||
|
|
0c8df39fce | ||
|
|
3e4a24c878 | ||
|
|
6ff717f395 | ||
|
|
d22bad13b1 | ||
|
|
5b2a54952c | ||
|
|
746081e933 | ||
|
|
839af2bd70 | ||
|
|
f213cf8349 | ||
|
|
474e616a5b | ||
|
|
cf29ee6b2b | ||
|
|
61baa40160 | ||
|
|
b2ece3abba | ||
|
|
24b7aff837 | ||
|
|
8a8fad8474 | ||
|
|
650f24f4d3 | ||
|
|
5790ec5359 | ||
|
|
a2e9a9076f | ||
|
|
19b163e49d | ||
|
|
c6440eb23c | ||
|
|
40147d9955 | ||
|
|
21b2d7109a | ||
|
|
e13ce390fe | ||
|
|
ca4a566443 | ||
|
|
bb4ccdfa91 | ||
|
|
952f76bda3 | ||
|
|
ab9580c168 | ||
|
|
e11f5df7dc | ||
|
|
d0610f7a39 | ||
|
|
31cf0e9c0d | ||
|
|
821769c95e | ||
|
|
f346db83ac | ||
|
|
f73182fd98 | ||
|
|
7b97fe206b | ||
|
|
43473995ad | ||
|
|
d2ce4b565c | ||
|
|
f0275e84ad | ||
|
|
05dfd94ed7 | ||
|
|
c65e660fbe | ||
|
|
ed4a468941 | ||
|
|
df92f73c72 | ||
|
|
fc15f85986 | ||
|
|
81e3978759 | ||
|
|
f7c234f14c | ||
|
|
d2e759e614 | ||
|
|
8849312e68 | ||
|
|
8de96cde08 | ||
|
|
114bc95526 | ||
|
|
6da24dee91 | ||
|
|
8ff898a95e | ||
|
|
8c613fadf0 | ||
|
|
55c1e76408 | ||
|
|
1492461b8e | ||
|
|
3006da8197 | ||
|
|
d001a8945a | ||
|
|
efecb22845 | ||
|
|
303230fe7f | ||
|
|
4d1d435330 | ||
|
|
1fa16c1b9b | ||
|
|
26e73e848d | ||
|
|
2ba00262e5 | ||
|
|
bf4c4e9b32 | ||
|
|
4e185d48d1 | ||
|
|
d6fbf8ad57 | ||
|
|
8395df6b5b | ||
|
|
1659beb220 | ||
|
|
ccc7ed0af9 | ||
|
|
68d5f348e5 | ||
|
|
126ab70420 | ||
|
|
3d17936866 | ||
|
|
bc40c3af91 | ||
|
|
a52beca2cf | ||
|
|
16adb5dd8f | ||
|
|
b642871b23 | ||
|
|
c25a723d74 | ||
|
|
90ace2d0ec | ||
|
|
2648fe902d | ||
|
|
39e30520ce | ||
|
|
a0b5dcca3c | ||
|
|
8d3272f82d | ||
|
|
7aa5cc241d | ||
|
|
e9c399110e | ||
|
|
18ba1b8923 | ||
|
|
ef9a607c4c | ||
|
|
b5916451fd | ||
|
|
89a4c9bb6d | ||
|
|
da69f6eda8 | ||
|
|
4097a0c93f | ||
|
|
ad9db839cf | ||
|
|
098b7b2e50 | ||
|
|
71dd8ed265 | ||
|
|
3b1b51e90d | ||
|
|
ca3171d002 | ||
|
|
aa8079556a | ||
|
|
ea45b95039 | ||
|
|
daf2ef9682 | ||
|
|
3c1b305752 | ||
|
|
70cd948852 | ||
|
|
290df7a14a | ||
|
|
d7c4f7b7e6 | ||
|
|
d3b10b04c1 | ||
|
|
97afb34ac3 | ||
|
|
dc275e1f83 | ||
|
|
346e6b3b77 | ||
|
|
5fb0ff3906 | ||
|
|
4261de5434 | ||
|
|
c2925a1704 | ||
|
|
03756583c3 | ||
|
|
c6b632a1f9 | ||
|
|
c460013a91 | ||
|
|
e112a26513 | ||
|
|
92b7805e40 | ||
|
|
8bdd6c6f6d | ||
|
|
10c7831366 | ||
|
|
459b991e78 | ||
|
|
7f01d1dc40 | ||
|
|
e9dfc50d1b | ||
|
|
04cd884c95 | ||
|
|
71954d9665 | ||
|
|
d7dd12edab | ||
|
|
49c015e391 | ||
|
|
7e143bab6a | ||
|
|
f1d0a819e9 | ||
|
|
631598014a | ||
|
|
10e9e142b7 | ||
|
|
6d54df1ba4 | ||
|
|
58d9f5c114 | ||
|
|
ac66624196 | ||
|
|
d343304bdc | ||
|
|
796588b2fd | ||
|
|
25fb753cd2 | ||
|
|
0c38c86403 | ||
|
|
16c07e6abd | ||
|
|
8e1b62ee68 | ||
|
|
c7306db061 | ||
|
|
136fa9c203 | ||
|
|
dc5022a9d9 | ||
|
|
eeff906ccc | ||
|
|
ae19ddbfcb | ||
|
|
1e0d311547 | ||
|
|
50ffb4b868 | ||
|
|
61e00ac1e1 | ||
|
|
71df23406d | ||
|
|
60e64bac0a | ||
|
|
880bb06cbf | ||
|
|
fac785914b | ||
|
|
9575531f2a | ||
|
|
83ca1b153f | ||
|
|
671878740c | ||
|
|
88f7a00f2d | ||
|
|
b2e0bc1ae7 | ||
|
|
bd94458552 | ||
|
|
36ae5ac6ef | ||
|
|
d7595d6703 | ||
|
|
f1ef260b47 | ||
|
|
4d37b384f5 | ||
|
|
df42c0b4b6 | ||
|
|
f5beee76c8 | ||
|
|
20fdab7f02 | ||
|
|
651df70213 | ||
|
|
1ac2cc5081 | ||
|
|
81b77873e7 | ||
|
|
987b5a5b6f | ||
|
|
1abfebb5e9 | ||
|
|
44ed19daa4 | ||
|
|
14c1d58b5a | ||
|
|
afb7e57d20 | ||
|
|
c529ccaab0 | ||
|
|
890a93bbcd | ||
|
|
62f8087716 | ||
|
|
704eadbda1 | ||
|
|
d5f50203d1 | ||
|
|
5abc7e59d1 | ||
|
|
edeb6c52f0 | ||
|
|
e775cbb0c0 | ||
|
|
ede100bf16 | ||
|
|
a87aa49fbf | ||
|
|
4f84721dc1 | ||
|
|
f2f58b70b1 | ||
|
|
f957f1936c | ||
|
|
5492e549a5 |
12
.github/workflows/linux_build_tests.yaml
vendored
12
.github/workflows/linux_build_tests.yaml
vendored
@@ -5,6 +5,18 @@ on:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
# Don't run if we only modified packages in the built-in repository
|
||||
- 'var/spack/repos/builtin/**'
|
||||
- '!var/spack/repos/builtin/packages/lz4/**'
|
||||
- '!var/spack/repos/builtin/packages/mpich/**'
|
||||
- '!var/spack/repos/builtin/packages/tut/**'
|
||||
- '!var/spack/repos/builtin/packages/py-setuptools/**'
|
||||
- '!var/spack/repos/builtin/packages/openjpeg/**'
|
||||
- '!var/spack/repos/builtin/packages/r-rcpp/**'
|
||||
- '!var/spack/repos/builtin/packages/ruby-rake/**'
|
||||
# Don't run if we only modified documentation
|
||||
- 'lib/spack/docs/**'
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
188
.github/workflows/linux_unit_tests.yaml
vendored
188
.github/workflows/linux_unit_tests.yaml
vendored
@@ -1,188 +0,0 @@
|
||||
name: linux tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
jobs:
|
||||
unittests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for unit tests
|
||||
sudo apt-get install -y coreutils gfortran graphviz gnupg2 mercurial
|
||||
sudo apt-get install -y ninja-build patchelf
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
|
||||
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools codecov coverage
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Install kcov for bash script coverage
|
||||
env:
|
||||
KCOV_VERSION: 34
|
||||
run: |
|
||||
KCOV_ROOT=$(mktemp -d)
|
||||
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
|
||||
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
- name: Run unit tests
|
||||
env:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
coverage combine
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
flags: unittests,linux
|
||||
shell:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for shell tests
|
||||
sudo apt-get install -y coreutils csh zsh tcsh fish dash bash
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
|
||||
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools codecov coverage
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Install kcov for bash script coverage
|
||||
env:
|
||||
KCOV_VERSION: 38
|
||||
run: |
|
||||
KCOV_ROOT=$(mktemp -d)
|
||||
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
|
||||
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
- name: Run shell tests
|
||||
env:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
|
||||
centos6:
|
||||
# Test for Python2.6 run on Centos 6
|
||||
runs-on: ubuntu-latest
|
||||
container: spack/github-actions:centos6
|
||||
steps:
|
||||
- name: Run unit tests
|
||||
env:
|
||||
HOME: /home/spack-test
|
||||
run: |
|
||||
whoami && echo $HOME && cd $HOME
|
||||
git clone https://github.com/spack/spack.git && cd spack
|
||||
git fetch origin ${{ github.ref }}:test-branch
|
||||
git checkout test-branch
|
||||
share/spack/qa/run-unit-tests
|
||||
|
||||
rhel8-platform-python:
|
||||
runs-on: ubuntu-latest
|
||||
container: registry.access.redhat.com/ubi8/ubi
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_PYTHON: /usr/libexec/platform-python
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack unit-test -k 'not svn and not hg' -x --verbose
|
||||
|
||||
clingo:
|
||||
# Test for the clingo based solver
|
||||
runs-on: ubuntu-latest
|
||||
container: spack/github-actions:clingo
|
||||
steps:
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
whoami && echo PWD=$PWD && echo HOME=$HOME && echo SPACK_TEST_SOLVER=$SPACK_TEST_SOLVER
|
||||
which clingo && clingo --version
|
||||
git clone https://github.com/spack/spack.git && cd spack
|
||||
git fetch origin ${{ github.ref }}:test-branch
|
||||
git checkout test-branch
|
||||
. share/spack/setup-env.sh
|
||||
spack compiler find
|
||||
spack solve mpileaks%gcc
|
||||
coverage run $(which spack) unit-test -v
|
||||
coverage combine
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
clingo-cffi:
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
runs-on: ubuntu-latest
|
||||
container: spack/github-actions:clingo-cffi
|
||||
steps:
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
whoami && echo PWD=$PWD && echo HOME=$HOME && echo SPACK_TEST_SOLVER=$SPACK_TEST_SOLVER
|
||||
python3 -c "import clingo; print(hasattr(clingo.Symbol, '_rep'), clingo.__version__)"
|
||||
git clone https://github.com/spack/spack.git && cd spack
|
||||
git fetch origin ${{ github.ref }}:test-branch
|
||||
git checkout test-branch
|
||||
. share/spack/setup-env.sh
|
||||
spack compiler find
|
||||
spack solve mpileaks%gcc
|
||||
coverage run $(which spack) unit-test -v
|
||||
coverage combine
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
44
.github/workflows/macos_unit_tests.yaml
vendored
44
.github/workflows/macos_unit_tests.yaml
vendored
@@ -1,44 +0,0 @@
|
||||
name: macos tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
jobs:
|
||||
build:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools
|
||||
pip install --upgrade codecov coverage
|
||||
pip install --upgrade flake8 pep8-naming mypy
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
. share/spack/setup-env.sh
|
||||
coverage run $(which spack) unit-test
|
||||
coverage combine
|
||||
coverage xml
|
||||
- uses: codecov/codecov-action@v1
|
||||
with:
|
||||
file: ./coverage.xml
|
||||
flags: unittests,macos
|
||||
65
.github/workflows/style_and_docs.yaml
vendored
65
.github/workflows/style_and_docs.yaml
vendored
@@ -1,65 +0,0 @@
|
||||
name: style and docs
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
jobs:
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
- name: Minimum Version (Spack's Core)
|
||||
run: vermin --backport argparse --backport typing -t=2.6- -t=3.5- -v lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: Minimum Version (Repositories)
|
||||
run: vermin --backport argparse --backport typing -t=2.6- -t=3.5- -v var/spack/repos
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools flake8 mypy>=0.800 black
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
documentation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
sudo apt-get install -y coreutils ninja-build graphviz
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools
|
||||
pip install --upgrade -r lib/spack/docs/requirements.txt
|
||||
- name: Build documentation
|
||||
run: |
|
||||
share/spack/qa/run-doc-tests
|
||||
360
.github/workflows/unit_tests.yaml
vendored
Normal file
360
.github/workflows/unit_tests.yaml
vendored
Normal file
@@ -0,0 +1,360 @@
|
||||
name: linux tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
- releases/**
|
||||
jobs:
|
||||
# Validate that the code can be run on all the Python versions
|
||||
# supported by Spack
|
||||
validate:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python Packages
|
||||
run: |
|
||||
pip install --upgrade pip
|
||||
pip install --upgrade vermin
|
||||
- name: vermin (Spack's Core)
|
||||
run: vermin --backport argparse --backport typing -t=2.6- -t=3.5- -v lib/spack/spack/ lib/spack/llnl/ bin/
|
||||
- name: vermin (Repositories)
|
||||
run: vermin --backport argparse --backport typing -t=2.6- -t=3.5- -v var/spack/repos
|
||||
# Run style checks on the files that have been changed
|
||||
style:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools flake8 mypy>=0.800 black
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Run style tests
|
||||
run: |
|
||||
share/spack/qa/run-style-tests
|
||||
# Build the documentation
|
||||
documentation:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
sudo apt-get install -y coreutils ninja-build graphviz
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools
|
||||
pip install --upgrade -r lib/spack/docs/requirements.txt
|
||||
- name: Build documentation
|
||||
run: |
|
||||
share/spack/qa/run-doc-tests
|
||||
|
||||
# Check which files have been updated by the PR
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
# Set job outputs to values from filter step
|
||||
outputs:
|
||||
core: ${{ steps.filter.outputs.core }}
|
||||
packages: ${{ steps.filter.outputs.packages }}
|
||||
with_coverage: ${{ steps.coverage.outputs.with_coverage }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
if: ${{ github.event_name == 'push' }}
|
||||
with:
|
||||
fetch-depth: 0
|
||||
# For pull requests it's not necessary to checkout the code
|
||||
- uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
with:
|
||||
# See https://github.com/dorny/paths-filter/issues/56 for the syntax used below
|
||||
filters: |
|
||||
core:
|
||||
- './!(var/**)/**'
|
||||
packages:
|
||||
- 'var/**'
|
||||
# Some links for easier reference:
|
||||
#
|
||||
# "github" context: https://docs.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#github-context
|
||||
# job outputs: https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idoutputs
|
||||
# setting environment variables from earlier steps: https://docs.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-environment-variable
|
||||
#
|
||||
- id: coverage
|
||||
# Run the subsequent jobs with coverage if core has been modified,
|
||||
# regardless of whether this is a pull request or a push to a branch
|
||||
run: |
|
||||
echo Core changes: ${{ steps.filter.outputs.core }}
|
||||
echo Event name: ${{ github.event_name }}
|
||||
if [ "${{ steps.filter.outputs.core }}" == "true" ]
|
||||
then
|
||||
echo "::set-output name=with_coverage::true"
|
||||
else
|
||||
echo "::set-output name=with_coverage::false"
|
||||
fi
|
||||
|
||||
# Run unit tests with different configurations on linux
|
||||
unittests:
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [2.7, 3.5, 3.6, 3.7, 3.8, 3.9]
|
||||
concretizer: ['original', 'clingo']
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for unit tests
|
||||
sudo apt-get install -y coreutils gfortran graphviz gnupg2 mercurial
|
||||
sudo apt-get install -y ninja-build patchelf
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
|
||||
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools codecov coverage
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Install kcov for bash script coverage
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
KCOV_VERSION: 34
|
||||
run: |
|
||||
KCOV_ROOT=$(mktemp -d)
|
||||
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
|
||||
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
- name: Bootstrap clingo from sources
|
||||
if: ${{ matrix.concretizer == 'clingo' }}
|
||||
run: |
|
||||
. share/spack/setup-env.sh
|
||||
spack external find --not-buildable cmake bison
|
||||
spack -v solve zlib
|
||||
- name: Run unit tests (full suite with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
COVERAGE: true
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
coverage combine
|
||||
coverage xml
|
||||
- name: Run unit tests (reduced suite without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
env:
|
||||
ONLY_PACKAGES: true
|
||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||
run: |
|
||||
share/spack/qa/run-unit-tests
|
||||
- uses: codecov/codecov-action@v1
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,${{ matrix.concretizer }}
|
||||
# Test shell integration
|
||||
shell:
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install System packages
|
||||
run: |
|
||||
sudo apt-get -y update
|
||||
# Needed for shell tests
|
||||
sudo apt-get install -y coreutils csh zsh tcsh fish dash bash
|
||||
# Needed for kcov
|
||||
sudo apt-get -y install cmake binutils-dev libcurl4-openssl-dev
|
||||
sudo apt-get -y install zlib1g-dev libdw-dev libiberty-dev
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools codecov coverage
|
||||
- name: Setup git configuration
|
||||
run: |
|
||||
# Need this for the git tests to succeed.
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
- name: Install kcov for bash script coverage
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
KCOV_VERSION: 38
|
||||
run: |
|
||||
KCOV_ROOT=$(mktemp -d)
|
||||
wget --output-document=${KCOV_ROOT}/${KCOV_VERSION}.tar.gz https://github.com/SimonKagstrom/kcov/archive/v${KCOV_VERSION}.tar.gz
|
||||
tar -C ${KCOV_ROOT} -xzvf ${KCOV_ROOT}/${KCOV_VERSION}.tar.gz
|
||||
mkdir -p ${KCOV_ROOT}/build
|
||||
cd ${KCOV_ROOT}/build && cmake -Wno-dev ${KCOV_ROOT}/kcov-${KCOV_VERSION} && cd -
|
||||
make -C ${KCOV_ROOT}/build && sudo make -C ${KCOV_ROOT}/build install
|
||||
- name: Run shell tests (without coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- name: Run shell tests (with coverage)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
COVERAGE: true
|
||||
run: |
|
||||
share/spack/qa/run-shell-tests
|
||||
- uses: codecov/codecov-action@v1
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: shelltests,linux
|
||||
# Test for Python2.6 run on Centos 6
|
||||
centos6:
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
container: spack/github-actions:centos6
|
||||
steps:
|
||||
- name: Run unit tests (full test-suite)
|
||||
# The CentOS 6 container doesn't run with coverage, but
|
||||
# under the same conditions it runs the full test suite
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
env:
|
||||
HOME: /home/spack-test
|
||||
run: |
|
||||
whoami && echo $HOME && cd $HOME
|
||||
git clone https://github.com/spack/spack.git && cd spack
|
||||
git fetch origin ${{ github.ref }}:test-branch
|
||||
git checkout test-branch
|
||||
share/spack/qa/run-unit-tests
|
||||
- name: Run unit tests (only package tests)
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'false' }}
|
||||
env:
|
||||
HOME: /home/spack-test
|
||||
ONLY_PACKAGES: true
|
||||
run: |
|
||||
whoami && echo $HOME && cd $HOME
|
||||
git clone https://github.com/spack/spack.git && cd spack
|
||||
git fetch origin ${{ github.ref }}:test-branch
|
||||
git checkout test-branch
|
||||
share/spack/qa/run-unit-tests
|
||||
|
||||
# Test RHEL8 UBI with platform Python. This job is run
|
||||
# only on PRs modifying core Spack
|
||||
rhel8-platform-python:
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
container: registry.access.redhat.com/ubi8/ubi
|
||||
steps:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
dnf install -y \
|
||||
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||
make patch tcl unzip which xz
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup repo and non-root user
|
||||
run: |
|
||||
git --version
|
||||
git fetch --unshallow
|
||||
. .github/workflows/setup_git.sh
|
||||
useradd spack-test
|
||||
chown -R spack-test .
|
||||
- name: Run unit tests
|
||||
env:
|
||||
SPACK_PYTHON: /usr/libexec/platform-python
|
||||
shell: runuser -u spack-test -- bash {0}
|
||||
run: |
|
||||
source share/spack/setup-env.sh
|
||||
spack unit-test -k 'not svn and not hg' -x --verbose
|
||||
# Test for the clingo based solver (using clingo-cffi)
|
||||
clingo-cffi:
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: ubuntu-latest
|
||||
container: spack/github-actions:clingo-cffi
|
||||
steps:
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
whoami && echo PWD=$PWD && echo HOME=$HOME && echo SPACK_TEST_SOLVER=$SPACK_TEST_SOLVER
|
||||
python3 -c "import clingo; print(hasattr(clingo.Symbol, '_rep'), clingo.__version__)"
|
||||
git clone https://github.com/spack/spack.git && cd spack
|
||||
git fetch origin ${{ github.ref }}:test-branch
|
||||
git checkout test-branch
|
||||
. share/spack/setup-env.sh
|
||||
spack compiler find
|
||||
spack solve mpileaks%gcc
|
||||
if [ "${{ needs.changes.outputs.with_coverage }}" == "true" ]
|
||||
then
|
||||
coverage run $(which spack) unit-test -v -x
|
||||
coverage combine
|
||||
coverage xml
|
||||
else
|
||||
$(which spack) unit-test -m "not maybeslow" -k "package_sanity"
|
||||
fi
|
||||
- uses: codecov/codecov-action@v1
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
flags: unittests,linux,clingo
|
||||
# Run unit tests on MacOS
|
||||
build:
|
||||
needs: [ validate, style, documentation, changes ]
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python packages
|
||||
run: |
|
||||
pip install --upgrade pip six setuptools
|
||||
pip install --upgrade codecov coverage
|
||||
pip install --upgrade flake8 pep8-naming mypy
|
||||
- name: Setup Homebrew packages
|
||||
run: |
|
||||
brew install dash fish gcc gnupg2 kcov
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
git --version
|
||||
. .github/workflows/setup_git.sh
|
||||
. share/spack/setup-env.sh
|
||||
if [ "${{ needs.changes.outputs.with_coverage }}" == "true" ]
|
||||
then
|
||||
coverage run $(which spack) unit-test -x
|
||||
coverage combine
|
||||
coverage xml
|
||||
else
|
||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping coverage]"
|
||||
$(which spack) unit-test -x -m "not maybeslow" -k "package_sanity"
|
||||
fi
|
||||
- uses: codecov/codecov-action@v1
|
||||
if: ${{ needs.changes.outputs.with_coverage == 'true' }}
|
||||
with:
|
||||
file: ./coverage.xml
|
||||
flags: unittests,macos
|
||||
@@ -1,7 +1,6 @@
|
||||
# <img src="https://cdn.rawgit.com/spack/spack/develop/share/spack/logo/spack-logo.svg" width="64" valign="middle" alt="Spack"/> Spack
|
||||
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions)
|
||||
[](https://github.com/spack/spack/actions?query=workflow%3A%22macOS+builds+nightly%22)
|
||||
[](https://codecov.io/gh/spack/spack)
|
||||
|
||||
23
bin/spack
23
bin/spack
@@ -29,10 +29,15 @@ from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
|
||||
if sys.version_info[:2] < (2, 6):
|
||||
min_python3 = (3, 5)
|
||||
|
||||
if sys.version_info[:2] < (2, 6) or (
|
||||
sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < min_python3
|
||||
):
|
||||
v_info = sys.version_info[:3]
|
||||
sys.exit("Spack requires Python 2.6 or higher."
|
||||
"This is Python %d.%d.%d." % v_info)
|
||||
msg = "Spack requires Python 2.6, 2.7 or %d.%d or higher " % min_python3
|
||||
msg += "You are running spack with Python %d.%d.%d." % v_info
|
||||
sys.exit(msg)
|
||||
|
||||
# Find spack's location and its prefix.
|
||||
spack_file = os.path.realpath(os.path.expanduser(__file__))
|
||||
@@ -46,9 +51,9 @@ sys.path.insert(0, spack_lib_path)
|
||||
spack_external_libs = os.path.join(spack_lib_path, "external")
|
||||
|
||||
if sys.version_info[:2] <= (2, 7):
|
||||
sys.path.insert(0, os.path.join(spack_external_libs, 'py2'))
|
||||
sys.path.insert(0, os.path.join(spack_external_libs, "py2"))
|
||||
if sys.version_info[:2] == (2, 6):
|
||||
sys.path.insert(0, os.path.join(spack_external_libs, 'py26'))
|
||||
sys.path.insert(0, os.path.join(spack_external_libs, "py26"))
|
||||
|
||||
sys.path.insert(0, spack_external_libs)
|
||||
|
||||
@@ -58,11 +63,11 @@ sys.path.insert(0, spack_external_libs)
|
||||
# Briefly: ruamel.yaml produces a .pth file when installed with pip that
|
||||
# makes the site installed package the preferred one, even though sys.path
|
||||
# is modified to point to another version of ruamel.yaml.
|
||||
if 'ruamel.yaml' in sys.modules:
|
||||
del sys.modules['ruamel.yaml']
|
||||
if "ruamel.yaml" in sys.modules:
|
||||
del sys.modules["ruamel.yaml"]
|
||||
|
||||
if 'ruamel' in sys.modules:
|
||||
del sys.modules['ruamel']
|
||||
if "ruamel" in sys.modules:
|
||||
del sys.modules["ruamel"]
|
||||
|
||||
import spack.main # noqa
|
||||
|
||||
|
||||
@@ -40,35 +40,32 @@ config:
|
||||
lmod: $spack/share/spack/lmod
|
||||
|
||||
|
||||
# Temporary locations Spack can try to use for builds.
|
||||
# `build_stage` determines where Spack builds packages.
|
||||
#
|
||||
# Recommended options are given below.
|
||||
# The default build location is `$tempdir/$user/spack-stage/$instance`.
|
||||
# `$tempdir` indicates that we should build in a temporary directory
|
||||
# (i.e., ``$TMP` or ``$TMPDIR``). On most systems (especially HPC
|
||||
# machines), building in a temporary directory is significantly faster
|
||||
# than other locations. `$user` ensures that the directory is unique by
|
||||
# user, so different users do not fight over Spack's build location.
|
||||
# Finally, `$instance` is an 8-digit hash that is unique per instance
|
||||
# of Spack. This ensures that different Spack instances do not fight
|
||||
# over build locations.
|
||||
#
|
||||
# Builds can be faster in temporary directories on some (e.g., HPC) systems.
|
||||
# Specifying `$tempdir` will ensure use of the default temporary directory
|
||||
# (i.e., ``$TMP` or ``$TMPDIR``).
|
||||
# The second choice, if Spack cannot create the first one for some
|
||||
# reason, is `~/.spack/stage/$instance`. This is unique to each user's
|
||||
# home directory, and it is also unique to each Spack instance.
|
||||
#
|
||||
# Another option that prevents conflicts and potential permission issues is
|
||||
# to specify `~/.spack/stage`, which ensures each user builds in their home
|
||||
# directory.
|
||||
# These choices both have the username in the path. If the username is
|
||||
# NOT in your chosen `build_stage` location, Spack will append it
|
||||
# anyway, to avoid conflicts among users in shared temporary spaces.
|
||||
#
|
||||
# A more traditional path uses the value of `$spack/var/spack/stage`, which
|
||||
# builds directly inside Spack's instance without staging them in a
|
||||
# temporary space. Problems with specifying a path inside a Spack instance
|
||||
# are that it precludes its use as a system package and its ability to be
|
||||
# pip installable.
|
||||
#
|
||||
# In any case, if the username is not already in the path, Spack will append
|
||||
# the value of `$user` in an attempt to avoid potential conflicts between
|
||||
# users in shared temporary spaces.
|
||||
#
|
||||
# The build stage can be purged with `spack clean --stage` and
|
||||
# `spack clean -a`, so it is important that the specified directory uniquely
|
||||
# identifies Spack staging to avoid accidentally wiping out non-Spack work.
|
||||
# The build stage can be purged with `spack clean`, so it is important
|
||||
# to choose a directory that is ONLY used by Spack so that you do not
|
||||
# accidentally wipe out files that have nothing to do with Spack.
|
||||
build_stage:
|
||||
- $tempdir/$user/spack-stage
|
||||
- ~/.spack/stage
|
||||
# - $spack/var/spack/stage
|
||||
- $tempdir/$user/spack-stage/$instance
|
||||
- ~/.spack/stage/$instance
|
||||
|
||||
# Directory in which to run tests and store test results.
|
||||
# Tests will be stored in directories named by date/time and package
|
||||
|
||||
@@ -50,6 +50,7 @@ packages:
|
||||
yacc: [bison, byacc]
|
||||
flame: [libflame, amdlibflame]
|
||||
uuid: [util-linux-uuid, libuuid]
|
||||
ziglang: [zig]
|
||||
permissions:
|
||||
read: world
|
||||
write: user
|
||||
|
||||
@@ -59,6 +59,7 @@ on these ideas for each distinct build system that Spack supports:
|
||||
|
||||
build_systems/bundlepackage
|
||||
build_systems/cudapackage
|
||||
build_systems/inteloneapipackage
|
||||
build_systems/intelpackage
|
||||
build_systems/rocmpackage
|
||||
build_systems/custompackage
|
||||
|
||||
137
lib/spack/docs/build_systems/inteloneapipackage.rst
Normal file
137
lib/spack/docs/build_systems/inteloneapipackage.rst
Normal file
@@ -0,0 +1,137 @@
|
||||
.. Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
|
||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
.. _inteloneapipackage:
|
||||
|
||||
|
||||
====================
|
||||
IntelOneapiPackage
|
||||
====================
|
||||
|
||||
|
||||
.. contents::
|
||||
|
||||
|
||||
oneAPI packages in Spack
|
||||
========================
|
||||
|
||||
Spack can install and use the Intel oneAPI products. You may either
|
||||
use spack to install the oneAPI tools or use the `Intel
|
||||
installers`_. After installation, you may use the tools directly, or
|
||||
use Spack to build packages with the tools.
|
||||
|
||||
The Spack Python class ``IntelOneapiPackage`` is a base class that is
|
||||
used by ``IntelOneapiCompilers``, ``IntelOneapiMkl``,
|
||||
``IntelOneapiTbb`` and other classes to implement the oneAPI
|
||||
packages. See the :ref:<package-list> for the full list of available
|
||||
oneAPI packages or use::
|
||||
|
||||
spack list -d oneAPI
|
||||
|
||||
For more information on a specific package, do::
|
||||
|
||||
spack info <package-name>
|
||||
|
||||
Intel no longer releases new versions of Parallel Studio, which can be
|
||||
used in Spack via the :ref:<intelpackage>. All of its components can
|
||||
now be found in oneAPI.
|
||||
|
||||
Example
|
||||
=======
|
||||
|
||||
We start with a simple example that will be sufficient for most
|
||||
users. Install the oneAPI compilers::
|
||||
|
||||
spack install intel-oneapi-compilers
|
||||
|
||||
Add the oneAPI compilers to the set of compilers that Spack can use::
|
||||
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin/intel64
|
||||
spack compiler add `spack location -i intel-oneapi-compilers`/compiler/latest/linux/bin
|
||||
|
||||
This adds the compilers to your ``compilers.yaml``. Verify that the
|
||||
compilers are available::
|
||||
|
||||
spack compiler list
|
||||
|
||||
The ``intel-oneapi-compilers`` package includes 2 families of
|
||||
compilers:
|
||||
|
||||
* ``intel``: ``icc``, ``icpc``, ``ifort``. Intel's *classic*
|
||||
compilers.
|
||||
* ``oneapi``: ``icx``, ``icpx``, ``ifx``. Intel's new generation of
|
||||
compilers based on LLVM.
|
||||
|
||||
To build the ``patchelf`` Spack package with ``icc``, do::
|
||||
|
||||
spack install patchelf%intel
|
||||
|
||||
To build with with ``icx``, do ::
|
||||
|
||||
spack install patchelf%oneapi
|
||||
|
||||
In addition to compilers, oneAPI contains many libraries. The ``hdf5``
|
||||
package works with any compatible MPI implementation. To build
|
||||
``hdf5`` with Intel oneAPI MPI do::
|
||||
|
||||
spack install hdf5 +mpi ^intel-oneapi-mpi
|
||||
|
||||
Using an Externally Installed oneAPI
|
||||
====================================
|
||||
|
||||
Spack can also use oneAPI tools that are manually installed with
|
||||
`Intel Installers`_. The procedures for configuring Spack to use
|
||||
external compilers and libraries are different.
|
||||
|
||||
Compilers
|
||||
---------
|
||||
|
||||
To use the compilers, add some information about the installation to
|
||||
``compilers.yaml``. For most users, it is sufficient to do::
|
||||
|
||||
spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin/intel64
|
||||
spack compiler add /opt/intel/oneapi/compiler/latest/linux/bin
|
||||
|
||||
Adapt the paths above if you did not install the tools in the default
|
||||
location. After adding the compilers, using them in Spack will be
|
||||
exactly the same as if you had installed the
|
||||
``intel-oneapi-compilers`` package. Another option is to manually add
|
||||
the configuration to ``compilers.yaml`` as described in :ref:`Compiler
|
||||
configuration <compiler-config>`.
|
||||
|
||||
|
||||
Using oneAPI Tools Installed by Spack
|
||||
=====================================
|
||||
|
||||
Spack can be a convenient way to install and configure compilers and
|
||||
libaries, even if you do not intend to build a Spack package. If you
|
||||
want to build a Makefile project using Spack-installed oneAPI compilers,
|
||||
then use spack to configure your environment::
|
||||
|
||||
spack load intel-oneapi-compilers
|
||||
|
||||
And then you can build with::
|
||||
|
||||
CXX=icpx make
|
||||
|
||||
You can also use Spack-installed libraries. For example::
|
||||
|
||||
spack load intel-oneapi-mkl
|
||||
|
||||
Will update your environment CPATH, LIBRARY_PATH, and other
|
||||
environment variables for building an application with MKL.
|
||||
|
||||
More information
|
||||
================
|
||||
|
||||
This section describes basic use of oneAPI, especially if it has
|
||||
changed compared to Parallel Studio. See :ref:<intelpackage> for more
|
||||
information on :ref:<intel-virtual-packages>,
|
||||
:ref:<intel-unrelated-packages>,
|
||||
:ref:<intel-integrating-external-libraries>, and
|
||||
:ref:<using-mkl-tips>.
|
||||
|
||||
|
||||
.. _`Intel installers`: https://software.intel.com/content/www/us/en/develop/documentation/installation-guide-for-intel-oneapi-toolkits-linux/top.html
|
||||
@@ -137,6 +137,7 @@ If you need to save disk space or installation time, you could install the
|
||||
``intel`` compilers-only subset (0.6 GB) and just the library packages you
|
||||
need, for example ``intel-mpi`` (0.5 GB) and ``intel-mkl`` (2.5 GB).
|
||||
|
||||
.. _intel-unrelated-packages:
|
||||
|
||||
""""""""""""""""""""
|
||||
Unrelated packages
|
||||
@@ -358,6 +359,8 @@ affected by an advanced third method:
|
||||
Next, visit section `Selecting Intel Compilers`_ to learn how to tell
|
||||
Spack to use the newly configured compilers.
|
||||
|
||||
.. _intel-integrating-external-libraries:
|
||||
|
||||
""""""""""""""""""""""""""""""""""
|
||||
Integrating external libraries
|
||||
""""""""""""""""""""""""""""""""""
|
||||
@@ -834,6 +837,7 @@ for example:
|
||||
compiler: [ intel@18, intel@17, gcc@4.4.7, gcc@4.9.3, gcc@7.3.0, ]
|
||||
|
||||
|
||||
.. _intel-virtual-packages:
|
||||
|
||||
""""""""""""""""""""""""""""""""""""""""""""""""
|
||||
Selecting libraries to satisfy virtual packages
|
||||
@@ -907,6 +911,7 @@ With the proper installation as detailed above, no special steps should be
|
||||
required when a client package specifically (and thus deliberately) requests an
|
||||
Intel package as dependency, this being one of the target use cases for Spack.
|
||||
|
||||
.. _using-mkl-tips:
|
||||
|
||||
"""""""""""""""""""""""""""""""""""""""""""""""
|
||||
Tips for configuring client packages to use MKL
|
||||
|
||||
@@ -79,12 +79,14 @@ Description
|
||||
The first thing you'll need to add to your new package is a description.
|
||||
The top of the homepage for ``caret`` lists the following description:
|
||||
|
||||
caret: Classification and Regression Training
|
||||
Classification and Regression Training
|
||||
|
||||
Misc functions for training and plotting classification and regression models.
|
||||
|
||||
You can either use the short description (first line), long description
|
||||
(second line), or both depending on what you feel is most appropriate.
|
||||
The first line is a short description (title) and the second line is a long
|
||||
description. In this case the description is only one line but often the
|
||||
description is several lines. Spack makes use of both short and long
|
||||
descriptions and convention is to use both when creating an R package.
|
||||
|
||||
^^^^^^^^
|
||||
Homepage
|
||||
@@ -124,6 +126,67 @@ If you only specify the URL for the latest release, your package will
|
||||
no longer be able to fetch that version as soon as a new release comes
|
||||
out. To get around this, add the archive directory as a ``list_url``.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
Bioconductor packages
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Bioconductor packages are set up in a similar way to CRAN packages, but there
|
||||
are some very important distinctions. Bioconductor packages can be found at:
|
||||
https://bioconductor.org/. Bioconductor packages are R packages and so follow
|
||||
the same packaging scheme as CRAN packages. What is different is that
|
||||
Bioconductor itself is versioned and released. This scheme, using the
|
||||
Bioconductor package installer, allows further specification of the minimum
|
||||
version of R as well as further restrictions on the dependencies between
|
||||
packages than what is possible with the native R packaging system. Spack can
|
||||
not replicate these extra features and thus Bioconductor packages in Spack need
|
||||
to be managed as a group during updates in order to maintain package
|
||||
consistency with Bioconductor itself.
|
||||
|
||||
Another key difference is that, while previous versions of packages are
|
||||
available, they are not available from a site that can be programmatically set,
|
||||
thus a ``list_url`` attribute can not be used. However, each package is also
|
||||
available in a git repository, with branches corresponding to each Bioconductor
|
||||
release. Thus, it is always possible to retrieve the version of any package
|
||||
corresponding to a Bioconductor release simply by fetching the branch that
|
||||
corresponds to the Bioconductor release of the package repository. For this
|
||||
reason, spack Bioconductor R packages use the git repository, with the commit
|
||||
of the respective branch used in the ``version()`` attribute of the package.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
cran and bioc attributes
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Much like the ``pypi`` attribute for python packages, due to the fact that R
|
||||
packages are obtained from specific repositories, it is possible to set up shortcut
|
||||
attributes that can be used to set ``homepage``, ``url``, ``list_url``, and
|
||||
``git``. For example, the following ``cran`` attribute:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
cran = 'caret'
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
homepage = 'https://cloud.r-project.org/package=caret'
|
||||
url = 'https://cloud.r-project.org/src/contrib/caret_6.0-86.tar.gz'
|
||||
list_url = 'https://cloud.r-project.org/src/contrib/Archive/caret'
|
||||
|
||||
Likewise, the following ``bioc`` attribute:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
bioc = 'BiocVersion'
|
||||
|
||||
is equivalent to:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
homepage = 'https://bioconductor.org/packages/BiocVersion/'
|
||||
git = 'https://git.bioconductor.org/packages/BiocVersion'
|
||||
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Build system dependencies
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -156,7 +219,7 @@ R dependencies
|
||||
R packages are often small and follow the classic Unix philosophy
|
||||
of doing one thing well. They are modular and usually depend on
|
||||
several other packages. You may find a single package with over a
|
||||
hundred dependencies. Luckily, CRAN packages are well-documented
|
||||
hundred dependencies. Luckily, R packages are well-documented
|
||||
and list all of their dependencies in the following sections:
|
||||
|
||||
* Depends
|
||||
|
||||
@@ -78,6 +78,13 @@ are six configuration scopes. From lowest to highest:
|
||||
If multiple scopes are listed on the command line, they are ordered
|
||||
from lowest to highest precedence.
|
||||
|
||||
#. **environment**: When using Spack :ref:`environments`, Spack reads
|
||||
additional configuration from the environment file. See
|
||||
:ref:`environment-configuration` for further details on these
|
||||
scopes. Environment scopes can be referenced from the command line
|
||||
as ``env:name`` (to reference environment ``foo``, use
|
||||
``env:foo``).
|
||||
|
||||
#. **command line**: Build settings specified on the command line take
|
||||
precedence over all other scopes.
|
||||
|
||||
@@ -192,10 +199,11 @@ with MPICH. You can create different configuration scopes for use with
|
||||
Platform-specific Scopes
|
||||
------------------------
|
||||
|
||||
For each scope above, there can also be platform-specific settings.
|
||||
For example, on most platforms, GCC is the preferred compiler.
|
||||
However, on macOS (darwin), Clang often works for more packages,
|
||||
and is set as the default compiler. This configuration is set in
|
||||
For each scope above (excluding environment scopes), there can also be
|
||||
platform-specific settings. For example, on most platforms, GCC is
|
||||
the preferred compiler. However, on macOS (darwin), Clang often works
|
||||
for more packages, and is set as the default compiler. This
|
||||
configuration is set in
|
||||
``$(prefix)/etc/spack/defaults/darwin/packages.yaml``. It will take
|
||||
precedence over settings in the ``defaults`` scope, but can still be
|
||||
overridden by settings in ``system``, ``system/darwin``, ``site``,
|
||||
|
||||
@@ -227,7 +227,7 @@ following ``spack.yaml``:
|
||||
|
||||
container:
|
||||
images:
|
||||
os: centos/7
|
||||
os: centos:7
|
||||
spack: 0.15.4
|
||||
|
||||
uses ``spack/centos7:0.15.4`` and ``centos:7`` for the stages where the
|
||||
|
||||
@@ -399,6 +399,12 @@ There are two ways to include configuration information in a Spack Environment:
|
||||
|
||||
#. Included in the ``spack.yaml`` file from another file.
|
||||
|
||||
Many Spack commands also affect configuration information in files
|
||||
automatically. Those commands take a ``--scope`` argument, and the
|
||||
environment can be specified by ``env:NAME`` (to affect environment
|
||||
``foo``, set ``--scope env:foo``). These commands will automatically
|
||||
manipulate configuration inline in the ``spack.yaml`` file.
|
||||
|
||||
"""""""""""""""""""""
|
||||
Inline configurations
|
||||
"""""""""""""""""""""
|
||||
|
||||
@@ -111,6 +111,53 @@ environment*, especially for ``PATH``. Only software that comes with
|
||||
the system, or that you know you wish to use with Spack, should be
|
||||
included. This procedure will avoid many strange build errors.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Optional: Bootstrapping clingo
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Spack supports using clingo as an external solver to compute which software
|
||||
needs to be installed. If you have a default compiler supporting C++14 Spack
|
||||
can automatically bootstrap this tool from sources the first time it is
|
||||
needed:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack solve zlib
|
||||
[+] /usr (external bison-3.0.4-wu5pgjchxzemk5ya2l3ddqug2d7jv6eb)
|
||||
[+] /usr (external cmake-3.19.4-a4kmcfzxxy45mzku4ipmj5kdiiz5a57b)
|
||||
[+] /usr (external python-3.6.9-x4fou4iqqlh5ydwddx3pvfcwznfrqztv)
|
||||
==> Installing re2c-1.2.1-e3x6nxtk3ahgd63ykgy44mpuva6jhtdt
|
||||
[ ... ]
|
||||
==> Optimization: [0, 0, 0, 0, 0, 1, 0, 0, 0]
|
||||
zlib@1.2.11%gcc@10.1.0+optimize+pic+shared arch=linux-ubuntu18.04-broadwell
|
||||
|
||||
If you want to speed-up bootstrapping, you may try to search for ``cmake`` and ``bison``
|
||||
on your system:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack external find cmake bison
|
||||
==> The following specs have been detected on this system and added to /home/spack/.spack/packages.yaml
|
||||
bison@3.0.4 cmake@3.19.4
|
||||
|
||||
All the tools Spack needs for its own functioning are installed in a separate store, which lives
|
||||
under the ``${HOME}/.spack`` directory. The software installed there can be queried with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack find --bootstrap
|
||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
||||
==> 3 installed packages
|
||||
-- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------
|
||||
clingo-bootstrap@spack python@3.6.9 re2c@1.2.1
|
||||
|
||||
In case it's needed the bootstrap store can also be cleaned with:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack clean -b
|
||||
==> Removing software in "/home/spack/.spack/bootstrap/store"
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Optional: Alternate Prefix
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
6
lib/spack/env/cc
vendored
6
lib/spack/env/cc
vendored
@@ -284,7 +284,7 @@ while [ -n "$1" ]; do
|
||||
case "$1" in
|
||||
-isystem*)
|
||||
arg="${1#-isystem}"
|
||||
isystem_was_used=true
|
||||
isystem_was_used=true
|
||||
if [ -z "$arg" ]; then shift; arg="$1"; fi
|
||||
if system_dir "$arg"; then
|
||||
isystem_system_includes+=("$arg")
|
||||
@@ -502,9 +502,9 @@ for dir in "${isystem_includes[@]}"; do args+=("-isystem" "$dir"); done
|
||||
IFS=':' read -ra spack_include_dirs <<< "$SPACK_INCLUDE_DIRS"
|
||||
if [[ $mode == cpp || $mode == cc || $mode == as || $mode == ccld ]]; then
|
||||
if [[ "$isystem_was_used" == "true" ]] ; then
|
||||
for dir in "${spack_include_dirs[@]}"; do args+=("-isystem" "$dir"); done
|
||||
for dir in "${spack_include_dirs[@]}"; do args+=("-isystem" "$dir"); done
|
||||
else
|
||||
for dir in "${spack_include_dirs[@]}"; do args+=("-I$dir"); done
|
||||
for dir in "${spack_include_dirs[@]}"; do args+=("-I$dir"); done
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
2
lib/spack/external/__init__.py
vendored
2
lib/spack/external/__init__.py
vendored
@@ -11,7 +11,7 @@
|
||||
|
||||
* Homepage: https://pypi.python.org/pypi/archspec
|
||||
* Usage: Labeling, comparison and detection of microarchitectures
|
||||
* Version: 0.1.2 (commit 068b0ebd641211971acf10f39aa876703a34bae4)
|
||||
* Version: 0.1.2 (commit 0389e83e87d3dc5043a7ac08172bd970706524d6)
|
||||
|
||||
argparse
|
||||
--------
|
||||
|
||||
@@ -841,7 +841,7 @@
|
||||
],
|
||||
"intel": [
|
||||
{
|
||||
"versions": "19.0:",
|
||||
"versions": "19.0.1:",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
@@ -1328,6 +1328,49 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"zen3": {
|
||||
"from": ["zen2"],
|
||||
"vendor": "AuthenticAMD",
|
||||
"features": [
|
||||
"bmi1",
|
||||
"bmi2",
|
||||
"f16c",
|
||||
"fma",
|
||||
"fsgsbase",
|
||||
"avx",
|
||||
"avx2",
|
||||
"rdseed",
|
||||
"clzero",
|
||||
"aes",
|
||||
"pclmulqdq",
|
||||
"cx16",
|
||||
"movbe",
|
||||
"mmx",
|
||||
"sse",
|
||||
"sse2",
|
||||
"sse4a",
|
||||
"ssse3",
|
||||
"sse4_1",
|
||||
"sse4_2",
|
||||
"abm",
|
||||
"xsavec",
|
||||
"xsaveopt",
|
||||
"clflushopt",
|
||||
"popcnt",
|
||||
"clwb",
|
||||
"vaes",
|
||||
"vpclmulqdq"
|
||||
],
|
||||
"compilers": {
|
||||
"aocc": [
|
||||
{
|
||||
"versions": "3.0:",
|
||||
"name": "znver3",
|
||||
"flags": "-march={name} -mtune={name}"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"ppc64": {
|
||||
"from": [],
|
||||
"vendor": "generic",
|
||||
|
||||
199
lib/spack/spack/bootstrap.py
Normal file
199
lib/spack/spack/bootstrap.py
Normal file
@@ -0,0 +1,199 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import contextlib
|
||||
import os
|
||||
import sys
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.architecture
|
||||
import spack.config
|
||||
import spack.paths
|
||||
import spack.repo
|
||||
import spack.spec
|
||||
import spack.store
|
||||
import spack.user_environment as uenv
|
||||
import spack.util.executable
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def spack_python_interpreter():
|
||||
"""Override the current configuration to set the interpreter under
|
||||
which Spack is currently running as the only Python external spec
|
||||
available.
|
||||
"""
|
||||
python_cls = type(spack.spec.Spec('python').package)
|
||||
python_prefix = os.path.dirname(os.path.dirname(sys.executable))
|
||||
externals = python_cls.determine_spec_details(
|
||||
python_prefix, [os.path.basename(sys.executable)])
|
||||
external_python = externals[0]
|
||||
|
||||
entry = {
|
||||
'buildable': False,
|
||||
'externals': [
|
||||
{'prefix': python_prefix, 'spec': str(external_python)}
|
||||
]
|
||||
}
|
||||
|
||||
with spack.config.override('packages:python::', entry):
|
||||
yield
|
||||
|
||||
|
||||
def make_module_available(module, spec=None, install=False):
|
||||
"""Ensure module is importable"""
|
||||
# If we already can import it, that's great
|
||||
try:
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# If it's already installed, use it
|
||||
# Search by spec
|
||||
spec = spack.spec.Spec(spec or module)
|
||||
|
||||
# We have to run as part of this python
|
||||
# We can constrain by a shortened version in place of a version range
|
||||
# because this spec is only used for querying or as a placeholder to be
|
||||
# replaced by an external that already has a concrete version. This syntax
|
||||
# is not suffucient when concretizing without an external, as it will
|
||||
# concretize to python@X.Y instead of python@X.Y.Z
|
||||
spec.constrain('^python@%d.%d' % sys.version_info[:2])
|
||||
installed_specs = spack.store.db.query(spec, installed=True)
|
||||
|
||||
for ispec in installed_specs:
|
||||
# TODO: make sure run-environment is appropriate
|
||||
module_path = os.path.join(ispec.prefix,
|
||||
ispec['python'].package.site_packages_dir)
|
||||
module_path_64 = module_path.replace('/lib/', '/lib64/')
|
||||
try:
|
||||
sys.path.append(module_path)
|
||||
sys.path.append(module_path_64)
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
tty.warn("Spec %s did not provide module %s" % (ispec, module))
|
||||
sys.path = sys.path[:-2]
|
||||
|
||||
def _raise_error(module_name, module_spec):
|
||||
error_msg = 'cannot import module "{0}"'.format(module_name)
|
||||
if module_spec:
|
||||
error_msg += ' from spec "{0}'.format(module_spec)
|
||||
raise ImportError(error_msg)
|
||||
|
||||
if not install:
|
||||
_raise_error(module, spec)
|
||||
|
||||
with spack_python_interpreter():
|
||||
# We will install for ourselves, using this python if needed
|
||||
# Concretize the spec
|
||||
spec.concretize()
|
||||
spec.package.do_install()
|
||||
|
||||
module_path = os.path.join(spec.prefix,
|
||||
spec['python'].package.site_packages_dir)
|
||||
module_path_64 = module_path.replace('/lib/', '/lib64/')
|
||||
try:
|
||||
sys.path.append(module_path)
|
||||
sys.path.append(module_path_64)
|
||||
__import__(module)
|
||||
return
|
||||
except ImportError:
|
||||
sys.path = sys.path[:-2]
|
||||
_raise_error(module, spec)
|
||||
|
||||
|
||||
def get_executable(exe, spec=None, install=False):
|
||||
"""Find an executable named exe, either in PATH or in Spack
|
||||
|
||||
Args:
|
||||
exe (str): needed executable name
|
||||
spec (Spec or str): spec to search for exe in (default exe)
|
||||
install (bool): install spec if not available
|
||||
|
||||
When ``install`` is True, Spack will use the python used to run Spack as an
|
||||
external. The ``install`` option should only be used with packages that
|
||||
install quickly (when using external python) or are guaranteed by Spack
|
||||
organization to be in a binary mirror (clingo)."""
|
||||
# Search the system first
|
||||
runner = spack.util.executable.which(exe)
|
||||
if runner:
|
||||
return runner
|
||||
|
||||
# Check whether it's already installed
|
||||
spec = spack.spec.Spec(spec or exe)
|
||||
installed_specs = spack.store.db.query(spec, installed=True)
|
||||
for ispec in installed_specs:
|
||||
# filter out directories of the same name as the executable
|
||||
exe_path = [exe_p for exe_p in fs.find(ispec.prefix, exe)
|
||||
if fs.is_exe(exe_p)]
|
||||
if exe_path:
|
||||
ret = spack.util.executable.Executable(exe_path[0])
|
||||
envmod = EnvironmentModifications()
|
||||
for dep in ispec.traverse(root=True, order='post'):
|
||||
envmod.extend(uenv.environment_modifications_for_spec(dep))
|
||||
ret.add_default_envmod(envmod)
|
||||
return ret
|
||||
else:
|
||||
tty.warn('Exe %s not found in prefix %s' % (exe, ispec.prefix))
|
||||
|
||||
def _raise_error(executable, exe_spec):
|
||||
error_msg = 'cannot find the executable "{0}"'.format(executable)
|
||||
if exe_spec:
|
||||
error_msg += ' from spec "{0}'.format(exe_spec)
|
||||
raise RuntimeError(error_msg)
|
||||
|
||||
# If we're not allowed to install this for ourselves, we can't find it
|
||||
if not install:
|
||||
_raise_error(exe, spec)
|
||||
|
||||
with spack_python_interpreter():
|
||||
# We will install for ourselves, using this python if needed
|
||||
# Concretize the spec
|
||||
spec.concretize()
|
||||
|
||||
spec.package.do_install()
|
||||
# filter out directories of the same name as the executable
|
||||
exe_path = [exe_p for exe_p in fs.find(spec.prefix, exe)
|
||||
if fs.is_exe(exe_p)]
|
||||
if exe_path:
|
||||
ret = spack.util.executable.Executable(exe_path[0])
|
||||
envmod = EnvironmentModifications()
|
||||
for dep in spec.traverse(root=True, order='post'):
|
||||
envmod.extend(uenv.environment_modifications_for_spec(dep))
|
||||
ret.add_default_envmod(envmod)
|
||||
return ret
|
||||
|
||||
_raise_error(exe, spec)
|
||||
|
||||
|
||||
def _bootstrap_config_scopes():
|
||||
config_scopes = []
|
||||
for name, path in spack.config.configuration_paths:
|
||||
platform = spack.architecture.platform().name
|
||||
platform_scope = spack.config.ConfigScope(
|
||||
'/'.join([name, platform]), os.path.join(path, platform)
|
||||
)
|
||||
generic_scope = spack.config.ConfigScope(name, path)
|
||||
config_scopes.extend([generic_scope, platform_scope])
|
||||
msg = '[BOOSTRAP CONFIG SCOPE] name={0}, path={1}'
|
||||
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
||||
tty.debug(msg.format(platform_scope.name, platform_scope.path))
|
||||
return config_scopes
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ensure_bootstrap_configuration():
|
||||
with spack.architecture.use_platform(spack.architecture.real_platform()):
|
||||
# Default configuration scopes excluding command line and builtin
|
||||
# but accounting for platform specific scopes
|
||||
config_scopes = _bootstrap_config_scopes()
|
||||
with spack.config.use_configuration(*config_scopes):
|
||||
with spack.repo.use_repositories(spack.paths.packages_path):
|
||||
with spack.store.use_store(spack.paths.user_bootstrap_store):
|
||||
with spack_python_interpreter():
|
||||
yield
|
||||
@@ -7,9 +7,11 @@
|
||||
|
||||
"""
|
||||
|
||||
from os.path import dirname, isdir
|
||||
from sys import platform
|
||||
from os.path import basename, dirname, isdir, join
|
||||
|
||||
from spack.package import Package
|
||||
from spack.util.environment import EnvironmentModifications
|
||||
from spack.util.executable import Executable
|
||||
|
||||
from llnl.util.filesystem import find_headers, find_libraries
|
||||
@@ -22,46 +24,50 @@ class IntelOneApiPackage(Package):
|
||||
|
||||
phases = ['install']
|
||||
|
||||
def component_info(self,
|
||||
dir_name,
|
||||
components,
|
||||
releases,
|
||||
url_name):
|
||||
self._dir_name = dir_name
|
||||
self._components = components
|
||||
self._releases = releases
|
||||
self._url_name = url_name
|
||||
# oneAPI license does not allow mirroring outside of the
|
||||
# organization (e.g. University/Company).
|
||||
redistribute_source = False
|
||||
|
||||
def url_for_version(self, version):
|
||||
release = self._release(version)
|
||||
return 'https://registrationcenter-download.intel.com/akdlm/irc_nas/%s/%s' % (
|
||||
release['irc_id'], self._oneapi_file(version, release))
|
||||
@property
|
||||
def component_dir(self):
|
||||
"""Subdirectory for this component in the install prefix."""
|
||||
raise NotImplementedError
|
||||
|
||||
def install(self, spec, prefix):
|
||||
bash = Executable('bash')
|
||||
def install(self, spec, prefix, installer_path=None):
|
||||
"""Shared install method for all oneapi packages."""
|
||||
|
||||
# Installer writes files in ~/intel set HOME so it goes to prefix
|
||||
bash.add_default_env('HOME', prefix)
|
||||
# intel-oneapi-compilers overrides the installer_path when
|
||||
# installing fortran, which comes from a spack resource
|
||||
if installer_path is None:
|
||||
installer_path = basename(self.url_for_version(spec.version))
|
||||
|
||||
version = spec.versions.lowest()
|
||||
release = self._release(version)
|
||||
bash('./%s' % self._oneapi_file(version, release),
|
||||
'-s', '-a', '-s', '--action', 'install',
|
||||
'--eula', 'accept',
|
||||
'--components',
|
||||
self._components,
|
||||
'--install-dir', prefix)
|
||||
if platform == 'linux':
|
||||
bash = Executable('bash')
|
||||
|
||||
#
|
||||
# Helper functions
|
||||
#
|
||||
# Installer writes files in ~/intel set HOME so it goes to prefix
|
||||
bash.add_default_env('HOME', prefix)
|
||||
|
||||
def _release(self, version):
|
||||
return self._releases[str(version)]
|
||||
bash(installer_path,
|
||||
'-s', '-a', '-s', '--action', 'install',
|
||||
'--eula', 'accept',
|
||||
'--install-dir', prefix)
|
||||
|
||||
def _oneapi_file(self, version, release):
|
||||
return 'l_%s_p_%s.%s_offline.sh' % (
|
||||
self._url_name, version, release['build'])
|
||||
# Some installers have a bug and do not return an error code when failing
|
||||
if not isdir(join(prefix, self.component_dir)):
|
||||
raise RuntimeError('install failed')
|
||||
|
||||
def setup_run_environment(self, env):
|
||||
|
||||
"""Adds environment variables to the generated module file.
|
||||
|
||||
These environment variables come from running:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ source {prefix}/setvars.sh --force
|
||||
"""
|
||||
env.extend(EnvironmentModifications.from_sourcing_file(
|
||||
join(self.prefix, self.component_dir, 'latest/env/vars.sh')))
|
||||
|
||||
|
||||
class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
||||
@@ -70,11 +76,11 @@ class IntelOneApiLibraryPackage(IntelOneApiPackage):
|
||||
@property
|
||||
def headers(self):
|
||||
include_path = '%s/%s/latest/include' % (
|
||||
self.prefix, self._dir_name)
|
||||
self.prefix, self.component_dir)
|
||||
return find_headers('*', include_path, recursive=True)
|
||||
|
||||
@property
|
||||
def libs(self):
|
||||
lib_path = '%s/%s/latest/lib/intel64' % (self.prefix, self._dir_name)
|
||||
lib_path = '%s/%s/latest/lib/intel64' % (self.prefix, self.component_dir)
|
||||
lib_path = lib_path if isdir(lib_path) else dirname(lib_path)
|
||||
return find_libraries('*', root=lib_path, shared=True, recursive=True)
|
||||
|
||||
@@ -243,7 +243,28 @@ def install_args(self, spec, prefix):
|
||||
if ('py-setuptools' == spec.name or # this is setuptools, or
|
||||
'py-setuptools' in spec._dependencies and # it's an immediate dep
|
||||
'build' in spec._dependencies['py-setuptools'].deptypes):
|
||||
args += ['--single-version-externally-managed', '--root=/']
|
||||
args += ['--single-version-externally-managed']
|
||||
|
||||
# Get all relative paths since we set the root to `prefix`
|
||||
# We query the python with which these will be used for the lib and inc
|
||||
# directories. This ensures we use `lib`/`lib64` as expected by python.
|
||||
python = spec['python'].package.command
|
||||
command_start = 'print(distutils.sysconfig.'
|
||||
commands = ';'.join([
|
||||
'import distutils.sysconfig',
|
||||
command_start + 'get_python_lib(plat_specific=False, prefix=""))',
|
||||
command_start + 'get_python_lib(plat_specific=True, prefix=""))',
|
||||
command_start + 'get_python_inc(plat_specific=True, prefix=""))'])
|
||||
pure_site_packages_dir, plat_site_packages_dir, inc_dir = python(
|
||||
'-c', commands, output=str, error=str).strip().split('\n')
|
||||
|
||||
args += ['--root=%s' % prefix,
|
||||
'--install-purelib=%s' % pure_site_packages_dir,
|
||||
'--install-platlib=%s' % plat_site_packages_dir,
|
||||
'--install-scripts=bin',
|
||||
'--install-data=',
|
||||
'--install-headers=%s' % inc_dir
|
||||
]
|
||||
|
||||
return args
|
||||
|
||||
|
||||
@@ -25,6 +25,14 @@ class RPackage(PackageBase):
|
||||
"""
|
||||
phases = ['install']
|
||||
|
||||
# package attributes that can be expanded to set the homepage, url,
|
||||
# list_url, and git values
|
||||
# For CRAN packages
|
||||
cran = None
|
||||
|
||||
# For Bioconductor packages
|
||||
bioc = None
|
||||
|
||||
maintainers = ['glennpj']
|
||||
|
||||
#: This attribute is used in UI queries that need to know the build
|
||||
@@ -33,6 +41,34 @@ class RPackage(PackageBase):
|
||||
|
||||
extends('r')
|
||||
|
||||
@property
|
||||
def homepage(self):
|
||||
if self.cran:
|
||||
return 'https://cloud.r-project.org/package=' + self.cran
|
||||
elif self.bioc:
|
||||
return 'https://bioconductor.org/packages/' + self.bioc
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
if self.cran:
|
||||
return (
|
||||
'https://cloud.r-project.org/src/contrib/'
|
||||
+ self.cran + '_' + str(list(self.versions)[0]) + '.tar.gz'
|
||||
)
|
||||
|
||||
@property
|
||||
def list_url(self):
|
||||
if self.cran:
|
||||
return (
|
||||
'https://cloud.r-project.org/src/contrib/Archive/'
|
||||
+ self.cran + '/'
|
||||
)
|
||||
|
||||
@property
|
||||
def git(self):
|
||||
if self.bioc:
|
||||
return 'https://git.bioconductor.org/packages/' + self.bioc
|
||||
|
||||
def configure_args(self):
|
||||
"""Arguments to pass to install via ``--configure-args``."""
|
||||
return []
|
||||
@@ -48,6 +84,7 @@ def install(self, spec, prefix):
|
||||
config_vars = self.configure_vars()
|
||||
|
||||
args = [
|
||||
'--vanilla',
|
||||
'CMD',
|
||||
'INSTALL'
|
||||
]
|
||||
|
||||
@@ -1291,7 +1291,9 @@ def push_mirror_contents(env, spec, yaml_path, mirror_url, build_id,
|
||||
if 'Access Denied' in err_msg:
|
||||
tty.msg('Permission problem writing to {0}'.format(
|
||||
mirror_url))
|
||||
tty.msg(err_msg)
|
||||
tty.msg(err_msg)
|
||||
else:
|
||||
raise inst
|
||||
|
||||
|
||||
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
|
||||
|
||||
@@ -10,11 +10,12 @@
|
||||
import llnl.util.tty as tty
|
||||
|
||||
import spack.caches
|
||||
import spack.config
|
||||
import spack.cmd.test
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.main
|
||||
import spack.repo
|
||||
import spack.stage
|
||||
import spack.config
|
||||
from spack.paths import lib_path, var_path
|
||||
|
||||
|
||||
@@ -26,7 +27,7 @@
|
||||
class AllClean(argparse.Action):
|
||||
"""Activates flags -s -d -f -m and -p simultaneously"""
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
parser.parse_args(['-sdfmp'], namespace=namespace)
|
||||
parser.parse_args(['-sdfmpb'], namespace=namespace)
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
@@ -46,7 +47,10 @@ def setup_parser(subparser):
|
||||
'-p', '--python-cache', action='store_true',
|
||||
help="remove .pyc, .pyo files and __pycache__ folders")
|
||||
subparser.add_argument(
|
||||
'-a', '--all', action=AllClean, help="equivalent to -sdfmp", nargs=0
|
||||
'-b', '--bootstrap', action='store_true',
|
||||
help="remove software needed to bootstrap Spack")
|
||||
subparser.add_argument(
|
||||
'-a', '--all', action=AllClean, help="equivalent to -sdfmpb", nargs=0
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ['specs'])
|
||||
|
||||
@@ -54,7 +58,7 @@ def setup_parser(subparser):
|
||||
def clean(parser, args):
|
||||
# If nothing was set, activate the default
|
||||
if not any([args.specs, args.stage, args.downloads, args.failures,
|
||||
args.misc_cache, args.python_cache]):
|
||||
args.misc_cache, args.python_cache, args.bootstrap]):
|
||||
args.stage = True
|
||||
|
||||
# Then do the cleaning falling through the cases
|
||||
@@ -96,3 +100,10 @@ def clean(parser, args):
|
||||
dname = os.path.join(root, d)
|
||||
tty.debug('Removing {0}'.format(dname))
|
||||
shutil.rmtree(dname)
|
||||
|
||||
if args.bootstrap:
|
||||
msg = 'Removing software in "{0}"'
|
||||
tty.msg(msg.format(spack.paths.user_bootstrap_store))
|
||||
with spack.store.use_store(spack.paths.user_bootstrap_store):
|
||||
uninstall = spack.main.SpackCommand('uninstall')
|
||||
uninstall('-a', '-y')
|
||||
|
||||
@@ -14,11 +14,25 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-f', '--force', action='store_true',
|
||||
help="Re-concretize even if already concretized.")
|
||||
subparser.add_argument(
|
||||
'--test', default=None,
|
||||
choices=['root', 'all'],
|
||||
help="""Concretize with test dependencies. When 'root' is chosen, test
|
||||
dependencies are only added for the environment's root specs. When 'all' is
|
||||
chosen, test dependencies are enabled for all packages in the environment.""")
|
||||
|
||||
|
||||
def concretize(parser, args):
|
||||
env = ev.get_env(args, 'concretize', required=True)
|
||||
|
||||
if args.test == 'all':
|
||||
tests = True
|
||||
elif args.test == 'root':
|
||||
tests = [spec.name for spec in env.user_specs]
|
||||
else:
|
||||
tests = False
|
||||
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize(force=args.force)
|
||||
concretized_specs = env.concretize(force=args.force, tests=tests)
|
||||
ev.display_specs(concretized_specs)
|
||||
env.write()
|
||||
|
||||
@@ -200,71 +200,11 @@ def config_add(args):
|
||||
|
||||
scope, section = _get_scope_and_section(args)
|
||||
|
||||
# Updates from file
|
||||
if args.file:
|
||||
# Get file as config dict
|
||||
data = spack.config.read_config_file(args.file)
|
||||
if any(k in data for k in spack.schema.env.keys):
|
||||
data = ev.config_dict(data)
|
||||
|
||||
# update all sections from config dict
|
||||
# We have to iterate on keys to keep overrides from the file
|
||||
for section in data.keys():
|
||||
if section in spack.config.section_schemas.keys():
|
||||
# Special handling for compiler scope difference
|
||||
# Has to be handled after we choose a section
|
||||
if scope is None:
|
||||
scope = spack.config.default_modify_scope(section)
|
||||
|
||||
value = data[section]
|
||||
existing = spack.config.get(section, scope=scope)
|
||||
new = spack.config.merge_yaml(existing, value)
|
||||
|
||||
spack.config.set(section, new, scope)
|
||||
spack.config.add_from_file(args.file, scope=scope)
|
||||
|
||||
if args.path:
|
||||
components = spack.config.process_config_path(args.path)
|
||||
|
||||
has_existing_value = True
|
||||
path = ''
|
||||
override = False
|
||||
for idx, name in enumerate(components[:-1]):
|
||||
# First handle double colons in constructing path
|
||||
colon = '::' if override else ':' if path else ''
|
||||
path += colon + name
|
||||
if getattr(name, 'override', False):
|
||||
override = True
|
||||
else:
|
||||
override = False
|
||||
|
||||
# Test whether there is an existing value at this level
|
||||
existing = spack.config.get(path, scope=scope)
|
||||
|
||||
if existing is None:
|
||||
has_existing_value = False
|
||||
# We've nested further than existing config, so we need the
|
||||
# type information for validation to know how to handle bare
|
||||
# values appended to lists.
|
||||
existing = spack.config.get_valid_type(path)
|
||||
|
||||
# construct value from this point down
|
||||
value = syaml.load_config(components[-1])
|
||||
for component in reversed(components[idx + 1:-1]):
|
||||
value = {component: value}
|
||||
break
|
||||
|
||||
if has_existing_value:
|
||||
path, _, value = args.path.rpartition(':')
|
||||
value = syaml.load_config(value)
|
||||
existing = spack.config.get(path, scope=scope)
|
||||
|
||||
# append values to lists
|
||||
if isinstance(existing, list) and not isinstance(value, list):
|
||||
value = [value]
|
||||
|
||||
# merge value into existing
|
||||
new = spack.config.merge_yaml(existing, value)
|
||||
spack.config.set(path, new, scope)
|
||||
spack.config.add(args.path, scope=scope)
|
||||
|
||||
|
||||
def config_remove(args):
|
||||
|
||||
@@ -328,14 +328,34 @@ def configure_args(self):
|
||||
args = []
|
||||
return args"""
|
||||
|
||||
def __init__(self, name, *args, **kwargs):
|
||||
def __init__(self, name, url, *args, **kwargs):
|
||||
# If the user provided `--name r-rcpp`, don't rename it r-r-rcpp
|
||||
if not name.startswith('r-'):
|
||||
# Make it more obvious that we are renaming the package
|
||||
tty.msg("Changing package name from {0} to r-{0}".format(name))
|
||||
name = 'r-{0}'.format(name)
|
||||
|
||||
super(RPackageTemplate, self).__init__(name, *args, **kwargs)
|
||||
r_name = parse_name(url)
|
||||
|
||||
cran = re.search(
|
||||
r'(?:r-project)[^/]+/src' + '/([^/]+)' * 2,
|
||||
url
|
||||
)
|
||||
|
||||
if cran:
|
||||
url = r_name
|
||||
self.url_line = ' cran = "{url}"'
|
||||
|
||||
bioc = re.search(
|
||||
r'(?:bioconductor)[^/]+/packages' + '/([^/]+)' * 5,
|
||||
url
|
||||
)
|
||||
|
||||
if bioc:
|
||||
self.url_line = ' url = "{0}"\n'\
|
||||
' bioc = "{1}"'.format(url, r_name)
|
||||
|
||||
super(RPackageTemplate, self).__init__(name, url, *args, **kwargs)
|
||||
|
||||
|
||||
class PerlmakePackageTemplate(PackageTemplate):
|
||||
|
||||
@@ -157,6 +157,10 @@ def env_create_setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-d', '--dir', action='store_true',
|
||||
help='create an environment in a specific directory')
|
||||
subparser.add_argument(
|
||||
'--keep-relative', action='store_true',
|
||||
help='copy relative develop paths verbatim into the new environment'
|
||||
' when initializing from envfile')
|
||||
view_opts = subparser.add_mutually_exclusive_group()
|
||||
view_opts.add_argument(
|
||||
'--without-view', action='store_true',
|
||||
@@ -184,13 +188,14 @@ def env_create(args):
|
||||
if args.envfile:
|
||||
with open(args.envfile) as f:
|
||||
_env_create(args.create_env, f, args.dir,
|
||||
with_view=with_view)
|
||||
with_view=with_view, keep_relative=args.keep_relative)
|
||||
else:
|
||||
_env_create(args.create_env, None, args.dir,
|
||||
with_view=with_view)
|
||||
|
||||
|
||||
def _env_create(name_or_path, init_file=None, dir=False, with_view=None):
|
||||
def _env_create(name_or_path, init_file=None, dir=False, with_view=None,
|
||||
keep_relative=False):
|
||||
"""Create a new environment, with an optional yaml description.
|
||||
|
||||
Arguments:
|
||||
@@ -199,15 +204,18 @@ def _env_create(name_or_path, init_file=None, dir=False, with_view=None):
|
||||
spack.yaml or spack.lock
|
||||
dir (bool): if True, create an environment in a directory instead
|
||||
of a named environment
|
||||
keep_relative (bool): if True, develop paths are copied verbatim into
|
||||
the new environment file, otherwise they may be made absolute if the
|
||||
new environment is in a different location
|
||||
"""
|
||||
if dir:
|
||||
env = ev.Environment(name_or_path, init_file, with_view)
|
||||
env = ev.Environment(name_or_path, init_file, with_view, keep_relative)
|
||||
env.write()
|
||||
tty.msg("Created environment in %s" % env.path)
|
||||
tty.msg("You can activate this environment with:")
|
||||
tty.msg(" spack env activate %s" % env.path)
|
||||
else:
|
||||
env = ev.create(name_or_path, init_file, with_view)
|
||||
env = ev.create(name_or_path, init_file, with_view, keep_relative)
|
||||
env.write()
|
||||
tty.msg("Created environment '%s' in %s" % (name_or_path, env.path))
|
||||
tty.msg("You can activate this environment with:")
|
||||
|
||||
@@ -109,6 +109,10 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'--end-date', help='latest date of installation [YYYY-MM-DD]'
|
||||
)
|
||||
subparser.add_argument(
|
||||
'-b', '--bootstrap', action='store_true',
|
||||
help='show software in the internal bootstrap store'
|
||||
)
|
||||
|
||||
arguments.add_common_arguments(subparser, ['constraint'])
|
||||
|
||||
@@ -201,7 +205,14 @@ def display_env(env, args, decorator):
|
||||
|
||||
def find(parser, args):
|
||||
q_args = query_arguments(args)
|
||||
results = args.specs(**q_args)
|
||||
# Query the current store or the internal bootstrap store if required
|
||||
if args.bootstrap:
|
||||
msg = 'Showing internal bootstrap store at "{0}"'
|
||||
tty.msg(msg.format(spack.paths.user_bootstrap_store))
|
||||
with spack.store.use_store(spack.paths.user_bootstrap_store):
|
||||
results = args.specs(**q_args)
|
||||
else:
|
||||
results = args.specs(**q_args)
|
||||
|
||||
decorator = lambda s, f: f
|
||||
added = set()
|
||||
|
||||
@@ -241,14 +241,28 @@ def install(parser, args, **kwargs):
|
||||
if args.log_file:
|
||||
reporter.filename = args.log_file
|
||||
|
||||
if args.run_tests:
|
||||
tty.warn("Deprecated option: --run-tests: use --test=all instead")
|
||||
|
||||
def get_tests(specs):
|
||||
if args.test == 'all' or args.run_tests:
|
||||
return True
|
||||
elif args.test == 'root':
|
||||
return [spec.name for spec in specs]
|
||||
else:
|
||||
return False
|
||||
|
||||
if not args.spec and not args.specfiles:
|
||||
# if there are no args but an active environment
|
||||
# then install the packages from it.
|
||||
env = ev.get_env(args, 'install')
|
||||
if env:
|
||||
tests = get_tests(env.user_specs)
|
||||
kwargs['tests'] = tests
|
||||
|
||||
if not args.only_concrete:
|
||||
with env.write_transaction():
|
||||
concretized_specs = env.concretize()
|
||||
concretized_specs = env.concretize(tests=tests)
|
||||
ev.display_specs(concretized_specs)
|
||||
|
||||
# save view regeneration for later, so that we only do it
|
||||
@@ -295,16 +309,9 @@ def install(parser, args, **kwargs):
|
||||
# that will be passed to the package installer
|
||||
update_kwargs_from_args(args, kwargs)
|
||||
|
||||
if args.run_tests:
|
||||
tty.warn("Deprecated option: --run-tests: use --test=all instead")
|
||||
|
||||
# 1. Abstract specs from cli
|
||||
abstract_specs = spack.cmd.parse_specs(args.spec)
|
||||
tests = False
|
||||
if args.test == 'all' or args.run_tests:
|
||||
tests = True
|
||||
elif args.test == 'root':
|
||||
tests = [spec.name for spec in abstract_specs]
|
||||
tests = get_tests(abstract_specs)
|
||||
kwargs['tests'] = tests
|
||||
|
||||
try:
|
||||
|
||||
@@ -109,4 +109,16 @@ def location(parser, args):
|
||||
tty.die("Build directory does not exist yet. "
|
||||
"Run this to create it:",
|
||||
"spack stage " + " ".join(args.spec))
|
||||
print(pkg.stage.source_path)
|
||||
|
||||
# Out of source builds have build_directory defined
|
||||
if hasattr(pkg, 'build_directory'):
|
||||
# build_directory can be either absolute or relative
|
||||
# to the stage path in either case os.path.join makes it
|
||||
# absolute
|
||||
print(os.path.normpath(os.path.join(
|
||||
pkg.stage.path,
|
||||
pkg.build_directory
|
||||
)))
|
||||
else:
|
||||
# Otherwise assume in-source builds
|
||||
return print(pkg.stage.source_path)
|
||||
|
||||
@@ -33,6 +33,9 @@ def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-m', dest='module', action='store',
|
||||
help='run library module as a script')
|
||||
subparser.add_argument(
|
||||
'--path', action='store_true', dest='show_path',
|
||||
help='show path to python interpreter that spack uses')
|
||||
subparser.add_argument(
|
||||
'python_args', nargs=argparse.REMAINDER,
|
||||
help="file to run plus arguments")
|
||||
@@ -43,6 +46,10 @@ def python(parser, args, unknown_args):
|
||||
print('Python', platform.python_version())
|
||||
return
|
||||
|
||||
if args.show_path:
|
||||
print(sys.executable)
|
||||
return
|
||||
|
||||
if args.module:
|
||||
sys.argv = ['spack-python'] + unknown_args + args.python_args
|
||||
runpy.run_module(args.module, run_name="__main__", alter_sys=True)
|
||||
|
||||
@@ -7,18 +7,22 @@
|
||||
import os
|
||||
import argparse
|
||||
import textwrap
|
||||
import inspect
|
||||
import fnmatch
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import llnl.util.tty.colify as colify
|
||||
|
||||
import spack.install_test
|
||||
import spack.environment as ev
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.report
|
||||
import spack.package
|
||||
import spack.repo
|
||||
import spack.report
|
||||
|
||||
description = "run spack's tests for an install"
|
||||
section = "admin"
|
||||
@@ -78,8 +82,11 @@ def setup_parser(subparser):
|
||||
arguments.add_common_arguments(run_parser, ['installed_specs'])
|
||||
|
||||
# List
|
||||
sp.add_parser('list', description=test_list.__doc__,
|
||||
help=first_line(test_list.__doc__))
|
||||
list_parser = sp.add_parser('list', description=test_list.__doc__,
|
||||
help=first_line(test_list.__doc__))
|
||||
list_parser.add_argument(
|
||||
"-a", "--all", action="store_true", dest="list_all",
|
||||
help="list all packages with tests (not just installed)")
|
||||
|
||||
# Find
|
||||
find_parser = sp.add_parser('find', description=test_find.__doc__,
|
||||
@@ -188,18 +195,36 @@ def test_run(args):
|
||||
|
||||
|
||||
def has_test_method(pkg):
|
||||
return pkg.test.__func__ != spack.package.PackageBase.test
|
||||
if not inspect.isclass(pkg):
|
||||
tty.die('{0}: is not a class, it is {1}'.format(pkg, type(pkg)))
|
||||
|
||||
pkg_base = spack.package.PackageBase
|
||||
return (
|
||||
(issubclass(pkg, pkg_base) and pkg.test != pkg_base.test) or
|
||||
(isinstance(pkg, pkg_base) and pkg.test.__func__ != pkg_base.test)
|
||||
)
|
||||
|
||||
|
||||
def test_list(args):
|
||||
"""List all installed packages with available tests."""
|
||||
"""List installed packages with available tests."""
|
||||
if args.list_all:
|
||||
all_packages_with_tests = [
|
||||
pkg_class.name
|
||||
for pkg_class in spack.repo.path.all_package_classes()
|
||||
if has_test_method(pkg_class)
|
||||
]
|
||||
if sys.stdout.isatty():
|
||||
tty.msg("%d packages with tests." % len(all_packages_with_tests))
|
||||
colify.colify(all_packages_with_tests)
|
||||
return
|
||||
|
||||
# TODO: This can be extended to have all of the output formatting options
|
||||
# from `spack find`.
|
||||
env = ev.get_env(args, 'test')
|
||||
hashes = env.all_hashes() if env else None
|
||||
|
||||
specs = spack.store.db.query(hashes=hashes)
|
||||
specs = list(filter(lambda s: has_test_method(s.package), specs))
|
||||
specs = list(filter(lambda s: has_test_method(s.package_class), specs))
|
||||
|
||||
spack.cmd.display_specs(specs, long=True)
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
import llnl.util.lang
|
||||
|
||||
from spack.compiler import Compiler
|
||||
from spack.version import ver
|
||||
|
||||
|
||||
class Aocc(Compiler):
|
||||
@@ -118,3 +119,22 @@ def f77_version(cls, f77):
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return ('-lstdc++', )
|
||||
|
||||
@property
|
||||
def cflags(self):
|
||||
return self._handle_default_flag_addtions()
|
||||
|
||||
@property
|
||||
def cxxflags(self):
|
||||
return self._handle_default_flag_addtions()
|
||||
|
||||
@property
|
||||
def fflags(self):
|
||||
return self._handle_default_flag_addtions()
|
||||
|
||||
def _handle_default_flag_addtions(self):
|
||||
# This is a known issue for AOCC 3.0 see:
|
||||
# https://developer.amd.com/wp-content/resources/AOCC-3.0-Install-Guide.pdf
|
||||
if self.real_version == ver('3.0.0'):
|
||||
return ("-Wno-unused-command-line-argument "
|
||||
"-mllvm -eliminate-similar-expr=false")
|
||||
|
||||
@@ -112,3 +112,9 @@ def fc_pic_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-fPIC"
|
||||
return "-h PIC"
|
||||
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
# Cray compiler wrappers link to the standard C++ library
|
||||
# without additional flags.
|
||||
return ()
|
||||
|
||||
@@ -88,3 +88,7 @@ def c11_flag(self):
|
||||
'the C11 standard',
|
||||
'c11_flag',
|
||||
'< 15.3')
|
||||
|
||||
@property
|
||||
def stdcxx_libs(self):
|
||||
return ('-pgc++libs',)
|
||||
|
||||
@@ -73,9 +73,7 @@ def concretize_develop(self, spec):
|
||||
if not dev_info:
|
||||
return False
|
||||
|
||||
path = dev_info['path']
|
||||
path = path if os.path.isabs(path) else os.path.join(
|
||||
env.path, path)
|
||||
path = os.path.normpath(os.path.join(env.path, dev_info['path']))
|
||||
|
||||
if 'dev_path' in spec.variants:
|
||||
assert spec.variants['dev_path'].value == path
|
||||
@@ -714,10 +712,12 @@ def _compiler_concretization_failure(compiler_spec, arch):
|
||||
raise UnavailableCompilerVersionError(compiler_spec, arch)
|
||||
|
||||
|
||||
def concretize_specs_together(*abstract_specs):
|
||||
def concretize_specs_together(*abstract_specs, **kwargs):
|
||||
"""Given a number of specs as input, tries to concretize them together.
|
||||
|
||||
Args:
|
||||
tests (bool or list or set): False to run no tests, True to test
|
||||
all packages, or a list of package names to run tests for some
|
||||
*abstract_specs: abstract specs to be concretized, given either
|
||||
as Specs or strings
|
||||
|
||||
@@ -757,7 +757,7 @@ def make_concretization_repository(abstract_specs):
|
||||
with spack.repo.additional_repository(concretization_repository):
|
||||
# Spec from a helper package that depends on all the abstract_specs
|
||||
concretization_root = spack.spec.Spec('concretizationroot')
|
||||
concretization_root.concretize()
|
||||
concretization_root.concretize(tests=kwargs.get('tests', False))
|
||||
# Retrieve the direct dependencies
|
||||
concrete_specs = [
|
||||
concretization_root[spec.name].copy() for spec in abstract_specs
|
||||
|
||||
@@ -806,6 +806,81 @@ def _config():
|
||||
config = llnl.util.lang.Singleton(_config)
|
||||
|
||||
|
||||
def add_from_file(filename, scope=None):
|
||||
"""Add updates to a config from a filename
|
||||
"""
|
||||
import spack.environment as ev
|
||||
|
||||
# Get file as config dict
|
||||
data = read_config_file(filename)
|
||||
if any(k in data for k in spack.schema.env.keys):
|
||||
data = ev.config_dict(data)
|
||||
|
||||
# update all sections from config dict
|
||||
# We have to iterate on keys to keep overrides from the file
|
||||
for section in data.keys():
|
||||
if section in section_schemas.keys():
|
||||
# Special handling for compiler scope difference
|
||||
# Has to be handled after we choose a section
|
||||
if scope is None:
|
||||
scope = default_modify_scope(section)
|
||||
|
||||
value = data[section]
|
||||
existing = get(section, scope=scope)
|
||||
new = merge_yaml(existing, value)
|
||||
|
||||
# We cannot call config.set directly (set is a type)
|
||||
config.set(section, new, scope)
|
||||
|
||||
|
||||
def add(fullpath, scope=None):
|
||||
"""Add the given configuration to the specified config scope.
|
||||
Add accepts a path. If you want to add from a filename, use add_from_file"""
|
||||
|
||||
components = process_config_path(fullpath)
|
||||
|
||||
has_existing_value = True
|
||||
path = ''
|
||||
override = False
|
||||
for idx, name in enumerate(components[:-1]):
|
||||
# First handle double colons in constructing path
|
||||
colon = '::' if override else ':' if path else ''
|
||||
path += colon + name
|
||||
if getattr(name, 'override', False):
|
||||
override = True
|
||||
else:
|
||||
override = False
|
||||
|
||||
# Test whether there is an existing value at this level
|
||||
existing = get(path, scope=scope)
|
||||
|
||||
if existing is None:
|
||||
has_existing_value = False
|
||||
# We've nested further than existing config, so we need the
|
||||
# type information for validation to know how to handle bare
|
||||
# values appended to lists.
|
||||
existing = get_valid_type(path)
|
||||
|
||||
# construct value from this point down
|
||||
value = syaml.load_config(components[-1])
|
||||
for component in reversed(components[idx + 1:-1]):
|
||||
value = {component: value}
|
||||
break
|
||||
|
||||
if has_existing_value:
|
||||
path, _, value = fullpath.rpartition(':')
|
||||
value = syaml.load_config(value)
|
||||
existing = get(path, scope=scope)
|
||||
|
||||
# append values to lists
|
||||
if isinstance(existing, list) and not isinstance(value, list):
|
||||
value = [value]
|
||||
|
||||
# merge value into existing
|
||||
new = merge_yaml(existing, value)
|
||||
config.set(path, new, scope)
|
||||
|
||||
|
||||
def get(path, default=None, scope=None):
|
||||
"""Module-level wrapper for ``Configuration.get()``."""
|
||||
return config.get(path, default, scope)
|
||||
|
||||
@@ -429,8 +429,8 @@ def add_extension(self, spec, ext_spec):
|
||||
def check_extension_conflict(self, spec, ext_spec):
|
||||
exts = self._extension_map(spec)
|
||||
if ext_spec.name in exts:
|
||||
installed_spec = exts[ext_spec.name]
|
||||
if ext_spec == installed_spec:
|
||||
installed_spec = exts[ext_spec.name].copy(deps=('link', 'run'))
|
||||
if ext_spec.copy(deps=('link', 'run')) == installed_spec:
|
||||
raise ExtensionAlreadyInstalledError(spec, ext_spec)
|
||||
else:
|
||||
raise ExtensionConflictError(spec, ext_spec, installed_spec)
|
||||
|
||||
@@ -132,7 +132,7 @@ def activate(
|
||||
if use_env_repo:
|
||||
spack.repo.path.put_first(_active_environment.repo)
|
||||
|
||||
tty.debug("Using environmennt '%s'" % _active_environment.name)
|
||||
tty.debug("Using environment '%s'" % _active_environment.name)
|
||||
|
||||
# Construct the commands to run
|
||||
cmds = ''
|
||||
@@ -393,12 +393,12 @@ def read(name):
|
||||
return Environment(root(name))
|
||||
|
||||
|
||||
def create(name, init_file=None, with_view=None):
|
||||
def create(name, init_file=None, with_view=None, keep_relative=False):
|
||||
"""Create a named environment in Spack."""
|
||||
validate_env_name(name)
|
||||
if exists(name):
|
||||
raise SpackEnvironmentError("'%s': environment already exists" % name)
|
||||
return Environment(root(name), init_file, with_view)
|
||||
return Environment(root(name), init_file, with_view, keep_relative)
|
||||
|
||||
|
||||
def config_dict(yaml_data):
|
||||
@@ -587,7 +587,7 @@ def regenerate(self, all_specs, roots):
|
||||
|
||||
|
||||
class Environment(object):
|
||||
def __init__(self, path, init_file=None, with_view=None):
|
||||
def __init__(self, path, init_file=None, with_view=None, keep_relative=False):
|
||||
"""Create a new environment.
|
||||
|
||||
The environment can be optionally initialized with either a
|
||||
@@ -600,6 +600,10 @@ def __init__(self, path, init_file=None, with_view=None):
|
||||
with_view (str or bool): whether a view should be maintained for
|
||||
the environment. If the value is a string, it specifies the
|
||||
path to the view.
|
||||
keep_relative (bool): if True, develop paths are copied verbatim
|
||||
into the new environment file, otherwise they are made absolute
|
||||
when the environment path is different from init_file's
|
||||
directory.
|
||||
"""
|
||||
self.path = os.path.abspath(path)
|
||||
|
||||
@@ -621,6 +625,13 @@ def __init__(self, path, init_file=None, with_view=None):
|
||||
self._set_user_specs_from_lockfile()
|
||||
else:
|
||||
self._read_manifest(f, raw_yaml=default_manifest_yaml)
|
||||
|
||||
# Rewrite relative develop paths when initializing a new
|
||||
# environment in a different location from the spack.yaml file.
|
||||
if not keep_relative and hasattr(f, 'name') and \
|
||||
f.name.endswith('.yaml'):
|
||||
init_file_dir = os.path.abspath(os.path.dirname(f.name))
|
||||
self._rewrite_relative_paths_on_relocation(init_file_dir)
|
||||
else:
|
||||
with lk.ReadTransaction(self.txlock):
|
||||
self._read()
|
||||
@@ -637,6 +648,27 @@ def __init__(self, path, init_file=None, with_view=None):
|
||||
# If with_view is None, then defer to the view settings determined by
|
||||
# the manifest file
|
||||
|
||||
def _rewrite_relative_paths_on_relocation(self, init_file_dir):
|
||||
"""When initializing the environment from a manifest file and we plan
|
||||
to store the environment in a different directory, we have to rewrite
|
||||
relative paths to absolute ones."""
|
||||
if init_file_dir == self.path:
|
||||
return
|
||||
|
||||
for name, entry in self.dev_specs.items():
|
||||
dev_path = entry['path']
|
||||
expanded_path = os.path.normpath(os.path.join(
|
||||
init_file_dir, entry['path']))
|
||||
|
||||
# Skip if the expanded path is the same (e.g. when absolute)
|
||||
if dev_path == expanded_path:
|
||||
continue
|
||||
|
||||
tty.debug("Expanding develop path for {0} to {1}".format(
|
||||
name, expanded_path))
|
||||
|
||||
self.dev_specs[name]['path'] = expanded_path
|
||||
|
||||
def _re_read(self):
|
||||
"""Reinitialize the environment object if it has been written (this
|
||||
may not be true if the environment was just created in this running
|
||||
@@ -1044,8 +1076,7 @@ def develop(self, spec, path, clone=False):
|
||||
|
||||
if clone:
|
||||
# "steal" the source code via staging API
|
||||
abspath = path if os.path.isabs(path) else os.path.join(
|
||||
self.path, path)
|
||||
abspath = os.path.normpath(os.path.join(self.path, path))
|
||||
|
||||
stage = spec.package.stage
|
||||
stage.steal_source(abspath)
|
||||
@@ -1064,7 +1095,7 @@ def undevelop(self, spec):
|
||||
return True
|
||||
return False
|
||||
|
||||
def concretize(self, force=False):
|
||||
def concretize(self, force=False, tests=False):
|
||||
"""Concretize user_specs in this environment.
|
||||
|
||||
Only concretizes specs that haven't been concretized yet unless
|
||||
@@ -1076,6 +1107,8 @@ def concretize(self, force=False):
|
||||
Arguments:
|
||||
force (bool): re-concretize ALL specs, even those that were
|
||||
already concretized
|
||||
tests (bool or list or set): False to run no tests, True to test
|
||||
all packages, or a list of package names to run tests for some
|
||||
|
||||
Returns:
|
||||
List of specs that have been concretized. Each entry is a tuple of
|
||||
@@ -1089,14 +1122,14 @@ def concretize(self, force=False):
|
||||
|
||||
# Pick the right concretization strategy
|
||||
if self.concretization == 'together':
|
||||
return self._concretize_together()
|
||||
return self._concretize_together(tests=tests)
|
||||
if self.concretization == 'separately':
|
||||
return self._concretize_separately()
|
||||
return self._concretize_separately(tests=tests)
|
||||
|
||||
msg = 'concretization strategy not implemented [{0}]'
|
||||
raise SpackEnvironmentError(msg.format(self.concretization))
|
||||
|
||||
def _concretize_together(self):
|
||||
def _concretize_together(self, tests=False):
|
||||
"""Concretization strategy that concretizes all the specs
|
||||
in the same DAG.
|
||||
"""
|
||||
@@ -1129,14 +1162,13 @@ def _concretize_together(self):
|
||||
self.specs_by_hash = {}
|
||||
|
||||
concrete_specs = spack.concretize.concretize_specs_together(
|
||||
*self.user_specs
|
||||
)
|
||||
*self.user_specs, tests=tests)
|
||||
concretized_specs = [x for x in zip(self.user_specs, concrete_specs)]
|
||||
for abstract, concrete in concretized_specs:
|
||||
self._add_concrete_spec(abstract, concrete)
|
||||
return concretized_specs
|
||||
|
||||
def _concretize_separately(self):
|
||||
def _concretize_separately(self, tests=False):
|
||||
"""Concretization strategy that concretizes separately one
|
||||
user spec after the other.
|
||||
"""
|
||||
@@ -1159,12 +1191,12 @@ def _concretize_separately(self):
|
||||
for uspec, uspec_constraints in zip(
|
||||
self.user_specs, self.user_specs.specs_as_constraints):
|
||||
if uspec not in old_concretized_user_specs:
|
||||
concrete = _concretize_from_constraints(uspec_constraints)
|
||||
concrete = _concretize_from_constraints(uspec_constraints, tests=tests)
|
||||
self._add_concrete_spec(uspec, concrete)
|
||||
concretized_specs.append((uspec, concrete))
|
||||
return concretized_specs
|
||||
|
||||
def concretize_and_add(self, user_spec, concrete_spec=None):
|
||||
def concretize_and_add(self, user_spec, concrete_spec=None, tests=False):
|
||||
"""Concretize and add a single spec to the environment.
|
||||
|
||||
Concretize the provided ``user_spec`` and add it along with the
|
||||
@@ -1187,7 +1219,7 @@ def concretize_and_add(self, user_spec, concrete_spec=None):
|
||||
spec = Spec(user_spec)
|
||||
|
||||
if self.add(spec):
|
||||
concrete = concrete_spec or spec.concretized()
|
||||
concrete = concrete_spec or spec.concretized(tests=tests)
|
||||
self._add_concrete_spec(spec, concrete)
|
||||
else:
|
||||
# spec might be in the user_specs, but not installed.
|
||||
@@ -1197,7 +1229,7 @@ def concretize_and_add(self, user_spec, concrete_spec=None):
|
||||
)
|
||||
concrete = self.specs_by_hash.get(spec.build_hash())
|
||||
if not concrete:
|
||||
concrete = spec.concretized()
|
||||
concrete = spec.concretized(tests=tests)
|
||||
self._add_concrete_spec(spec, concrete)
|
||||
|
||||
return concrete
|
||||
@@ -1904,7 +1936,7 @@ def _tree_to_display(spec):
|
||||
print('')
|
||||
|
||||
|
||||
def _concretize_from_constraints(spec_constraints):
|
||||
def _concretize_from_constraints(spec_constraints, tests=False):
|
||||
# Accept only valid constraints from list and concretize spec
|
||||
# Get the named spec even if out of order
|
||||
root_spec = [s for s in spec_constraints if s.name]
|
||||
@@ -1923,7 +1955,7 @@ def _concretize_from_constraints(spec_constraints):
|
||||
if c not in invalid_constraints:
|
||||
s.constrain(c)
|
||||
try:
|
||||
return s.concretized()
|
||||
return s.concretized(tests=tests)
|
||||
except spack.spec.InvalidDependencyError as e:
|
||||
invalid_deps_string = ['^' + d for d in e.invalid_deps]
|
||||
invalid_deps = [c for c in spec_constraints
|
||||
|
||||
@@ -62,17 +62,16 @@ def topological_sort(spec, reverse=False, deptype='all'):
|
||||
"""
|
||||
deptype = canonical_deptype(deptype)
|
||||
|
||||
if not reverse:
|
||||
parents = lambda s: s.dependents()
|
||||
children = lambda s: s.dependencies()
|
||||
else:
|
||||
parents = lambda s: s.dependencies()
|
||||
children = lambda s: s.dependents()
|
||||
|
||||
# Work on a copy so this is nondestructive.
|
||||
spec = spec.copy(deps=deptype)
|
||||
nodes = spec.index(deptype=deptype)
|
||||
|
||||
parents = lambda s: [p for p in s.dependents() if p.name in nodes]
|
||||
children = lambda s: s.dependencies()
|
||||
|
||||
if reverse:
|
||||
parents, children = children, parents
|
||||
|
||||
topo_order = []
|
||||
par = dict((name, parents(nodes[name])) for name in nodes.keys())
|
||||
remaining = [name for name in nodes.keys() if not parents(nodes[name])]
|
||||
|
||||
@@ -1609,15 +1609,22 @@ def install(self):
|
||||
self._cleanup_all_tasks()
|
||||
|
||||
# Ensure we properly report if one or more explicit specs failed
|
||||
if exists_errors or failed_explicits:
|
||||
# or were not installed when should have been.
|
||||
missing = [request.pkg_id for request in self.build_requests if
|
||||
request.install_args.get('install_package') and
|
||||
request.pkg_id not in self.installed]
|
||||
if exists_errors or failed_explicits or missing:
|
||||
for pkg_id, err in exists_errors:
|
||||
tty.error('{0}: {1}'.format(pkg_id, err))
|
||||
|
||||
for pkg_id, err in failed_explicits:
|
||||
tty.error('{0}: {1}'.format(pkg_id, err))
|
||||
|
||||
for pkg_id in missing:
|
||||
tty.error('{0}: Package was not installed'.format(pkg_id))
|
||||
|
||||
raise InstallError('Installation request failed. Refer to '
|
||||
'recent errors for specific package(s).')
|
||||
'reported errors for failing package(s).')
|
||||
|
||||
|
||||
def build_process(pkg, kwargs):
|
||||
|
||||
@@ -40,7 +40,6 @@
|
||||
import spack.util.executable as exe
|
||||
from spack.error import SpackError
|
||||
|
||||
|
||||
#: names of profile statistics
|
||||
stat_names = pstats.Stats.sort_arg_dict_default
|
||||
|
||||
@@ -358,6 +357,9 @@ def make_argument_parser(**kwargs):
|
||||
'--color', action='store', default='auto',
|
||||
choices=('always', 'never', 'auto'),
|
||||
help="when to colorize output (default: auto)")
|
||||
parser.add_argument(
|
||||
'-c', '--config', default=None, action="append", dest="config_vars",
|
||||
help="add one or more custom, one off config settings.")
|
||||
parser.add_argument(
|
||||
'-C', '--config-scope', dest='config_scopes', action='append',
|
||||
metavar='DIR', help="add a custom configuration scope")
|
||||
@@ -463,6 +465,10 @@ def setup_main_options(args):
|
||||
tty.warn("You asked for --insecure. Will NOT check SSL certificates.")
|
||||
spack.config.set('config:verify_ssl', False, scope='command_line')
|
||||
|
||||
# Use the spack config command to handle parsing the config strings
|
||||
for config_var in (args.config_vars or []):
|
||||
spack.config.add(fullpath=config_var, scope="command_line")
|
||||
|
||||
# when to use color (takes always, auto, or never)
|
||||
color.set_color_when(args.color)
|
||||
|
||||
|
||||
@@ -50,7 +50,8 @@
|
||||
|
||||
#: User configuration location
|
||||
user_config_path = os.path.expanduser('~/.spack')
|
||||
|
||||
user_bootstrap_path = os.path.join(user_config_path, 'bootstrap')
|
||||
user_bootstrap_store = os.path.join(user_bootstrap_path, 'store')
|
||||
|
||||
opt_path = os.path.join(prefix, "opt")
|
||||
etc_path = os.path.join(prefix, "etc")
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
clingo_cffi = hasattr(clingo.Symbol, '_rep')
|
||||
except ImportError:
|
||||
clingo = None # type: ignore
|
||||
clingo_cffi = False
|
||||
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
@@ -38,6 +39,7 @@
|
||||
import spack.package
|
||||
import spack.package_prefs
|
||||
import spack.repo
|
||||
import spack.bootstrap
|
||||
import spack.variant
|
||||
import spack.version
|
||||
|
||||
@@ -246,7 +248,20 @@ def __init__(self, cores=True, asp=None):
|
||||
asp (file-like): optional stream to write a text-based ASP program
|
||||
for debugging or verification.
|
||||
"""
|
||||
assert clingo, "PyclingoDriver requires clingo with Python support"
|
||||
global clingo
|
||||
if not clingo:
|
||||
# TODO: Find a way to vendor the concrete spec
|
||||
# in a cross-platform way
|
||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||
generic_target = archspec.cpu.host().family
|
||||
spec_str = 'clingo-bootstrap@spack+python target={0}'.format(
|
||||
str(generic_target)
|
||||
)
|
||||
clingo_spec = spack.spec.Spec(spec_str)
|
||||
clingo_spec._old_concretize()
|
||||
spack.bootstrap.make_module_available(
|
||||
'clingo', spec=clingo_spec, install=True)
|
||||
import clingo
|
||||
self.out = asp or llnl.util.lang.Devnull()
|
||||
self.cores = cores
|
||||
|
||||
@@ -468,28 +483,12 @@ def target_ranges(self, spec, single_target_fn):
|
||||
|
||||
def conflict_rules(self, pkg):
|
||||
for trigger, constraints in pkg.conflicts.items():
|
||||
trigger_id = self.condition(spack.spec.Spec(trigger), name=pkg.name)
|
||||
self.gen.fact(fn.conflict_trigger(trigger_id))
|
||||
|
||||
for constraint, _ in constraints:
|
||||
constraint_body = spack.spec.Spec(pkg.name)
|
||||
constraint_body.constrain(constraint)
|
||||
constraint_body.constrain(trigger)
|
||||
|
||||
clauses = []
|
||||
for s in constraint_body.traverse():
|
||||
clauses += self.spec_clauses(s, body=True)
|
||||
|
||||
# TODO: find a better way to generate clauses for integrity
|
||||
# TODO: constraints, instead of generating them for the body
|
||||
# TODO: of a rule and filter unwanted functions.
|
||||
to_be_filtered = ['node_compiler_hard']
|
||||
clauses = [x for x in clauses if x.name not in to_be_filtered]
|
||||
|
||||
# Emit facts based on clauses
|
||||
condition_id = next(self._condition_id_counter)
|
||||
self.gen.fact(fn.conflict(condition_id, pkg.name))
|
||||
for clause in clauses:
|
||||
self.gen.fact(fn.conflict_condition(
|
||||
condition_id, clause.name, *clause.args
|
||||
))
|
||||
constraint_id = self.condition(constraint, name=pkg.name)
|
||||
self.gen.fact(fn.conflict(pkg.name, trigger_id, constraint_id))
|
||||
self.gen.newline()
|
||||
|
||||
def available_compilers(self):
|
||||
@@ -627,50 +626,44 @@ def pkg_rules(self, pkg, tests):
|
||||
)
|
||||
)
|
||||
|
||||
def _condition_facts(
|
||||
self, pkg_name, cond_spec, dep_spec,
|
||||
cond_fn, require_fn, impose_fn
|
||||
):
|
||||
def condition(self, required_spec, imposed_spec=None, name=None):
|
||||
"""Generate facts for a dependency or virtual provider condition.
|
||||
|
||||
Arguments:
|
||||
pkg_name (str): name of the package that triggers the
|
||||
condition (e.g., the dependent or the provider)
|
||||
cond_spec (Spec): the dependency spec representing the
|
||||
condition that needs to be True (can be anonymous)
|
||||
dep_spec (Spec): the sepc of the dependency or provider
|
||||
to be depended on/provided if the condition holds.
|
||||
cond_fn (AspFunction): function to use to declare the condition;
|
||||
will be called with the cond id, pkg_name, an dep_spec.name
|
||||
require_fn (AspFunction): function to use to declare the conditions
|
||||
required of the dependent/provider to trigger
|
||||
impose_fn (AspFunction): function to use for constraints imposed
|
||||
on the dependency/virtual
|
||||
required_spec (Spec): the spec that triggers this condition
|
||||
imposed_spec (optional, Spec): the sepc with constraints that
|
||||
are imposed when this condition is triggered
|
||||
name (optional, str): name for `required_spec` (required if
|
||||
required_spec is anonymous, ignored if not)
|
||||
|
||||
Returns:
|
||||
(int): id of the condition created by this function
|
||||
"""
|
||||
named_cond = required_spec.copy()
|
||||
named_cond.name = named_cond.name or name
|
||||
assert named_cond.name, "must provide name for anonymous condtions!"
|
||||
|
||||
condition_id = next(self._condition_id_counter)
|
||||
named_cond = cond_spec.copy()
|
||||
named_cond.name = named_cond.name or pkg_name
|
||||
self.gen.fact(fn.condition(condition_id))
|
||||
|
||||
self.gen.fact(cond_fn(condition_id, pkg_name, dep_spec.name))
|
||||
# requirements trigger the condition
|
||||
requirements = self.checked_spec_clauses(
|
||||
named_cond, body=True, required_from=name)
|
||||
for pred in requirements:
|
||||
self.gen.fact(
|
||||
fn.condition_requirement(condition_id, pred.name, *pred.args)
|
||||
)
|
||||
|
||||
# conditions that trigger the condition
|
||||
conditions = self.checked_spec_clauses(
|
||||
named_cond, body=True, required_from=pkg_name
|
||||
)
|
||||
for pred in conditions:
|
||||
self.gen.fact(require_fn(condition_id, pred.name, *pred.args))
|
||||
|
||||
imposed_constraints = self.checked_spec_clauses(
|
||||
dep_spec, required_from=pkg_name
|
||||
)
|
||||
for pred in imposed_constraints:
|
||||
# imposed "node"-like conditions are no-ops
|
||||
if pred.name in ("node", "virtual_node"):
|
||||
continue
|
||||
self.gen.fact(impose_fn(condition_id, pred.name, *pred.args))
|
||||
if imposed_spec:
|
||||
imposed_constraints = self.checked_spec_clauses(
|
||||
imposed_spec, body=False, required_from=name)
|
||||
for pred in imposed_constraints:
|
||||
# imposed "node"-like conditions are no-ops
|
||||
if pred.name in ("node", "virtual_node"):
|
||||
continue
|
||||
self.gen.fact(
|
||||
fn.imposed_constraint(condition_id, pred.name, *pred.args)
|
||||
)
|
||||
|
||||
return condition_id
|
||||
|
||||
@@ -680,36 +673,36 @@ def package_provider_rules(self, pkg):
|
||||
|
||||
for provided, whens in pkg.provided.items():
|
||||
for when in whens:
|
||||
self._condition_facts(
|
||||
pkg.name, when, provided,
|
||||
fn.provider_condition,
|
||||
fn.required_provider_condition,
|
||||
fn.imposed_dependency_condition
|
||||
)
|
||||
|
||||
condition_id = self.condition(when, provided, pkg.name)
|
||||
self.gen.fact(fn.provider_condition(
|
||||
condition_id, when.name, provided.name
|
||||
))
|
||||
self.gen.newline()
|
||||
|
||||
def package_dependencies_rules(self, pkg, tests):
|
||||
"""Translate 'depends_on' directives into ASP logic."""
|
||||
for _, conditions in sorted(pkg.dependencies.items()):
|
||||
for cond, dep in sorted(conditions.items()):
|
||||
condition_id = self._condition_facts(
|
||||
pkg.name, cond, dep.spec,
|
||||
fn.dependency_condition,
|
||||
fn.required_dependency_condition,
|
||||
fn.imposed_dependency_condition
|
||||
)
|
||||
deptypes = dep.type.copy()
|
||||
# Skip test dependencies if they're not requested
|
||||
if not tests:
|
||||
deptypes.discard("test")
|
||||
|
||||
for t in sorted(dep.type):
|
||||
# Skip test dependencies if they're not requested at all
|
||||
if t == 'test' and not tests:
|
||||
continue
|
||||
# ... or if they are requested only for certain packages
|
||||
if not isinstance(tests, bool) and pkg.name not in tests:
|
||||
deptypes.discard("test")
|
||||
|
||||
# ... or if they are requested only for certain packages
|
||||
if t == 'test' and (not isinstance(tests, bool)
|
||||
and pkg.name not in tests):
|
||||
continue
|
||||
# if there are no dependency types to be considered
|
||||
# anymore, don't generate the dependency
|
||||
if not deptypes:
|
||||
continue
|
||||
|
||||
condition_id = self.condition(cond, dep.spec, pkg.name)
|
||||
self.gen.fact(fn.dependency_condition(
|
||||
condition_id, pkg.name, dep.spec.name
|
||||
))
|
||||
|
||||
for t in sorted(deptypes):
|
||||
# there is a declared dependency of type t
|
||||
self.gen.fact(fn.dependency_type(condition_id, t))
|
||||
|
||||
@@ -779,24 +772,13 @@ def external_packages(self):
|
||||
pkg_name, str(version), weight, id
|
||||
))
|
||||
|
||||
# Declare external conditions with a local index into packages.yaml
|
||||
for local_idx, spec in enumerate(external_specs):
|
||||
condition_id = next(self._condition_id_counter)
|
||||
|
||||
# Declare the global ID associated with this external spec
|
||||
self.gen.fact(fn.external_spec(condition_id, pkg_name))
|
||||
|
||||
# Local index into packages.yaml
|
||||
condition_id = self.condition(spec)
|
||||
self.gen.fact(
|
||||
fn.external_spec_index(condition_id, pkg_name, local_idx))
|
||||
|
||||
# Add conditions to be satisfied for this external
|
||||
fn.possible_external(condition_id, pkg_name, local_idx)
|
||||
)
|
||||
self.possible_versions[spec.name].add(spec.version)
|
||||
clauses = self.spec_clauses(spec, body=True)
|
||||
for clause in clauses:
|
||||
self.gen.fact(
|
||||
fn.external_spec_condition(
|
||||
condition_id, clause.name, *clause.args)
|
||||
)
|
||||
self.gen.newline()
|
||||
|
||||
def preferred_variants(self, pkg_name):
|
||||
@@ -951,16 +933,17 @@ class Body(object):
|
||||
if value == '*':
|
||||
continue
|
||||
|
||||
# validate variant value
|
||||
reserved_names = spack.directives.reserved_names
|
||||
if not spec.virtual and vname not in reserved_names:
|
||||
try:
|
||||
variant_def = spec.package.variants[vname]
|
||||
except KeyError:
|
||||
msg = 'variant "{0}" not found in package "{1}"'
|
||||
raise RuntimeError(msg.format(vname, spec.name))
|
||||
else:
|
||||
variant_def.validate_or_raise(variant, spec.package)
|
||||
# validate variant value only if spec not concrete
|
||||
if not spec.concrete:
|
||||
reserved_names = spack.directives.reserved_names
|
||||
if not spec.virtual and vname not in reserved_names:
|
||||
try:
|
||||
variant_def = spec.package.variants[vname]
|
||||
except KeyError:
|
||||
msg = 'variant "{0}" not found in package "{1}"'
|
||||
raise RuntimeError(msg.format(vname, spec.name))
|
||||
else:
|
||||
variant_def.validate_or_raise(variant, spec.package)
|
||||
|
||||
clauses.append(f.variant_value(spec.name, vname, value))
|
||||
|
||||
@@ -1459,7 +1442,7 @@ def node_flag_source(self, pkg, source):
|
||||
def no_flags(self, pkg, flag_type):
|
||||
self._specs[pkg].compiler_flags[flag_type] = []
|
||||
|
||||
def external_spec_selected(self, condition_id, pkg, idx):
|
||||
def external_spec_selected(self, pkg, idx):
|
||||
"""This means that the external spec and index idx
|
||||
has been selected for this package.
|
||||
"""
|
||||
@@ -1599,8 +1582,7 @@ def _develop_specs_from_env(spec, env):
|
||||
if not dev_info:
|
||||
return
|
||||
|
||||
path = dev_info['path']
|
||||
path = path if os.path.isabs(path) else os.path.join(env.path, path)
|
||||
path = os.path.normpath(os.path.join(env.path, dev_info['path']))
|
||||
|
||||
if 'dev_path' in spec.variants:
|
||||
assert spec.variants['dev_path'].value == path
|
||||
|
||||
@@ -36,18 +36,66 @@ version_satisfies(Package, Constraint)
|
||||
#defined preferred_version_declared/3.
|
||||
#defined version_satisfies/3.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Spec conditions and imposed constraints
|
||||
%
|
||||
% Given Spack directives like these:
|
||||
% depends_on("foo@1.0+bar", when="@2.0+variant")
|
||||
% provides("mpi@2:", when="@1.9:")
|
||||
%
|
||||
% The conditions are `@2.0+variant` and `@1.9:`, and the imposed constraints
|
||||
% are `@1.0+bar` on `foo` and `@2:` on `mpi`.
|
||||
%-----------------------------------------------------------------------------
|
||||
% conditions are specified with `condition_requirement` and hold when
|
||||
% corresponding spec attributes hold.
|
||||
condition_holds(ID) :-
|
||||
condition(ID);
|
||||
attr(Name, A1) : condition_requirement(ID, Name, A1);
|
||||
attr(Name, A1, A2) : condition_requirement(ID, Name, A1, A2);
|
||||
attr(Name, A1, A2, A3) : condition_requirement(ID, Name, A1, A2, A3).
|
||||
|
||||
% condition_holds(ID) implies all imposed_constraints, unless do_not_impose(ID)
|
||||
% is derived. This allows imposed constraints to be canceled in special cases.
|
||||
impose(ID) :- condition_holds(ID), not do_not_impose(ID).
|
||||
|
||||
% conditions that hold impose constraints on other specs
|
||||
attr(Name, A1) :- impose(ID), imposed_constraint(ID, Name, A1).
|
||||
attr(Name, A1, A2) :- impose(ID), imposed_constraint(ID, Name, A1, A2).
|
||||
attr(Name, A1, A2, A3) :- impose(ID), imposed_constraint(ID, Name, A1, A2, A3).
|
||||
|
||||
#defined condition/1.
|
||||
#defined condition_requirement/3.
|
||||
#defined condition_requirement/4.
|
||||
#defined condition_requirement/5.
|
||||
#defined imposed_constraint/3.
|
||||
#defined imposed_constraint/4.
|
||||
#defined imposed_constraint/5.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Dependency semantics
|
||||
%-----------------------------------------------------------------------------
|
||||
% Dependencies of any type imply that one package "depends on" another
|
||||
depends_on(Package, Dependency) :- depends_on(Package, Dependency, _).
|
||||
|
||||
% a dependency holds if its condition holds
|
||||
dependency_holds(Package, Dependency, Type) :-
|
||||
dependency_condition(ID, Package, Dependency),
|
||||
dependency_type(ID, Type),
|
||||
condition_holds(ID),
|
||||
not external(Package).
|
||||
|
||||
% We cut off dependencies of externals (as we don't really know them).
|
||||
% Don't impose constraints on dependencies that don't exist.
|
||||
do_not_impose(ID) :-
|
||||
not dependency_holds(Package, Dependency, _),
|
||||
dependency_condition(ID, Package, Dependency).
|
||||
|
||||
% declared dependencies are real if they're not virtual AND
|
||||
% the package is not an external
|
||||
% the package is not an external.
|
||||
% They're only triggered if the associated dependnecy condition holds.
|
||||
depends_on(Package, Dependency, Type)
|
||||
:- dependency_conditions(Package, Dependency, Type),
|
||||
not virtual(Dependency),
|
||||
not external(Package).
|
||||
:- dependency_holds(Package, Dependency, Type),
|
||||
not virtual(Dependency).
|
||||
|
||||
% every root must be a node
|
||||
node(Package) :- root(Package).
|
||||
@@ -69,74 +117,19 @@ path(Parent, Child) :- depends_on(Parent, Child).
|
||||
path(Parent, Descendant) :- path(Parent, A), depends_on(A, Descendant).
|
||||
:- path(A, B), path(B, A).
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Conditional dependencies
|
||||
%
|
||||
% This takes care of `when=SPEC` in `depends_on("foo@1.0+bar", when="SPEC")`.
|
||||
%-----------------------------------------------------------------------------
|
||||
% if any individual condition below is true, trigger the dependency.
|
||||
dependency_conditions(Package, Dependency, Type) :-
|
||||
dependency_conditions_hold(ID, Package, Dependency),
|
||||
dependency_type(ID, Type).
|
||||
|
||||
#defined dependency_type/2.
|
||||
|
||||
% collect all the dependency conditions into a single conditional rule
|
||||
% distinguishing between Parent and Package (Arg1) is needed to account
|
||||
% for conditions like:
|
||||
%
|
||||
% depends_on('patchelf@0.9', when='@1.0:1.1 ^python@:2')
|
||||
%
|
||||
% that include dependencies
|
||||
dependency_conditions_hold(ID, Parent, Dependency) :-
|
||||
attr(Name, Arg1) : required_dependency_condition(ID, Name, Arg1);
|
||||
attr(Name, Arg1, Arg2) : required_dependency_condition(ID, Name, Arg1, Arg2);
|
||||
attr(Name, Arg1, Arg2, Arg3) : required_dependency_condition(ID, Name, Arg1, Arg2, Arg3);
|
||||
dependency_condition(ID, Parent, Dependency);
|
||||
% There must be at least a dependency type declared,
|
||||
% otherwise the dependency doesn't hold
|
||||
dependency_type(ID, _);
|
||||
node(Parent);
|
||||
not external(Parent).
|
||||
|
||||
#defined dependency_condition/3.
|
||||
#defined required_dependency_condition/3.
|
||||
#defined required_dependency_condition/4.
|
||||
#defined required_dependency_condition/5.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Imposed constraints on dependencies
|
||||
%
|
||||
% This handles the `@1.0+bar` in `depends_on("foo@1.0+bar", when="SPEC")`, or
|
||||
% the `mpi@2:` in `provides("mpi@2:", when="@1.9:")`.
|
||||
%-----------------------------------------------------------------------------
|
||||
% NOTE: `attr(Name, Arg1)` is omitted here b/c the only single-arg attribute is
|
||||
% NOTE: `node()`, which is handled above under "Dependency Semantics"
|
||||
|
||||
attr(Name, Arg1, Arg2) :-
|
||||
dependency_conditions_hold(ID, Package, Dependency),
|
||||
imposed_dependency_condition(ID, Name, Arg1, Arg2).
|
||||
|
||||
attr(Name, Arg1, Arg2, Arg3) :-
|
||||
dependency_conditions_hold(ID, Package, Dependency),
|
||||
imposed_dependency_condition(ID, Name, Arg1, Arg2, Arg3).
|
||||
|
||||
#defined imposed_dependency_condition/4.
|
||||
#defined imposed_dependency_condition/5.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Conflicts
|
||||
%-----------------------------------------------------------------------------
|
||||
:- not external(Package) : conflict_condition(ID, "node", Package);
|
||||
attr(Name, Arg1) : conflict_condition(ID, Name, Arg1);
|
||||
attr(Name, Arg1, Arg2) : conflict_condition(ID, Name, Arg1, Arg2);
|
||||
attr(Name, Arg1, Arg2, Arg3) : conflict_condition(ID, Name, Arg1, Arg2, Arg3);
|
||||
conflict(ID, Package).
|
||||
:- node(Package),
|
||||
not external(Package),
|
||||
conflict(Package, TriggerID, ConstraintID),
|
||||
condition_holds(TriggerID),
|
||||
condition_holds(ConstraintID).
|
||||
|
||||
#defined conflict/2.
|
||||
#defined conflict_condition/3.
|
||||
#defined conflict_condition/4.
|
||||
#defined conflict_condition/5.
|
||||
#defined conflict/3.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Virtual dependencies
|
||||
@@ -145,10 +138,15 @@ attr(Name, Arg1, Arg2, Arg3) :-
|
||||
% if a package depends on a virtual, it's not external and we have a
|
||||
% provider for that virtual then it depends on the provider
|
||||
depends_on(Package, Provider, Type)
|
||||
:- dependency_conditions(Package, Virtual, Type),
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
provides_virtual(Provider, Virtual),
|
||||
not external(Package).
|
||||
|
||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
||||
virtual_node(Virtual)
|
||||
:- dependency_holds(Package, Virtual, Type),
|
||||
virtual(Virtual), not external(Package).
|
||||
|
||||
% if there's a virtual node, we must select one provider
|
||||
1 { provides_virtual(Package, Virtual) : possible_provider(Package, Virtual) } 1
|
||||
:- virtual_node(Virtual).
|
||||
@@ -158,32 +156,21 @@ virtual_node(Virtual) :- virtual_root(Virtual).
|
||||
1 { root(Package) : provides_virtual(Package, Virtual) } 1
|
||||
:- virtual_root(Virtual).
|
||||
|
||||
% all virtual providers come from provider conditions like this
|
||||
dependency_conditions_hold(ID, Provider, Virtual) :-
|
||||
attr(Name, Arg1) : required_provider_condition(ID, Name, Arg1);
|
||||
attr(Name, Arg1, Arg2) : required_provider_condition(ID, Name, Arg1, Arg2);
|
||||
attr(Name, Arg1, Arg2, Arg3) : required_provider_condition(ID, Name, Arg1, Arg2, Arg3);
|
||||
virtual(Virtual);
|
||||
provider_condition(ID, Provider, Virtual).
|
||||
|
||||
% The provider provides the virtual if some provider condition holds.
|
||||
provides_virtual(Provider, Virtual) :-
|
||||
provider_condition(ID, Provider, Virtual),
|
||||
dependency_conditions_hold(ID, Provider, Virtual),
|
||||
condition_holds(ID),
|
||||
virtual(Virtual).
|
||||
|
||||
% a node that provides a virtual is a provider
|
||||
provider(Package, Virtual)
|
||||
:- node(Package), provides_virtual(Package, Virtual).
|
||||
|
||||
% dependencies on virtuals also imply that the virtual is a virtual node
|
||||
virtual_node(Virtual)
|
||||
:- dependency_conditions(Package, Virtual, Type),
|
||||
virtual(Virtual), not external(Package).
|
||||
|
||||
% for any virtual, there can be at most one provider in the DAG
|
||||
0 { node(Package) : provides_virtual(Package, Virtual) } 1 :- virtual(Virtual).
|
||||
|
||||
#defined possible_provider/2.
|
||||
|
||||
%-----------------------------------------------------------------------------
|
||||
% Virtual dependency weights
|
||||
%-----------------------------------------------------------------------------
|
||||
@@ -308,32 +295,29 @@ external(Package) :- external_only(Package), node(Package).
|
||||
% a package is a real_node if it is not external
|
||||
real_node(Package) :- node(Package), not external(Package).
|
||||
|
||||
% if an external version is selected, the package is external and
|
||||
% we are using the corresponding spec
|
||||
external(Package) :-
|
||||
version(Package, Version), version_weight(Package, Weight),
|
||||
external_version_declared(Package, Version, Weight, ID).
|
||||
% a package is external if we are using an external spec for it
|
||||
external(Package) :- external_spec_selected(Package, _).
|
||||
|
||||
% we can't use the weight for an external version if we don't use the
|
||||
% corresponding external spec.
|
||||
:- version(Package, Version),
|
||||
version_weight(Package, Weight),
|
||||
external_version_declared(Package, Version, Weight, ID),
|
||||
not external(Package).
|
||||
|
||||
% determine if an external spec has been selected
|
||||
external_spec_selected(ID, Package, LocalIndex) :-
|
||||
version(Package, Version), version_weight(Package, Weight),
|
||||
external_spec_index(ID, Package, LocalIndex),
|
||||
external_version_declared(Package, Version, Weight, LocalIndex),
|
||||
external_spec_conditions_hold(ID, Package).
|
||||
external_spec_selected(Package, LocalIndex) :-
|
||||
external_conditions_hold(Package, LocalIndex),
|
||||
node(Package).
|
||||
|
||||
% determine if all the conditions on an external spec hold. If they do
|
||||
% the spec can be selected.
|
||||
external_spec_conditions_hold(ID, Package) :-
|
||||
attr(Name, Arg1) : external_spec_condition(ID, Name, Arg1);
|
||||
attr(Name, Arg1, Arg2) : external_spec_condition(ID, Name, Arg1, Arg2);
|
||||
attr(Name, Arg1, Arg2, Arg3) : external_spec_condition(ID, Name, Arg1, Arg2, Arg3);
|
||||
external_spec(ID, Package);
|
||||
node(Package).
|
||||
external_conditions_hold(Package, LocalIndex) :-
|
||||
possible_external(ID, Package, LocalIndex), condition_holds(ID).
|
||||
|
||||
% it cannot happen that a spec is external, but none of the external specs
|
||||
% conditions hold.
|
||||
:- external(Package), not external_spec_conditions_hold(_, Package).
|
||||
:- external(Package), not external_conditions_hold(Package, _).
|
||||
|
||||
#defined possible_external/3.
|
||||
#defined external_spec_index/3.
|
||||
#defined external_spec_condition/3.
|
||||
#defined external_spec_condition/4.
|
||||
|
||||
@@ -29,4 +29,4 @@
|
||||
#show compiler_weight/2.
|
||||
#show node_target_match/2.
|
||||
#show node_target_weight/2.
|
||||
#show external_spec_selected/3.
|
||||
#show external_spec_selected/2.
|
||||
|
||||
@@ -196,6 +196,16 @@ def _store():
|
||||
config_dict = spack.config.get('config')
|
||||
root, unpadded_root, projections = parse_install_tree(config_dict)
|
||||
hash_length = spack.config.get('config:install_hash_length')
|
||||
|
||||
# Check that the user is not trying to install software into the store
|
||||
# reserved by Spack to bootstrap its own dependencies, since this would
|
||||
# lead to bizarre behaviors (e.g. cleaning the bootstrap area would wipe
|
||||
# user installed software)
|
||||
if spack.paths.user_bootstrap_store == root:
|
||||
msg = ('please change the install tree root "{0}" in your '
|
||||
'configuration [path reserved for Spack internal use]')
|
||||
raise ValueError(msg.format(root))
|
||||
|
||||
return Store(root=root,
|
||||
unpadded_root=unpadded_root,
|
||||
projections=projections,
|
||||
|
||||
@@ -114,6 +114,7 @@ def test_get_concrete_specs(config, mock_packages):
|
||||
assert('archive-files' in spec_map)
|
||||
|
||||
|
||||
@pytest.mark.maybeslow
|
||||
def test_register_cdash_build():
|
||||
build_name = 'Some pkg'
|
||||
base_url = 'http://cdash.fake.org'
|
||||
|
||||
@@ -38,6 +38,9 @@
|
||||
git = exe.which('git', required=True)
|
||||
|
||||
|
||||
pytestmark = pytest.mark.maybeslow
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def env_deactivate():
|
||||
yield
|
||||
|
||||
55
lib/spack/spack/test/cmd/concretize.py
Normal file
55
lib/spack/spack/test/cmd/concretize.py
Normal file
@@ -0,0 +1,55 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
|
||||
import pytest
|
||||
import spack.environment as ev
|
||||
from spack.main import SpackCommand
|
||||
|
||||
|
||||
# everything here uses the mock_env_path
|
||||
pytestmark = pytest.mark.usefixtures(
|
||||
'mutable_mock_env_path', 'config', 'mutable_mock_repo')
|
||||
|
||||
env = SpackCommand('env')
|
||||
add = SpackCommand('add')
|
||||
concretize = SpackCommand('concretize')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('concretization', ['separately', 'together'])
|
||||
def test_concretize_all_test_dependencies(concretization):
|
||||
"""Check all test dependencies are concretized."""
|
||||
env('create', 'test')
|
||||
|
||||
with ev.read('test') as e:
|
||||
e.concretization = concretization
|
||||
add('depb')
|
||||
concretize('--test', 'all')
|
||||
assert e.matching_spec('test-dependency')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('concretization', ['separately', 'together'])
|
||||
def test_concretize_root_test_dependencies_not_recursive(concretization):
|
||||
"""Check that test dependencies are not concretized recursively."""
|
||||
env('create', 'test')
|
||||
|
||||
with ev.read('test') as e:
|
||||
e.concretization = concretization
|
||||
add('depb')
|
||||
concretize('--test', 'root')
|
||||
assert e.matching_spec('test-dependency') is None
|
||||
|
||||
|
||||
@pytest.mark.parametrize('concretization', ['separately', 'together'])
|
||||
def test_concretize_root_test_dependencies_are_concretized(concretization):
|
||||
"""Check that root test dependencies are concretized."""
|
||||
env('create', 'test')
|
||||
|
||||
with ev.read('test') as e:
|
||||
e.concretization = concretization
|
||||
add('a')
|
||||
add('b')
|
||||
concretize('--test', 'root')
|
||||
assert e.matching_spec('test-dependency')
|
||||
@@ -87,6 +87,7 @@ def test_get_config_scope_merged(mock_low_high_config):
|
||||
|
||||
def test_config_edit():
|
||||
"""Ensure `spack config edit` edits the right paths."""
|
||||
|
||||
dms = spack.config.default_modify_scope('compilers')
|
||||
dms_path = spack.config.config.scopes[dms].path
|
||||
user_path = spack.config.config.scopes['user'].path
|
||||
@@ -204,20 +205,27 @@ def test_config_add_override_leaf(mutable_empty_config):
|
||||
|
||||
|
||||
def test_config_add_update_dict(mutable_empty_config):
|
||||
config('add', 'packages:all:compiler:[gcc]')
|
||||
config('add', 'packages:all:version:1.0.0')
|
||||
config('add', 'packages:all:version:[1.0.0]')
|
||||
output = config('get', 'packages')
|
||||
|
||||
expected = """packages:
|
||||
all:
|
||||
compiler: [gcc]
|
||||
version:
|
||||
- 1.0.0
|
||||
"""
|
||||
|
||||
expected = 'packages:\n all:\n version: [1.0.0]\n'
|
||||
assert output == expected
|
||||
|
||||
|
||||
def test_config_with_c_argument(mutable_empty_config):
|
||||
|
||||
# I don't know how to add a spack argument to a Spack Command, so we test this way
|
||||
config_file = 'config:install_root:root:/path/to/config.yaml'
|
||||
parser = spack.main.make_argument_parser()
|
||||
args = parser.parse_args(['-c', config_file])
|
||||
assert config_file in args.config_vars
|
||||
|
||||
# Add the path to the config
|
||||
config("add", args.config_vars[0], scope='command_line')
|
||||
output = config("get", 'config')
|
||||
assert "config:\n install_root:\n - root: /path/to/config.yaml" in output
|
||||
|
||||
|
||||
def test_config_add_ordered_dict(mutable_empty_config):
|
||||
config('add', 'mirrors:first:/path/to/first')
|
||||
config('add', 'mirrors:second:/path/to/second')
|
||||
|
||||
@@ -89,7 +89,7 @@ def test_dev_build_until_last_phase(tmpdir, mock_packages, install_mockery):
|
||||
assert os.path.exists(str(tmpdir))
|
||||
|
||||
|
||||
def test_dev_build_before_until(tmpdir, mock_packages, install_mockery):
|
||||
def test_dev_build_before_until(tmpdir, mock_packages, install_mockery, capsys):
|
||||
spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' % tmpdir)
|
||||
spec.concretize()
|
||||
|
||||
@@ -103,13 +103,18 @@ def test_dev_build_before_until(tmpdir, mock_packages, install_mockery):
|
||||
|
||||
bad_phase = 'phase_that_does_not_exist'
|
||||
not_allowed = 'is not a valid phase'
|
||||
out = dev_build('-u', bad_phase, 'dev-build-test-install@0.0.0')
|
||||
not_installed = 'was not installed'
|
||||
out = dev_build('-u', bad_phase, 'dev-build-test-install@0.0.0',
|
||||
fail_on_error=False)
|
||||
assert bad_phase in out
|
||||
assert not_allowed in out
|
||||
assert not_installed in out
|
||||
|
||||
out = dev_build('-b', bad_phase, 'dev-build-test-install@0.0.0')
|
||||
out = dev_build('-b', bad_phase, 'dev-build-test-install@0.0.0',
|
||||
fail_on_error=False)
|
||||
assert bad_phase in out
|
||||
assert not_allowed in out
|
||||
assert not_installed in out
|
||||
|
||||
|
||||
def print_spack_cc(*args):
|
||||
@@ -197,7 +202,7 @@ def test_dev_build_env(tmpdir, mock_packages, install_mockery,
|
||||
dev-build-test-install:
|
||||
spec: dev-build-test-install@0.0.0
|
||||
path: %s
|
||||
""" % build_dir)
|
||||
""" % os.path.relpath(str(build_dir), start=str(envdir)))
|
||||
|
||||
env('create', 'test', './spack.yaml')
|
||||
with ev.read('test'):
|
||||
@@ -323,7 +328,7 @@ def test_dev_build_env_dependency(tmpdir, mock_packages, install_mockery,
|
||||
dev-build-test-install:
|
||||
spec: dev-build-test-install@0.0.0
|
||||
path: %s
|
||||
""" % build_dir)
|
||||
""" % os.path.relpath(str(build_dir), start=str(envdir)))
|
||||
|
||||
env('create', 'test', './spack.yaml')
|
||||
with ev.read('test'):
|
||||
@@ -338,7 +343,7 @@ def test_dev_build_env_dependency(tmpdir, mock_packages, install_mockery,
|
||||
assert dep_spec.package.filename in os.listdir(dep_spec.prefix)
|
||||
assert os.path.exists(spec.prefix)
|
||||
|
||||
# Ensure variants set properly
|
||||
# Ensure variants set properly; ensure build_dir is absolute and normalized
|
||||
for dep in (dep_spec, spec['dev-build-test-install']):
|
||||
assert dep.satisfies('dev_path=%s' % build_dir)
|
||||
assert spec.satisfies('^dev_path=*')
|
||||
|
||||
@@ -25,8 +25,10 @@
|
||||
|
||||
|
||||
# everything here uses the mock_env_path
|
||||
pytestmark = pytest.mark.usefixtures(
|
||||
'mutable_mock_env_path', 'config', 'mutable_mock_repo')
|
||||
pytestmark = [
|
||||
pytest.mark.usefixtures('mutable_mock_env_path', 'config', 'mutable_mock_repo'),
|
||||
pytest.mark.maybeslow
|
||||
]
|
||||
|
||||
env = SpackCommand('env')
|
||||
install = SpackCommand('install')
|
||||
@@ -2113,7 +2115,11 @@ def test_env_activate_default_view_root_unconditional(env_deactivate,
|
||||
viewdir = e.default_view.root
|
||||
|
||||
out = env('activate', '--sh', 'test')
|
||||
assert 'PATH=%s' % os.path.join(viewdir, 'bin') in out
|
||||
viewdir_bin = os.path.join(viewdir, 'bin')
|
||||
|
||||
assert "export PATH={0}".format(viewdir_bin) in out or \
|
||||
"export PATH='{0}".format(viewdir_bin) in out or \
|
||||
'export PATH="{0}'.format(viewdir_bin) in out
|
||||
|
||||
|
||||
def test_concretize_user_specs_together():
|
||||
@@ -2369,3 +2375,82 @@ def _write_helper_raise(self, x, y):
|
||||
e.clear()
|
||||
e.write()
|
||||
assert os.path.exists(str(spack_lock))
|
||||
|
||||
|
||||
def _setup_develop_packages(tmpdir):
|
||||
"""Sets up a structure ./init_env/spack.yaml, ./build_folder, ./dest_env
|
||||
where spack.yaml has a relative develop path to build_folder"""
|
||||
init_env = tmpdir.join('init_env')
|
||||
build_folder = tmpdir.join('build_folder')
|
||||
dest_env = tmpdir.join('dest_env')
|
||||
|
||||
fs.mkdirp(str(init_env))
|
||||
fs.mkdirp(str(build_folder))
|
||||
fs.mkdirp(str(dest_env))
|
||||
|
||||
raw_yaml = """
|
||||
spack:
|
||||
specs: ['mypkg1', 'mypkg2']
|
||||
develop:
|
||||
mypkg1:
|
||||
path: ../build_folder
|
||||
spec: mypkg@main
|
||||
mypkg2:
|
||||
path: /some/other/path
|
||||
spec: mypkg@main
|
||||
"""
|
||||
spack_yaml = init_env.join('spack.yaml')
|
||||
spack_yaml.write(raw_yaml)
|
||||
|
||||
return init_env, build_folder, dest_env, spack_yaml
|
||||
|
||||
|
||||
def test_rewrite_rel_dev_path_new_dir(tmpdir):
|
||||
"""Relative develop paths should be rewritten for new environments in
|
||||
a different directory from the original manifest file"""
|
||||
_, build_folder, dest_env, spack_yaml = _setup_develop_packages(tmpdir)
|
||||
|
||||
env('create', '-d', str(dest_env), str(spack_yaml))
|
||||
with ev.Environment(str(dest_env)) as e:
|
||||
assert e.dev_specs['mypkg1']['path'] == str(build_folder)
|
||||
assert e.dev_specs['mypkg2']['path'] == '/some/other/path'
|
||||
|
||||
|
||||
def test_rewrite_rel_dev_path_named_env(tmpdir):
|
||||
"""Relative develop paths should by default be rewritten for new named
|
||||
environment"""
|
||||
_, build_folder, _, spack_yaml = _setup_develop_packages(tmpdir)
|
||||
env('create', 'named_env', str(spack_yaml))
|
||||
with ev.read('named_env') as e:
|
||||
assert e.dev_specs['mypkg1']['path'] == str(build_folder)
|
||||
assert e.dev_specs['mypkg2']['path'] == '/some/other/path'
|
||||
|
||||
|
||||
def test_rewrite_rel_dev_path_original_dir(tmpdir):
|
||||
"""Relative devevelop paths should not be rewritten when initializing an
|
||||
environment with root path set to the same directory"""
|
||||
init_env, _, _, spack_yaml = _setup_develop_packages(tmpdir)
|
||||
with ev.Environment(str(init_env), str(spack_yaml)) as e:
|
||||
assert e.dev_specs['mypkg1']['path'] == '../build_folder'
|
||||
assert e.dev_specs['mypkg2']['path'] == '/some/other/path'
|
||||
|
||||
|
||||
def test_rewrite_rel_dev_path_create_original_dir(tmpdir):
|
||||
"""Relative develop paths should not be rewritten when creating an
|
||||
environment in the original directory"""
|
||||
init_env, _, _, spack_yaml = _setup_develop_packages(tmpdir)
|
||||
env('create', '-d', str(init_env), str(spack_yaml))
|
||||
with ev.Environment(str(init_env)) as e:
|
||||
assert e.dev_specs['mypkg1']['path'] == '../build_folder'
|
||||
assert e.dev_specs['mypkg2']['path'] == '/some/other/path'
|
||||
|
||||
|
||||
def test_does_not_rewrite_rel_dev_path_when_keep_relative_is_set(tmpdir):
|
||||
"""Relative develop paths should not be rewritten when --keep-relative is
|
||||
passed to create"""
|
||||
_, _, _, spack_yaml = _setup_develop_packages(tmpdir)
|
||||
env('create', '--keep-relative', 'named_env', str(spack_yaml))
|
||||
with ev.read('named_env') as e:
|
||||
print(e.dev_specs)
|
||||
assert e.dev_specs['mypkg1']['path'] == '../build_folder'
|
||||
assert e.dev_specs['mypkg2']['path'] == '/some/other/path'
|
||||
|
||||
@@ -629,10 +629,11 @@ def test_cache_only_fails(tmpdir, mock_fetch, install_mockery, capfd):
|
||||
# libelf from cache fails to install, which automatically removes the
|
||||
# the libdwarf build task
|
||||
with capfd.disabled():
|
||||
out = install('--cache-only', 'libdwarf')
|
||||
out = install('--cache-only', 'libdwarf', fail_on_error=False)
|
||||
|
||||
assert 'Failed to install libelf' in out
|
||||
assert 'Skipping build of libdwarf' in out
|
||||
assert 'was not installed' in out
|
||||
|
||||
# Check that failure prefix locks are still cached
|
||||
failure_lock_prefixes = ','.join(spack.store.db._prefix_failures.keys())
|
||||
@@ -884,3 +885,23 @@ def test_cache_install_full_hash_match(
|
||||
|
||||
uninstall('-y', s.name)
|
||||
mirror('rm', 'test-mirror')
|
||||
|
||||
|
||||
def test_install_env_with_tests_all(tmpdir, mock_packages, mock_fetch,
|
||||
install_mockery, mutable_mock_env_path):
|
||||
env('create', 'test')
|
||||
with ev.read('test'):
|
||||
test_dep = Spec('test-dependency').concretized()
|
||||
add('depb')
|
||||
install('--test', 'all')
|
||||
assert os.path.exists(test_dep.prefix)
|
||||
|
||||
|
||||
def test_install_env_with_tests_root(tmpdir, mock_packages, mock_fetch,
|
||||
install_mockery, mutable_mock_env_path):
|
||||
env('create', 'test')
|
||||
with ev.read('test'):
|
||||
test_dep = Spec('test-dependency').concretized()
|
||||
add('depb')
|
||||
install('--test', 'root')
|
||||
assert not os.path.exists(test_dep.prefix)
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import platform
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -18,6 +19,11 @@ def test_python():
|
||||
assert out.strip() == spack.spack_version
|
||||
|
||||
|
||||
def test_python_interpreter_path():
|
||||
out = python('--path')
|
||||
assert out.strip() == sys.executable
|
||||
|
||||
|
||||
def test_python_version():
|
||||
out = python('-V')
|
||||
assert platform.python_version() in out
|
||||
|
||||
@@ -11,6 +11,8 @@
|
||||
import spack.config
|
||||
import spack.package
|
||||
import spack.cmd.install
|
||||
|
||||
from spack.cmd.test import has_test_method
|
||||
from spack.main import SpackCommand
|
||||
|
||||
install = SpackCommand('install')
|
||||
@@ -181,3 +183,32 @@ def test_test_help_cdash(mock_test_stage):
|
||||
"""Make sure `spack test --help-cdash` describes CDash arguments"""
|
||||
out = spack_test('run', '--help-cdash')
|
||||
assert 'CDash URL' in out
|
||||
|
||||
|
||||
def test_test_list_all(mock_packages):
|
||||
"""make sure `spack test list --all` returns all packages with tests"""
|
||||
pkgs = spack_test("list", "--all").strip().split()
|
||||
assert set(pkgs) == set([
|
||||
"printing-package",
|
||||
"py-extension1",
|
||||
"py-extension2",
|
||||
"test-error",
|
||||
"test-fail",
|
||||
])
|
||||
|
||||
|
||||
def test_test_list(
|
||||
mock_packages, mock_archive, mock_fetch, install_mockery_mutable_config
|
||||
):
|
||||
pkg_with_tests = 'printing-package'
|
||||
install(pkg_with_tests)
|
||||
output = spack_test("list")
|
||||
assert pkg_with_tests in output
|
||||
|
||||
|
||||
def test_has_test_method_fails(capsys):
|
||||
with pytest.raises(SystemExit):
|
||||
has_test_method('printing-package')
|
||||
|
||||
captured = capsys.readouterr()[1]
|
||||
assert 'is not a class' in captured
|
||||
|
||||
@@ -71,7 +71,7 @@ def test_url_with_no_version_fails():
|
||||
url('parse', 'http://www.netlib.org/voronoi/triangle.zip')
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info < (2, 7),
|
||||
reason="Python 2.6 tests are run in a container, where "
|
||||
@@ -106,7 +106,7 @@ def test_url_list():
|
||||
assert 0 < correct_version_urls < total_urls
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info < (2, 7),
|
||||
reason="Python 2.6 tests are run in a container, where "
|
||||
|
||||
@@ -14,7 +14,6 @@ def test_safe_only_versions():
|
||||
"""Only test the safe versions of a package.
|
||||
(Using the deprecated command line argument)
|
||||
"""
|
||||
|
||||
versions('--safe-only', 'zlib')
|
||||
|
||||
|
||||
@@ -24,21 +23,21 @@ def test_safe_versions():
|
||||
versions('--safe', 'zlib')
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@pytest.mark.maybeslow
|
||||
def test_remote_versions():
|
||||
"""Test a package for which remote versions should be available."""
|
||||
|
||||
versions('zlib')
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@pytest.mark.maybeslow
|
||||
def test_remote_versions_only():
|
||||
"""Test a package for which remote versions should be available."""
|
||||
|
||||
versions('--remote', 'zlib')
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@pytest.mark.maybeslow
|
||||
@pytest.mark.usefixtures('mock_packages')
|
||||
def test_new_versions_only():
|
||||
"""Test a package for which new versions should be available."""
|
||||
@@ -46,28 +45,28 @@ def test_new_versions_only():
|
||||
versions('--new', 'brillig')
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@pytest.mark.maybeslow
|
||||
def test_no_versions():
|
||||
"""Test a package for which no remote versions are available."""
|
||||
|
||||
versions('converge')
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@pytest.mark.maybeslow
|
||||
def test_no_unchecksummed_versions():
|
||||
"""Test a package for which no unchecksummed versions are available."""
|
||||
|
||||
versions('bzip2')
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@pytest.mark.maybeslow
|
||||
def test_versions_no_url():
|
||||
"""Test a package with versions but without a ``url`` attribute."""
|
||||
|
||||
versions('graphviz')
|
||||
|
||||
|
||||
@pytest.mark.network
|
||||
@pytest.mark.maybeslow
|
||||
def test_no_versions_no_url():
|
||||
"""Test a package without versions or a ``url`` attribute."""
|
||||
|
||||
|
||||
@@ -389,6 +389,7 @@ def test_cce_flags():
|
||||
supported_flag_test("cxx_pic_flag", "-fPIC", "cce@9.1.0")
|
||||
supported_flag_test("f77_pic_flag", "-fPIC", "cce@9.1.0")
|
||||
supported_flag_test("fc_pic_flag", "-fPIC", "cce@9.1.0")
|
||||
supported_flag_test("stdcxx_libs", (), "cce@1.0")
|
||||
supported_flag_test("debug_flags", ['-g', '-G0', '-G1', '-G2', '-Gfast'],
|
||||
'cce@1.0')
|
||||
|
||||
@@ -466,6 +467,10 @@ def test_aocc_flags():
|
||||
supported_flag_test("f77_pic_flag", "-fPIC", "aocc@2.2.0")
|
||||
supported_flag_test("fc_pic_flag", "-fPIC", "aocc@2.2.0")
|
||||
supported_flag_test("version_argument", "--version", "aocc@2.2.0")
|
||||
flg = "-Wno-unused-command-line-argument -mllvm -eliminate-similar-expr=false"
|
||||
supported_flag_test("cflags", flg, "aocc@3.0.0")
|
||||
supported_flag_test("cxxflags", flg, "aocc@3.0.0")
|
||||
supported_flag_test("fflags", flg, "aocc@3.0.0")
|
||||
|
||||
|
||||
def test_fj_flags():
|
||||
@@ -608,6 +613,7 @@ def test_pgi_flags():
|
||||
supported_flag_test("cxx_pic_flag", "-fpic", "pgi@1.0")
|
||||
supported_flag_test("f77_pic_flag", "-fpic", "pgi@1.0")
|
||||
supported_flag_test("fc_pic_flag", "-fpic", "pgi@1.0")
|
||||
supported_flag_test("stdcxx_libs", ("-pgc++libs",), "pgi@1.0")
|
||||
supported_flag_test("debug_flags", ['-g', '-gopt'], 'pgi@1.0')
|
||||
supported_flag_test("opt_flags", ['-O', '-O0', '-O1', '-O2', '-O3', '-O4'],
|
||||
'pgi@1.0')
|
||||
|
||||
@@ -321,6 +321,11 @@ def _module(cmd, *args):
|
||||
|
||||
@pytest.mark.parametrize('version_str,expected_version', [
|
||||
# This applies to C,C++ and FORTRAN compiler
|
||||
('AMD clang version 12.0.0 (CLANG: AOCC_3.0.0-Build#78 2020_12_10)'
|
||||
'(based on LLVM Mirror.Version.12.0.0)\n'
|
||||
'Target: x86_64-unknown-linux-gnu\n'
|
||||
'Thread model: posix\n', '3.0.0'
|
||||
),
|
||||
('AMD clang version 11.0.0 (CLANG: AOCC_2.3.0-Build#85 2020_11_10)'
|
||||
'(based on LLVM Mirror.Version.11.0.0)\n'
|
||||
'Target: x86_64-unknown-linux-gnu\n'
|
||||
|
||||
@@ -660,8 +660,7 @@ def test_simultaneous_concretization_of_specs(self, abstract_specs):
|
||||
|
||||
abstract_specs = [Spec(x) for x in abstract_specs]
|
||||
concrete_specs = spack.concretize.concretize_specs_together(
|
||||
*abstract_specs
|
||||
)
|
||||
*abstract_specs)
|
||||
|
||||
# Check there's only one configuration of each package in the DAG
|
||||
names = set(
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
import os
|
||||
import collections
|
||||
import getpass
|
||||
import re
|
||||
import tempfile
|
||||
from six import StringIO
|
||||
|
||||
@@ -16,6 +17,7 @@
|
||||
import spack.paths
|
||||
import spack.config
|
||||
import spack.main
|
||||
import spack.environment
|
||||
import spack.schema.compilers
|
||||
import spack.schema.config
|
||||
import spack.schema.env
|
||||
@@ -257,6 +259,37 @@ def test_write_to_same_priority_file(mock_low_high_config, compiler_specs):
|
||||
repos_low = {'repos': ["/some/path"]}
|
||||
repos_high = {'repos': ["/some/other/path"]}
|
||||
|
||||
# Test setting config values via path in filename
|
||||
|
||||
|
||||
def test_add_config_path():
|
||||
|
||||
# Try setting a new install tree root
|
||||
path = "config:install_tree:root:/path/to/config.yaml"
|
||||
spack.config.add(path, scope="command_line")
|
||||
set_value = spack.config.get('config')['install_tree']['root']
|
||||
assert set_value == '/path/to/config.yaml'
|
||||
|
||||
# Now a package:all setting
|
||||
path = "packages:all:compiler:[gcc]"
|
||||
spack.config.add(path, scope="command_line")
|
||||
compilers = spack.config.get('packages')['all']['compiler']
|
||||
assert "gcc" in compilers
|
||||
|
||||
|
||||
def test_add_config_filename(mock_low_high_config, tmpdir):
|
||||
|
||||
config_yaml = tmpdir.join('config-filename.yaml')
|
||||
config_yaml.ensure()
|
||||
with config_yaml.open('w') as f:
|
||||
syaml.dump_config(config_low, f)
|
||||
|
||||
spack.config.add_from_file(str(config_yaml), scope="low")
|
||||
assert "build_stage" in spack.config.get('config')
|
||||
build_stages = spack.config.get('config')['build_stage']
|
||||
for stage in config_low['config']['build_stage']:
|
||||
assert stage in build_stages
|
||||
|
||||
|
||||
# repos
|
||||
def test_write_list_in_memory(mock_low_high_config):
|
||||
@@ -267,7 +300,12 @@ def test_write_list_in_memory(mock_low_high_config):
|
||||
assert config == repos_high['repos'] + repos_low['repos']
|
||||
|
||||
|
||||
def test_substitute_config_variables(mock_low_high_config):
|
||||
class MockEnv(object):
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
|
||||
def test_substitute_config_variables(mock_low_high_config, monkeypatch):
|
||||
prefix = spack.paths.prefix.lstrip('/')
|
||||
|
||||
assert os.path.join(
|
||||
@@ -298,6 +336,50 @@ def test_substitute_config_variables(mock_low_high_config):
|
||||
'/foo/bar/baz', prefix, 'foo/bar/baz'
|
||||
) != spack_path.canonicalize_path('/foo/bar/baz/${spack/foo/bar/baz/')
|
||||
|
||||
# $env replacement is a no-op when no environment is active
|
||||
assert spack_path.canonicalize_path(
|
||||
'/foo/bar/baz/$env'
|
||||
) == '/foo/bar/baz/$env'
|
||||
|
||||
# Fake an active environment and $env is replaced properly
|
||||
fake_env_path = '/quux/quuux'
|
||||
monkeypatch.setattr(spack.environment, 'get_env',
|
||||
lambda x, y: MockEnv(fake_env_path))
|
||||
assert spack_path.canonicalize_path(
|
||||
'$env/foo/bar/baz'
|
||||
) == os.path.join(fake_env_path, 'foo/bar/baz')
|
||||
|
||||
# relative paths without source information are relative to cwd
|
||||
assert spack_path.canonicalize_path(
|
||||
'foo/bar/baz'
|
||||
) == os.path.abspath('foo/bar/baz')
|
||||
|
||||
# relative paths with source information are relative to the file
|
||||
spack.config.set(
|
||||
'config:module_roots', {'lmod': 'foo/bar/baz'}, scope='low')
|
||||
spack.config.config.clear_caches()
|
||||
path = spack.config.get('config:module_roots:lmod')
|
||||
assert spack_path.canonicalize_path(path) == os.path.normpath(
|
||||
os.path.join(mock_low_high_config.scopes['low'].path,
|
||||
'foo/bar/baz'))
|
||||
|
||||
# The 'instance' substitution allows us to make a unique hash for the
|
||||
# spack instance. Test that they're all the right length, right
|
||||
# chars, and all different.
|
||||
path = "/foo/$instance/bar/baz"
|
||||
prefixes = [spack.paths.prefix, "/foo/bar/baz", "/foo/bar", "/blah/blah"]
|
||||
paths = []
|
||||
for prefix in prefixes:
|
||||
monkeypatch.setattr(spack.paths, "prefix", prefix)
|
||||
canonical = spack_path.canonicalize_path(path)
|
||||
|
||||
# all hashed paths are 12-character hashes
|
||||
assert re.match("/foo/[0-9a-z]{8}/bar/baz", canonical)
|
||||
paths.append(canonical)
|
||||
|
||||
# ensure all hashed paths are different
|
||||
assert len(set(paths)) == len(prefixes)
|
||||
|
||||
|
||||
packages_merge_low = {
|
||||
'packages': {
|
||||
|
||||
@@ -120,7 +120,7 @@ def test_read_and_write_spec(temporary_store, config, mock_packages):
|
||||
# TODO: fix this when we can concretize more loosely based on
|
||||
# TODO: what is installed. We currently omit these to
|
||||
# TODO: increase reuse of build dependencies.
|
||||
stored_deptypes = ('link', 'run')
|
||||
stored_deptypes = spack.hash_types.full_hash
|
||||
expected = spec.copy(deps=stored_deptypes)
|
||||
expected._mark_concrete()
|
||||
|
||||
|
||||
@@ -122,3 +122,12 @@ def test_ascii_graph_mpileaks(mock_packages):
|
||||
|/
|
||||
o libelf
|
||||
'''
|
||||
|
||||
|
||||
def test_topo_sort_filtered(mock_packages):
|
||||
"""Test topo sort gives correct order when filtering link deps."""
|
||||
s = Spec('both-link-and-build-dep-a').normalized()
|
||||
|
||||
topo = topological_sort(s, deptype=('link',))
|
||||
|
||||
assert topo == ['both-link-and-build-dep-a', 'both-link-and-build-dep-c']
|
||||
|
||||
@@ -879,7 +879,8 @@ def test_install_failed(install_mockery, monkeypatch, capsys):
|
||||
# Make sure the package is identified as failed
|
||||
monkeypatch.setattr(spack.database.Database, 'prefix_failed', _true)
|
||||
|
||||
installer.install()
|
||||
with pytest.raises(inst.InstallError, match='request failed'):
|
||||
installer.install()
|
||||
|
||||
out = str(capsys.readouterr())
|
||||
assert installer.build_requests[0].pkg_id in out
|
||||
@@ -894,7 +895,8 @@ def test_install_failed_not_fast(install_mockery, monkeypatch, capsys):
|
||||
# Make sure the package is identified as failed
|
||||
monkeypatch.setattr(spack.database.Database, 'prefix_failed', _true)
|
||||
|
||||
installer.install()
|
||||
with pytest.raises(inst.InstallError, match='request failed'):
|
||||
installer.install()
|
||||
|
||||
out = str(capsys.readouterr())
|
||||
assert 'failed to install' in out
|
||||
@@ -1046,7 +1048,9 @@ def _requeued(installer, task):
|
||||
# Ensure don't continually requeue the task
|
||||
monkeypatch.setattr(inst.PackageInstaller, '_requeue_task', _requeued)
|
||||
|
||||
installer.install()
|
||||
with pytest.raises(inst.InstallError, match='request failed'):
|
||||
installer.install()
|
||||
|
||||
out = capfd.readouterr()[0]
|
||||
expected = ['write locked', 'read locked', 'requeued']
|
||||
for exp, ln in zip(expected, out.split('\n')):
|
||||
@@ -1077,7 +1081,9 @@ def _requeued(installer, task):
|
||||
# Ensure don't continually requeue the task
|
||||
monkeypatch.setattr(inst.PackageInstaller, '_requeue_task', _requeued)
|
||||
|
||||
installer.install()
|
||||
with pytest.raises(inst.InstallError, match='request failed'):
|
||||
installer.install()
|
||||
|
||||
assert b_pkg_id not in installer.installed
|
||||
|
||||
out = capfd.readouterr()[0]
|
||||
@@ -1113,7 +1119,9 @@ def _requeued(installer, task):
|
||||
const_arg = installer_args(['b'], {})
|
||||
installer = create_installer(const_arg)
|
||||
|
||||
installer.install()
|
||||
with pytest.raises(inst.InstallError, match='request failed'):
|
||||
installer.install()
|
||||
|
||||
assert 'b' not in installer.installed
|
||||
|
||||
out = capfd.readouterr()[0]
|
||||
|
||||
@@ -124,9 +124,9 @@ def test_patch_order(mock_packages, config):
|
||||
spec = Spec('dep-diamond-patch-top')
|
||||
spec.concretize()
|
||||
|
||||
mid2_sha256 = 'mid21234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234' # noqa: E501
|
||||
mid1_sha256 = '0b62284961dab49887e31319843431ee5b037382ac02c4fe436955abef11f094' # noqa: E501
|
||||
top_sha256 = 'f7de2947c64cb6435e15fb2bef359d1ed5f6356b2aebb7b20535e3772904e6db' # noqa: E501
|
||||
mid2_sha256 = 'mid21234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234'
|
||||
mid1_sha256 = '0b62284961dab49887e31319843431ee5b037382ac02c4fe436955abef11f094'
|
||||
top_sha256 = 'f7de2947c64cb6435e15fb2bef359d1ed5f6356b2aebb7b20535e3772904e6db'
|
||||
|
||||
dep = spec['patch']
|
||||
patch_order = dep.variants['patches']._patches_in_order_of_appearance
|
||||
@@ -328,9 +328,25 @@ def test_write_and_read_sub_dags_with_patched_deps(mock_packages, config):
|
||||
spec.package.package_dir)
|
||||
|
||||
|
||||
def test_file_patch_no_file():
|
||||
def test_patch_no_file():
|
||||
# Give it the attributes we need to construct the error message
|
||||
FakePackage = collections.namedtuple(
|
||||
'FakePackage', ['name', 'namespace', 'fullname'])
|
||||
fp = FakePackage('fake-package', 'test', 'fake-package')
|
||||
with pytest.raises(ValueError, match='FilePatch:'):
|
||||
spack.patch.FilePatch(fp, 'nonexistent_file', 0, '')
|
||||
|
||||
patch = spack.patch.Patch(fp, 'nonexistent_file', 0, '')
|
||||
patch.path = 'test'
|
||||
with pytest.raises(spack.patch.NoSuchPatchError, match='No such patch:'):
|
||||
patch.apply('')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('level', [-1, 0.0, '1'])
|
||||
def test_invalid_level(level):
|
||||
# Give it the attributes we need to construct the error message
|
||||
FakePackage = collections.namedtuple('FakePackage', ['name', 'namespace'])
|
||||
fp = FakePackage('fake-package', 'test')
|
||||
with pytest.raises(ValueError, match=r'FilePatch:.*'):
|
||||
spack.patch.FilePatch(fp, 'nonexistent_file', 0, '')
|
||||
with pytest.raises(ValueError,
|
||||
match='Patch level needs to be a non-negative integer.'):
|
||||
spack.patch.Patch(fp, 'nonexistent_file', level, '')
|
||||
|
||||
@@ -65,6 +65,7 @@ def test_parse_gpg_output_case_three():
|
||||
|
||||
@pytest.mark.skipif(not spack.util.gpg.GpgConstants.user_run_dir,
|
||||
reason='This test requires /var/run/user/$(id -u)')
|
||||
@pytest.mark.requires_executables('gpg2')
|
||||
def test_really_long_gnupg_home_dir(tmpdir):
|
||||
N = 960
|
||||
|
||||
|
||||
@@ -528,13 +528,18 @@ def reversed(self):
|
||||
|
||||
return rev
|
||||
|
||||
def apply_modifications(self):
|
||||
def apply_modifications(self, env=None):
|
||||
"""Applies the modifications and clears the list."""
|
||||
# Use os.environ if not specified
|
||||
# Do not copy, we want to modify it in place
|
||||
if env is None:
|
||||
env = os.environ
|
||||
|
||||
modifications = self.group_by_name()
|
||||
# Apply modifications one variable at a time
|
||||
for name, actions in sorted(modifications.items()):
|
||||
for x in actions:
|
||||
x.execute(os.environ)
|
||||
x.execute(env)
|
||||
|
||||
def shell_modifications(self, shell='sh'):
|
||||
"""Return shell code to apply the modifications and clears the list."""
|
||||
|
||||
@@ -22,6 +22,8 @@ class Executable(object):
|
||||
def __init__(self, name):
|
||||
self.exe = shlex.split(str(name))
|
||||
self.default_env = {}
|
||||
from spack.util.environment import EnvironmentModifications # no cycle
|
||||
self.default_envmod = EnvironmentModifications()
|
||||
self.returncode = None
|
||||
|
||||
if not self.exe:
|
||||
@@ -40,6 +42,10 @@ def add_default_env(self, key, value):
|
||||
"""
|
||||
self.default_env[key] = value
|
||||
|
||||
def add_default_envmod(self, envmod):
|
||||
"""Set an EnvironmentModifications to use when the command is run."""
|
||||
self.default_envmod.extend(envmod)
|
||||
|
||||
@property
|
||||
def command(self):
|
||||
"""The command-line string.
|
||||
@@ -76,9 +82,10 @@ def __call__(self, *args, **kwargs):
|
||||
Keyword Arguments:
|
||||
_dump_env (dict): Dict to be set to the environment actually
|
||||
used (envisaged for testing purposes only)
|
||||
env (dict): The environment to run the executable with
|
||||
extra_env (dict): Extra items to add to the environment
|
||||
(neither requires nor precludes env)
|
||||
env (dict or EnvironmentModifications): The environment with which
|
||||
to run the executable
|
||||
extra_env (dict or EnvironmentModifications): Extra items to add to
|
||||
the environment (neither requires nor precludes env)
|
||||
fail_on_error (bool): Raise an exception if the subprocess returns
|
||||
an error. Default is True. The return code is available as
|
||||
``exe.returncode``
|
||||
@@ -107,13 +114,26 @@ def __call__(self, *args, **kwargs):
|
||||
"""
|
||||
# Environment
|
||||
env_arg = kwargs.get('env', None)
|
||||
if env_arg is None:
|
||||
env = os.environ.copy()
|
||||
env.update(self.default_env)
|
||||
else:
|
||||
env = self.default_env.copy()
|
||||
|
||||
# Setup default environment
|
||||
env = os.environ.copy() if env_arg is None else {}
|
||||
self.default_envmod.apply_modifications(env)
|
||||
env.update(self.default_env)
|
||||
|
||||
from spack.util.environment import EnvironmentModifications # no cycle
|
||||
# Apply env argument
|
||||
if isinstance(env_arg, EnvironmentModifications):
|
||||
env_arg.apply_modifications(env)
|
||||
elif env_arg:
|
||||
env.update(env_arg)
|
||||
env.update(kwargs.get('extra_env', {}))
|
||||
|
||||
# Apply extra env
|
||||
extra_env = kwargs.get('extra_env', {})
|
||||
if isinstance(extra_env, EnvironmentModifications):
|
||||
extra_env.apply_modifications(env)
|
||||
else:
|
||||
env.update(extra_env)
|
||||
|
||||
if '_dump_env' in kwargs:
|
||||
kwargs['_dump_env'].clear()
|
||||
kwargs['_dump_env'].update(env)
|
||||
|
||||
@@ -7,9 +7,11 @@
|
||||
|
||||
TODO: this is really part of spack.config. Consolidate it.
|
||||
"""
|
||||
import base64
|
||||
import getpass
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import getpass
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
@@ -17,19 +19,13 @@
|
||||
from llnl.util.lang import memoized
|
||||
|
||||
import spack.paths
|
||||
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
__all__ = [
|
||||
'substitute_config_variables',
|
||||
'substitute_path_variables',
|
||||
'canonicalize_path']
|
||||
|
||||
# Substitutions to perform
|
||||
replacements = {
|
||||
'spack': spack.paths.prefix,
|
||||
'user': getpass.getuser(),
|
||||
'tempdir': tempfile.gettempdir(),
|
||||
}
|
||||
|
||||
# This is intended to be longer than the part of the install path
|
||||
# spack generates from the root path we give it. Included in the
|
||||
@@ -69,18 +65,54 @@ def substitute_config_variables(path):
|
||||
|
||||
Spack allows paths in configs to have some placeholders, as follows:
|
||||
|
||||
- $spack The Spack instance's prefix
|
||||
- $user The current user's username
|
||||
- $tempdir Default temporary directory returned by tempfile.gettempdir()
|
||||
``$spack``
|
||||
The Spack instance's prefix.
|
||||
|
||||
``$user``
|
||||
The current user's username.
|
||||
|
||||
``$tempdir``
|
||||
Default temporary directory returned by ``tempfile.gettempdir()``.
|
||||
|
||||
``$env``
|
||||
The active Spack environment.
|
||||
|
||||
``$instance``
|
||||
Hash of the spack prefix, for creating paths unique to a spack
|
||||
instance outside of that instance (e.g., in $tempdir).
|
||||
|
||||
These are substituted case-insensitively into the path, and users can
|
||||
use either ``$var`` or ``${var}`` syntax for the variables.
|
||||
use either ``$var`` or ``${var}`` syntax for the variables. $env is only
|
||||
replaced if there is an active environment, and should only be used in
|
||||
environment yaml files.
|
||||
|
||||
"""
|
||||
# Look up replacements for re.sub in the replacements dict.
|
||||
# Possible replacements
|
||||
def repl(match):
|
||||
m = match.group(0).strip('${}')
|
||||
return replacements.get(m.lower(), match.group(0))
|
||||
raw_match = match.group(0)
|
||||
name = raw_match.strip('${}').lower()
|
||||
|
||||
if name == "spack":
|
||||
return spack.paths.prefix
|
||||
|
||||
elif name == "user":
|
||||
return getpass.getuser()
|
||||
|
||||
elif name == "tempdir":
|
||||
return tempfile.gettempdir()
|
||||
|
||||
elif name == "env":
|
||||
import spack.environment as ev # break circular
|
||||
env = ev.get_env({}, '')
|
||||
if env:
|
||||
return env.path
|
||||
|
||||
elif name == "instance":
|
||||
sha = hashlib.sha1(spack.paths.prefix.encode("utf-8"))
|
||||
b32_hash = base64.b32encode(sha.digest()).lower()
|
||||
return b32_hash[:8].decode("utf-8")
|
||||
|
||||
return raw_match
|
||||
|
||||
# Replace $var or ${var}.
|
||||
return re.sub(r'(\$\w+\b|\$\{\w+\})', repl, path)
|
||||
@@ -132,7 +164,19 @@ def add_padding(path, length):
|
||||
|
||||
def canonicalize_path(path):
|
||||
"""Same as substitute_path_variables, but also take absolute path."""
|
||||
path = substitute_path_variables(path)
|
||||
path = os.path.abspath(path)
|
||||
# Get file in which path was written in case we need to make it absolute
|
||||
# relative to that path.
|
||||
filename = None
|
||||
if isinstance(path, syaml.syaml_str):
|
||||
filename = os.path.dirname(path._start_mark.name)
|
||||
assert path._start_mark.name == path._end_mark.name
|
||||
|
||||
return path
|
||||
path = substitute_path_variables(path)
|
||||
if not os.path.isabs(path):
|
||||
if filename:
|
||||
path = os.path.join(filename, path)
|
||||
else:
|
||||
path = os.path.abspath(path)
|
||||
tty.debug("Using current working directory as base for abspath")
|
||||
|
||||
return os.path.normpath(path)
|
||||
|
||||
@@ -282,8 +282,21 @@ def _value_setter(self, value):
|
||||
# to a set
|
||||
self._value = tuple(sorted(set(value)))
|
||||
|
||||
def _cmp_value(self):
|
||||
"""Returns a tuple of strings containing the values stored in
|
||||
the variant.
|
||||
|
||||
Returns:
|
||||
tuple of str: values stored in the variant
|
||||
"""
|
||||
value = self._value
|
||||
if not isinstance(value, tuple):
|
||||
value = (value,)
|
||||
stringified = tuple(str(x) for x in value)
|
||||
return stringified
|
||||
|
||||
def _cmp_key(self):
|
||||
return self.name, self.value
|
||||
return self.name, self._cmp_value()
|
||||
|
||||
def copy(self):
|
||||
"""Returns an instance of a variant equivalent to self
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
# content of pytest.ini
|
||||
[pytest]
|
||||
addopts = --durations=20 -ra
|
||||
addopts = --durations=30 -ra
|
||||
testpaths = lib/spack/spack/test
|
||||
python_files = *.py
|
||||
markers =
|
||||
db: tests that require creating a DB
|
||||
network: tests that require access to the network
|
||||
maybeslow: tests that may be slow (e.g. access a lot the filesystem, etc.)
|
||||
regression: tests that fix a reported bug
|
||||
|
||||
@@ -55,7 +55,7 @@ _bash_completion_spack() {
|
||||
# For our purposes, flags should not affect tab completion. For instance,
|
||||
# `spack install []` and `spack -d install --jobs 8 []` should both give the same
|
||||
# possible completions. Therefore, we need to ignore any flags in COMP_WORDS.
|
||||
local COMP_WORDS_NO_FLAGS=()
|
||||
local -a COMP_WORDS_NO_FLAGS
|
||||
local index=0
|
||||
while [[ "$index" -lt "$COMP_CWORD" ]]
|
||||
do
|
||||
|
||||
@@ -12,7 +12,7 @@ stages: [generate, build]
|
||||
artifacts:
|
||||
paths:
|
||||
- "${CI_PROJECT_DIR}/jobs_scratch_dir/e4s_pipeline.yml"
|
||||
tags: ["spack", "public", "medium"]
|
||||
tags: ["spack", "public", "medium", "x86_64"]
|
||||
interruptible: true
|
||||
|
||||
e4s-pr-generate:
|
||||
|
||||
@@ -35,78 +35,78 @@ spack:
|
||||
|
||||
definitions:
|
||||
- e4s:
|
||||
- adios
|
||||
- adios2
|
||||
- aml
|
||||
- amrex
|
||||
- arborx
|
||||
# - adios
|
||||
# - adios2
|
||||
# - aml
|
||||
# - amrex
|
||||
# - arborx
|
||||
- argobots
|
||||
- ascent
|
||||
- axom
|
||||
- bolt
|
||||
- caliper
|
||||
- darshan-runtime
|
||||
# - ascent
|
||||
# - axom
|
||||
# - bolt
|
||||
# - caliper
|
||||
# - darshan-runtime
|
||||
- darshan-util
|
||||
- dyninst
|
||||
- faodel
|
||||
- flecsi+cinch
|
||||
- flit
|
||||
- gasnet
|
||||
- ginkgo
|
||||
- globalarrays
|
||||
- gotcha
|
||||
- hdf5
|
||||
- hpctoolkit
|
||||
- hpx
|
||||
- hypre
|
||||
- kokkos-kernels+openmp
|
||||
- kokkos+openmp
|
||||
- legion
|
||||
- libnrm
|
||||
- libquo
|
||||
- magma cuda_arch=70 ^cuda@10.2.89
|
||||
- mercury
|
||||
- mfem
|
||||
- mpifileutils@develop~xattr
|
||||
- ninja
|
||||
- omega-h
|
||||
- openmpi
|
||||
- openpmd-api
|
||||
- papi
|
||||
- papyrus@1.0.1
|
||||
- parallel-netcdf
|
||||
- pdt
|
||||
- petsc
|
||||
- phist
|
||||
- plasma
|
||||
- precice
|
||||
- pumi
|
||||
- py-jupyterhub
|
||||
- py-libensemble
|
||||
- py-petsc4py
|
||||
- qthreads scheduler=distrib
|
||||
- raja
|
||||
- rempi
|
||||
- scr
|
||||
- slate ^openblas@0.3.6 threads=openmp ^cuda@10.2.89
|
||||
- slepc
|
||||
- stc
|
||||
- strumpack ~slate ^openblas@0.3.6 threads=openmp
|
||||
- sundials
|
||||
- superlu
|
||||
- superlu-dist
|
||||
# - dyninst
|
||||
# - faodel
|
||||
# - flecsi+cinch
|
||||
# - flit
|
||||
# - gasnet
|
||||
# - ginkgo
|
||||
# - globalarrays
|
||||
# - gotcha
|
||||
# - hdf5
|
||||
# - hpctoolkit
|
||||
# - hpx
|
||||
# - hypre
|
||||
# - kokkos-kernels+openmp
|
||||
# - kokkos+openmp
|
||||
# - legion
|
||||
# - libnrm
|
||||
# - libquo
|
||||
# - magma cuda_arch=70 ^cuda@10.2.89
|
||||
# - mercury
|
||||
# - mfem
|
||||
# - mpifileutils@develop~xattr
|
||||
# - ninja
|
||||
# - omega-h
|
||||
# - openmpi
|
||||
# - openpmd-api
|
||||
# - papi
|
||||
# - papyrus@1.0.1
|
||||
# - parallel-netcdf
|
||||
# - pdt
|
||||
# - petsc
|
||||
# - phist
|
||||
# - plasma
|
||||
# - precice
|
||||
# - pumi
|
||||
# - py-jupyterhub
|
||||
# - py-libensemble
|
||||
# - py-petsc4py
|
||||
# - qthreads scheduler=distrib
|
||||
# - raja
|
||||
# - rempi
|
||||
# - scr
|
||||
# - slate ^openblas@0.3.6 threads=openmp ^cuda@10.2.89
|
||||
# - slepc
|
||||
# - stc
|
||||
# - strumpack ~slate ^openblas@0.3.6 threads=openmp
|
||||
# - sundials
|
||||
# - superlu
|
||||
# - superlu-dist
|
||||
- swig
|
||||
- sz
|
||||
- tasmanian
|
||||
- tau
|
||||
- trilinos
|
||||
- turbine
|
||||
- umap
|
||||
- umpire
|
||||
- unifyfs
|
||||
- upcxx
|
||||
- veloc
|
||||
- zfp
|
||||
# - sz
|
||||
# - tasmanian
|
||||
# - tau
|
||||
# - trilinos
|
||||
# - turbine
|
||||
# - umap
|
||||
# - umpire
|
||||
# - unifyfs
|
||||
# - upcxx
|
||||
# - veloc
|
||||
# - zfp
|
||||
- arch:
|
||||
- '%gcc@7.5.0 arch=linux-ubuntu18.04-x86_64'
|
||||
|
||||
@@ -128,11 +128,11 @@ spack:
|
||||
- match: [cuda, dyninst, hpx, precice, strumpack, sundials, trilinos, vtk-h, vtk-m]
|
||||
runner-attributes:
|
||||
image: { "name": "ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01", "entrypoint": [""] }
|
||||
tags: ["spack", "public", "xlarge"]
|
||||
tags: ["spack", "public", "xlarge", "x86_64"]
|
||||
- match: ['os=ubuntu18.04']
|
||||
runner-attributes:
|
||||
image: { "name": "ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01", "entrypoint": [""] }
|
||||
tags: ["spack", "public", "large"]
|
||||
tags: ["spack", "public", "large", "x86_64"]
|
||||
temporary-storage-url-prefix: "s3://spack-binaries-prs/pipeline-storage"
|
||||
service-job-attributes:
|
||||
before_script:
|
||||
@@ -141,7 +141,7 @@ spack:
|
||||
- cd share/spack/gitlab/cloud_e4s_pipelines/stacks/e4s
|
||||
- spack env activate --without-view .
|
||||
image: { "name": "ghcr.io/scottwittenburg/ecpe4s-ubuntu18.04-runner-x86_64:2020-09-01", "entrypoint": [""] }
|
||||
tags: ["spack", "public", "medium"]
|
||||
tags: ["spack", "public", "medium", "x86_64"]
|
||||
|
||||
cdash:
|
||||
build-group: New PR testing workflow
|
||||
|
||||
@@ -18,7 +18,11 @@
|
||||
ORIGINAL_PATH="$PATH"
|
||||
|
||||
. "$(dirname $0)/setup.sh"
|
||||
check_dependencies $coverage kcov git hg svn
|
||||
if [ "$COVERAGE" = true ]; then
|
||||
check_dependencies $coverage kcov git hg svn
|
||||
else
|
||||
echo "COVERAGE not set to 'true' [skipping coverage]"
|
||||
fi
|
||||
|
||||
# Clean the environment by removing Spack from the path and getting rid of
|
||||
# the spack shell function
|
||||
|
||||
@@ -42,4 +42,9 @@ spack -p --lines 20 spec mpileaks%gcc ^elfutils@0.170
|
||||
#-----------------------------------------------------------
|
||||
# Run unit tests with code coverage
|
||||
#-----------------------------------------------------------
|
||||
if [[ "$ONLY_PACKAGES" == "true" ]]; then
|
||||
echo "ONLY PACKAGE RECIPES CHANGED [skipping slow unit tests]"
|
||||
export PYTEST_ADDOPTS='-k "package_sanity" -m "not maybeslow"'
|
||||
fi
|
||||
|
||||
$coverage_run $(which spack) unit-test -x --verbose
|
||||
|
||||
@@ -303,12 +303,12 @@ _spack_pathadd PATH "${_sp_prefix%/}/bin"
|
||||
# Check whether a function of the given name is defined
|
||||
#
|
||||
_spack_fn_exists() {
|
||||
LANG= type $1 2>&1 | grep -q 'function'
|
||||
LANG= type $1 2>&1 | grep -q 'function'
|
||||
}
|
||||
|
||||
need_module="no"
|
||||
if ! _spack_fn_exists use && ! _spack_fn_exists module; then
|
||||
need_module="yes"
|
||||
need_module="yes"
|
||||
fi;
|
||||
|
||||
# Define the spack shell function with some informative no-ops, so when users
|
||||
|
||||
@@ -95,16 +95,16 @@ INSTANCEID=$(curl http://169.254.169.254/latest/meta-data//instance-id)
|
||||
|
||||
# Get the ID of the Amazon EBS volume associated with the instance.
|
||||
VOLUMEID=$(aws ec2 describe-instances \
|
||||
--instance-id $INSTANCEID \
|
||||
--query "Reservations[0].Instances[0].BlockDeviceMappings[0].Ebs.VolumeId" \
|
||||
--output text)
|
||||
--instance-id $INSTANCEID \
|
||||
--query "Reservations[0].Instances[0].BlockDeviceMappings[0].Ebs.VolumeId" \
|
||||
--output text)
|
||||
|
||||
# Resize the EBS volume.
|
||||
aws ec2 modify-volume --volume-id $VOLUMEID --size $SIZE
|
||||
|
||||
# Wait for the resize to finish.
|
||||
while [ \
|
||||
"$(aws ec2 describe-volumes-modifications \
|
||||
"$(aws ec2 describe-volumes-modifications \
|
||||
--volume-id $VOLUMEID \
|
||||
--filters Name=modification-state,Values="optimizing","completed" \
|
||||
--query "length(VolumesModifications)"\
|
||||
|
||||
@@ -55,7 +55,7 @@ _bash_completion_spack() {
|
||||
# For our purposes, flags should not affect tab completion. For instance,
|
||||
# `spack install []` and `spack -d install --jobs 8 []` should both give the same
|
||||
# possible completions. Therefore, we need to ignore any flags in COMP_WORDS.
|
||||
local COMP_WORDS_NO_FLAGS=()
|
||||
local -a COMP_WORDS_NO_FLAGS
|
||||
local index=0
|
||||
while [[ "$index" -lt "$COMP_CWORD" ]]
|
||||
do
|
||||
@@ -331,7 +331,7 @@ _spacktivate() {
|
||||
_spack() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -H --all-help --color -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
|
||||
SPACK_COMPREPLY="-h --help -H --all-help --color -c --config -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
|
||||
else
|
||||
SPACK_COMPREPLY="activate add arch blame build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mark mirror module patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
|
||||
fi
|
||||
@@ -492,7 +492,7 @@ _spack_ci_rebuild_index() {
|
||||
_spack_clean() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -s --stage -d --downloads -f --failures -m --misc-cache -p --python-cache -a --all"
|
||||
SPACK_COMPREPLY="-h --help -s --stage -d --downloads -f --failures -m --misc-cache -p --python-cache -b --bootstrap -a --all"
|
||||
else
|
||||
_all_packages
|
||||
fi
|
||||
@@ -579,7 +579,7 @@ _spack_compilers() {
|
||||
}
|
||||
|
||||
_spack_concretize() {
|
||||
SPACK_COMPREPLY="-h --help -f --force"
|
||||
SPACK_COMPREPLY="-h --help -f --force --test"
|
||||
}
|
||||
|
||||
_spack_config() {
|
||||
@@ -793,7 +793,7 @@ _spack_env_deactivate() {
|
||||
_spack_env_create() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -d --dir --without-view --with-view"
|
||||
SPACK_COMPREPLY="-h --help -d --dir --keep-relative --without-view --with-view"
|
||||
else
|
||||
_environments
|
||||
fi
|
||||
@@ -912,7 +912,7 @@ _spack_fetch() {
|
||||
_spack_find() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help --format --json -d --deps -p --paths --groups --no-groups -l --long -L --very-long -t --tag -c --show-concretized -f --show-flags --show-full-compiler -x --explicit -X --implicit -u --unknown -m --missing -v --variants --loaded -M --only-missing --deprecated --only-deprecated -N --namespace --start-date --end-date"
|
||||
SPACK_COMPREPLY="-h --help --format --json -d --deps -p --paths --groups --no-groups -l --long -L --very-long -t --tag -c --show-concretized -f --show-flags --show-full-compiler -x --explicit -X --implicit -u --unknown -m --missing -v --variants --loaded -M --only-missing --deprecated --only-deprecated -N --namespace --start-date --end-date -b --bootstrap"
|
||||
else
|
||||
_installed_packages
|
||||
fi
|
||||
@@ -1385,7 +1385,7 @@ _spack_pydoc() {
|
||||
_spack_python() {
|
||||
if $list_options
|
||||
then
|
||||
SPACK_COMPREPLY="-h --help -V --version -c -i -m"
|
||||
SPACK_COMPREPLY="-h --help -V --version -c -i -m --path"
|
||||
else
|
||||
SPACK_COMPREPLY=""
|
||||
fi
|
||||
@@ -1548,7 +1548,7 @@ _spack_test_run() {
|
||||
}
|
||||
|
||||
_spack_test_list() {
|
||||
SPACK_COMPREPLY="-h --help"
|
||||
SPACK_COMPREPLY="-h --help -a --all"
|
||||
}
|
||||
|
||||
_spack_test_find() {
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack import *
|
||||
|
||||
|
||||
class BothLinkAndBuildDepA(Package):
|
||||
"""
|
||||
Structure where c occurs as a build dep down the line and as a direct
|
||||
link dep. Useful for testing situations where you copy the parent spec
|
||||
just with link deps, and you want to make sure b is not part of that.
|
||||
a <--build-- b <-link-- c
|
||||
a <--link--- c
|
||||
"""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/1.0.tar.gz"
|
||||
|
||||
version('1.0', '0123456789abcdef0123456789abcdef')
|
||||
|
||||
depends_on('both-link-and-build-dep-b', type='build')
|
||||
depends_on('both-link-and-build-dep-c', type='link')
|
||||
@@ -0,0 +1,23 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack import *
|
||||
|
||||
|
||||
class BothLinkAndBuildDepB(Package):
|
||||
"""
|
||||
Structure where c occurs as a build dep down the line and as a direct
|
||||
link dep. Useful for testing situations where you copy the parent spec
|
||||
just with link deps, and you want to make sure b is not part of that.
|
||||
a <--build-- b <-link-- c
|
||||
a <--link--- c
|
||||
"""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/1.0.tar.gz"
|
||||
|
||||
version('1.0', '0123456789abcdef0123456789abcdef')
|
||||
|
||||
depends_on('both-link-and-build-dep-c', type='link')
|
||||
@@ -0,0 +1,21 @@
|
||||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
from spack import *
|
||||
|
||||
|
||||
class BothLinkAndBuildDepC(Package):
|
||||
"""
|
||||
Structure where c occurs as a build dep down the line and as a direct
|
||||
link dep. Useful for testing situations where you copy the parent spec
|
||||
just with link deps, and you want to make sure b is not part of that.
|
||||
a <--build-- b <-link-- c
|
||||
a <--link--- c
|
||||
"""
|
||||
|
||||
homepage = "http://www.example.com"
|
||||
url = "http://www.example.com/1.0.tar.gz"
|
||||
|
||||
version('1.0', '0123456789abcdef0123456789abcdef')
|
||||
@@ -1,6 +1,143 @@
|
||||
diff -uprN src/43_wvl_wrappers/m_abi2big.F90 src/43_wvl_wrappers/m_abi2big.F90
|
||||
--- src/43_wvl_wrappers/m_abi2big.F90 2019-01-17 20:49:35.000000000 +0900
|
||||
+++ src/43_wvl_wrappers/m_abi2big.F90 2019-05-21 15:05:23.000000000 +0900
|
||||
diff -uprN spack-src.org/configure spack-src/configure
|
||||
--- spack-src.org/configure 2021-03-11 13:52:52.000000000 +0900
|
||||
+++ spack-src/configure 2021-03-11 14:14:36.000000000 +0900
|
||||
@@ -14183,7 +14183,10 @@ $as_echo "#define FC_OPEN64 1" >>confdef
|
||||
|
||||
|
||||
fc_info_string=`${FC} -V 2>/dev/null | head -n 1`
|
||||
- abi_result=`echo "${fc_info_string}" | grep '^Fujitsu Fortran'`
|
||||
+ if test "${fc_info_string}" = ""; then
|
||||
+ fc_info_string=`${FC} -V 2>&1 | head -n 1`
|
||||
+ fi
|
||||
+ abi_result=`echo "${fc_info_string}" | grep 'Fujitsu Fortran'`
|
||||
if test "${abi_result}" = ""; then
|
||||
abi_result="no"
|
||||
fc_info_string=""
|
||||
@@ -14194,9 +14197,17 @@ $as_echo "#define FC_OPEN64 1" >>confdef
|
||||
$as_echo "#define FC_FUJITSU 1" >>confdefs.h
|
||||
|
||||
abi_fc_vendor="fujitsu"
|
||||
- abi_fc_version=`echo "${abi_result}" | sed -e 's/.*Driver //;s/ .*//'`
|
||||
+ abi_fc_version=`echo "${abi_result}" | sed -e 's/.*Driver //'`
|
||||
if test "${abi_fc_version}" = "${abi_result}"; then
|
||||
abi_fc_version="unknown"
|
||||
+ else
|
||||
+ abi_fc_version=`echo "${abi_result}" | sed -e 's/.*Driver //;s/ .*//'`
|
||||
+ fi
|
||||
+ if test "${abi_fc_version}" = "unknown"; then
|
||||
+ abi_fc_version=`echo "${abi_result}" | sed -e 's/.*Compiler //;s/ .*//'`
|
||||
+ if test "${abi_fc_version}" = "${abi_result}"; then
|
||||
+ abi_fc_version="unknown"
|
||||
+ fi
|
||||
fi
|
||||
abi_result="yes"
|
||||
fi
|
||||
@@ -14655,6 +14666,7 @@ $as_echo "$as_me:${as_lineno-$LINENO}: $
|
||||
# gfortran 4.3 outputs lines setting COLLECT_GCC_OPTIONS, COMPILER_PATH,
|
||||
# LIBRARY_PATH; skip all such settings.
|
||||
ac_fc_v_output=`eval $ac_link 5>&1 2>&1 |
|
||||
+ sed -r "s/(\-L)(\/[^ ]+)+(\/bin\/\.\.\/lib64\/nofjobj)//g" |
|
||||
sed '/^Driving:/d; /^Configured with:/d;
|
||||
'"/^[_$as_cr_Letters][_$as_cr_alnum]*=/d"`
|
||||
$as_echo "$ac_fc_v_output" >&5
|
||||
@@ -16392,15 +16404,29 @@ $as_echo_n "checking which fc hints to a
|
||||
;;
|
||||
fujitsu)
|
||||
abi_fc_vendor_hnt="fujitsu"
|
||||
- abi_fc_version_hnt="default"
|
||||
abi_sys_spec_hnt="default"
|
||||
- FCFLAGS_FIXEDFORM='-Fixed -X7'
|
||||
- FCFLAGS_FREEFORM='-Free -X9'
|
||||
- FCFLAGS_MODDIR='-M $(abinit_moddir)'
|
||||
- FCFLAGS_HINTS='-Am -Ee -Ep'
|
||||
- abi_fc_wrap='yes'
|
||||
- FCFLAGS_OPENMP='--openmp'
|
||||
- FCFLAGS_PIC='-K PIC'
|
||||
+ case "${abi_fc_version}" in
|
||||
+ 4.[0-9]*)
|
||||
+ abi_fc_version_hnt="4.x"
|
||||
+ FCFLAGS_FIXEDFORM='-Fixed -X7'
|
||||
+ FCFLAGS_FREEFORM='-Free -X9'
|
||||
+ FCFLAGS_MODDIR='-M $(abinit_moddir)'
|
||||
+ FCFLAGS_HINTS=''
|
||||
+ abi_fc_wrap='no'
|
||||
+ FCFLAGS_OPENMP='-Kopenmp'
|
||||
+ FCFLAGS_PIC='-K PIC'
|
||||
+ ;;
|
||||
+ *)
|
||||
+ abi_fc_version_hnt="default"
|
||||
+ FCFLAGS_FIXEDFORM='-Fixed -X7'
|
||||
+ FCFLAGS_FREEFORM='-Free -X9'
|
||||
+ FCFLAGS_MODDIR='-M $(abinit_moddir)'
|
||||
+ FCFLAGS_HINTS='-Am -Ee -Ep'
|
||||
+ abi_fc_wrap='yes'
|
||||
+ FCFLAGS_OPENMP='--openmp'
|
||||
+ FCFLAGS_PIC='-K PIC'
|
||||
+ ;;
|
||||
+ esac # [case: abi_fc_version, indent: 2, item: True]
|
||||
;;
|
||||
pathscale)
|
||||
abi_fc_vendor_hnt="pathscale"
|
||||
@@ -18085,22 +18111,43 @@ $as_echo_n "checking which fc optimizati
|
||||
;;
|
||||
fujitsu)
|
||||
abi_fc_vendor_opt="fujitsu"
|
||||
- abi_fc_version_opt="default"
|
||||
abi_cpu_spec_opt="default"
|
||||
- case "${enable_optim}" in
|
||||
- aggressive)
|
||||
- enable_optim_opt="aggressive"
|
||||
- FCFLAGS_OPTIM="-Of -X9 -Ps -Wv,-md"
|
||||
- ;;
|
||||
- safe)
|
||||
- enable_optim_opt="safe"
|
||||
- FCFLAGS_OPTIM="-Of -X9 -Ps -Wv,-md"
|
||||
+ case "${abi_fc_version}" in
|
||||
+ 4.[0-9]*)
|
||||
+ abi_fc_version_opt="4.x"
|
||||
+ case "${enable_optim}" in
|
||||
+ aggressive)
|
||||
+ enable_optim_opt="aggressive"
|
||||
+ FCFLAGS_OPTIM="-Kfast -Koptmsg=2 -Nlst=t"
|
||||
+ ;;
|
||||
+ safe)
|
||||
+ enable_optim_opt="safe"
|
||||
+ FCFLAGS_OPTIM="-O2 -Koptmsg=2 -Nlst=t"
|
||||
+ ;;
|
||||
+ standard)
|
||||
+ enable_optim_opt="standard"
|
||||
+ FCFLAGS_OPTIM="-O2 -Koptmsg=2 -Nlst=t"
|
||||
+ ;;
|
||||
+ esac # [case: enable_optim, indent: 4, item: True]
|
||||
;;
|
||||
- standard)
|
||||
- enable_optim_opt="standard"
|
||||
- FCFLAGS_OPTIM="-Of -X9 -Ps -Wv,-md"
|
||||
+ *)
|
||||
+ abi_fc_version_opt="default"
|
||||
+ case "${enable_optim}" in
|
||||
+ aggressive)
|
||||
+ enable_optim_opt="aggressive"
|
||||
+ FCFLAGS_OPTIM="-Of -X9 -Ps -Wv,-md"
|
||||
+ ;;
|
||||
+ safe)
|
||||
+ enable_optim_opt="safe"
|
||||
+ FCFLAGS_OPTIM="-Of -X9 -Ps -Wv,-md"
|
||||
+ ;;
|
||||
+ standard)
|
||||
+ enable_optim_opt="standard"
|
||||
+ FCFLAGS_OPTIM="-Of -X9 -Ps -Wv,-md"
|
||||
+ ;;
|
||||
+ esac # [case: enable_optim, indent: 4, item: True]
|
||||
;;
|
||||
- esac # [case: enable_optim, indent: 2, item: True]
|
||||
+ esac # [case: abi_fc_version, indent: 2, item: True]
|
||||
;;
|
||||
pathscale)
|
||||
abi_fc_vendor_opt="pathscale"
|
||||
diff -uprN spack-src.org/src/43_wvl_wrappers/m_abi2big.F90 spack-src/src/43_wvl_wrappers/m_abi2big.F90
|
||||
--- spack-src.org/src/43_wvl_wrappers/m_abi2big.F90 2021-03-11 13:52:54.000000000 +0900
|
||||
+++ spack-src/src/43_wvl_wrappers/m_abi2big.F90 2021-03-11 14:06:53.000000000 +0900
|
||||
@@ -1333,10 +1333,10 @@ subroutine wvl_setngfft(me_wvl, mgfft, n
|
||||
|
||||
!Arguments ------------------------------------
|
||||
@@ -14,9 +151,9 @@ diff -uprN src/43_wvl_wrappers/m_abi2big.F90 src/43_wvl_wrappers/m_abi2big.F90
|
||||
|
||||
!Local variables-------------------------------
|
||||
!scalars
|
||||
diff -uprN src/43_wvl_wrappers/m_wvl_denspot.F90 src/43_wvl_wrappers/m_wvl_denspot.F90
|
||||
--- src/43_wvl_wrappers/m_wvl_denspot.F90 2019-01-17 20:49:32.000000000 +0900
|
||||
+++ src/43_wvl_wrappers/m_wvl_denspot.F90 2019-05-21 15:06:21.000000000 +0900
|
||||
diff -uprN spack-src.org/src/43_wvl_wrappers/m_wvl_denspot.F90 spack-src/src/43_wvl_wrappers/m_wvl_denspot.F90
|
||||
--- spack-src.org/src/43_wvl_wrappers/m_wvl_denspot.F90 2021-03-11 13:52:54.000000000 +0900
|
||||
+++ spack-src/src/43_wvl_wrappers/m_wvl_denspot.F90 2021-03-11 14:06:53.000000000 +0900
|
||||
@@ -96,7 +96,7 @@ subroutine wvl_denspot_set(den,gth_param
|
||||
real(dp), intent(in) :: rprimd(3, 3)
|
||||
real(dp), intent(in) :: wvl_frmult,wvl_crmult
|
||||
@@ -26,9 +163,9 @@ diff -uprN src/43_wvl_wrappers/m_wvl_denspot.F90 src/43_wvl_wrappers/m_wvl_densp
|
||||
type(wvl_internal_type),intent(in) :: wvl
|
||||
type(pseudopotential_gth_type),intent(in)::gth_params
|
||||
|
||||
diff -uprN src/43_wvl_wrappers/m_wvl_wfs.F90 src/43_wvl_wrappers/m_wvl_wfs.F90
|
||||
--- src/43_wvl_wrappers/m_wvl_wfs.F90 2019-01-17 20:49:33.000000000 +0900
|
||||
+++ src/43_wvl_wrappers/m_wvl_wfs.F90 2019-05-21 15:07:08.000000000 +0900
|
||||
diff -uprN spack-src.org/src/43_wvl_wrappers/m_wvl_wfs.F90 spack-src/src/43_wvl_wrappers/m_wvl_wfs.F90
|
||||
--- spack-src.org/src/43_wvl_wrappers/m_wvl_wfs.F90 2021-03-11 13:52:54.000000000 +0900
|
||||
+++ spack-src/src/43_wvl_wrappers/m_wvl_wfs.F90 2021-03-11 14:06:53.000000000 +0900
|
||||
@@ -103,7 +103,7 @@ subroutine wvl_wfs_set(alphadiis, spinma
|
||||
integer, intent(in) :: natom, nkpt, nsppol, nspinor, nband, nwfshist,me,nproc
|
||||
real(dp), intent(in) :: spinmagntarget, wvl_crmult, wvl_frmult, alphadiis
|
||||
@@ -38,9 +175,9 @@ diff -uprN src/43_wvl_wrappers/m_wvl_wfs.F90 src/43_wvl_wrappers/m_wvl_wfs.F90
|
||||
type(wvl_internal_type), intent(in) :: wvl
|
||||
!arrays
|
||||
real(dp), intent(in) :: kpt(3,nkpt)
|
||||
diff -uprN src/52_fft_mpi_noabirule/m_fftw3.F90 src/52_fft_mpi_noabirule/m_fftw3.F90
|
||||
--- src/52_fft_mpi_noabirule/m_fftw3.F90 2019-01-17 20:49:35.000000000 +0900
|
||||
+++ src/52_fft_mpi_noabirule/m_fftw3.F90 2019-05-21 15:14:52.000000000 +0900
|
||||
diff -uprN spack-src.org/src/52_fft_mpi_noabirule/m_fftw3.F90 spack-src/src/52_fft_mpi_noabirule/m_fftw3.F90
|
||||
--- spack-src.org/src/52_fft_mpi_noabirule/m_fftw3.F90 2021-03-11 13:52:52.000000000 +0900
|
||||
+++ spack-src/src/52_fft_mpi_noabirule/m_fftw3.F90 2021-03-11 14:06:53.000000000 +0900
|
||||
@@ -4588,7 +4588,7 @@ subroutine fftw3_mpiback_wf(cplexwf,ndat
|
||||
integer,intent(in) :: cplexwf,ndat,n1,n2,n3,nd1,nd2,nd3proc
|
||||
integer,intent(in) :: max1,max2,max3,m1,m2,m3,md1,md2proc,md3,comm_fft
|
||||
@@ -95,9 +232,9 @@ diff -uprN src/52_fft_mpi_noabirule/m_fftw3.F90 src/52_fft_mpi_noabirule/m_fftw3
|
||||
|
||||
!Local variables-------------------------------
|
||||
!scalars
|
||||
diff -uprN src/62_poisson/m_psolver.F90 src/62_poisson/m_psolver.F90
|
||||
--- src/62_poisson/m_psolver.F90 2019-01-17 20:49:26.000000000 +0900
|
||||
+++ src/62_poisson/m_psolver.F90 2019-05-21 15:09:11.000000000 +0900
|
||||
diff -uprN spack-src.org/src/62_poisson/m_psolver.F90 spack-src/src/62_poisson/m_psolver.F90
|
||||
--- spack-src.org/src/62_poisson/m_psolver.F90 2021-03-11 13:52:55.000000000 +0900
|
||||
+++ spack-src/src/62_poisson/m_psolver.F90 2021-03-11 14:06:53.000000000 +0900
|
||||
@@ -118,7 +118,7 @@ subroutine psolver_rhohxc(enhartr, enxc,
|
||||
integer,intent(in) :: usexcnhat,usepaw,xclevel
|
||||
real(dp),intent(in) :: rprimd(3,3)
|
||||
@@ -133,9 +270,9 @@ diff -uprN src/62_poisson/m_psolver.F90 src/62_poisson/m_psolver.F90
|
||||
|
||||
!Local variables-------------------------------
|
||||
#if defined HAVE_BIGDFT
|
||||
diff -uprN src/62_wvl_wfs/m_wvl_psi.F90 src/62_wvl_wfs/m_wvl_psi.F90
|
||||
--- src/62_wvl_wfs/m_wvl_psi.F90 2019-01-17 20:49:14.000000000 +0900
|
||||
+++ src/62_wvl_wfs/m_wvl_psi.F90 2019-05-21 15:10:51.000000000 +0900
|
||||
diff -uprN spack-src.org/src/62_wvl_wfs/m_wvl_psi.F90 spack-src/src/62_wvl_wfs/m_wvl_psi.F90
|
||||
--- spack-src.org/src/62_wvl_wfs/m_wvl_psi.F90 2021-03-11 13:52:54.000000000 +0900
|
||||
+++ spack-src/src/62_wvl_wfs/m_wvl_psi.F90 2021-03-11 14:06:53.000000000 +0900
|
||||
@@ -248,16 +248,16 @@ subroutine wvl_psitohpsi(alphamix,eexctX
|
||||
!scalars
|
||||
integer, intent(in) :: me, nproc, itrp, iter, iscf, natom, nfft, nspden
|
||||
@@ -166,9 +303,9 @@ diff -uprN src/62_wvl_wfs/m_wvl_psi.F90 src/62_wvl_wfs/m_wvl_psi.F90
|
||||
type(MPI_type),intent(in) :: mpi_enreg
|
||||
type(dataset_type),intent(in) :: dtset
|
||||
type(energies_type),intent(inout) :: energies
|
||||
diff -uprN src/67_common/m_mklocl_realspace.F90 src/67_common/m_mklocl_realspace.F90
|
||||
--- src/67_common/m_mklocl_realspace.F90 2019-01-17 20:49:35.000000000 +0900
|
||||
+++ src/67_common/m_mklocl_realspace.F90 2019-05-21 15:12:07.000000000 +0900
|
||||
diff -uprN spack-src.org/src/67_common/m_mklocl_realspace.F90 spack-src/src/67_common/m_mklocl_realspace.F90
|
||||
--- spack-src.org/src/67_common/m_mklocl_realspace.F90 2021-03-11 13:52:53.000000000 +0900
|
||||
+++ spack-src/src/67_common/m_mklocl_realspace.F90 2021-03-11 14:06:53.000000000 +0900
|
||||
@@ -1703,7 +1703,7 @@ subroutine local_forces_wvl(iproc,natom,
|
||||
!arrays
|
||||
real(dp),intent(in) :: rxyz(3,natom)
|
||||
@@ -178,9 +315,9 @@ diff -uprN src/67_common/m_mklocl_realspace.F90 src/67_common/m_mklocl_realspace
|
||||
|
||||
!Local variables -------------------------
|
||||
#if defined HAVE_BIGDFT
|
||||
diff -uprN src/67_common/mkcore_wvl.F90 src/67_common/mkcore_wvl.F90
|
||||
--- src/67_common/mkcore_wvl.F90 2019-01-17 20:49:30.000000000 +0900
|
||||
+++ src/67_common/mkcore_wvl.F90 2019-05-21 15:13:04.000000000 +0900
|
||||
diff -uprN spack-src.org/src/67_common/mkcore_wvl.F90 spack-src/src/67_common/mkcore_wvl.F90
|
||||
--- spack-src.org/src/67_common/mkcore_wvl.F90 2021-03-11 13:52:53.000000000 +0900
|
||||
+++ spack-src/src/67_common/mkcore_wvl.F90 2021-03-11 14:06:53.000000000 +0900
|
||||
@@ -138,7 +138,7 @@ subroutine mkcore_wvl(atindx1,corstr,grx
|
||||
integer,intent(in) :: atindx1(natom),nattyp(ntypat)
|
||||
real(dp),intent(in) :: rprimd(3,3),xccc1d(n1xccc,6,ntypat),xcccrc(ntypat),xred(3,natom)
|
||||
@@ -200,10 +337,10 @@ diff -uprN src/67_common/mkcore_wvl.F90 src/67_common/mkcore_wvl.F90
|
||||
+ real(dp),intent(inout) :: corstr(6),dyfrx2(3,3,natom),grxc(3,natom)
|
||||
type(pawtab_type),intent(in) :: pawtab(ntypat)
|
||||
type(pawrad_type),intent(in) :: pawrad(ntypat)
|
||||
|
||||
diff -uprN src/98_main/abinit.F90 src/98_main/abinit.F90
|
||||
--- src/98_main/abinit.F90 2019-01-17 20:49:35.000000000 +0900
|
||||
+++ src/98_main/abinit.F90 2019-08-07 08:29:17.000000000 +0900
|
||||
|
||||
diff -uprN spack-src.org/src/98_main/abinit.F90 spack-src/src/98_main/abinit.F90
|
||||
--- spack-src.org/src/98_main/abinit.F90 2021-03-11 13:52:55.000000000 +0900
|
||||
+++ spack-src/src/98_main/abinit.F90 2021-03-11 14:06:53.000000000 +0900
|
||||
@@ -261,7 +261,7 @@ program abinit
|
||||
open(unit=ab_out,file=filnam(2),form='formatted',status='new', action="write", iomsg=message, iostat=ios)
|
||||
#endif
|
||||
|
||||
@@ -95,7 +95,11 @@ class Abinit(AutotoolsPackage):
|
||||
# conflicts('+elpa', when='+scalapack')
|
||||
|
||||
patch('rm_march_settings.patch')
|
||||
patch('fix_for_fujitsu.patch', level=0, when='%fj')
|
||||
|
||||
# Fix detection of Fujitsu compiler
|
||||
# Fix configure not to collect the option that causes an error
|
||||
# Fix intent(out) and unnecessary rewind to avoid compile error
|
||||
patch('fix_for_fujitsu.patch', when='%fj')
|
||||
|
||||
def configure_args(self):
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@ class Acts(CMakePackage, CudaPackage):
|
||||
|
||||
# Supported Acts versions
|
||||
version('master', branch='master')
|
||||
version('6.00.0', commit='a5cf04acd4b1a2c625e0826189109472a3392558')
|
||||
version('5.00.0', commit='df77b91a7d37b8db6ed028a4d737014b5ad86bb7')
|
||||
version('4.01.0', commit='c383bf434ef69939b47e840e0eac0ba632e6af9f')
|
||||
version('4.00.0', commit='ed64b4b88d366b63adc4a8d1afe5bc97aa5751eb')
|
||||
@@ -95,12 +96,14 @@ class Acts(CMakePackage, CudaPackage):
|
||||
variant('examples', default=False, description='Build the examples')
|
||||
variant('integration_tests', default=False, description='Build the integration tests')
|
||||
variant('unit_tests', default=False, description='Build the unit tests')
|
||||
variant('log_failure_threshold', default='MAX', description='Log level above which examples should auto-crash')
|
||||
|
||||
# Variants that enable / disable Acts plugins
|
||||
variant('autodiff', default=False, description='Build the auto-differentiation plugin')
|
||||
variant('dd4hep', default=False, description='Build the DD4hep plugin')
|
||||
variant('digitization', default=False, description='Build the geometric digitization plugin')
|
||||
variant('fatras', default=False, description='Build the FAst TRAcking Simulation package')
|
||||
variant('fatras_geant4', default=False, description='Build Geant4 Fatras package')
|
||||
variant('identification', default=False, description='Build the Identification plugin')
|
||||
variant('json', default=False, description='Build the Json plugin')
|
||||
variant('legacy', default=False, description='Build the Legacy package')
|
||||
@@ -121,11 +124,12 @@ class Acts(CMakePackage, CudaPackage):
|
||||
depends_on('dd4hep @1.11:', when='+dd4hep')
|
||||
depends_on('dd4hep @1.11: +geant4', when='+dd4hep +geant4')
|
||||
depends_on('eigen @3.3.7:', type='build')
|
||||
depends_on('geant4', when='+fatras_geant4')
|
||||
depends_on('geant4', when='+geant4')
|
||||
depends_on('hepmc3 @3.2.1:', when='+hepmc3')
|
||||
depends_on('heppdt', when='+hepmc3 @:4.0')
|
||||
depends_on('intel-tbb @2020.1:', when='+examples')
|
||||
depends_on('nlohmann-json @3.2.0:', when='@0.14: +json')
|
||||
depends_on('nlohmann-json @3.9.1:', when='@0.14: +json')
|
||||
depends_on('pythia8', when='+pythia8')
|
||||
depends_on('root @6.10: cxxstd=14', when='+tgeo @:0.8.0')
|
||||
depends_on('root @6.20: cxxstd=17', when='+tgeo @0.8.1:')
|
||||
@@ -191,6 +195,7 @@ def plugin_cmake_variant(plugin_name, spack_variant):
|
||||
example_cmake_variant("HEPMC3", "hepmc3"),
|
||||
example_cmake_variant("PYTHIA8", "pythia8"),
|
||||
cmake_variant("FATRAS", "fatras"),
|
||||
cmake_variant("FATRAS_GEANT4", "fatras_geant4"),
|
||||
plugin_cmake_variant("IDENTIFICATION", "identification"),
|
||||
cmake_variant(integration_tests_label, "integration_tests"),
|
||||
plugin_cmake_variant("JSON", "json"),
|
||||
@@ -199,6 +204,9 @@ def plugin_cmake_variant(plugin_name, spack_variant):
|
||||
plugin_cmake_variant("TGEO", "tgeo")
|
||||
]
|
||||
|
||||
log_failure_threshold = spec.variants['log_failure_threshold'].value
|
||||
args.append("-DACTS_LOG_FAILURE_THRESHOLD={0}".format(log_failure_threshold))
|
||||
|
||||
cuda_arch = spec.variants['cuda_arch'].value
|
||||
if cuda_arch != 'none':
|
||||
args.append('-DCUDA_FLAGS=-arch=sm_{0}'.format(cuda_arch[0]))
|
||||
|
||||
@@ -12,10 +12,17 @@ class Adiak(CMakePackage):
|
||||
|
||||
homepage = "https://github.com/LLNL/Adiak"
|
||||
url = "https://github.com/LLNL/Adiak/releases/download/v0.1/adiak-v0.1.1.tar.gz"
|
||||
git = "https://github.com/LLNL/Adiak"
|
||||
|
||||
maintainers = ["daboehme", "mplegendre"]
|
||||
|
||||
variant('mpi', default=True, description='Build with MPI support')
|
||||
variant('shared', default=True, description='Build dynamic libraries')
|
||||
|
||||
version('0.3.0-alpha', commit='054d2693a977ed0e1f16c665b4966bb90924779e',
|
||||
submodules=True)
|
||||
version('0.2.1', commit='950e3bfb91519ecb7b7ee7fa3063bfab23c0e2c9',
|
||||
submodules=True, preferred=True)
|
||||
version('0.1.1', sha256='438e4652e15e206cd0019423d829fd4f2329323ff0c8861d9586bae051d9624b')
|
||||
|
||||
depends_on('mpi', when='+mpi')
|
||||
|
||||
@@ -14,6 +14,9 @@ class AdwaitaIconTheme(AutotoolsPackage):
|
||||
|
||||
version('3.38.0', sha256='6683a1aaf2430ccd9ea638dd4bfe1002bc92b412050c3dba20e480f979faaf97')
|
||||
|
||||
depends_on("gdk-pixbuf", type="build")
|
||||
depends_on("librsvg", type="build")
|
||||
|
||||
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||
env.prepend_path('XDG_DATA_DIRS', self.prefix.share)
|
||||
|
||||
|
||||
@@ -33,6 +33,10 @@ class Alps(CMakePackage):
|
||||
depends_on('py-scipy', type=('build', 'run'))
|
||||
depends_on('py-matplotlib', type=('build', 'run'))
|
||||
|
||||
# use depends_on to help with dependency resolution
|
||||
depends_on('py-numpy@:1.19', when='^python@:3.6.99')
|
||||
depends_on('py-scipy@:1.5', when='^python@:3.6.99')
|
||||
|
||||
# fix for gcc@7:
|
||||
patch('alps_newgcc.patch', when='%gcc@7:')
|
||||
|
||||
@@ -54,3 +58,30 @@ def cmake_args(self):
|
||||
args.append('Boost_ROOT_DIR=' + self.spec['boost'].prefix)
|
||||
args.append("-DCMAKE_CXX_FLAGS={0}".format(self.compiler.cxx98_flag))
|
||||
return args
|
||||
|
||||
def _single_test(self, target, exename, dataname, opts=[]):
|
||||
troot = self.prefix.tutorials
|
||||
copy_tree(join_path(troot, target), target)
|
||||
|
||||
if target == 'dmrg-01-dmrg':
|
||||
test_dir = self.test_suite.current_test_data_dir
|
||||
copy(join_path(test_dir, dataname), target)
|
||||
|
||||
self.run_test('parameter2xml',
|
||||
options=[dataname, 'SEED=123456'],
|
||||
work_dir=target
|
||||
)
|
||||
options = []
|
||||
options.extend(opts)
|
||||
options.extend(['--write-xml', '{0}.in.xml'.format(dataname)])
|
||||
self.run_test(exename,
|
||||
options=options,
|
||||
expected=['Finished with everything.'],
|
||||
work_dir=target
|
||||
)
|
||||
|
||||
def test(self):
|
||||
self._single_test('mc-02-susceptibilities', 'spinmc', 'parm2a',
|
||||
['--Tmin', '10'])
|
||||
self._single_test('ed-01-sparsediag', 'sparsediag', 'parm1a')
|
||||
self._single_test('dmrg-01-dmrg', 'dmrg', 'spin_one_half')
|
||||
|
||||
9
var/spack/repos/builtin/packages/alps/test/spin_one_half
Normal file
9
var/spack/repos/builtin/packages/alps/test/spin_one_half
Normal file
@@ -0,0 +1,9 @@
|
||||
LATTICE="open chain lattice"
|
||||
MODEL="spin"
|
||||
CONSERVED_QUANTUMNUMBERS="N,Sz"
|
||||
Sz_total=0
|
||||
J=1
|
||||
SWEEPS=4
|
||||
NUMBER_EIGENVALUES=1
|
||||
L=32
|
||||
{MAXSTATES=100}
|
||||
@@ -22,6 +22,7 @@ class Aluminum(CMakePackage, CudaPackage, ROCmPackage):
|
||||
maintainers = ['bvanessen']
|
||||
|
||||
version('master', branch='master')
|
||||
version('1.0.0', sha256='028d12e271817214db5c07c77b0528f88862139c3e442e1b12f58717290f414a')
|
||||
version('0.7.0', sha256='bbb73d2847c56efbe6f99e46b41d837763938483f2e2d1982ccf8350d1148caa')
|
||||
version('0.6.0', sha256='6ca329951f4c7ea52670e46e5020e7e7879d9b56fed5ff8c5df6e624b313e925')
|
||||
version('0.5.0', sha256='dc365a5849eaba925355a8efb27005c5f22bcd1dca94aaed8d0d29c265c064c1')
|
||||
|
||||
@@ -12,13 +12,14 @@ class Amber(Package, CudaPackage):
|
||||
"""Amber is a suite of biomolecular simulation programs together
|
||||
with Amber tools.
|
||||
|
||||
Note: The version number is composed of the Amber version (major)
|
||||
and the tools version (minor). A manual download is required for
|
||||
both Amber and Amber tools.
|
||||
Spack will search your current directory for the download files.
|
||||
Alternatively, add the files to a mirror so that Spack can find them.
|
||||
For instructions on how to set up a mirror, see
|
||||
http://spack.readthedocs.io/en/latest/mirrors.html"""
|
||||
A manual download is required for Ambers. Spack will search your current
|
||||
directory for the download files. Alternatively, add the files to a mirror
|
||||
so that Spack can find them. For instructions on how to set up a mirror, see
|
||||
http://spack.readthedocs.io/en/latest/mirrors.html
|
||||
|
||||
Note: Only certain versions of ambertools are compatible with amber.
|
||||
Only the latter version of ambertools for each amber version is supported.
|
||||
"""
|
||||
|
||||
homepage = "http://ambermd.org/"
|
||||
url = "file://{0}/Amber18.tar.bz2".format(os.getcwd())
|
||||
@@ -26,35 +27,40 @@ class Amber(Package, CudaPackage):
|
||||
|
||||
maintainers = ['hseara']
|
||||
|
||||
def url_for_version(self, version):
|
||||
url = "file://{0}/Amber{1}.tar.bz2".format(
|
||||
os.getcwd(), version.up_to(1))
|
||||
return url
|
||||
version(
|
||||
'20', sha256='a4c53639441c8cc85adee397933d07856cc4a723c82c6bea585cd76c197ead75')
|
||||
version(
|
||||
'18', sha256='2060897c0b11576082d523fb63a51ba701bc7519ff7be3d299d5ec56e8e6e277')
|
||||
version(
|
||||
'16', sha256='3b7ef281fd3c46282a51b6a6deed9ed174a1f6d468002649d84bfc8a2577ae5d',
|
||||
deprecated=True)
|
||||
|
||||
version(
|
||||
'18.20', sha256='2060897c0b11576082d523fb63a51ba701bc7519ff7be3d299d5ec56e8e6e277')
|
||||
version(
|
||||
'18.19', sha256='2060897c0b11576082d523fb63a51ba701bc7519ff7be3d299d5ec56e8e6e277')
|
||||
version(
|
||||
'16.16', sha256='3b7ef281fd3c46282a51b6a6deed9ed174a1f6d468002649d84bfc8a2577ae5d')
|
||||
|
||||
resources = [
|
||||
resources = {
|
||||
# [version amber, version ambertools , sha256sum]
|
||||
('18', '20', 'b1e1f8f277c54e88abc9f590e788bbb2f7a49bcff5e8d8a6eacfaf332a4890f9'),
|
||||
('18', '19', '0c86937904854b64e4831e047851f504ec45b42e593db4ded92c1bee5973e699'),
|
||||
('16', '16', '7b876afe566e9dd7eb6a5aa952a955649044360f15c1f5d4d91ba7f41f3105fa'),
|
||||
]
|
||||
for ver, ambertools_ver, checksum in resources:
|
||||
resource(when='@{0}.{1}'.format(ver, ambertools_ver),
|
||||
'20': ('20', 'b1e1f8f277c54e88abc9f590e788bbb2f7a49bcff5e8d8a6eacfaf332a4890f9'),
|
||||
'18': ('19', '0c86937904854b64e4831e047851f504ec45b42e593db4ded92c1bee5973e699'),
|
||||
'16': ('16', '7b876afe566e9dd7eb6a5aa952a955649044360f15c1f5d4d91ba7f41f3105fa'),
|
||||
}
|
||||
for ver, (ambertools_ver, ambertools_checksum) in resources.items():
|
||||
resource(when='@{0}'.format(ver),
|
||||
name='AmberTools',
|
||||
url='file://{0}/AmberTools{1}.tar.bz2'.format(os.getcwd(),
|
||||
ambertools_ver),
|
||||
sha256=checksum,
|
||||
url='http://ambermd.org/downloads/AmberTools{0}.tar.bz2'.format(
|
||||
ambertools_ver),
|
||||
sha256=ambertools_checksum,
|
||||
destination='',
|
||||
placement='ambertools_tmpdir',
|
||||
)
|
||||
|
||||
patches = [
|
||||
('20', '1', '10780cb91a022b49ffdd7b1e2bf4a572fa4edb7745f0fc4e5d93b158d6168e42'),
|
||||
('20', '2', '9c973e3f8f33a271d60787e8862901e8f69e94e7d80cda1695f7fad7bc396093'),
|
||||
('20', '3', 'acb359dc9b1bcff7e0f1965baa9f3f3dc18eeae99c49f1103c1e2986c0bbeed8'),
|
||||
('20', '4', 'fd93c74f5ec80689023648cdd12b2c5fb21a3898c81ebc3fa256ef244932562a'),
|
||||
('20', '5', '8e46d5be28c002f560050a71f4851b01ef45a3eb66ac90d7e23553fae1370e68'),
|
||||
('20', '6', '8cf9707b3d08ad9242326f02d1861831ad782c9bfb0c46e7b1f0d4640571d5c1'),
|
||||
('20', '7', '143b6a09f774aeae8b002afffb00839212020139a11873a3a1a34d4a63fa995d'),
|
||||
('20', '8', 'a6fc6d5c8ba0aad3a8afe44d1539cc299ef78ab53721e28244198fd5425d14ad'),
|
||||
('20', '9', '5ce6b534bab869b1e9bfefa353d7f578750e54fa72c8c9d74ddf129d993e78cf'),
|
||||
('18', '1', '3cefac9a24ece99176d5d2d58fea2722de3e235be5138a128428b9260fe922ad'),
|
||||
('18', '2', '3a0707a9a59dcbffa765dcf87b68001450095c51b96ec39d21260ba548a2f66a'),
|
||||
('18', '3', '24c2e06f71ae553a408caa3f722254db2cbf1ca4db274542302184e3d6ca7015'),
|
||||
@@ -94,17 +100,17 @@ def url_for_version(self, version):
|
||||
sha256=checksum, level=0, when='@{0}'.format(ver))
|
||||
|
||||
# Patch to add ppc64le in config.guess
|
||||
patch('ppc64le.patch', when='@18.20')
|
||||
patch('ppc64le.patch', when='@18: target=ppc64le')
|
||||
|
||||
# Patch to add aarch64 in config.guess
|
||||
patch('aarch64.patch', when='@18.20')
|
||||
patch('aarch64.patch', when='@18: target=aarch64')
|
||||
|
||||
# Workaround to modify the AmberTools script when using the NVIDIA
|
||||
# compilers
|
||||
patch('nvhpc.patch', when='@18.20 %nvhpc')
|
||||
patch('nvhpc.patch', when='@18: %nvhpc')
|
||||
|
||||
# Workaround to use NVIDIA compilers to build the bundled Boost
|
||||
patch('nvhpc-boost.patch', when='@18.20 %nvhpc')
|
||||
patch('nvhpc-boost.patch', when='@18: %nvhpc')
|
||||
|
||||
variant('mpi', description='Build MPI executables',
|
||||
default=True)
|
||||
|
||||
@@ -17,9 +17,22 @@ class Amdblis(BlisBase):
|
||||
|
||||
_name = 'amdblis'
|
||||
homepage = "https://developer.amd.com/amd-aocl/blas-library/"
|
||||
url = "https://github.com/amd/blis/archive/2.2.tar.gz"
|
||||
url = "https://github.com/amd/blis/archive/3.0.tar.gz"
|
||||
git = "https://github.com/amd/blis.git"
|
||||
|
||||
maintainers = ['amd-toolchain-support']
|
||||
|
||||
version('3.0', sha256='ac848c040cd6c3550fe49148dbdf109216cad72d3235763ee7ee8134e1528517')
|
||||
version('2.2', sha256='e1feb60ac919cf6d233c43c424f6a8a11eab2c62c2c6e3f2652c15ee9063c0c9')
|
||||
|
||||
def configure_args(self):
|
||||
spec = self.spec
|
||||
args = super(Amdblis, self).configure_args()
|
||||
|
||||
if spec.satisfies('@3.0 %aocc'):
|
||||
""" To enabled Fortran to C calling convention for
|
||||
complex types when compiling with aocc flang"""
|
||||
args.append('CFLAGS={0}'.format("-DAOCL_F2C"))
|
||||
args.append('CXXFLAGS={0}'.format("-DAOCL_F2C"))
|
||||
|
||||
return args
|
||||
|
||||
@@ -25,11 +25,12 @@ class Amdfftw(FftwBase):
|
||||
|
||||
_name = 'amdfftw'
|
||||
homepage = "https://developer.amd.com/amd-aocl/fftw/"
|
||||
url = "https://github.com/amd/amd-fftw/archive/2.2.tar.gz"
|
||||
url = "https://github.com/amd/amd-fftw/archive/3.0.tar.gz"
|
||||
git = "https://github.com/amd/amd-fftw.git"
|
||||
|
||||
maintainers = ['amd-toolchain-support']
|
||||
|
||||
version('3.0', sha256='a69deaf45478a59a69f77c4f7e9872967f1cfe996592dd12beb6318f18ea0bcd')
|
||||
version('2.2', sha256='de9d777236fb290c335860b458131678f75aa0799c641490c644c843f0e246f8')
|
||||
|
||||
variant('shared', default=True, description='Builds a shared version of the library')
|
||||
|
||||
@@ -31,11 +31,12 @@ class Amdlibflame(LibflameBase):
|
||||
|
||||
_name = 'amdlibflame'
|
||||
homepage = "http://developer.amd.com/amd-cpu-libraries/blas-library/#libflame"
|
||||
url = "https://github.com/amd/libflame/archive/2.2.tar.gz"
|
||||
url = "https://github.com/amd/libflame/archive/3.0.tar.gz"
|
||||
git = "https://github.com/amd/libflame.git"
|
||||
|
||||
maintainers = ['amd-toolchain-support']
|
||||
|
||||
version('3.0', sha256='d94e08b688539748571e6d4c1ec1ce42732eac18bd75de989234983c33f01ced')
|
||||
version('2.2', sha256='12b9c1f92d2c2fa637305aaa15cf706652406f210eaa5cbc17aaea9fcfa576dc')
|
||||
|
||||
patch('aocc-2.2.0.patch', when="@:2.999", level=1)
|
||||
@@ -54,6 +55,12 @@ def configure_args(self):
|
||||
"""configure_args function"""
|
||||
args = super(Amdlibflame, self).configure_args()
|
||||
args.append("--enable-external-lapack-interfaces")
|
||||
|
||||
"""To enabled Fortran to C calling convention for
|
||||
complex types when compiling with aocc flang"""
|
||||
if "@3.0: %aocc" in self.spec:
|
||||
args.append("--enable-f2c-dotc")
|
||||
|
||||
return args
|
||||
|
||||
def install(self, spec, prefix):
|
||||
|
||||
@@ -24,10 +24,8 @@ class Amdlibm(SConsPackage):
|
||||
# of master branch.
|
||||
# To install amdlibm from latest master branch:
|
||||
# spack install amdlibm ^amdlibm@master
|
||||
version("master", branch="master")
|
||||
version("20201104",
|
||||
commit="4033e022da428125747e118ccd6fdd9cee21c470",
|
||||
preferred=True)
|
||||
version("3.0", branch="aocl-3.0")
|
||||
version("2.2", commit="4033e022da428125747e118ccd6fdd9cee21c470")
|
||||
|
||||
variant("verbose", default=False,
|
||||
description="Building with verbosity")
|
||||
@@ -37,8 +35,8 @@ class Amdlibm(SConsPackage):
|
||||
depends_on("scons@3.1.2:", type=("build"))
|
||||
depends_on("mpfr", type=("link"))
|
||||
|
||||
patch('0001-libm-ose-Scripts-cleanup-pyc-files.patch')
|
||||
patch('0002-libm-ose-prevent-log-v3.c-from-building.patch')
|
||||
patch("0001-libm-ose-Scripts-cleanup-pyc-files.patch", when="@2.2")
|
||||
patch("0002-libm-ose-prevent-log-v3.c-from-building.patch", when="@2.2")
|
||||
|
||||
conflicts("%gcc@:9.1.999", msg="Minimum required GCC version is 9.2.0")
|
||||
|
||||
|
||||
@@ -20,11 +20,12 @@ class Amdscalapack(ScalapackBase):
|
||||
|
||||
_name = 'amdscalapack'
|
||||
homepage = "https://developer.amd.com/amd-aocl/scalapack/"
|
||||
url = "https://github.com/amd/scalapack/archive/2.2.tar.gz"
|
||||
url = "https://github.com/amd/scalapack/archive/3.0.tar.gz"
|
||||
git = "https://github.com/amd/scalapack.git"
|
||||
|
||||
maintainers = ['amd-toolchain-support']
|
||||
|
||||
version('3.0', sha256='6e6f3578f44a8e64518d276e7580530599ecfa8729f568303ed2590688e7096f')
|
||||
version('2.2', sha256='2d64926864fc6d12157b86e3f88eb1a5205e7fc157bf67e7577d0f18b9a7484c')
|
||||
|
||||
variant(
|
||||
@@ -38,11 +39,13 @@ def cmake_args(self):
|
||||
args = super(Amdscalapack, self).cmake_args()
|
||||
spec = self.spec
|
||||
|
||||
args.extend([
|
||||
"-DUSE_DOTC_WRAPPER:BOOL=%s" % (
|
||||
'ON' if '%aocc ^amdblis' in spec else 'OFF'
|
||||
)
|
||||
])
|
||||
if spec.satisfies('%gcc@10:'):
|
||||
args.extend(['-DCMAKE_Fortran_FLAGS={0}'.format(
|
||||
"-fallow-argument-mismatch")])
|
||||
|
||||
if spec.satisfies('@2.2'):
|
||||
args.extend(['-DUSE_DOTC_WRAPPER:BOOL=%s' % (
|
||||
'ON' if spec.satisfies('%aocc ^amdblis') else 'OFF')])
|
||||
|
||||
args.extend([
|
||||
'-DUSE_F2C=ON',
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user