Compare commits
291 Commits
cws/config
...
features/c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
540c37cb06 | ||
|
|
be89a18971 | ||
|
|
104023e7cb | ||
|
|
1412251d79 | ||
|
|
186532bac1 | ||
|
|
e09623060b | ||
|
|
8dbd7b423b | ||
|
|
4f5afbe97b | ||
|
|
469401d4a1 | ||
|
|
4bc2d12a68 | ||
|
|
2b5be8c52a | ||
|
|
2ad94bc76a | ||
|
|
56671984b5 | ||
|
|
bd40a98ccd | ||
|
|
25e875c1d6 | ||
|
|
de59410216 | ||
|
|
edc3a3d19b | ||
|
|
114ad6dd0a | ||
|
|
3ddc16b1ff | ||
|
|
d38ad41b65 | ||
|
|
d68e1c976d | ||
|
|
821d20cf06 | ||
|
|
a92eacd3c8 | ||
|
|
bb079ee356 | ||
|
|
0b24c820b4 | ||
|
|
2db85d240a | ||
|
|
a8fa5f6ca1 | ||
|
|
d7cb790f88 | ||
|
|
7c0b3f6374 | ||
|
|
912d544afe | ||
|
|
53fbaa5dcd | ||
|
|
a88c74dc17 | ||
|
|
fb5ff901c0 | ||
|
|
76ec64859a | ||
|
|
e85b308212 | ||
|
|
a4f3fe2ac7 | ||
|
|
b6da8635ec | ||
|
|
d338ac0634 | ||
|
|
33e5e77225 | ||
|
|
13565df027 | ||
|
|
6f8e242db8 | ||
|
|
17eaf34902 | ||
|
|
ac9172fdbc | ||
|
|
1a77f3e2e0 | ||
|
|
7d4373f526 | ||
|
|
2caec6bd27 | ||
|
|
bde4d1e38c | ||
|
|
49d7aa21fd | ||
|
|
1d4919924d | ||
|
|
b1f896e6c7 | ||
|
|
34969b7072 | ||
|
|
72b11c1883 | ||
|
|
b3e41736e6 | ||
|
|
9c5c327a6f | ||
|
|
79fcc0848f | ||
|
|
b52be75978 | ||
|
|
7fc49c42ee | ||
|
|
5b39059472 | ||
|
|
e8cc1a60ea | ||
|
|
d2d01ea488 | ||
|
|
ccc716f617 | ||
|
|
b55509ffa8 | ||
|
|
08bee718a2 | ||
|
|
da020d1bb8 | ||
|
|
ca93c8b57a | ||
|
|
16acd25053 | ||
|
|
9925f3b779 | ||
|
|
68a5fe84a7 | ||
|
|
243dfe91e9 | ||
|
|
30da20c1bc | ||
|
|
0d82688903 | ||
|
|
707e56dea8 | ||
|
|
2264b75ca0 | ||
|
|
c716c6ca95 | ||
|
|
b652fe72d7 | ||
|
|
4d5f2e3a37 | ||
|
|
4c535a2037 | ||
|
|
2a516aadb1 | ||
|
|
0661c1f531 | ||
|
|
4b549560f9 | ||
|
|
d4ea74bf80 | ||
|
|
cff94f8e71 | ||
|
|
aa4f478ab8 | ||
|
|
bc209c470d | ||
|
|
ae99829af4 | ||
|
|
a4d978be59 | ||
|
|
4bad9f9b13 | ||
|
|
353e31e72a | ||
|
|
328addd43d | ||
|
|
f696f02a46 | ||
|
|
e0265745bc | ||
|
|
75360bdc21 | ||
|
|
230e96fbb8 | ||
|
|
6b3ea94630 | ||
|
|
6dd2bb258c | ||
|
|
1abcc8caf7 | ||
|
|
23aef6bb94 | ||
|
|
973b43b1c1 | ||
|
|
d925ba9bc6 | ||
|
|
1780f3ab3c | ||
|
|
d6f25becdb | ||
|
|
f2c84efed2 | ||
|
|
156dd5848e | ||
|
|
cd40d02214 | ||
|
|
3187d4e7b1 | ||
|
|
6b86a8562f | ||
|
|
df6cdcf6c7 | ||
|
|
02097b1856 | ||
|
|
a83456dd7b | ||
|
|
269304a7ac | ||
|
|
ace8c74e20 | ||
|
|
fc2d5d2311 | ||
|
|
ef3cd6d6ca | ||
|
|
222cef9b7c | ||
|
|
214890c026 | ||
|
|
30d84a2716 | ||
|
|
6990cf76a0 | ||
|
|
7db38433e2 | ||
|
|
a93b3643c6 | ||
|
|
a4930c74cb | ||
|
|
1ab888cdc1 | ||
|
|
616d5a89d4 | ||
|
|
fb4be98f26 | ||
|
|
20fafe6a46 | ||
|
|
7d5446740c | ||
|
|
999c460b1e | ||
|
|
82470f880a | ||
|
|
e5274de7ec | ||
|
|
23aada1d24 | ||
|
|
7e645f54c5 | ||
|
|
29f1e8395c | ||
|
|
83b49070e6 | ||
|
|
ef8c15c5ef | ||
|
|
c10d525956 | ||
|
|
6edb20c7a6 | ||
|
|
b06c5a43d9 | ||
|
|
06bac4a487 | ||
|
|
6c5a7fefd6 | ||
|
|
04f87da36b | ||
|
|
f74a6a4503 | ||
|
|
b89f6226f8 | ||
|
|
c772b662c9 | ||
|
|
fa0432c521 | ||
|
|
21e826f256 | ||
|
|
0896bf928b | ||
|
|
d4a0f588e9 | ||
|
|
b81b54a74c | ||
|
|
3ad0952956 | ||
|
|
bd51751a8c | ||
|
|
67585fe13e | ||
|
|
3e966f2547 | ||
|
|
600948558d | ||
|
|
a8c0a662a1 | ||
|
|
6408b51def | ||
|
|
4be67facdc | ||
|
|
13636a2094 | ||
|
|
ec50906943 | ||
|
|
ea1719d986 | ||
|
|
6f4d69cf8d | ||
|
|
57226a870b | ||
|
|
34d55af55d | ||
|
|
5f99d3dfaa | ||
|
|
86337f042e | ||
|
|
2960d8ac0a | ||
|
|
b9bee70a97 | ||
|
|
5a939d9c94 | ||
|
|
6cb4a00280 | ||
|
|
ecdfe02355 | ||
|
|
b19549ce9d | ||
|
|
590c4e35ca | ||
|
|
41d53b85f8 | ||
|
|
07d9c254a2 | ||
|
|
9a51d42cec | ||
|
|
883b7cfa29 | ||
|
|
605411a9fb | ||
|
|
df1d233573 | ||
|
|
a82a9cf3c1 | ||
|
|
f89be5d7e4 | ||
|
|
9d7c688d3c | ||
|
|
8fc3e49e00 | ||
|
|
6594f49758 | ||
|
|
8b202769f4 | ||
|
|
4ff8a6a9b7 | ||
|
|
5d0ae001a1 | ||
|
|
bfc23f4560 | ||
|
|
34f9394732 | ||
|
|
30c9ff50dd | ||
|
|
83ee500108 | ||
|
|
b497581ce7 | ||
|
|
117a82117d | ||
|
|
bb64d09ccd | ||
|
|
8ddb5c3299 | ||
|
|
3e37ad9aee | ||
|
|
b3794761ab | ||
|
|
6413862f84 | ||
|
|
b92bdf8c72 | ||
|
|
a2520e80c0 | ||
|
|
d039744a5b | ||
|
|
57c1d6c410 | ||
|
|
53a76761d0 | ||
|
|
7b053fde89 | ||
|
|
b538acb2a9 | ||
|
|
8aa9758024 | ||
|
|
10aa6bdfc1 | ||
|
|
649e2d3e28 | ||
|
|
512f8d14d2 | ||
|
|
9d5151ba25 | ||
|
|
d2e75045b8 | ||
|
|
09e0bd55c2 | ||
|
|
00ae74f40e | ||
|
|
62526c1085 | ||
|
|
4b237349a3 | ||
|
|
d361378553 | ||
|
|
329adc1d22 | ||
|
|
272767c67f | ||
|
|
8aa09fd1c0 | ||
|
|
8e78a91ebd | ||
|
|
baa21d664b | ||
|
|
e7512bcb7b | ||
|
|
20492fa48e | ||
|
|
2eb0660a79 | ||
|
|
0b971a1aef | ||
|
|
a51bd80a5e | ||
|
|
dda2ff4653 | ||
|
|
560a9eec92 | ||
|
|
d67b12eb79 | ||
|
|
7d99fbcafd | ||
|
|
6c32c6fbdb | ||
|
|
f8a899f904 | ||
|
|
6e0d06c104 | ||
|
|
efa6acae86 | ||
|
|
e00208c566 | ||
|
|
d4a7ea8eb0 | ||
|
|
3256688f20 | ||
|
|
0bc596fc27 | ||
|
|
afc33518e6 | ||
|
|
8e2696172b | ||
|
|
dc110db65d | ||
|
|
5e2f258767 | ||
|
|
6d3869a7d3 | ||
|
|
669bbe1e66 | ||
|
|
cff76fb23f | ||
|
|
773de54cd9 | ||
|
|
3fd097f1d5 | ||
|
|
9268b14f96 | ||
|
|
ffbace0fbd | ||
|
|
6fdb8b2682 | ||
|
|
b1836a7c50 | ||
|
|
3c37bfb6d9 | ||
|
|
27921c38ce | ||
|
|
f9112a6244 | ||
|
|
7c3d93465c | ||
|
|
428a8f72a0 | ||
|
|
b1559cc831 | ||
|
|
93db654b8d | ||
|
|
abf3a696bd | ||
|
|
1e4732d5fe | ||
|
|
9621b2bb3b | ||
|
|
ee721f9c91 | ||
|
|
e981ab9b65 | ||
|
|
e60e743843 | ||
|
|
70e369086c | ||
|
|
cd015b8498 | ||
|
|
69e66f57a9 | ||
|
|
0b873be13c | ||
|
|
b3e04e8cd2 | ||
|
|
9a1957c881 | ||
|
|
af5d6295d5 | ||
|
|
3e1db75372 | ||
|
|
70d2556f4b | ||
|
|
89cf5004db | ||
|
|
8aac0d09d4 | ||
|
|
0934c4d602 | ||
|
|
e1344067fd | ||
|
|
ae7999d7a1 | ||
|
|
4b0832d3bc | ||
|
|
ef872cc64b | ||
|
|
1f0751defe | ||
|
|
fa30f74e0c | ||
|
|
6b45e2fef1 | ||
|
|
5cce66be75 | ||
|
|
9f89926980 | ||
|
|
7ad7fde09c | ||
|
|
25cbb34579 | ||
|
|
a423dc646a | ||
|
|
3ec7304699 | ||
|
|
7bb4b58b8b | ||
|
|
13356f3bfa | ||
|
|
c6c5e56ec1 | ||
|
|
1ae32ff62c | ||
|
|
d95f14084e |
2
.github/workflows/bootstrap-test.sh
vendored
2
.github/workflows/bootstrap-test.sh
vendored
@@ -1,7 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -ex
|
set -ex
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
$PYTHON bin/spack bootstrap untrust spack-install
|
$PYTHON bin/spack bootstrap disable spack-install
|
||||||
$PYTHON bin/spack -d solve zlib
|
$PYTHON bin/spack -d solve zlib
|
||||||
tree $BOOTSTRAP/store
|
tree $BOOTSTRAP/store
|
||||||
exit 0
|
exit 0
|
||||||
|
|||||||
22
.github/workflows/bootstrap.yml
vendored
22
.github/workflows/bootstrap.yml
vendored
@@ -42,7 +42,8 @@ jobs:
|
|||||||
shell: runuser -u spack-test -- bash {0}
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack bootstrap untrust github-actions-v0.2
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack bootstrap disable github-actions-v0.3
|
||||||
spack external find cmake bison
|
spack external find cmake bison
|
||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
@@ -79,7 +80,8 @@ jobs:
|
|||||||
shell: runuser -u spack-test -- bash {0}
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack bootstrap untrust github-actions-v0.2
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack bootstrap disable github-actions-v0.3
|
||||||
spack external find cmake bison
|
spack external find cmake bison
|
||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
@@ -143,7 +145,8 @@ jobs:
|
|||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack bootstrap untrust github-actions-v0.2
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack bootstrap disable github-actions-v0.3
|
||||||
spack external find cmake bison
|
spack external find cmake bison
|
||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
@@ -160,7 +163,8 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
export PATH=/usr/local/opt/bison@2.7/bin:$PATH
|
||||||
spack bootstrap untrust github-actions-v0.2
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack bootstrap disable github-actions-v0.3
|
||||||
spack external find --not-buildable cmake bison
|
spack external find --not-buildable cmake bison
|
||||||
spack -d solve zlib
|
spack -d solve zlib
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
@@ -261,7 +265,7 @@ jobs:
|
|||||||
shell: runuser -u spack-test -- bash {0}
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack bootstrap untrust spack-install
|
spack bootstrap disable spack-install
|
||||||
spack -d gpg list
|
spack -d gpg list
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
@@ -298,7 +302,8 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack solve zlib
|
spack solve zlib
|
||||||
spack bootstrap untrust github-actions-v0.2
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack bootstrap disable github-actions-v0.3
|
||||||
spack -d gpg list
|
spack -d gpg list
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
@@ -315,7 +320,7 @@ jobs:
|
|||||||
- name: Bootstrap GnuPG
|
- name: Bootstrap GnuPG
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack bootstrap untrust spack-install
|
spack bootstrap disable spack-install
|
||||||
spack -d gpg list
|
spack -d gpg list
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
@@ -333,7 +338,8 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack solve zlib
|
spack solve zlib
|
||||||
spack bootstrap untrust github-actions-v0.2
|
spack bootstrap disable github-actions-v0.4
|
||||||
|
spack bootstrap disable github-actions-v0.3
|
||||||
spack -d gpg list
|
spack -d gpg list
|
||||||
tree ~/.spack/bootstrap/store/
|
tree ~/.spack/bootstrap/store/
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/build-containers.yml
vendored
2
.github/workflows/build-containers.yml
vendored
@@ -13,7 +13,7 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- '.github/workflows/build-containers.yml'
|
- '.github/workflows/build-containers.yml'
|
||||||
- 'share/spack/docker/*'
|
- 'share/spack/docker/*'
|
||||||
- 'share/templates/container/*'
|
- 'share/spack/templates/container/*'
|
||||||
- 'lib/spack/spack/container/*'
|
- 'lib/spack/spack/container/*'
|
||||||
# Let's also build & tag Spack containers on releases.
|
# Let's also build & tag Spack containers on releases.
|
||||||
release:
|
release:
|
||||||
|
|||||||
4
.github/workflows/setup_git.ps1
vendored
4
.github/workflows/setup_git.ps1
vendored
@@ -6,6 +6,10 @@ git config --global user.email "spack@example.com"
|
|||||||
git config --global user.name "Test User"
|
git config --global user.name "Test User"
|
||||||
git config --global core.longpaths true
|
git config --global core.longpaths true
|
||||||
|
|
||||||
|
# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253)
|
||||||
|
# This is needed to let some fixture in our unit-test suite run
|
||||||
|
git config --global protocol.file.allow always
|
||||||
|
|
||||||
if ($(git branch --show-current) -ne "develop")
|
if ($(git branch --show-current) -ne "develop")
|
||||||
{
|
{
|
||||||
git branch develop origin/develop
|
git branch develop origin/develop
|
||||||
|
|||||||
4
.github/workflows/setup_git.sh
vendored
4
.github/workflows/setup_git.sh
vendored
@@ -2,6 +2,10 @@
|
|||||||
git config --global user.email "spack@example.com"
|
git config --global user.email "spack@example.com"
|
||||||
git config --global user.name "Test User"
|
git config --global user.name "Test User"
|
||||||
|
|
||||||
|
# See https://github.com/git/git/security/advisories/GHSA-3wp6-j8xr-qw85 (CVE-2022-39253)
|
||||||
|
# This is needed to let some fixture in our unit-test suite run
|
||||||
|
git config --global protocol.file.allow always
|
||||||
|
|
||||||
# create a local pr base branch
|
# create a local pr base branch
|
||||||
if [[ -n $GITHUB_BASE_REF ]]; then
|
if [[ -n $GITHUB_BASE_REF ]]; then
|
||||||
git fetch origin "${GITHUB_BASE_REF}:${GITHUB_BASE_REF}"
|
git fetch origin "${GITHUB_BASE_REF}:${GITHUB_BASE_REF}"
|
||||||
|
|||||||
28
.github/workflows/unit_tests.yaml
vendored
28
.github/workflows/unit_tests.yaml
vendored
@@ -52,7 +52,12 @@ jobs:
|
|||||||
patchelf cmake bison libbison-dev kcov
|
patchelf cmake bison libbison-dev kcov
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-cov pytest-xdist
|
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-xdist
|
||||||
|
# Install pytest-cov only on recent Python, to avoid stalling on Python 2.7 due
|
||||||
|
# to bugs on an unmaintained version of the package when used with xdist.
|
||||||
|
if [[ ${{ matrix.python-version }} != "2.7" ]]; then
|
||||||
|
pip install --upgrade pytest-cov
|
||||||
|
fi
|
||||||
# ensure style checks are not skipped in unit tests for python >= 3.6
|
# ensure style checks are not skipped in unit tests for python >= 3.6
|
||||||
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash
|
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash
|
||||||
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
|
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
|
||||||
@@ -61,7 +66,7 @@ jobs:
|
|||||||
- name: Pin pathlib for Python 2.7
|
- name: Pin pathlib for Python 2.7
|
||||||
if: ${{ matrix.python-version == 2.7 }}
|
if: ${{ matrix.python-version == 2.7 }}
|
||||||
run: |
|
run: |
|
||||||
pip install -U pathlib2==2.3.6
|
pip install -U pathlib2==2.3.6 toml
|
||||||
- name: Setup git configuration
|
- name: Setup git configuration
|
||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
@@ -73,7 +78,7 @@ jobs:
|
|||||||
SPACK_PYTHON: python
|
SPACK_PYTHON: python
|
||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
spack bootstrap untrust spack-install
|
spack bootstrap disable spack-install
|
||||||
spack -v solve zlib
|
spack -v solve zlib
|
||||||
- name: Run unit tests
|
- name: Run unit tests
|
||||||
env:
|
env:
|
||||||
@@ -81,11 +86,9 @@ jobs:
|
|||||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||||
SPACK_TEST_PARALLEL: 2
|
SPACK_TEST_PARALLEL: 2
|
||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
UNIT_TEST_COVERAGE: ${{ (matrix.concretizer == 'original' && matrix.python-version == '2.7') || (matrix.python-version == '3.10') }}
|
UNIT_TEST_COVERAGE: ${{ (matrix.python-version == '3.10') }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
coverage combine -a
|
|
||||||
coverage xml
|
|
||||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,${{ matrix.concretizer }}
|
flags: unittests,linux,${{ matrix.concretizer }}
|
||||||
@@ -177,8 +180,6 @@ jobs:
|
|||||||
SPACK_TEST_SOLVER: clingo
|
SPACK_TEST_SOLVER: clingo
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
coverage combine -a
|
|
||||||
coverage xml
|
|
||||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0
|
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70 # @v2.1.0
|
||||||
with:
|
with:
|
||||||
flags: unittests,linux,clingo
|
flags: unittests,linux,clingo
|
||||||
@@ -187,7 +188,7 @@ jobs:
|
|||||||
runs-on: macos-latest
|
runs-on: macos-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: [3.8]
|
python-version: ["3.10"]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
- uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # @v2
|
||||||
with:
|
with:
|
||||||
@@ -210,15 +211,10 @@ jobs:
|
|||||||
git --version
|
git --version
|
||||||
. .github/workflows/setup_git.sh
|
. .github/workflows/setup_git.sh
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
$(which spack) bootstrap untrust spack-install
|
$(which spack) bootstrap disable spack-install
|
||||||
$(which spack) solve zlib
|
$(which spack) solve zlib
|
||||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||||
$(which spack) unit-test --cov --cov-config=pyproject.toml "${common_args[@]}"
|
$(which spack) unit-test --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||||
coverage combine -a
|
|
||||||
coverage xml
|
|
||||||
# Delete the symlink going from ./lib/spack/docs/_spack_root back to
|
|
||||||
# the initial directory, since it causes ELOOP errors with codecov/actions@2
|
|
||||||
rm lib/spack/docs/_spack_root
|
|
||||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||||
with:
|
with:
|
||||||
flags: unittests,macos
|
flags: unittests,macos
|
||||||
|
|||||||
12
.github/workflows/windows_python.yml
vendored
12
.github/workflows/windows_python.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
|||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov
|
python -m pip install --upgrade pip six pywin32 setuptools codecov pytest-cov clingo
|
||||||
- name: Create local develop
|
- name: Create local develop
|
||||||
run: |
|
run: |
|
||||||
.\spack\.github\workflows\setup_git.ps1
|
.\spack\.github\workflows\setup_git.ps1
|
||||||
@@ -32,8 +32,7 @@ jobs:
|
|||||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||||
cd spack
|
cd spack
|
||||||
dir
|
dir
|
||||||
(Get-Item '.\lib\spack\docs\_spack_root').Delete()
|
spack unit-test -x --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
||||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --ignore=lib/spack/spack/test/cmd
|
|
||||||
coverage combine -a
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||||
@@ -50,7 +49,7 @@ jobs:
|
|||||||
python-version: 3.9
|
python-version: 3.9
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov
|
python -m pip install --upgrade pip six pywin32 setuptools codecov coverage pytest-cov clingo
|
||||||
- name: Create local develop
|
- name: Create local develop
|
||||||
run: |
|
run: |
|
||||||
.\spack\.github\workflows\setup_git.ps1
|
.\spack\.github\workflows\setup_git.ps1
|
||||||
@@ -58,8 +57,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||||
cd spack
|
cd spack
|
||||||
(Get-Item '.\lib\spack\docs\_spack_root').Delete()
|
spack unit-test -x --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
||||||
spack unit-test --verbose --cov --cov-config=pyproject.toml lib/spack/spack/test/cmd
|
|
||||||
coverage combine -a
|
coverage combine -a
|
||||||
coverage xml
|
coverage xml
|
||||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||||
@@ -83,7 +81,7 @@ jobs:
|
|||||||
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
echo F|xcopy .\spack\share\spack\qa\configuration\windows_config.yaml $env:USERPROFILE\.spack\windows\config.yaml
|
||||||
spack external find cmake
|
spack external find cmake
|
||||||
spack external find ninja
|
spack external find ninja
|
||||||
spack install abseil-cpp
|
spack -d install abseil-cpp
|
||||||
make-installer:
|
make-installer:
|
||||||
runs-on: windows-latest
|
runs-on: windows-latest
|
||||||
steps:
|
steps:
|
||||||
|
|||||||
@@ -9,16 +9,15 @@ bootstrap:
|
|||||||
# may not be able to bootstrap all the software that Spack needs,
|
# may not be able to bootstrap all the software that Spack needs,
|
||||||
# depending on its type.
|
# depending on its type.
|
||||||
sources:
|
sources:
|
||||||
|
- name: 'github-actions-v0.4'
|
||||||
|
metadata: $spack/share/spack/bootstrap/github-actions-v0.4
|
||||||
- name: 'github-actions-v0.3'
|
- name: 'github-actions-v0.3'
|
||||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.3
|
metadata: $spack/share/spack/bootstrap/github-actions-v0.3
|
||||||
- name: 'github-actions-v0.2'
|
|
||||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.2
|
|
||||||
- name: 'github-actions-v0.1'
|
|
||||||
metadata: $spack/share/spack/bootstrap/github-actions-v0.1
|
|
||||||
- name: 'spack-install'
|
- name: 'spack-install'
|
||||||
metadata: $spack/share/spack/bootstrap/spack-install
|
metadata: $spack/share/spack/bootstrap/spack-install
|
||||||
trusted:
|
trusted:
|
||||||
# By default we trust bootstrapping from sources and from binaries
|
# By default we trust bootstrapping from sources and from binaries
|
||||||
# produced on Github via the workflow
|
# produced on Github via the workflow
|
||||||
|
github-actions-v0.4: true
|
||||||
github-actions-v0.3: true
|
github-actions-v0.3: true
|
||||||
spack-install: true
|
spack-install: true
|
||||||
|
|||||||
@@ -187,10 +187,20 @@ config:
|
|||||||
package_lock_timeout: null
|
package_lock_timeout: null
|
||||||
|
|
||||||
|
|
||||||
# Control whether Spack embeds RPATH or RUNPATH attributes in ELF binaries.
|
# Control how shared libraries are located at runtime on Linux. See the
|
||||||
# Has no effect on macOS. DO NOT MIX these within the same install tree.
|
# the Spack documentation for details.
|
||||||
# See the Spack documentation for details.
|
shared_linking:
|
||||||
shared_linking: 'rpath'
|
# Spack automatically embeds runtime search paths in ELF binaries for their
|
||||||
|
# dependencies. Their type can either be "rpath" or "runpath". For glibc, rpath is
|
||||||
|
# inherited and has precedence over LD_LIBRARY_PATH; runpath is not inherited
|
||||||
|
# and of lower precedence. DO NOT MIX these within the same install tree.
|
||||||
|
type: rpath
|
||||||
|
|
||||||
|
|
||||||
|
# (Experimental) Embed absolute paths of dependent libraries directly in ELF
|
||||||
|
# binaries to avoid runtime search. This can improve startup time of
|
||||||
|
# executables with many dependencies, in particular on slow filesystems.
|
||||||
|
bind: false
|
||||||
|
|
||||||
|
|
||||||
# Set to 'false' to allow installation on filesystems that doesn't allow setgid bit
|
# Set to 'false' to allow installation on filesystems that doesn't allow setgid bit
|
||||||
@@ -201,3 +211,7 @@ config:
|
|||||||
# building and installing packages. This gives information about Spack's
|
# building and installing packages. This gives information about Spack's
|
||||||
# current progress as well as the current and total number of packages.
|
# current progress as well as the current and total number of packages.
|
||||||
terminal_title: false
|
terminal_title: false
|
||||||
|
|
||||||
|
# Number of seconds a buildcache's index.json is cached locally before probing
|
||||||
|
# for updates, within a single Spack invocation. Defaults to 10 minutes.
|
||||||
|
binary_index_ttl: 600
|
||||||
@@ -27,7 +27,8 @@ packages:
|
|||||||
fuse: [libfuse]
|
fuse: [libfuse]
|
||||||
gl: [glx, osmesa]
|
gl: [glx, osmesa]
|
||||||
glu: [mesa-glu, openglu]
|
glu: [mesa-glu, openglu]
|
||||||
golang: [gcc]
|
golang: [go, gcc]
|
||||||
|
go-external-or-gccgo-bootstrap: [go-bootstrap, gcc]
|
||||||
iconv: [libiconv]
|
iconv: [libiconv]
|
||||||
ipp: [intel-ipp]
|
ipp: [intel-ipp]
|
||||||
java: [openjdk, jdk, ibm-java]
|
java: [openjdk, jdk, ibm-java]
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
config:
|
config:
|
||||||
locks: false
|
locks: false
|
||||||
concretizer: original
|
concretizer: clingo
|
||||||
build_stage::
|
build_stage::
|
||||||
- '$spack/.staging'
|
- '$spack/.staging'
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
../../..
|
|
||||||
@@ -85,7 +85,7 @@ All packages whose names or descriptions contain documentation:
|
|||||||
To get more information on a particular package from `spack list`, use
|
To get more information on a particular package from `spack list`, use
|
||||||
`spack info`. Just supply the name of a package:
|
`spack info`. Just supply the name of a package:
|
||||||
|
|
||||||
.. command-output:: spack info mpich
|
.. command-output:: spack info --all mpich
|
||||||
|
|
||||||
Most of the information is self-explanatory. The *safe versions* are
|
Most of the information is self-explanatory. The *safe versions* are
|
||||||
versions that Spack knows the checksum for, and it will use the
|
versions that Spack knows the checksum for, and it will use the
|
||||||
@@ -998,11 +998,15 @@ More formally, a spec consists of the following pieces:
|
|||||||
* ``%`` Optional compiler specifier, with an optional compiler version
|
* ``%`` Optional compiler specifier, with an optional compiler version
|
||||||
(``gcc`` or ``gcc@4.7.3``)
|
(``gcc`` or ``gcc@4.7.3``)
|
||||||
* ``+`` or ``-`` or ``~`` Optional variant specifiers (``+debug``,
|
* ``+`` or ``-`` or ``~`` Optional variant specifiers (``+debug``,
|
||||||
``-qt``, or ``~qt``) for boolean variants
|
``-qt``, or ``~qt``) for boolean variants. Use ``++`` or ``--`` or
|
||||||
|
``~~`` to propagate variants through the dependencies (``++debug``,
|
||||||
|
``--qt``, or ``~~qt``).
|
||||||
* ``name=<value>`` Optional variant specifiers that are not restricted to
|
* ``name=<value>`` Optional variant specifiers that are not restricted to
|
||||||
boolean variants
|
boolean variants. Use ``name==<value>`` to propagate variant through the
|
||||||
|
dependencies.
|
||||||
* ``name=<value>`` Optional compiler flag specifiers. Valid flag names are
|
* ``name=<value>`` Optional compiler flag specifiers. Valid flag names are
|
||||||
``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``.
|
``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``.
|
||||||
|
Use ``name==<value>`` to propagate compiler flags through the dependencies.
|
||||||
* ``target=<value> os=<value>`` Optional architecture specifier
|
* ``target=<value> os=<value>`` Optional architecture specifier
|
||||||
(``target=haswell os=CNL10``)
|
(``target=haswell os=CNL10``)
|
||||||
* ``^`` Dependency specs (``^callpath@1.1``)
|
* ``^`` Dependency specs (``^callpath@1.1``)
|
||||||
@@ -1226,6 +1230,23 @@ variants using the backwards compatibility syntax and uses only ``~``
|
|||||||
for disabled boolean variants. The ``-`` and spaces on the command
|
for disabled boolean variants. The ``-`` and spaces on the command
|
||||||
line are provided for convenience and legibility.
|
line are provided for convenience and legibility.
|
||||||
|
|
||||||
|
Spack allows variants to propagate their value to the package's
|
||||||
|
dependency by using ``++``, ``--``, and ``~~`` for boolean variants.
|
||||||
|
For example, for a ``debug`` variant:
|
||||||
|
|
||||||
|
.. code-block:: sh
|
||||||
|
|
||||||
|
mpileaks ++debug # enabled debug will be propagated to dependencies
|
||||||
|
mpileaks +debug # only mpileaks will have debug enabled
|
||||||
|
|
||||||
|
To propagate the value of non-boolean variants Spack uses ``name==value``.
|
||||||
|
For example, for the ``stackstart`` variant:
|
||||||
|
|
||||||
|
.. code-block:: sh
|
||||||
|
|
||||||
|
mpileaks stackstart=4 # variant will be propagated to dependencies
|
||||||
|
mpileaks stackstart==4 # only mpileaks will have this variant value
|
||||||
|
|
||||||
^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^
|
||||||
Compiler Flags
|
Compiler Flags
|
||||||
^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^
|
||||||
@@ -1233,10 +1254,15 @@ Compiler Flags
|
|||||||
Compiler flags are specified using the same syntax as non-boolean variants,
|
Compiler flags are specified using the same syntax as non-boolean variants,
|
||||||
but fulfill a different purpose. While the function of a variant is set by
|
but fulfill a different purpose. While the function of a variant is set by
|
||||||
the package, compiler flags are used by the compiler wrappers to inject
|
the package, compiler flags are used by the compiler wrappers to inject
|
||||||
flags into the compile line of the build. Additionally, compiler flags are
|
flags into the compile line of the build. Additionally, compiler flags can
|
||||||
inherited by dependencies. ``spack install libdwarf cppflags="-g"`` will
|
be inherited by dependencies by using ``==``.
|
||||||
install both libdwarf and libelf with the ``-g`` flag injected into their
|
``spack install libdwarf cppflags=="-g"`` will install both libdwarf and
|
||||||
compile line.
|
libelf with the ``-g`` flag injected into their compile line.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
versions of spack prior to 0.19.0 will propagate compiler flags using
|
||||||
|
the ``=`` syntax.
|
||||||
|
|
||||||
Notice that the value of the compiler flags must be quoted if it
|
Notice that the value of the compiler flags must be quoted if it
|
||||||
contains any spaces. Any of ``cppflags=-O3``, ``cppflags="-O3"``,
|
contains any spaces. Any of ``cppflags=-O3``, ``cppflags="-O3"``,
|
||||||
@@ -1438,7 +1464,7 @@ built.
|
|||||||
You can see what virtual packages a particular package provides by
|
You can see what virtual packages a particular package provides by
|
||||||
getting info on it:
|
getting info on it:
|
||||||
|
|
||||||
.. command-output:: spack info mpich
|
.. command-output:: spack info --virtuals mpich
|
||||||
|
|
||||||
Spack is unique in that its virtual packages can be versioned, just
|
Spack is unique in that its virtual packages can be versioned, just
|
||||||
like regular packages. A particular version of a package may provide
|
like regular packages. A particular version of a package may provide
|
||||||
|
|||||||
@@ -15,15 +15,13 @@ is an entire command dedicated to the management of every aspect of bootstrappin
|
|||||||
|
|
||||||
.. command-output:: spack bootstrap --help
|
.. command-output:: spack bootstrap --help
|
||||||
|
|
||||||
The first thing to know to understand bootstrapping in Spack is that each of
|
Spack is configured to bootstrap its dependencies lazily by default; i.e. the first time they are needed and
|
||||||
Spack's dependencies is bootstrapped lazily; i.e. the first time it is needed and
|
can't be found. You can readily check if any prerequisite for using Spack is missing by running:
|
||||||
can't be found. You can readily check if any prerequisite for using Spack
|
|
||||||
is missing by running:
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
% spack bootstrap status
|
% spack bootstrap status
|
||||||
Spack v0.17.1 - python@3.8
|
Spack v0.19.0 - python@3.8
|
||||||
|
|
||||||
[FAIL] Core Functionalities
|
[FAIL] Core Functionalities
|
||||||
[B] MISSING "clingo": required to concretize specs
|
[B] MISSING "clingo": required to concretize specs
|
||||||
@@ -48,6 +46,21 @@ they can be bootstrapped. Running a command that concretize a spec, like:
|
|||||||
|
|
||||||
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
triggers the bootstrapping of clingo from pre-built binaries as expected.
|
||||||
|
|
||||||
|
Users can also bootstrap all the dependencies needed by Spack in a single command, which
|
||||||
|
might be useful to setup containers or other similar environments:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack bootstrap now
|
||||||
|
==> Bootstrapping clingo from pre-built binaries
|
||||||
|
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-shqedxgvjnhiwdcdrvjhbd73jaevv7wt.spec.json
|
||||||
|
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64/gcc-10.2.1/clingo-bootstrap-spack/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-shqedxgvjnhiwdcdrvjhbd73jaevv7wt.spack
|
||||||
|
==> Installing "clingo-bootstrap@spack%gcc@10.2.1~docs~ipo+python+static_libstdcpp build_type=Release arch=linux-centos7-x86_64" from a buildcache
|
||||||
|
==> Bootstrapping patchelf from pre-built binaries
|
||||||
|
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.15.0-htk62k7efo2z22kh6kmhaselru7bfkuc.spec.json
|
||||||
|
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.3/build_cache/linux-centos7-x86_64/gcc-10.2.1/patchelf-0.15.0/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.15.0-htk62k7efo2z22kh6kmhaselru7bfkuc.spack
|
||||||
|
==> Installing "patchelf@0.15.0%gcc@10.2.1 ldflags="-static-libstdc++ -static-libgcc" arch=linux-centos7-x86_64" from a buildcache
|
||||||
|
|
||||||
-----------------------
|
-----------------------
|
||||||
The Bootstrapping store
|
The Bootstrapping store
|
||||||
-----------------------
|
-----------------------
|
||||||
@@ -107,19 +120,19 @@ If need be, you can disable bootstrapping altogether by running:
|
|||||||
|
|
||||||
in which case it's your responsibility to ensure Spack runs in an
|
in which case it's your responsibility to ensure Spack runs in an
|
||||||
environment where all its prerequisites are installed. You can
|
environment where all its prerequisites are installed. You can
|
||||||
also configure Spack to skip certain bootstrapping methods by *untrusting*
|
also configure Spack to skip certain bootstrapping methods by disabling
|
||||||
them. For instance:
|
them specifically:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
% spack bootstrap untrust github-actions
|
% spack bootstrap disable github-actions
|
||||||
==> "github-actions" is now untrusted and will not be used for bootstrapping
|
==> "github-actions" is now disabled and will not be used for bootstrapping
|
||||||
|
|
||||||
tells Spack to skip trying to bootstrap from binaries. To add the "github-actions" method back you can:
|
tells Spack to skip trying to bootstrap from binaries. To add the "github-actions" method back you can:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
% spack bootstrap trust github-actions
|
% spack bootstrap enable github-actions
|
||||||
|
|
||||||
There is also an option to reset the bootstrapping configuration to Spack's defaults:
|
There is also an option to reset the bootstrapping configuration to Spack's defaults:
|
||||||
|
|
||||||
|
|||||||
@@ -65,7 +65,6 @@ on these ideas for each distinct build system that Spack supports:
|
|||||||
build_systems/custompackage
|
build_systems/custompackage
|
||||||
build_systems/inteloneapipackage
|
build_systems/inteloneapipackage
|
||||||
build_systems/intelpackage
|
build_systems/intelpackage
|
||||||
build_systems/multiplepackage
|
|
||||||
build_systems/rocmpackage
|
build_systems/rocmpackage
|
||||||
build_systems/sourceforgepackage
|
build_systems/sourceforgepackage
|
||||||
|
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _autotoolspackage:
|
.. _autotoolspackage:
|
||||||
|
|
||||||
----------------
|
---------
|
||||||
AutotoolsPackage
|
Autotools
|
||||||
----------------
|
---------
|
||||||
|
|
||||||
Autotools is a GNU build system that provides a build-script generator.
|
Autotools is a GNU build system that provides a build-script generator.
|
||||||
By running the platform-independent ``./configure`` script that comes
|
By running the platform-independent ``./configure`` script that comes
|
||||||
@@ -17,7 +17,7 @@ with the package, you can generate a platform-dependent Makefile.
|
|||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
|
||||||
The ``AutotoolsPackage`` base class comes with the following phases:
|
The ``AutotoolsBuilder`` and ``AutotoolsPackage`` base classes come with the following phases:
|
||||||
|
|
||||||
#. ``autoreconf`` - generate the configure script
|
#. ``autoreconf`` - generate the configure script
|
||||||
#. ``configure`` - generate the Makefiles
|
#. ``configure`` - generate the Makefiles
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _bundlepackage:
|
.. _bundlepackage:
|
||||||
|
|
||||||
-------------
|
------
|
||||||
BundlePackage
|
Bundle
|
||||||
-------------
|
------
|
||||||
|
|
||||||
``BundlePackage`` represents a set of packages that are expected to work well
|
``BundlePackage`` represents a set of packages that are expected to work well
|
||||||
together, such as a collection of commonly used software libraries. The
|
together, such as a collection of commonly used software libraries. The
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _cmakepackage:
|
.. _cmakepackage:
|
||||||
|
|
||||||
------------
|
-----
|
||||||
CMakePackage
|
CMake
|
||||||
------------
|
-----
|
||||||
|
|
||||||
Like Autotools, CMake is a widely-used build-script generator. Designed
|
Like Autotools, CMake is a widely-used build-script generator. Designed
|
||||||
by Kitware, CMake is the most popular build system for new C, C++, and
|
by Kitware, CMake is the most popular build system for new C, C++, and
|
||||||
@@ -21,7 +21,7 @@ whereas Autotools is Unix-only.
|
|||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
|
||||||
The ``CMakePackage`` base class comes with the following phases:
|
The ``CMakeBuilder`` and ``CMakePackage`` base classes come with the following phases:
|
||||||
|
|
||||||
#. ``cmake`` - generate the Makefile
|
#. ``cmake`` - generate the Makefile
|
||||||
#. ``build`` - build the package
|
#. ``build`` - build the package
|
||||||
@@ -130,8 +130,8 @@ Adding flags to cmake
|
|||||||
To add additional flags to the ``cmake`` call, simply override the
|
To add additional flags to the ``cmake`` call, simply override the
|
||||||
``cmake_args`` function. The following example defines values for the flags
|
``cmake_args`` function. The following example defines values for the flags
|
||||||
``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with
|
``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with
|
||||||
and without the :meth:`~spack.build_systems.cmake.CMakePackage.define` and
|
and without the :meth:`~spack.build_systems.cmake.CMakeBuilder.define` and
|
||||||
:meth:`~spack.build_systems.cmake.CMakePackage.define_from_variant` helper functions:
|
:meth:`~spack.build_systems.cmake.CMakeBuilder.define_from_variant` helper functions:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ oneAPI packages or use::
|
|||||||
|
|
||||||
For more information on a specific package, do::
|
For more information on a specific package, do::
|
||||||
|
|
||||||
spack info <package-name>
|
spack info --all <package-name>
|
||||||
|
|
||||||
Intel no longer releases new versions of Parallel Studio, which can be
|
Intel no longer releases new versions of Parallel Studio, which can be
|
||||||
used in Spack via the :ref:`intelpackage`. All of its components can
|
used in Spack via the :ref:`intelpackage`. All of its components can
|
||||||
|
|||||||
@@ -5,11 +5,11 @@
|
|||||||
|
|
||||||
.. _luapackage:
|
.. _luapackage:
|
||||||
|
|
||||||
------------
|
---
|
||||||
LuaPackage
|
Lua
|
||||||
------------
|
---
|
||||||
|
|
||||||
LuaPackage is a helper for the common case of Lua packages that provide
|
The ``Lua`` build-system is a helper for the common case of Lua packages that provide
|
||||||
a rockspec file. This is not meant to take a rock archive, but to build
|
a rockspec file. This is not meant to take a rock archive, but to build
|
||||||
a source archive or repository that provides a rockspec, which should cover
|
a source archive or repository that provides a rockspec, which should cover
|
||||||
most lua packages. In the case a Lua package builds by Make rather than
|
most lua packages. In the case a Lua package builds by Make rather than
|
||||||
@@ -19,7 +19,7 @@ luarocks, prefer MakefilePackage.
|
|||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
|
||||||
The ``LuaPackage`` base class comes with the following phases:
|
The ``LuaBuilder`` and `LuaPackage`` base classes come with the following phases:
|
||||||
|
|
||||||
#. ``unpack`` - if using a rock, unpacks the rock and moves into the source directory
|
#. ``unpack`` - if using a rock, unpacks the rock and moves into the source directory
|
||||||
#. ``preprocess`` - adjust sources or rockspec to fix build
|
#. ``preprocess`` - adjust sources or rockspec to fix build
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _makefilepackage:
|
.. _makefilepackage:
|
||||||
|
|
||||||
---------------
|
--------
|
||||||
MakefilePackage
|
Makefile
|
||||||
---------------
|
--------
|
||||||
|
|
||||||
The most primitive build system a package can use is a plain Makefile.
|
The most primitive build system a package can use is a plain Makefile.
|
||||||
Makefiles are simple to write for small projects, but they usually
|
Makefiles are simple to write for small projects, but they usually
|
||||||
@@ -18,7 +18,7 @@ variables.
|
|||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
|
||||||
The ``MakefilePackage`` base class comes with 3 phases:
|
The ``MakefileBuilder`` and ``MakefilePackage`` base classes come with 3 phases:
|
||||||
|
|
||||||
#. ``edit`` - edit the Makefile
|
#. ``edit`` - edit the Makefile
|
||||||
#. ``build`` - build the project
|
#. ``build`` - build the project
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _mavenpackage:
|
.. _mavenpackage:
|
||||||
|
|
||||||
------------
|
-----
|
||||||
MavenPackage
|
Maven
|
||||||
------------
|
-----
|
||||||
|
|
||||||
Apache Maven is a general-purpose build system that does not rely
|
Apache Maven is a general-purpose build system that does not rely
|
||||||
on Makefiles to build software. It is designed for building and
|
on Makefiles to build software. It is designed for building and
|
||||||
@@ -17,7 +17,7 @@ managing and Java-based project.
|
|||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
|
||||||
The ``MavenPackage`` base class comes with the following phases:
|
The ``MavenBuilder`` and ``MavenPackage`` base classes come with the following phases:
|
||||||
|
|
||||||
#. ``build`` - compile code and package into a JAR file
|
#. ``build`` - compile code and package into a JAR file
|
||||||
#. ``install`` - copy to installation prefix
|
#. ``install`` - copy to installation prefix
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _mesonpackage:
|
.. _mesonpackage:
|
||||||
|
|
||||||
------------
|
-----
|
||||||
MesonPackage
|
Meson
|
||||||
------------
|
-----
|
||||||
|
|
||||||
Much like Autotools and CMake, Meson is a build system. But it is
|
Much like Autotools and CMake, Meson is a build system. But it is
|
||||||
meant to be both fast and as user friendly as possible. GNOME's goal
|
meant to be both fast and as user friendly as possible. GNOME's goal
|
||||||
@@ -17,7 +17,7 @@ is to port modules to use the Meson build system.
|
|||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
|
||||||
The ``MesonPackage`` base class comes with the following phases:
|
The ``MesonBuilder`` and ``MesonPackage`` base classes come with the following phases:
|
||||||
|
|
||||||
#. ``meson`` - generate ninja files
|
#. ``meson`` - generate ninja files
|
||||||
#. ``build`` - build the project
|
#. ``build`` - build the project
|
||||||
|
|||||||
@@ -1,350 +0,0 @@
|
|||||||
.. Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
|
||||||
Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
|
|
||||||
SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
.. _multiplepackage:
|
|
||||||
|
|
||||||
----------------------
|
|
||||||
Multiple Build Systems
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
Quite frequently, a package will change build systems from one version to the
|
|
||||||
next. For example, a small project that once used a single Makefile to build
|
|
||||||
may now require Autotools to handle the increased number of files that need to
|
|
||||||
be compiled. Or, a package that once used Autotools may switch to CMake for
|
|
||||||
Windows support. In this case, it becomes a bit more challenging to write a
|
|
||||||
single build recipe for this package in Spack.
|
|
||||||
|
|
||||||
There are several ways that this can be handled in Spack:
|
|
||||||
|
|
||||||
#. Subclass the new build system, and override phases as needed (preferred)
|
|
||||||
#. Subclass ``Package`` and implement ``install`` as needed
|
|
||||||
#. Create separate ``*-cmake``, ``*-autotools``, etc. packages for each build system
|
|
||||||
#. Rename the old package to ``*-legacy`` and create a new package
|
|
||||||
#. Move the old package to a ``legacy`` repository and create a new package
|
|
||||||
#. Drop older versions that only support the older build system
|
|
||||||
|
|
||||||
Of these options, 1 is preferred, and will be demonstrated in this
|
|
||||||
documentation. Options 3-5 have issues with concretization, so shouldn't be
|
|
||||||
used. Options 4-5 also don't support more than two build systems. Option 6 only
|
|
||||||
works if the old versions are no longer needed. Option 1 is preferred over 2
|
|
||||||
because it makes it easier to drop the old build system entirely.
|
|
||||||
|
|
||||||
The exact syntax of the package depends on which build systems you need to
|
|
||||||
support. Below are a couple of common examples.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Makefile -> Autotools
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Let's say we have the following package:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class Foo(MakefilePackage):
|
|
||||||
version("1.2.0", sha256="...")
|
|
||||||
|
|
||||||
def edit(self, spec, prefix):
|
|
||||||
filter_file("CC=", "CC=" + spack_cc, "Makefile")
|
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
|
||||||
install_tree(".", prefix)
|
|
||||||
|
|
||||||
|
|
||||||
The package subclasses from :ref:`makefilepackage`, which has three phases:
|
|
||||||
|
|
||||||
#. ``edit`` (does nothing by default)
|
|
||||||
#. ``build`` (runs ``make`` by default)
|
|
||||||
#. ``install`` (runs ``make install`` by default)
|
|
||||||
|
|
||||||
In this case, the ``install`` phase needed to be overridden because the
|
|
||||||
Makefile did not have an install target. We also modify the Makefile to use
|
|
||||||
Spack's compiler wrappers. The default ``build`` phase is not changed.
|
|
||||||
|
|
||||||
Starting with version 1.3.0, we want to use Autotools to build instead.
|
|
||||||
:ref:`autotoolspackage` has four phases:
|
|
||||||
|
|
||||||
#. ``autoreconf`` (does not if a configure script already exists)
|
|
||||||
#. ``configure`` (runs ``./configure --prefix=...`` by default)
|
|
||||||
#. ``build`` (runs ``make`` by default)
|
|
||||||
#. ``install`` (runs ``make install`` by default)
|
|
||||||
|
|
||||||
If the only version we need to support is 1.3.0, the package would look as
|
|
||||||
simple as:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class Foo(AutotoolsPackage):
|
|
||||||
version("1.3.0", sha256="...")
|
|
||||||
|
|
||||||
def configure_args(self):
|
|
||||||
return ["--enable-shared"]
|
|
||||||
|
|
||||||
|
|
||||||
In this case, we use the default methods for each phase and only override
|
|
||||||
``configure_args`` to specify additional flags to pass to ``./configure``.
|
|
||||||
|
|
||||||
If we wanted to write a single package that supports both versions 1.2.0 and
|
|
||||||
1.3.0, it would look something like:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class Foo(AutotoolsPackage):
|
|
||||||
version("1.3.0", sha256="...")
|
|
||||||
version("1.2.0", sha256="...", deprecated=True)
|
|
||||||
|
|
||||||
def configure_args(self):
|
|
||||||
return ["--enable-shared"]
|
|
||||||
|
|
||||||
# Remove the following once version 1.2.0 is dropped
|
|
||||||
@when("@:1.2")
|
|
||||||
def patch(self):
|
|
||||||
filter_file("CC=", "CC=" + spack_cc, "Makefile")
|
|
||||||
|
|
||||||
@when("@:1.2")
|
|
||||||
def autoreconf(self, spec, prefix):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@when("@:1.2")
|
|
||||||
def configure(self, spec, prefix):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@when("@:1.2")
|
|
||||||
def install(self, spec, prefix):
|
|
||||||
install_tree(".", prefix)
|
|
||||||
|
|
||||||
|
|
||||||
There are a few interesting things to note here:
|
|
||||||
|
|
||||||
* We added ``deprecated=True`` to version 1.2.0. This signifies that version
|
|
||||||
1.2.0 is deprecated and shouldn't be used. However, if a user still relies
|
|
||||||
on version 1.2.0, it's still there and builds just fine.
|
|
||||||
* We moved the contents of the ``edit`` phase to the ``patch`` function. Since
|
|
||||||
``AutotoolsPackage`` doesn't have an ``edit`` phase, the only way for this
|
|
||||||
step to be executed is to move it to the ``patch`` function, which always
|
|
||||||
gets run.
|
|
||||||
* The ``autoreconf`` and ``configure`` phases become no-ops. Since the old
|
|
||||||
Makefile-based build system doesn't use these, we ignore these phases when
|
|
||||||
building ``foo@1.2.0``.
|
|
||||||
* The ``@when`` decorator is used to override these phases only for older
|
|
||||||
versions. The default methods are used for ``foo@1.3:``.
|
|
||||||
|
|
||||||
Once a new Spack release comes out, version 1.2.0 and everything below the
|
|
||||||
comment can be safely deleted. The result is the same as if we had written a
|
|
||||||
package for version 1.3.0 from scratch.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^
|
|
||||||
Autotools -> CMake
|
|
||||||
^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Let's say we have the following package:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class Bar(AutotoolsPackage):
|
|
||||||
version("1.2.0", sha256="...")
|
|
||||||
|
|
||||||
def configure_args(self):
|
|
||||||
return ["--enable-shared"]
|
|
||||||
|
|
||||||
|
|
||||||
The package subclasses from :ref:`autotoolspackage`, which has four phases:
|
|
||||||
|
|
||||||
#. ``autoreconf`` (does not if a configure script already exists)
|
|
||||||
#. ``configure`` (runs ``./configure --prefix=...`` by default)
|
|
||||||
#. ``build`` (runs ``make`` by default)
|
|
||||||
#. ``install`` (runs ``make install`` by default)
|
|
||||||
|
|
||||||
In this case, we use the default methods for each phase and only override
|
|
||||||
``configure_args`` to specify additional flags to pass to ``./configure``.
|
|
||||||
|
|
||||||
Starting with version 1.3.0, we want to use CMake to build instead.
|
|
||||||
:ref:`cmakepackage` has three phases:
|
|
||||||
|
|
||||||
#. ``cmake`` (runs ``cmake ...`` by default)
|
|
||||||
#. ``build`` (runs ``make`` by default)
|
|
||||||
#. ``install`` (runs ``make install`` by default)
|
|
||||||
|
|
||||||
If the only version we need to support is 1.3.0, the package would look as
|
|
||||||
simple as:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class Bar(CMakePackage):
|
|
||||||
version("1.3.0", sha256="...")
|
|
||||||
|
|
||||||
def cmake_args(self):
|
|
||||||
return [self.define("BUILD_SHARED_LIBS", True)]
|
|
||||||
|
|
||||||
|
|
||||||
In this case, we use the default methods for each phase and only override
|
|
||||||
``cmake_args`` to specify additional flags to pass to ``cmake``.
|
|
||||||
|
|
||||||
If we wanted to write a single package that supports both versions 1.2.0 and
|
|
||||||
1.3.0, it would look something like:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class Bar(CMakePackage):
|
|
||||||
version("1.3.0", sha256="...")
|
|
||||||
version("1.2.0", sha256="...", deprecated=True)
|
|
||||||
|
|
||||||
def cmake_args(self):
|
|
||||||
return [self.define("BUILD_SHARED_LIBS", True)]
|
|
||||||
|
|
||||||
# Remove the following once version 1.2.0 is dropped
|
|
||||||
def configure_args(self):
|
|
||||||
return ["--enable-shared"]
|
|
||||||
|
|
||||||
@when("@:1.2")
|
|
||||||
def cmake(self, spec, prefix):
|
|
||||||
configure("--prefix=" + prefix, *self.configure_args())
|
|
||||||
|
|
||||||
|
|
||||||
There are a few interesting things to note here:
|
|
||||||
|
|
||||||
* We added ``deprecated=True`` to version 1.2.0. This signifies that version
|
|
||||||
1.2.0 is deprecated and shouldn't be used. However, if a user still relies
|
|
||||||
on version 1.2.0, it's still there and builds just fine.
|
|
||||||
* Since CMake and Autotools are so similar, we only need to override the
|
|
||||||
``cmake`` phase, we can use the default ``build`` and ``install`` phases.
|
|
||||||
* We override ``cmake`` to run ``./configure`` for older versions.
|
|
||||||
``configure_args`` remains the same.
|
|
||||||
* The ``@when`` decorator is used to override these phases only for older
|
|
||||||
versions. The default methods are used for ``bar@1.3:``.
|
|
||||||
|
|
||||||
Once a new Spack release comes out, version 1.2.0 and everything below the
|
|
||||||
comment can be safely deleted. The result is the same as if we had written a
|
|
||||||
package for version 1.3.0 from scratch.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Multiple build systems for the same version
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
During the transition from one build system to another, developers often
|
|
||||||
support multiple build systems at the same time. Spack can only use a single
|
|
||||||
build system for a single version. To decide which build system to use for a
|
|
||||||
particular version, take the following things into account:
|
|
||||||
|
|
||||||
1. If the developers explicitly state that one build system is preferred over
|
|
||||||
another, use that one.
|
|
||||||
2. If one build system is considered "experimental" while another is considered
|
|
||||||
"stable", use the stable build system.
|
|
||||||
3. Otherwise, use the newer build system.
|
|
||||||
|
|
||||||
The developer preference for which build system to use can change over time as
|
|
||||||
a newer build system becomes stable/recommended.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Dropping support for old build systems
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
When older versions of a package don't support a newer build system, it can be
|
|
||||||
tempting to simply delete them from a package. This significantly reduces
|
|
||||||
package complexity and makes the build recipe much easier to maintain. However,
|
|
||||||
other packages or Spack users may rely on these older versions. The recommended
|
|
||||||
approach is to first support both build systems (as demonstrated above),
|
|
||||||
:ref:`deprecate <deprecate>` versions that rely on the old build system, and
|
|
||||||
remove those versions and any phases that needed to be overridden in the next
|
|
||||||
Spack release.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Three or more build systems
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
In rare cases, a package may change build systems multiple times. For example,
|
|
||||||
a package may start with Makefiles, then switch to Autotools, then switch to
|
|
||||||
CMake. The same logic used above can be extended to any number of build systems.
|
|
||||||
For example:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class Baz(CMakePackage):
|
|
||||||
version("1.4.0", sha256="...") # CMake
|
|
||||||
version("1.3.0", sha256="...") # Autotools
|
|
||||||
version("1.2.0", sha256="...") # Makefile
|
|
||||||
|
|
||||||
def cmake_args(self):
|
|
||||||
return [self.define("BUILD_SHARED_LIBS", True)]
|
|
||||||
|
|
||||||
# Remove the following once version 1.3.0 is dropped
|
|
||||||
def configure_args(self):
|
|
||||||
return ["--enable-shared"]
|
|
||||||
|
|
||||||
@when("@1.3")
|
|
||||||
def cmake(self, spec, prefix):
|
|
||||||
configure("--prefix=" + prefix, *self.configure_args())
|
|
||||||
|
|
||||||
# Remove the following once version 1.2.0 is dropped
|
|
||||||
@when("@:1.2")
|
|
||||||
def patch(self):
|
|
||||||
filter_file("CC=", "CC=" + spack_cc, "Makefile")
|
|
||||||
|
|
||||||
@when("@:1.2")
|
|
||||||
def cmake(self, spec, prefix):
|
|
||||||
pass
|
|
||||||
|
|
||||||
@when("@:1.2")
|
|
||||||
def install(self, spec, prefix):
|
|
||||||
install_tree(".", prefix)
|
|
||||||
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^
|
|
||||||
Additional examples
|
|
||||||
^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
When writing new packages, it often helps to see examples of existing packages.
|
|
||||||
Here is an incomplete list of existing Spack packages that have changed build
|
|
||||||
systems before:
|
|
||||||
|
|
||||||
================ ===================== ================
|
|
||||||
Package Previous Build System New Build System
|
|
||||||
================ ===================== ================
|
|
||||||
amber custom CMake
|
|
||||||
arpack-ng Autotools CMake
|
|
||||||
atk Autotools Meson
|
|
||||||
blast None Autotools
|
|
||||||
dyninst Autotools CMake
|
|
||||||
evtgen Autotools CMake
|
|
||||||
fish Autotools CMake
|
|
||||||
gdk-pixbuf Autotools Meson
|
|
||||||
glib Autotools Meson
|
|
||||||
glog Autotools CMake
|
|
||||||
gmt Autotools CMake
|
|
||||||
gtkplus Autotools Meson
|
|
||||||
hpl Makefile Autotools
|
|
||||||
interproscan Perl Maven
|
|
||||||
jasper Autotools CMake
|
|
||||||
kahip SCons CMake
|
|
||||||
kokkos Makefile CMake
|
|
||||||
kokkos-kernels Makefile CMake
|
|
||||||
leveldb Makefile CMake
|
|
||||||
libdrm Autotools Meson
|
|
||||||
libjpeg-turbo Autotools CMake
|
|
||||||
mesa Autotools Meson
|
|
||||||
metis None CMake
|
|
||||||
mpifileutils Autotools CMake
|
|
||||||
muparser Autotools CMake
|
|
||||||
mxnet Makefile CMake
|
|
||||||
nest Autotools CMake
|
|
||||||
neuron Autotools CMake
|
|
||||||
nsimd CMake nsconfig
|
|
||||||
opennurbs Makefile CMake
|
|
||||||
optional-lite None CMake
|
|
||||||
plasma Makefile CMake
|
|
||||||
preseq Makefile Autotools
|
|
||||||
protobuf Autotools CMake
|
|
||||||
py-pygobject Autotools Python
|
|
||||||
singularity Autotools Makefile
|
|
||||||
span-lite None CMake
|
|
||||||
ssht Makefile CMake
|
|
||||||
string-view-lite None CMake
|
|
||||||
superlu Makefile CMake
|
|
||||||
superlu-dist Makefile CMake
|
|
||||||
uncrustify Autotools CMake
|
|
||||||
================ ===================== ================
|
|
||||||
|
|
||||||
Packages that support multiple build systems can be a bit confusing to write.
|
|
||||||
Don't hesitate to open an issue or draft pull request and ask for advice from
|
|
||||||
other Spack developers!
|
|
||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _octavepackage:
|
.. _octavepackage:
|
||||||
|
|
||||||
-------------
|
------
|
||||||
OctavePackage
|
Octave
|
||||||
-------------
|
------
|
||||||
|
|
||||||
Octave has its own build system for installing packages.
|
Octave has its own build system for installing packages.
|
||||||
|
|
||||||
@@ -15,7 +15,7 @@ Octave has its own build system for installing packages.
|
|||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
|
||||||
The ``OctavePackage`` base class has a single phase:
|
The ``OctaveBuilder`` and ``OctavePackage`` base classes have a single phase:
|
||||||
|
|
||||||
#. ``install`` - install the package
|
#. ``install`` - install the package
|
||||||
|
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _perlpackage:
|
.. _perlpackage:
|
||||||
|
|
||||||
-----------
|
----
|
||||||
PerlPackage
|
Perl
|
||||||
-----------
|
----
|
||||||
|
|
||||||
Much like Octave, Perl has its own language-specific
|
Much like Octave, Perl has its own language-specific
|
||||||
build system.
|
build system.
|
||||||
@@ -16,7 +16,7 @@ build system.
|
|||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
|
||||||
The ``PerlPackage`` base class comes with 3 phases that can be overridden:
|
The ``PerlBuilder`` and ``PerlPackage`` base classes come with 3 phases that can be overridden:
|
||||||
|
|
||||||
#. ``configure`` - configure the package
|
#. ``configure`` - configure the package
|
||||||
#. ``build`` - build the package
|
#. ``build`` - build the package
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _qmakepackage:
|
.. _qmakepackage:
|
||||||
|
|
||||||
------------
|
-----
|
||||||
QMakePackage
|
QMake
|
||||||
------------
|
-----
|
||||||
|
|
||||||
Much like Autotools and CMake, QMake is a build-script generator
|
Much like Autotools and CMake, QMake is a build-script generator
|
||||||
designed by the developers of Qt. In its simplest form, Spack's
|
designed by the developers of Qt. In its simplest form, Spack's
|
||||||
@@ -29,7 +29,7 @@ variables or edit ``*.pro`` files to get things working properly.
|
|||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
|
||||||
The ``QMakePackage`` base class comes with the following phases:
|
The ``QMakeBuilder`` and ``QMakePackage`` base classes come with the following phases:
|
||||||
|
|
||||||
#. ``qmake`` - generate Makefiles
|
#. ``qmake`` - generate Makefiles
|
||||||
#. ``build`` - build the project
|
#. ``build`` - build the project
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _racketpackage:
|
.. _racketpackage:
|
||||||
|
|
||||||
-------------
|
------
|
||||||
RacketPackage
|
Racket
|
||||||
-------------
|
------
|
||||||
|
|
||||||
Much like Python, Racket packages and modules have their own special build system.
|
Much like Python, Racket packages and modules have their own special build system.
|
||||||
To learn more about the specifics of Racket package system, please refer to the
|
To learn more about the specifics of Racket package system, please refer to the
|
||||||
@@ -17,7 +17,7 @@ To learn more about the specifics of Racket package system, please refer to the
|
|||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
|
||||||
The ``RacketPackage`` base class provides an ``install`` phase that
|
The ``RacketBuilder`` and ``RacketPackage`` base classes provides an ``install`` phase that
|
||||||
can be overridden, corresponding to the use of:
|
can be overridden, corresponding to the use of:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ new Spack packages for.
|
|||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
|
||||||
The ``RPackage`` base class has a single phase:
|
The ``RBuilder`` and ``RPackage`` base classes have a single phase:
|
||||||
|
|
||||||
#. ``install`` - install the package
|
#. ``install`` - install the package
|
||||||
|
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _rubypackage:
|
.. _rubypackage:
|
||||||
|
|
||||||
-----------
|
----
|
||||||
RubyPackage
|
Ruby
|
||||||
-----------
|
----
|
||||||
|
|
||||||
Like Perl, Python, and R, Ruby has its own build system for
|
Like Perl, Python, and R, Ruby has its own build system for
|
||||||
installing Ruby gems.
|
installing Ruby gems.
|
||||||
@@ -16,7 +16,7 @@ installing Ruby gems.
|
|||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
|
||||||
The ``RubyPackage`` base class provides the following phases that
|
The ``RubyBuilder`` and ``RubyPackage`` base classes provide the following phases that
|
||||||
can be overridden:
|
can be overridden:
|
||||||
|
|
||||||
#. ``build`` - build everything needed to install
|
#. ``build`` - build everything needed to install
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _sconspackage:
|
.. _sconspackage:
|
||||||
|
|
||||||
------------
|
-----
|
||||||
SConsPackage
|
SCons
|
||||||
------------
|
-----
|
||||||
|
|
||||||
SCons is a general-purpose build system that does not rely on
|
SCons is a general-purpose build system that does not rely on
|
||||||
Makefiles to build software. SCons is written in Python, and handles
|
Makefiles to build software. SCons is written in Python, and handles
|
||||||
@@ -42,7 +42,7 @@ As previously mentioned, SCons allows developers to add subcommands like
|
|||||||
$ scons install
|
$ scons install
|
||||||
|
|
||||||
|
|
||||||
To facilitate this, the ``SConsPackage`` base class provides the
|
To facilitate this, the ``SConsBuilder`` and ``SconsPackage`` base classes provide the
|
||||||
following phases:
|
following phases:
|
||||||
|
|
||||||
#. ``build`` - build the package
|
#. ``build`` - build the package
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _sippackage:
|
.. _sippackage:
|
||||||
|
|
||||||
----------
|
---
|
||||||
SIPPackage
|
SIP
|
||||||
----------
|
---
|
||||||
|
|
||||||
SIP is a tool that makes it very easy to create Python bindings for C and C++
|
SIP is a tool that makes it very easy to create Python bindings for C and C++
|
||||||
libraries. It was originally developed to create PyQt, the Python bindings for
|
libraries. It was originally developed to create PyQt, the Python bindings for
|
||||||
@@ -22,7 +22,7 @@ provides support functions to the automatically generated code.
|
|||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
|
||||||
The ``SIPPackage`` base class comes with the following phases:
|
The ``SIPBuilder`` and ``SIPPackage`` base classes come with the following phases:
|
||||||
|
|
||||||
#. ``configure`` - configure the package
|
#. ``configure`` - configure the package
|
||||||
#. ``build`` - build the package
|
#. ``build`` - build the package
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _wafpackage:
|
.. _wafpackage:
|
||||||
|
|
||||||
----------
|
---
|
||||||
WafPackage
|
Waf
|
||||||
----------
|
---
|
||||||
|
|
||||||
Like SCons, Waf is a general-purpose build system that does not rely
|
Like SCons, Waf is a general-purpose build system that does not rely
|
||||||
on Makefiles to build software.
|
on Makefiles to build software.
|
||||||
@@ -16,7 +16,7 @@ on Makefiles to build software.
|
|||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
|
||||||
The ``WafPackage`` base class comes with the following phases:
|
The ``WafBuilder`` and ``WafPackage`` base classes come with the following phases:
|
||||||
|
|
||||||
#. ``configure`` - configure the project
|
#. ``configure`` - configure the project
|
||||||
#. ``build`` - build the project
|
#. ``build`` - build the project
|
||||||
|
|||||||
@@ -32,6 +32,9 @@
|
|||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
|
link_name = os.path.abspath("_spack_root")
|
||||||
|
if not os.path.exists(link_name):
|
||||||
|
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
|
||||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
|
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
|
||||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback"))
|
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback"))
|
||||||
|
|
||||||
@@ -206,6 +209,9 @@ def setup(sphinx):
|
|||||||
# Spack classes that are private and we don't want to expose
|
# Spack classes that are private and we don't want to expose
|
||||||
("py:class", "spack.provider_index._IndexBase"),
|
("py:class", "spack.provider_index._IndexBase"),
|
||||||
("py:class", "spack.repo._PrependFileLoader"),
|
("py:class", "spack.repo._PrependFileLoader"),
|
||||||
|
("py:class", "spack.build_systems._checks.BaseBuilder"),
|
||||||
|
# Spack classes that intersphinx is unable to resolve
|
||||||
|
("py:class", "spack.version.VersionBase"),
|
||||||
]
|
]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||||
|
|||||||
@@ -224,9 +224,9 @@ them). Please note that we currently disable ccache's ``hash_dir``
|
|||||||
feature to avoid an issue with the stage directory (see
|
feature to avoid an issue with the stage directory (see
|
||||||
https://github.com/LLNL/spack/pull/3761#issuecomment-294352232).
|
https://github.com/LLNL/spack/pull/3761#issuecomment-294352232).
|
||||||
|
|
||||||
------------------
|
-----------------------
|
||||||
``shared_linking``
|
``shared_linking:type``
|
||||||
------------------
|
-----------------------
|
||||||
|
|
||||||
Control whether Spack embeds ``RPATH`` or ``RUNPATH`` attributes in ELF binaries
|
Control whether Spack embeds ``RPATH`` or ``RUNPATH`` attributes in ELF binaries
|
||||||
so that they can find their dependencies. Has no effect on macOS.
|
so that they can find their dependencies. Has no effect on macOS.
|
||||||
@@ -245,6 +245,52 @@ the loading object.
|
|||||||
|
|
||||||
DO NOT MIX the two options within the same install tree.
|
DO NOT MIX the two options within the same install tree.
|
||||||
|
|
||||||
|
-----------------------
|
||||||
|
``shared_linking:bind``
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
This is an *experimental option* that controls whether Spack embeds absolute paths
|
||||||
|
to needed shared libraries in ELF executables and shared libraries on Linux. Setting
|
||||||
|
this option to ``true`` has two advantages:
|
||||||
|
|
||||||
|
1. **Improved startup time**: when running an executable, the dynamic loader does not
|
||||||
|
have to perform a search for needed libraries, they are loaded directly.
|
||||||
|
2. **Reliability**: libraries loaded at runtime are those that were linked to. This
|
||||||
|
minimizes the risk of accidentally picking up system libraries.
|
||||||
|
|
||||||
|
In the current implementation, Spack sets the soname (shared object name) of
|
||||||
|
libraries to their install path upon installation. This has two implications:
|
||||||
|
|
||||||
|
1. binding does not apply to libraries installed *before* the option was enabled;
|
||||||
|
2. toggling the option off does *not* prevent binding of libraries installed when
|
||||||
|
the option was still enabled.
|
||||||
|
|
||||||
|
It is also worth noting that:
|
||||||
|
|
||||||
|
1. Applications relying on ``dlopen(3)`` will continue to work, even when they open
|
||||||
|
a library by name. This is because ``RPATH``\s are retained in binaries also
|
||||||
|
when ``bind`` is enabled.
|
||||||
|
2. ``LD_PRELOAD`` continues to work for the typical use case of overriding
|
||||||
|
symbols, such as preloading a library with a more efficient ``malloc``.
|
||||||
|
However, the preloaded library will be loaded *additionally to*, instead of
|
||||||
|
*in place of* another library with the same name --- this can be problematic
|
||||||
|
in very rare cases where libraries rely on a particular ``init`` or ``fini``
|
||||||
|
order.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
In some cases packages provide *stub libraries* that only contain an interface
|
||||||
|
for linking, but lack an implementation for runtime. An example of this is
|
||||||
|
``libcuda.so``, provided by the CUDA toolkit; it can be used to link against,
|
||||||
|
but the library needed at runtime is the one installed with the CUDA driver.
|
||||||
|
To avoid binding those libraries, they can be marked as non-bindable using
|
||||||
|
a property in the package:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class Example(Package):
|
||||||
|
non_bindable_shared_objects = ["libinterface.so"]
|
||||||
|
|
||||||
----------------------
|
----------------------
|
||||||
``terminal_title``
|
``terminal_title``
|
||||||
----------------------
|
----------------------
|
||||||
|
|||||||
@@ -549,7 +549,7 @@ down the problem:
|
|||||||
|
|
||||||
You can see above that the ``build_jobs`` and ``debug`` settings are
|
You can see above that the ``build_jobs`` and ``debug`` settings are
|
||||||
built in and are not overridden by a configuration file. The
|
built in and are not overridden by a configuration file. The
|
||||||
``verify_ssl`` setting comes from the ``--insceure`` option on the
|
``verify_ssl`` setting comes from the ``--insecure`` option on the
|
||||||
command line. ``dirty`` and ``install_tree`` come from the custom
|
command line. ``dirty`` and ``install_tree`` come from the custom
|
||||||
scopes ``./my-scope`` and ``./my-scope-2``, and all other configuration
|
scopes ``./my-scope`` and ``./my-scope-2``, and all other configuration
|
||||||
options come from the default configuration files that ship with Spack.
|
options come from the default configuration files that ship with Spack.
|
||||||
|
|||||||
@@ -149,11 +149,9 @@ grouped by functionality.
|
|||||||
Package-related modules
|
Package-related modules
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
:mod:`spack.package`
|
:mod:`spack.package_base`
|
||||||
Contains the :class:`~spack.package_base.Package` class, which
|
Contains the :class:`~spack.package_base.PackageBase` class, which
|
||||||
is the superclass for all packages in Spack. Methods on ``Package``
|
is the superclass for all packages in Spack.
|
||||||
implement all phases of the :ref:`package lifecycle
|
|
||||||
<package-lifecycle>` and manage the build process.
|
|
||||||
|
|
||||||
:mod:`spack.util.naming`
|
:mod:`spack.util.naming`
|
||||||
Contains functions for mapping between Spack package names,
|
Contains functions for mapping between Spack package names,
|
||||||
|
|||||||
@@ -98,40 +98,42 @@ For example, this command:
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack create http://www.mr511.de/software/libelf-0.8.13.tar.gz
|
$ spack create https://ftp.osuosl.org/pub/blfs/conglomeration/libelf/libelf-0.8.13.tar.gz
|
||||||
|
|
||||||
creates a simple python file:
|
creates a simple python file:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
from spack import *
|
from spack.package import *
|
||||||
|
|
||||||
|
|
||||||
class Libelf(Package):
|
class Libelf(AutotoolsPackage):
|
||||||
"""FIXME: Put a proper description of your package here."""
|
"""FIXME: Put a proper description of your package here."""
|
||||||
|
|
||||||
# FIXME: Add a proper url for your package's homepage here.
|
# FIXME: Add a proper url for your package's homepage here.
|
||||||
homepage = "http://www.example.com"
|
homepage = "https://www.example.com"
|
||||||
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
|
url = "https://ftp.osuosl.org/pub/blfs/conglomeration/libelf/libelf-0.8.13.tar.gz"
|
||||||
|
|
||||||
version('0.8.13', '4136d7b4c04df68b686570afa26988ac')
|
# FIXME: Add a list of GitHub accounts to
|
||||||
|
# notify when the package is updated.
|
||||||
|
# maintainers = ["github_user1", "github_user2"]
|
||||||
|
|
||||||
|
version("0.8.13", sha256="591a9b4ec81c1f2042a97aa60564e0cb79d041c52faa7416acb38bc95bd2c76d")
|
||||||
|
|
||||||
# FIXME: Add dependencies if required.
|
# FIXME: Add dependencies if required.
|
||||||
# depends_on('foo')
|
# depends_on("foo")
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def configure_args(self):
|
||||||
# FIXME: Modify the configure line to suit your build system here.
|
# FIXME: Add arguments other than --prefix
|
||||||
configure('--prefix={0}'.format(prefix))
|
# FIXME: If not needed delete this function
|
||||||
|
args = []
|
||||||
# FIXME: Add logic to build and install here.
|
return args
|
||||||
make()
|
|
||||||
make('install')
|
|
||||||
|
|
||||||
It doesn't take much python coding to get from there to a working
|
It doesn't take much python coding to get from there to a working
|
||||||
package:
|
package:
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
|
.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/libelf/package.py
|
||||||
:lines: 6-
|
:lines: 5-
|
||||||
|
|
||||||
Spack also provides wrapper functions around common commands like
|
Spack also provides wrapper functions around common commands like
|
||||||
``configure``, ``make``, and ``cmake`` to make writing packages
|
``configure``, ``make``, and ``cmake`` to make writing packages
|
||||||
|
|||||||
@@ -124,88 +124,41 @@ Spack provides two ways of bootstrapping ``clingo``: from pre-built binaries
|
|||||||
(default), or from sources. The fastest way to get started is to bootstrap from
|
(default), or from sources. The fastest way to get started is to bootstrap from
|
||||||
pre-built binaries.
|
pre-built binaries.
|
||||||
|
|
||||||
.. note::
|
The first time you concretize a spec, Spack will bootstrap automatically:
|
||||||
|
|
||||||
When bootstrapping from pre-built binaries, Spack currently requires
|
|
||||||
``patchelf`` on Linux and ``otool`` on macOS. If ``patchelf`` is not in the
|
|
||||||
``PATH``, Spack will build it from sources, and a C++ compiler is required.
|
|
||||||
|
|
||||||
The first time you concretize a spec, Spack will bootstrap in the background:
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ time spack spec zlib
|
$ spack spec zlib
|
||||||
|
==> Bootstrapping clingo from pre-built binaries
|
||||||
|
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-ba5ijauisd3uuixtmactc36vps7yfsrl.spec.json
|
||||||
|
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64/gcc-10.2.1/clingo-bootstrap-spack/linux-centos7-x86_64-gcc-10.2.1-clingo-bootstrap-spack-ba5ijauisd3uuixtmactc36vps7yfsrl.spack
|
||||||
|
==> Installing "clingo-bootstrap@spack%gcc@10.2.1~docs~ipo+python+static_libstdcpp build_type=Release arch=linux-centos7-x86_64" from a buildcache
|
||||||
|
==> Bootstrapping patchelf from pre-built binaries
|
||||||
|
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.16.1-p72zyan5wrzuabtmzq7isa5mzyh6ahdp.spec.json
|
||||||
|
==> Fetching https://mirror.spack.io/bootstrap/github-actions/v0.4/build_cache/linux-centos7-x86_64/gcc-10.2.1/patchelf-0.16.1/linux-centos7-x86_64-gcc-10.2.1-patchelf-0.16.1-p72zyan5wrzuabtmzq7isa5mzyh6ahdp.spack
|
||||||
|
==> Installing "patchelf@0.16.1%gcc@10.2.1 ldflags="-static-libstdc++ -static-libgcc" build_system=autotools arch=linux-centos7-x86_64" from a buildcache
|
||||||
Input spec
|
Input spec
|
||||||
--------------------------------
|
--------------------------------
|
||||||
zlib
|
zlib
|
||||||
|
|
||||||
Concretized
|
Concretized
|
||||||
--------------------------------
|
--------------------------------
|
||||||
zlib@1.2.11%gcc@7.5.0+optimize+pic+shared arch=linux-ubuntu18.04-zen
|
zlib@1.2.13%gcc@9.4.0+optimize+pic+shared build_system=makefile arch=linux-ubuntu20.04-icelake
|
||||||
|
|
||||||
real 0m20.023s
|
|
||||||
user 0m18.351s
|
|
||||||
sys 0m0.784s
|
|
||||||
|
|
||||||
After this command you'll see that ``clingo`` has been installed for Spack's own use:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ spack find -b
|
|
||||||
==> Showing internal bootstrap store at "/root/.spack/bootstrap/store"
|
|
||||||
==> 3 installed packages
|
|
||||||
-- linux-rhel5-x86_64 / gcc@9.3.0 -------------------------------
|
|
||||||
clingo-bootstrap@spack python@3.6
|
|
||||||
|
|
||||||
-- linux-ubuntu18.04-zen / gcc@7.5.0 ----------------------------
|
|
||||||
patchelf@0.13
|
|
||||||
|
|
||||||
Subsequent calls to the concretizer will then be much faster:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
$ time spack spec zlib
|
|
||||||
[ ... ]
|
|
||||||
real 0m0.490s
|
|
||||||
user 0m0.431s
|
|
||||||
sys 0m0.041s
|
|
||||||
|
|
||||||
|
|
||||||
If for security concerns you cannot bootstrap ``clingo`` from pre-built
|
If for security concerns you cannot bootstrap ``clingo`` from pre-built
|
||||||
binaries, you have to mark this bootstrapping method as untrusted. This makes
|
binaries, you have to disable fetching the binaries we generated with Github Actions.
|
||||||
Spack fall back to bootstrapping from sources:
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack bootstrap untrust github-actions-v0.2
|
$ spack bootstrap disable github-actions-v0.4
|
||||||
==> "github-actions-v0.2" is now untrusted and will not be used for bootstrapping
|
==> "github-actions-v0.4" is now disabled and will not be used for bootstrapping
|
||||||
|
$ spack bootstrap disable github-actions-v0.3
|
||||||
|
==> "github-actions-v0.3" is now disabled and will not be used for bootstrapping
|
||||||
|
|
||||||
You can verify that the new settings are effective with:
|
You can verify that the new settings are effective with:
|
||||||
|
|
||||||
.. code-block:: console
|
.. command-output:: spack bootstrap list
|
||||||
|
|
||||||
$ spack bootstrap list
|
|
||||||
Name: github-actions-v0.2 UNTRUSTED
|
|
||||||
|
|
||||||
Type: buildcache
|
|
||||||
|
|
||||||
Info:
|
|
||||||
url: https://mirror.spack.io/bootstrap/github-actions/v0.2
|
|
||||||
homepage: https://github.com/spack/spack-bootstrap-mirrors
|
|
||||||
releases: https://github.com/spack/spack-bootstrap-mirrors/releases
|
|
||||||
|
|
||||||
Description:
|
|
||||||
Buildcache generated from a public workflow using Github Actions.
|
|
||||||
The sha256 checksum of binaries is checked before installation.
|
|
||||||
|
|
||||||
[ ... ]
|
|
||||||
|
|
||||||
Name: spack-install TRUSTED
|
|
||||||
|
|
||||||
Type: install
|
|
||||||
|
|
||||||
Description:
|
|
||||||
Specs built from sources by Spack. May take a long time.
|
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
@@ -235,9 +188,7 @@ under the ``${HOME}/.spack`` directory. The software installed there can be quer
|
|||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack find --bootstrap
|
$ spack -b find
|
||||||
==> Showing internal bootstrap store at "/home/spack/.spack/bootstrap/store"
|
|
||||||
==> 3 installed packages
|
|
||||||
-- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------
|
-- linux-ubuntu18.04-x86_64 / gcc@10.1.0 ------------------------
|
||||||
clingo-bootstrap@spack python@3.6.9 re2c@1.2.1
|
clingo-bootstrap@spack python@3.6.9 re2c@1.2.1
|
||||||
|
|
||||||
@@ -246,7 +197,7 @@ In case it's needed the bootstrap store can also be cleaned with:
|
|||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack clean -b
|
$ spack clean -b
|
||||||
==> Removing software in "/home/spack/.spack/bootstrap/store"
|
==> Removing bootstrapped software and configuration in "/home/spack/.spack/bootstrap"
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^
|
||||||
Check Installation
|
Check Installation
|
||||||
|
|||||||
BIN
lib/spack/docs/images/adapter.png
Normal file
BIN
lib/spack/docs/images/adapter.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 658 KiB |
BIN
lib/spack/docs/images/builder_package_architecture.png
Normal file
BIN
lib/spack/docs/images/builder_package_architecture.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 449 KiB |
BIN
lib/spack/docs/images/builder_phases.png
Normal file
BIN
lib/spack/docs/images/builder_phases.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 128 KiB |
BIN
lib/spack/docs/images/installation_pipeline.png
Normal file
BIN
lib/spack/docs/images/installation_pipeline.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 126 KiB |
BIN
lib/spack/docs/images/original_package_architecture.png
Normal file
BIN
lib/spack/docs/images/original_package_architecture.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 35 KiB |
3092
lib/spack/docs/images/packaging.excalidrawlib
Normal file
3092
lib/spack/docs/images/packaging.excalidrawlib
Normal file
File diff suppressed because it is too large
Load Diff
@@ -34,24 +34,155 @@ ubiquitous in the scientific software community. Second, it's a modern
|
|||||||
language and has many powerful features to help make package writing
|
language and has many powerful features to help make package writing
|
||||||
easy.
|
easy.
|
||||||
|
|
||||||
---------------------------
|
|
||||||
Creating & editing packages
|
.. _installation_procedure:
|
||||||
---------------------------
|
|
||||||
|
--------------------------------------
|
||||||
|
Overview of the installation procedure
|
||||||
|
--------------------------------------
|
||||||
|
|
||||||
|
Whenever Spack installs software, it goes through a series of predefined steps:
|
||||||
|
|
||||||
|
.. image:: images/installation_pipeline.png
|
||||||
|
:scale: 60 %
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
All these steps are influenced by the metadata in each ``package.py`` and
|
||||||
|
by the current Spack configuration.
|
||||||
|
Since build systems are different from one another, the execution of the
|
||||||
|
last block in the figure is further expanded in a build system specific way.
|
||||||
|
An example for ``CMake`` is, for instance:
|
||||||
|
|
||||||
|
.. image:: images/builder_phases.png
|
||||||
|
:align: center
|
||||||
|
:scale: 60 %
|
||||||
|
|
||||||
|
The predefined steps for each build system are called "phases".
|
||||||
|
In general, the name and order in which the phases will be executed can be
|
||||||
|
obtained by either reading the API docs at :py:mod:`~.spack.build_systems`, or
|
||||||
|
using the ``spack info`` command:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
:emphasize-lines: 13,14
|
||||||
|
|
||||||
|
$ spack info --phases m4
|
||||||
|
AutotoolsPackage: m4
|
||||||
|
Homepage: https://www.gnu.org/software/m4/m4.html
|
||||||
|
|
||||||
|
Safe versions:
|
||||||
|
1.4.17 ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz
|
||||||
|
|
||||||
|
Variants:
|
||||||
|
Name Default Description
|
||||||
|
|
||||||
|
sigsegv on Build the libsigsegv dependency
|
||||||
|
|
||||||
|
Installation Phases:
|
||||||
|
autoreconf configure build install
|
||||||
|
|
||||||
|
Build Dependencies:
|
||||||
|
libsigsegv
|
||||||
|
|
||||||
|
...
|
||||||
|
|
||||||
|
An extensive list of available build systems and phases is provided in :ref:`installation_process`.
|
||||||
|
|
||||||
|
|
||||||
|
------------------------
|
||||||
|
Writing a package recipe
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
Since v0.19, Spack supports two ways of writing a package recipe. The most commonly used is to encode both the metadata
|
||||||
|
(directives, etc.) and the build behavior in a single class, like shown in the following example:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class Openjpeg(CMakePackage):
|
||||||
|
"""OpenJPEG is an open-source JPEG 2000 codec written in C language"""
|
||||||
|
|
||||||
|
homepage = "https://github.com/uclouvain/openjpeg"
|
||||||
|
url = "https://github.com/uclouvain/openjpeg/archive/v2.3.1.tar.gz"
|
||||||
|
|
||||||
|
version("2.4.0", sha256="8702ba68b442657f11aaeb2b338443ca8d5fb95b0d845757968a7be31ef7f16d")
|
||||||
|
|
||||||
|
variant("codec", default=False, description="Build the CODEC executables")
|
||||||
|
depends_on("libpng", when="+codec")
|
||||||
|
|
||||||
|
def url_for_version(self, version):
|
||||||
|
if version >= Version("2.1.1"):
|
||||||
|
return super(Openjpeg, self).url_for_version(version)
|
||||||
|
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
||||||
|
return url_fmt.format(version)
|
||||||
|
|
||||||
|
def cmake_args(self):
|
||||||
|
args = [
|
||||||
|
self.define_from_variant("BUILD_CODEC", "codec"),
|
||||||
|
self.define("BUILD_MJ2", False),
|
||||||
|
self.define("BUILD_THIRDPARTY", False),
|
||||||
|
]
|
||||||
|
return args
|
||||||
|
|
||||||
|
A package encoded with a single class is backward compatible with versions of Spack
|
||||||
|
lower than v0.19, and so are custom repositories containing only recipes of this kind.
|
||||||
|
The downside is that *this format doesn't allow packagers to use more than one build system in a single recipe*.
|
||||||
|
|
||||||
|
To do that, we have to resort to the second way Spack has of writing packages, which involves writing a
|
||||||
|
builder class explicitly. Using the same example as above, this reads:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class Openjpeg(CMakePackage):
|
||||||
|
"""OpenJPEG is an open-source JPEG 2000 codec written in C language"""
|
||||||
|
|
||||||
|
homepage = "https://github.com/uclouvain/openjpeg"
|
||||||
|
url = "https://github.com/uclouvain/openjpeg/archive/v2.3.1.tar.gz"
|
||||||
|
|
||||||
|
version("2.4.0", sha256="8702ba68b442657f11aaeb2b338443ca8d5fb95b0d845757968a7be31ef7f16d")
|
||||||
|
|
||||||
|
variant("codec", default=False, description="Build the CODEC executables")
|
||||||
|
depends_on("libpng", when="+codec")
|
||||||
|
|
||||||
|
def url_for_version(self, version):
|
||||||
|
if version >= Version("2.1.1"):
|
||||||
|
return super(Openjpeg, self).url_for_version(version)
|
||||||
|
url_fmt = "https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz"
|
||||||
|
return url_fmt.format(version)
|
||||||
|
|
||||||
|
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||||
|
def cmake_args(self):
|
||||||
|
args = [
|
||||||
|
self.define_from_variant("BUILD_CODEC", "codec"),
|
||||||
|
self.define("BUILD_MJ2", False),
|
||||||
|
self.define("BUILD_THIRDPARTY", False),
|
||||||
|
]
|
||||||
|
return args
|
||||||
|
|
||||||
|
This way of writing packages allows extending the recipe to support multiple build systems,
|
||||||
|
see :ref:`multiple_build_systems` for more details. The downside is that recipes of this kind
|
||||||
|
are only understood by Spack since v0.19+. More information on the internal architecture of
|
||||||
|
Spack can be found at :ref:`package_class_structure`.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
If a builder is implemented in ``package.py``, all build-specific methods must be moved
|
||||||
|
to the builder. This means that if you have a package like
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class Foo(CmakePackage):
|
||||||
|
def cmake_args(self):
|
||||||
|
...
|
||||||
|
|
||||||
|
and you add a builder to the ``package.py``, you must move ``cmake_args`` to the builder.
|
||||||
|
|
||||||
.. _cmd-spack-create:
|
.. _cmd-spack-create:
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^
|
---------------------
|
||||||
``spack create``
|
Creating new packages
|
||||||
^^^^^^^^^^^^^^^^
|
---------------------
|
||||||
|
|
||||||
The ``spack create`` command creates a directory with the package name and
|
To help creating a new package Spack provides a command that generates a ``package.py``
|
||||||
generates a ``package.py`` file with a boilerplate package template. If given
|
file in an existing repository, with a boilerplate package template. Here's an example:
|
||||||
a URL pointing to a tarball or other software archive, ``spack create`` is
|
|
||||||
smart enough to determine basic information about the package, including its name
|
|
||||||
and build system. In most cases, ``spack create`` plus a few modifications is
|
|
||||||
all you need to get a package working.
|
|
||||||
|
|
||||||
Here's an example:
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -87,23 +218,6 @@ You do not *have* to download all of the versions up front. You can
|
|||||||
always choose to download just one tarball initially, and run
|
always choose to download just one tarball initially, and run
|
||||||
:ref:`cmd-spack-checksum` later if you need more versions.
|
:ref:`cmd-spack-checksum` later if you need more versions.
|
||||||
|
|
||||||
Let's say you download 3 tarballs:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
|
|
||||||
How many would you like to checksum? (default is 1, q to abort) 3
|
|
||||||
==> Downloading...
|
|
||||||
==> Fetching https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2
|
|
||||||
######################################################################## 100.0%
|
|
||||||
==> Fetching https://gmplib.org/download/gmp/gmp-6.1.1.tar.bz2
|
|
||||||
######################################################################## 100.0%
|
|
||||||
==> Fetching https://gmplib.org/download/gmp/gmp-6.1.0.tar.bz2
|
|
||||||
######################################################################## 100.0%
|
|
||||||
==> Checksummed 3 versions of gmp:
|
|
||||||
==> This package looks like it uses the autotools build system
|
|
||||||
==> Created template for gmp package
|
|
||||||
==> Created package file: /Users/Adam/spack/var/spack/repos/builtin/packages/gmp/package.py
|
|
||||||
|
|
||||||
Spack automatically creates a directory in the appropriate repository,
|
Spack automatically creates a directory in the appropriate repository,
|
||||||
generates a boilerplate template for your package, and opens up the new
|
generates a boilerplate template for your package, and opens up the new
|
||||||
``package.py`` in your favorite ``$EDITOR``:
|
``package.py`` in your favorite ``$EDITOR``:
|
||||||
@@ -111,6 +225,14 @@ generates a boilerplate template for your package, and opens up the new
|
|||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
:linenos:
|
:linenos:
|
||||||
|
|
||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------------
|
||||||
|
# If you submit this package back to Spack as a pull request,
|
||||||
|
# please first remove this boilerplate and all FIXME comments.
|
||||||
#
|
#
|
||||||
# This is a template package file for Spack. We've put "FIXME"
|
# This is a template package file for Spack. We've put "FIXME"
|
||||||
# next to all the things you'll want to change. Once you've handled
|
# next to all the things you'll want to change. Once you've handled
|
||||||
@@ -123,9 +245,8 @@ generates a boilerplate template for your package, and opens up the new
|
|||||||
# spack edit gmp
|
# spack edit gmp
|
||||||
#
|
#
|
||||||
# See the Spack documentation for more information on packaging.
|
# See the Spack documentation for more information on packaging.
|
||||||
# If you submit this package back to Spack as a pull request,
|
# ----------------------------------------------------------------------------
|
||||||
# please first remove this boilerplate and all FIXME comments.
|
import spack.build_systems.autotools
|
||||||
#
|
|
||||||
from spack.package import *
|
from spack.package import *
|
||||||
|
|
||||||
|
|
||||||
@@ -133,19 +254,17 @@ generates a boilerplate template for your package, and opens up the new
|
|||||||
"""FIXME: Put a proper description of your package here."""
|
"""FIXME: Put a proper description of your package here."""
|
||||||
|
|
||||||
# FIXME: Add a proper url for your package's homepage here.
|
# FIXME: Add a proper url for your package's homepage here.
|
||||||
homepage = "http://www.example.com"
|
homepage = "https://www.example.com"
|
||||||
url = "https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2"
|
url = "https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2"
|
||||||
|
|
||||||
# FIXME: Add a list of GitHub accounts to
|
# FIXME: Add a list of GitHub accounts to
|
||||||
# notify when the package is updated.
|
# notify when the package is updated.
|
||||||
# maintainers = ['github_user1', 'github_user2']
|
# maintainers = ["github_user1", "github_user2"]
|
||||||
|
|
||||||
version('6.1.2', '8ddbb26dc3bd4e2302984debba1406a5')
|
version("6.2.1", sha256="eae9326beb4158c386e39a356818031bd28f3124cf915f8c5b1dc4c7a36b4d7c")
|
||||||
version('6.1.1', '4c175f86e11eb32d8bf9872ca3a8e11d')
|
|
||||||
version('6.1.0', '86ee6e54ebfc4a90b643a65e402c4048')
|
|
||||||
|
|
||||||
# FIXME: Add dependencies if required.
|
# FIXME: Add dependencies if required.
|
||||||
# depends_on('foo')
|
# depends_on("foo")
|
||||||
|
|
||||||
def configure_args(self):
|
def configure_args(self):
|
||||||
# FIXME: Add arguments other than --prefix
|
# FIXME: Add arguments other than --prefix
|
||||||
@@ -154,15 +273,16 @@ generates a boilerplate template for your package, and opens up the new
|
|||||||
return args
|
return args
|
||||||
|
|
||||||
The tedious stuff (creating the class, checksumming archives) has been
|
The tedious stuff (creating the class, checksumming archives) has been
|
||||||
done for you. You'll notice that ``spack create`` correctly detected that
|
done for you. Spack correctly detected that ``gmp`` uses the ``autotools``
|
||||||
``gmp`` uses the Autotools build system. It created a new ``Gmp`` package
|
build system, so it created a new ``Gmp`` package that subclasses the
|
||||||
that subclasses the ``AutotoolsPackage`` base class. This base class
|
``AutotoolsPackage`` base class.
|
||||||
provides basic installation methods common to all Autotools packages:
|
|
||||||
|
The default installation procedure for a package subclassing the ``AutotoolsPackage``
|
||||||
|
is to go through the typical process of:
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
./configure --prefix=/path/to/installation/directory
|
./configure --prefix=/path/to/installation/directory
|
||||||
|
|
||||||
make
|
make
|
||||||
make check
|
make check
|
||||||
make install
|
make install
|
||||||
@@ -209,12 +329,14 @@ The rest of the tasks you need to do are as follows:
|
|||||||
Your new package may require specific flags during ``configure``.
|
Your new package may require specific flags during ``configure``.
|
||||||
These can be added via ``configure_args``. Specifics will differ
|
These can be added via ``configure_args``. Specifics will differ
|
||||||
depending on the package and its build system.
|
depending on the package and its build system.
|
||||||
:ref:`Implementing the install method <install-method>` is
|
:ref:`installation_process` is
|
||||||
covered in detail later.
|
covered in detail later.
|
||||||
|
|
||||||
Passing a URL to ``spack create`` is a convenient and easy way to get
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
a basic package template, but what if your software is licensed and
|
Non-downloadable software
|
||||||
cannot be downloaded from a URL? You can still create a boilerplate
|
^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
If your software cannot be downloaded from a URL you can still create a boilerplate
|
||||||
``package.py`` by telling ``spack create`` what name you want to use:
|
``package.py`` by telling ``spack create`` what name you want to use:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
@@ -223,40 +345,23 @@ cannot be downloaded from a URL? You can still create a boilerplate
|
|||||||
|
|
||||||
This will create a simple ``intel`` package with an ``install()``
|
This will create a simple ``intel`` package with an ``install()``
|
||||||
method that you can craft to install your package.
|
method that you can craft to install your package.
|
||||||
|
Likewise, you can force the build system to be used with ``--template`` and,
|
||||||
What if ``spack create <url>`` guessed the wrong name or build system?
|
in case it's needed, you can overwrite a package already in the repository
|
||||||
For example, if your package uses the Autotools build system but does
|
with ``--force``:
|
||||||
not come with a ``configure`` script, Spack won't realize it uses
|
|
||||||
Autotools. You can overwrite the old package with ``--force`` and specify
|
|
||||||
a name with ``--name`` or a build system template to use with ``--template``:
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack create --name gmp https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2
|
$ spack create --name gmp https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2
|
||||||
$ spack create --force --template autotools https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2
|
$ spack create --force --template autotools https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
If you are creating a package that uses the Autotools build system
|
|
||||||
but does not come with a ``configure`` script, you'll need to add an
|
|
||||||
``autoreconf`` method to your package that explains how to generate
|
|
||||||
the ``configure`` script. You may also need the following dependencies:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
depends_on('autoconf', type='build')
|
|
||||||
depends_on('automake', type='build')
|
|
||||||
depends_on('libtool', type='build')
|
|
||||||
depends_on('m4', type='build')
|
|
||||||
|
|
||||||
A complete list of available build system templates can be found by running
|
A complete list of available build system templates can be found by running
|
||||||
``spack create --help``.
|
``spack create --help``.
|
||||||
|
|
||||||
.. _cmd-spack-edit:
|
.. _cmd-spack-edit:
|
||||||
|
|
||||||
^^^^^^^^^^^^^^
|
-------------------------
|
||||||
``spack edit``
|
Editing existing packages
|
||||||
^^^^^^^^^^^^^^
|
-------------------------
|
||||||
|
|
||||||
One of the easiest ways to learn how to write packages is to look at
|
One of the easiest ways to learn how to write packages is to look at
|
||||||
existing ones. You can edit a package file by name with the ``spack
|
existing ones. You can edit a package file by name with the ``spack
|
||||||
@@ -266,10 +371,15 @@ edit`` command:
|
|||||||
|
|
||||||
$ spack edit gmp
|
$ spack edit gmp
|
||||||
|
|
||||||
So, if you used ``spack create`` to create a package, then saved and
|
If you used ``spack create`` to create a package, you can get back to
|
||||||
closed the resulting file, you can get back to it with ``spack edit``.
|
it later with ``spack edit``. For instance, the ``gmp`` package actually
|
||||||
The ``gmp`` package actually lives in
|
lives in:
|
||||||
``$SPACK_ROOT/var/spack/repos/builtin/packages/gmp/package.py``,
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack location -p gmp
|
||||||
|
${SPACK_ROOT}/var/spack/repos/builtin/packages/gmp/package.py
|
||||||
|
|
||||||
but ``spack edit`` provides a much simpler shortcut and saves you the
|
but ``spack edit`` provides a much simpler shortcut and saves you the
|
||||||
trouble of typing the full path.
|
trouble of typing the full path.
|
||||||
|
|
||||||
@@ -2422,7 +2532,7 @@ Spack provides a mechanism for dependencies to influence the
|
|||||||
environment of their dependents by overriding the
|
environment of their dependents by overriding the
|
||||||
:meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
:meth:`setup_dependent_run_environment <spack.package_base.PackageBase.setup_dependent_run_environment>`
|
||||||
or the
|
or the
|
||||||
:meth:`setup_dependent_build_environment <spack.package_base.PackageBase.setup_dependent_build_environment>`
|
:meth:`setup_dependent_build_environment <spack.builder.Builder.setup_dependent_build_environment>`
|
||||||
methods.
|
methods.
|
||||||
The Qt package, for instance, uses this call:
|
The Qt package, for instance, uses this call:
|
||||||
|
|
||||||
@@ -3280,67 +3390,91 @@ the Python extensions provided by them: once for ``+python`` and once
|
|||||||
for ``~python``. Other than using a little extra disk space, that
|
for ``~python``. Other than using a little extra disk space, that
|
||||||
solution has no serious problems.
|
solution has no serious problems.
|
||||||
|
|
||||||
.. _installation_procedure:
|
.. _installation_process:
|
||||||
|
|
||||||
---------------------------------------
|
--------------------------------
|
||||||
Implementing the installation procedure
|
Overriding build system defaults
|
||||||
---------------------------------------
|
--------------------------------
|
||||||
|
|
||||||
The last element of a package is its **installation procedure**. This is
|
.. note::
|
||||||
where the real work of installation happens, and it's the main part of
|
|
||||||
the package you'll need to customize for each piece of software.
|
|
||||||
|
|
||||||
Defining an installation procedure means overriding a set of methods or attributes
|
If you code a single class in ``package.py`` all the functions shown in the table below
|
||||||
that will be called at some point during the installation of the package.
|
can be implemented with the same signature on the ``*Package`` instead of the corresponding builder.
|
||||||
The package base class, usually specialized for a given build system, determines the
|
|
||||||
actual set of entities available for overriding.
|
|
||||||
The classes that are currently provided by Spack are:
|
Most of the time the default implementation of methods or attributes in build system base classes
|
||||||
|
is what a packager needs, and just a very few entities need to be overwritten. Typically we just
|
||||||
|
need to override methods like ``configure_args``:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
def configure_args(self):
|
||||||
|
args = ["--enable-cxx"] + self.enable_or_disable("libs")
|
||||||
|
if "libs=static" in self.spec:
|
||||||
|
args.append("--with-pic")
|
||||||
|
return args
|
||||||
|
|
||||||
|
The actual set of entities available for overriding in ``package.py`` depend on
|
||||||
|
the build system. The build systems currently supported by Spack are:
|
||||||
|
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| **Base Class** | **Purpose** |
|
| **API docs** | **Description** |
|
||||||
+==========================================================+==================================+
|
+==========================================================+==================================+
|
||||||
| :class:`~spack.package_base.Package` | General base class not |
|
| :class:`~spack.build_systems.generic` | Generic build system without any |
|
||||||
| | specialized for any build system |
|
| | base implementation |
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| :class:`~spack.build_systems.makefile.MakefilePackage` | Specialized class for packages |
|
| :class:`~spack.build_systems.makefile` | Specialized build system for |
|
||||||
| | built invoking |
|
| | software built invoking |
|
||||||
| | hand-written Makefiles |
|
| | hand-written Makefiles |
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| :class:`~spack.build_systems.autotools.AutotoolsPackage` | Specialized class for packages |
|
| :class:`~spack.build_systems.autotools` | Specialized build system for |
|
||||||
| | built using GNU Autotools |
|
| | software built using |
|
||||||
|
| | GNU Autotools |
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| :class:`~spack.build_systems.cmake.CMakePackage` | Specialized class for packages |
|
| :class:`~spack.build_systems.cmake` | Specialized build system for |
|
||||||
| | built using CMake |
|
| | software built using CMake |
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| :class:`~spack.build_systems.cuda.CudaPackage` | A helper class for packages that |
|
| :class:`~spack.build_systems.maven` | Specialized build system for |
|
||||||
| | use CUDA |
|
| | software built using Maven |
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| :class:`~spack.build_systems.qmake.QMakePackage` | Specialized class for packages |
|
| :class:`~spack.build_systems.meson` | Specialized build system for |
|
||||||
| | built using QMake |
|
| | software built using Meson |
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| :class:`~spack.build_systems.rocm.ROCmPackage` | A helper class for packages that |
|
| :class:`~spack.build_systems.nmake` | Specialized build system for |
|
||||||
| | use ROCm |
|
| | software built using NMake |
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| :class:`~spack.build_systems.scons.SConsPackage` | Specialized class for packages |
|
| :class:`~spack.build_systems.qmake` | Specialized build system for |
|
||||||
| | built using SCons |
|
| | software built using QMake |
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| :class:`~spack.build_systems.waf.WafPackage` | Specialized class for packages |
|
| :class:`~spack.build_systems.scons` | Specialized build system for |
|
||||||
| | built using Waf |
|
| | software built using SCons |
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| :class:`~spack.build_systems.r.RPackage` | Specialized class for |
|
| :class:`~spack.build_systems.waf` | Specialized build system for |
|
||||||
|
| | software built using Waf |
|
||||||
|
+----------------------------------------------------------+----------------------------------+
|
||||||
|
| :class:`~spack.build_systems.r` | Specialized build system for |
|
||||||
| | R extensions |
|
| | R extensions |
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| :class:`~spack.build_systems.octave.OctavePackage` | Specialized class for |
|
| :class:`~spack.build_systems.octave` | Specialized build system for |
|
||||||
| | Octave packages |
|
| | Octave packages |
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| :class:`~spack.build_systems.python.PythonPackage` | Specialized class for |
|
| :class:`~spack.build_systems.python` | Specialized build system for |
|
||||||
| | Python extensions |
|
| | Python extensions |
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| :class:`~spack.build_systems.perl.PerlPackage` | Specialized class for |
|
| :class:`~spack.build_systems.perl` | Specialized build system for |
|
||||||
| | Perl extensions |
|
| | Perl extensions |
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
| :class:`~spack.build_systems.intel.IntelPackage` | Specialized class for licensed |
|
| :class:`~spack.build_systems.ruby` | Specialized build system for |
|
||||||
| | Intel software |
|
| | Ruby extensions |
|
||||||
|
+----------------------------------------------------------+----------------------------------+
|
||||||
|
| :class:`~spack.build_systems.intel` | Specialized build system for |
|
||||||
|
| | licensed Intel software |
|
||||||
|
+----------------------------------------------------------+----------------------------------+
|
||||||
|
| :class:`~spack.build_systems.oneapi` | Specialized build system for |
|
||||||
|
| | Intel onaAPI software |
|
||||||
|
+----------------------------------------------------------+----------------------------------+
|
||||||
|
| :class:`~spack.build_systems.aspell_dict` | Specialized build system for |
|
||||||
|
| | Aspell dictionaries |
|
||||||
+----------------------------------------------------------+----------------------------------+
|
+----------------------------------------------------------+----------------------------------+
|
||||||
|
|
||||||
|
|
||||||
@@ -3353,52 +3487,17 @@ The classes that are currently provided by Spack are:
|
|||||||
For example, a Python extension installed with CMake would ``extends('python')`` and
|
For example, a Python extension installed with CMake would ``extends('python')`` and
|
||||||
subclass from :class:`~spack.build_systems.cmake.CMakePackage`.
|
subclass from :class:`~spack.build_systems.cmake.CMakePackage`.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Installation pipeline
|
Overriding builder methods
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
When a user runs ``spack install``, Spack:
|
Build-system "phases" have default implementations that fit most of the common cases:
|
||||||
|
|
||||||
1. Fetches an archive for the correct version of the software.
|
|
||||||
2. Expands the archive.
|
|
||||||
3. Sets the current working directory to the root directory of the expanded archive.
|
|
||||||
|
|
||||||
Then, depending on the base class of the package under consideration, it will execute
|
|
||||||
a certain number of **phases** that reflect the way a package of that type is usually built.
|
|
||||||
The name and order in which the phases will be executed can be obtained either reading the API
|
|
||||||
docs at :py:mod:`~.spack.build_systems`, or using the ``spack info`` command:
|
|
||||||
|
|
||||||
.. code-block:: console
|
|
||||||
:emphasize-lines: 13,14
|
|
||||||
|
|
||||||
$ spack info m4
|
|
||||||
AutotoolsPackage: m4
|
|
||||||
Homepage: https://www.gnu.org/software/m4/m4.html
|
|
||||||
|
|
||||||
Safe versions:
|
|
||||||
1.4.17 ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz
|
|
||||||
|
|
||||||
Variants:
|
|
||||||
Name Default Description
|
|
||||||
|
|
||||||
sigsegv on Build the libsigsegv dependency
|
|
||||||
|
|
||||||
Installation Phases:
|
|
||||||
autoreconf configure build install
|
|
||||||
|
|
||||||
Build Dependencies:
|
|
||||||
libsigsegv
|
|
||||||
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
Typically, phases have default implementations that fit most of the common cases:
|
|
||||||
|
|
||||||
.. literalinclude:: _spack_root/lib/spack/spack/build_systems/autotools.py
|
.. literalinclude:: _spack_root/lib/spack/spack/build_systems/autotools.py
|
||||||
:pyobject: AutotoolsPackage.configure
|
:pyobject: AutotoolsBuilder.configure
|
||||||
:linenos:
|
:linenos:
|
||||||
|
|
||||||
It is thus just sufficient for a packager to override a few
|
It is usually sufficient for a packager to override a few
|
||||||
build system specific helper methods or attributes to provide, for instance,
|
build system specific helper methods or attributes to provide, for instance,
|
||||||
configure arguments:
|
configure arguments:
|
||||||
|
|
||||||
@@ -3406,31 +3505,31 @@ configure arguments:
|
|||||||
:pyobject: M4.configure_args
|
:pyobject: M4.configure_args
|
||||||
:linenos:
|
:linenos:
|
||||||
|
|
||||||
.. note::
|
Each specific build system has a list of attributes and methods that can be overridden to
|
||||||
Each specific build system has a list of attributes that can be overridden to
|
fine-tune the installation of a package without overriding an entire phase. To
|
||||||
fine-tune the installation of a package without overriding an entire phase. To
|
have more information on them the place to go is the API docs of the :py:mod:`~.spack.build_systems`
|
||||||
have more information on them the place to go is the API docs of the :py:mod:`~.spack.build_systems`
|
module.
|
||||||
module.
|
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Overriding an entire phase
|
Overriding an entire phase
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
In extreme cases it may be necessary to override an entire phase. Regardless
|
Sometimes it is necessary to override an entire phase. If the ``package.py`` contains
|
||||||
of the build system, the signature is the same. For example, the signature
|
a single class recipe, see :ref:`package_class_structure`, then the signature for a
|
||||||
for the install phase is:
|
phase is:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
class Foo(Package):
|
class Openjpeg(CMakePackage):
|
||||||
def install(self, spec, prefix):
|
def install(self, spec, prefix):
|
||||||
...
|
...
|
||||||
|
|
||||||
|
regardless of the build system. The arguments for the phase are:
|
||||||
|
|
||||||
``self``
|
``self``
|
||||||
For those not used to Python instance methods, this is the
|
This is the package object, which extends ``CMakePackage``.
|
||||||
package itself. In this case it's an instance of ``Foo``, which
|
For API docs on Package objects, see
|
||||||
extends ``Package``. For API docs on Package objects, see
|
:py:class:`Package <spack.package_base.PackageBase>`.
|
||||||
:py:class:`Package <spack.package_base.Package>`.
|
|
||||||
|
|
||||||
``spec``
|
``spec``
|
||||||
This is the concrete spec object created by Spack from an
|
This is the concrete spec object created by Spack from an
|
||||||
@@ -3445,12 +3544,111 @@ for the install phase is:
|
|||||||
The arguments ``spec`` and ``prefix`` are passed only for convenience, as they always
|
The arguments ``spec`` and ``prefix`` are passed only for convenience, as they always
|
||||||
correspond to ``self.spec`` and ``self.spec.prefix`` respectively.
|
correspond to ``self.spec`` and ``self.spec.prefix`` respectively.
|
||||||
|
|
||||||
As mentioned in :ref:`install-environment`, you will usually not need to refer
|
If the ``package.py`` encodes builders explicitly, the signature for a phase changes slightly:
|
||||||
to dependencies explicitly in your package file, as the compiler wrappers take care of most of
|
|
||||||
the heavy lifting here. There will be times, though, when you need to refer to
|
.. code-block:: python
|
||||||
the install locations of dependencies, or when you need to do something different
|
|
||||||
depending on the version, compiler, dependencies, etc. that your package is
|
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||||
built with. These parameters give you access to this type of information.
|
def install(self, pkg, spec, prefix):
|
||||||
|
...
|
||||||
|
|
||||||
|
In this case the package is passed as the second argument, and ``self`` is the builder instance.
|
||||||
|
|
||||||
|
.. _multiple_build_systems:
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Multiple build systems
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
There are cases where a software actively supports two build systems, or changes build systems
|
||||||
|
as it evolves, or needs different build systems on different platforms. Spack allows dealing with
|
||||||
|
these cases natively, if a recipe is written using builders explicitly.
|
||||||
|
|
||||||
|
For instance, software that supports two build systems unconditionally should derive from
|
||||||
|
both ``*Package`` base classes, and declare the possible use of multiple build systems using
|
||||||
|
a directive:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class ArpackNg(CMakePackage, AutotoolsPackage):
|
||||||
|
|
||||||
|
build_system("cmake", "autotools", default="cmake")
|
||||||
|
|
||||||
|
In this case the software can be built with both ``autotools`` and ``cmake``. Since the package
|
||||||
|
supports multiple build systems, it is necessary to declare which one is the default. The ``package.py``
|
||||||
|
will likely contain some overriding of default builder methods:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||||
|
def cmake_args(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Autotoolsbuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||||
|
def configure_args(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
In more complex cases it might happen that the build system changes according to certain conditions,
|
||||||
|
for instance across versions. That can be expressed with conditional variant values:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class ArpackNg(CMakePackage, AutotoolsPackage):
|
||||||
|
|
||||||
|
build_system(
|
||||||
|
conditional("cmake", when="@0.64:"),
|
||||||
|
conditional("autotools", when="@:0.63"),
|
||||||
|
default="cmake",
|
||||||
|
)
|
||||||
|
|
||||||
|
In the example the directive impose a change from ``Autotools`` to ``CMake`` going
|
||||||
|
from ``v0.63`` to ``v0.64``.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^
|
||||||
|
Mixin base classes
|
||||||
|
^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Besides build systems, there are other cases where common metadata and behavior can be extracted
|
||||||
|
and reused by many packages. For instance, packages that depend on ``Cuda`` or ``Rocm``, share
|
||||||
|
common dependencies and constraints. To factor these attributes into a single place, Spack provides
|
||||||
|
a few mixin classes in the ``spack.build_systems`` module:
|
||||||
|
|
||||||
|
+---------------------------------------------------------------+----------------------------------+
|
||||||
|
| **API docs** | **Description** |
|
||||||
|
+===============================================================+==================================+
|
||||||
|
| :class:`~spack.build_systems.cuda.CudaPackage` | A helper class for packages that |
|
||||||
|
| | use CUDA |
|
||||||
|
+---------------------------------------------------------------+----------------------------------+
|
||||||
|
| :class:`~spack.build_systems.rocm.ROCmPackage` | A helper class for packages that |
|
||||||
|
| | use ROCm |
|
||||||
|
+---------------------------------------------------------------+----------------------------------+
|
||||||
|
| :class:`~spack.build_systems.gnu.GNUMirrorPackage` | A helper class for GNU packages |
|
||||||
|
+---------------------------------------------------------------+----------------------------------+
|
||||||
|
| :class:`~spack.build_systems.python.PythonExtension` | A helper class for Python |
|
||||||
|
| | extensions |
|
||||||
|
+---------------------------------------------------------------+----------------------------------+
|
||||||
|
| :class:`~spack.build_systems.sourceforge.SourceforgePackage` | A helper class for packages |
|
||||||
|
| | from sourceforge.org |
|
||||||
|
+---------------------------------------------------------------+----------------------------------+
|
||||||
|
| :class:`~spack.build_systems.sourceware.SourcewarePackage` | A helper class for packages |
|
||||||
|
| | from sourceware.org |
|
||||||
|
+---------------------------------------------------------------+----------------------------------+
|
||||||
|
| :class:`~spack.build_systems.xorg.XorgPackage` | A helper class for x.org |
|
||||||
|
| | packages |
|
||||||
|
+---------------------------------------------------------------+----------------------------------+
|
||||||
|
|
||||||
|
These classes should be used by adding them to the inheritance tree of the package that needs them,
|
||||||
|
for instance:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
class Cp2k(MakefilePackage, CudaPackage):
|
||||||
|
"""CP2K is a quantum chemistry and solid state physics software package
|
||||||
|
that can perform atomistic simulations of solid state, liquid, molecular,
|
||||||
|
periodic, material, crystal, and biological systems
|
||||||
|
"""
|
||||||
|
|
||||||
|
In the example above ``Cp2k`` inherits all the conflicts and variants that ``CudaPackage`` defines.
|
||||||
|
|
||||||
.. _install-environment:
|
.. _install-environment:
|
||||||
|
|
||||||
@@ -5062,6 +5260,16 @@ where each argument has the following meaning:
|
|||||||
will run.
|
will run.
|
||||||
|
|
||||||
The default of ``None`` corresponds to the current directory (``'.'``).
|
The default of ``None`` corresponds to the current directory (``'.'``).
|
||||||
|
Each call starts with the working directory set to the spec's test stage
|
||||||
|
directory (i.e., ``self.test_suite.test_dir_for_spec(self.spec)``).
|
||||||
|
|
||||||
|
.. warning::
|
||||||
|
|
||||||
|
Use of the package spec's installation directory for building and running
|
||||||
|
tests is **strongly** discouraged. Doing so has caused permission errors
|
||||||
|
for shared spack instances *and* for facilities that install the software
|
||||||
|
in read-only file systems or directories.
|
||||||
|
|
||||||
|
|
||||||
"""""""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""""""""
|
||||||
Accessing package- and test-related files
|
Accessing package- and test-related files
|
||||||
@@ -5069,10 +5277,10 @@ Accessing package- and test-related files
|
|||||||
|
|
||||||
You may need to access files from one or more locations when writing
|
You may need to access files from one or more locations when writing
|
||||||
stand-alone tests. This can happen if the software's repository does not
|
stand-alone tests. This can happen if the software's repository does not
|
||||||
include test source files or includes files but no way to build the
|
include test source files or includes files but has no way to build the
|
||||||
executables using the installed headers and libraries. In these
|
executables using the installed headers and libraries. In these cases,
|
||||||
cases, you may need to reference the files relative to one or more
|
you may need to reference the files relative to one or more root
|
||||||
root directory. The properties containing package- and test-related
|
directory. The properties containing package- (or spec-) and test-related
|
||||||
directory paths are provided in the table below.
|
directory paths are provided in the table below.
|
||||||
|
|
||||||
.. list-table:: Directory-to-property mapping
|
.. list-table:: Directory-to-property mapping
|
||||||
@@ -5081,19 +5289,22 @@ directory paths are provided in the table below.
|
|||||||
* - Root Directory
|
* - Root Directory
|
||||||
- Package Property
|
- Package Property
|
||||||
- Example(s)
|
- Example(s)
|
||||||
* - Package Installation Files
|
* - Package (Spec) Installation
|
||||||
- ``self.prefix``
|
- ``self.prefix``
|
||||||
- ``self.prefix.include``, ``self.prefix.lib``
|
- ``self.prefix.include``, ``self.prefix.lib``
|
||||||
* - Package Dependency's Files
|
* - Dependency Installation
|
||||||
- ``self.spec['<dependency-package>'].prefix``
|
- ``self.spec['<dependency-package>'].prefix``
|
||||||
- ``self.spec['trilinos'].prefix.include``
|
- ``self.spec['trilinos'].prefix.include``
|
||||||
* - Test Suite Stage Files
|
* - Test Suite Stage
|
||||||
- ``self.test_suite.stage``
|
- ``self.test_suite.stage``
|
||||||
- ``join_path(self.test_suite.stage, 'results.txt')``
|
- ``join_path(self.test_suite.stage, 'results.txt')``
|
||||||
* - Staged Cached Build-time Files
|
* - Spec's Test Stage
|
||||||
|
- ``self.test_suite.test_dir_for_spec``
|
||||||
|
- ``self.test_suite.test_dir_for_spec(self.spec)``
|
||||||
|
* - Current Spec's Build-time Files
|
||||||
- ``self.test_suite.current_test_cache_dir``
|
- ``self.test_suite.current_test_cache_dir``
|
||||||
- ``join_path(self.test_suite.current_test_cache_dir, 'examples', 'foo.c')``
|
- ``join_path(self.test_suite.current_test_cache_dir, 'examples', 'foo.c')``
|
||||||
* - Staged Custom Package Files
|
* - Current Spec's Custom Test Files
|
||||||
- ``self.test_suite.current_test_data_dir``
|
- ``self.test_suite.current_test_data_dir``
|
||||||
- ``join_path(self.test_suite.current_test_data_dir, 'hello.f90')``
|
- ``join_path(self.test_suite.current_test_data_dir, 'hello.f90')``
|
||||||
|
|
||||||
@@ -6099,3 +6310,82 @@ might write:
|
|||||||
DWARF_PREFIX = $(spack location --install-dir libdwarf)
|
DWARF_PREFIX = $(spack location --install-dir libdwarf)
|
||||||
CXXFLAGS += -I$DWARF_PREFIX/include
|
CXXFLAGS += -I$DWARF_PREFIX/include
|
||||||
CXXFLAGS += -L$DWARF_PREFIX/lib
|
CXXFLAGS += -L$DWARF_PREFIX/lib
|
||||||
|
|
||||||
|
|
||||||
|
.. _package_class_structure:
|
||||||
|
|
||||||
|
--------------------------
|
||||||
|
Package class architecture
|
||||||
|
--------------------------
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
This section aims to provide a high-level knowledge of how the package class architecture evolved
|
||||||
|
in Spack, and provides some insights on the current design.
|
||||||
|
|
||||||
|
Packages in Spack were originally designed to support only a single build system. The overall
|
||||||
|
class structure for a package looked like:
|
||||||
|
|
||||||
|
.. image:: images/original_package_architecture.png
|
||||||
|
:scale: 60 %
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
In this architecture the base class ``AutotoolsPackage`` was responsible for both the metadata
|
||||||
|
related to the ``autotools`` build system (e.g. dependencies or variants common to all packages
|
||||||
|
using it), and for encoding the default installation procedure.
|
||||||
|
|
||||||
|
In reality, a non-negligible number of packages are either changing their build system during the evolution of the
|
||||||
|
project, or using different build systems for different platforms. An architecture based on a single class
|
||||||
|
requires hacks or other workarounds to deal with these cases.
|
||||||
|
|
||||||
|
To support a model more adherent to reality, Spack v0.19 changed its internal design by extracting
|
||||||
|
the attributes and methods related to building a software into a separate hierarchy:
|
||||||
|
|
||||||
|
.. image:: images/builder_package_architecture.png
|
||||||
|
:scale: 60 %
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
In this new format each ``package.py`` contains one ``*Package`` class that gathers all the metadata,
|
||||||
|
and one or more ``*Builder`` classes that encode the installation procedure. A specific builder object
|
||||||
|
is created just before the software is built, so at a time where Spack knows which build system needs
|
||||||
|
to be used for the current installation, and receives a ``package`` object during initialization.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
``build_system`` variant
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
To allow imposing conditions based on the build system, each package must a have ``build_system`` variant,
|
||||||
|
which is usually inherited from base classes. This variant allows for writing metadata that is conditional
|
||||||
|
on the build system:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
with when("build_system=cmake"):
|
||||||
|
depends_on("cmake", type="build")
|
||||||
|
|
||||||
|
and also for selecting a specific build system from a spec literal, like in the following command:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack install arpack-ng build_system=autotools
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Compatibility with single-class format
|
||||||
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
Internally, Spack always uses builders to perform operations related to the installation of a specific software.
|
||||||
|
The builders are created in the ``spack.builder.create`` function
|
||||||
|
|
||||||
|
.. literalinclude:: _spack_root/lib/spack/spack/builder.py
|
||||||
|
:pyobject: create
|
||||||
|
|
||||||
|
To achieve backward compatibility with the single-class format Spack creates in this function a special
|
||||||
|
"adapter builder", if no custom builder is detected in the recipe:
|
||||||
|
|
||||||
|
.. image:: images/adapter.png
|
||||||
|
:scale: 60 %
|
||||||
|
:align: center
|
||||||
|
|
||||||
|
Overall the role of the adapter is to route access to attributes of methods first through the ``*Package``
|
||||||
|
hierarchy, and then back to the base class builder. This is schematically shown in the diagram above, where
|
||||||
|
the adapter role is to "emulate" a method resolution order like the one represented by the red arrows.
|
||||||
|
|||||||
8
lib/spack/env/cc
vendored
8
lib/spack/env/cc
vendored
@@ -241,28 +241,28 @@ case "$command" in
|
|||||||
mode=cpp
|
mode=cpp
|
||||||
debug_flags="-g"
|
debug_flags="-g"
|
||||||
;;
|
;;
|
||||||
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe)
|
cc|c89|c99|gcc|clang|armclang|icc|icx|pgcc|nvc|xlc|xlc_r|fcc|amdclang|cl.exe|craycc)
|
||||||
command="$SPACK_CC"
|
command="$SPACK_CC"
|
||||||
language="C"
|
language="C"
|
||||||
comp="CC"
|
comp="CC"
|
||||||
lang_flags=C
|
lang_flags=C
|
||||||
debug_flags="-g"
|
debug_flags="-g"
|
||||||
;;
|
;;
|
||||||
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++)
|
c++|CC|g++|clang++|armclang++|icpc|icpx|dpcpp|pgc++|nvc++|xlc++|xlc++_r|FCC|amdclang++|crayCC)
|
||||||
command="$SPACK_CXX"
|
command="$SPACK_CXX"
|
||||||
language="C++"
|
language="C++"
|
||||||
comp="CXX"
|
comp="CXX"
|
||||||
lang_flags=CXX
|
lang_flags=CXX
|
||||||
debug_flags="-g"
|
debug_flags="-g"
|
||||||
;;
|
;;
|
||||||
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang)
|
ftn|f90|fc|f95|gfortran|flang|armflang|ifort|ifx|pgfortran|nvfortran|xlf90|xlf90_r|nagfor|frt|amdflang|crayftn)
|
||||||
command="$SPACK_FC"
|
command="$SPACK_FC"
|
||||||
language="Fortran 90"
|
language="Fortran 90"
|
||||||
comp="FC"
|
comp="FC"
|
||||||
lang_flags=F
|
lang_flags=F
|
||||||
debug_flags="-g"
|
debug_flags="-g"
|
||||||
;;
|
;;
|
||||||
f77|xlf|xlf_r|pgf77|amdflang)
|
f77|xlf|xlf_r|pgf77)
|
||||||
command="$SPACK_F77"
|
command="$SPACK_F77"
|
||||||
language="Fortran 77"
|
language="Fortran 77"
|
||||||
comp="F77"
|
comp="F77"
|
||||||
|
|||||||
1
lib/spack/env/cce/case-insensitive/crayCC
vendored
Symbolic link
1
lib/spack/env/cce/case-insensitive/crayCC
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../../cc
|
||||||
1
lib/spack/env/cce/craycc
vendored
Symbolic link
1
lib/spack/env/cce/craycc
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../cc
|
||||||
1
lib/spack/env/cce/crayftn
vendored
Symbolic link
1
lib/spack/env/cce/crayftn
vendored
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
../cc
|
||||||
@@ -505,8 +505,15 @@ def group_ids(uid=None):
|
|||||||
|
|
||||||
if uid is None:
|
if uid is None:
|
||||||
uid = getuid()
|
uid = getuid()
|
||||||
user = pwd.getpwuid(uid).pw_name
|
|
||||||
return [g.gr_gid for g in grp.getgrall() if user in g.gr_mem]
|
pwd_entry = pwd.getpwuid(uid)
|
||||||
|
user = pwd_entry.pw_name
|
||||||
|
|
||||||
|
# user's primary group id may not be listed in grp (i.e. /etc/group)
|
||||||
|
# you have to check pwd for that, so start the list with that
|
||||||
|
gids = [pwd_entry.pw_gid]
|
||||||
|
|
||||||
|
return sorted(set(gids + [g.gr_gid for g in grp.getgrall() if user in g.gr_mem]))
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter(arg_slice=slice(1))
|
@system_path_filter(arg_slice=slice(1))
|
||||||
@@ -1083,7 +1090,11 @@ def temp_cwd():
|
|||||||
with working_dir(tmp_dir):
|
with working_dir(tmp_dir):
|
||||||
yield tmp_dir
|
yield tmp_dir
|
||||||
finally:
|
finally:
|
||||||
shutil.rmtree(tmp_dir)
|
kwargs = {}
|
||||||
|
if is_windows:
|
||||||
|
kwargs["ignore_errors"] = False
|
||||||
|
kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
|
||||||
|
shutil.rmtree(tmp_dir, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
@@ -2095,7 +2106,7 @@ def find_system_libraries(libraries, shared=True):
|
|||||||
return libraries_found
|
return libraries_found
|
||||||
|
|
||||||
|
|
||||||
def find_libraries(libraries, root, shared=True, recursive=False):
|
def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
||||||
"""Returns an iterable of full paths to libraries found in a root dir.
|
"""Returns an iterable of full paths to libraries found in a root dir.
|
||||||
|
|
||||||
Accepts any glob characters accepted by fnmatch:
|
Accepts any glob characters accepted by fnmatch:
|
||||||
@@ -2116,6 +2127,10 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
|||||||
otherwise for static. Defaults to True.
|
otherwise for static. Defaults to True.
|
||||||
recursive (bool): if False search only root folder,
|
recursive (bool): if False search only root folder,
|
||||||
if True descends top-down from the root. Defaults to False.
|
if True descends top-down from the root. Defaults to False.
|
||||||
|
runtime (bool): Windows only option, no-op elsewhere. If true,
|
||||||
|
search for runtime shared libs (.DLL), otherwise, search
|
||||||
|
for .Lib files. If shared is false, this has no meaning.
|
||||||
|
Defaults to True.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
LibraryList: The libraries that have been found
|
LibraryList: The libraries that have been found
|
||||||
@@ -2130,7 +2145,9 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
|||||||
|
|
||||||
if is_windows:
|
if is_windows:
|
||||||
static_ext = "lib"
|
static_ext = "lib"
|
||||||
shared_ext = "dll"
|
# For linking (runtime=False) you need the .lib files regardless of
|
||||||
|
# whether you are doing a shared or static link
|
||||||
|
shared_ext = "dll" if runtime else "lib"
|
||||||
else:
|
else:
|
||||||
# Used on both Linux and macOS
|
# Used on both Linux and macOS
|
||||||
static_ext = "a"
|
static_ext = "a"
|
||||||
@@ -2174,13 +2191,13 @@ def find_libraries(libraries, root, shared=True, recursive=False):
|
|||||||
return LibraryList(found_libs)
|
return LibraryList(found_libs)
|
||||||
|
|
||||||
|
|
||||||
def find_all_shared_libraries(root, recursive=False):
|
def find_all_shared_libraries(root, recursive=False, runtime=True):
|
||||||
"""Convenience function that returns the list of all shared libraries found
|
"""Convenience function that returns the list of all shared libraries found
|
||||||
in the directory passed as argument.
|
in the directory passed as argument.
|
||||||
|
|
||||||
See documentation for `llnl.util.filesystem.find_libraries` for more information
|
See documentation for `llnl.util.filesystem.find_libraries` for more information
|
||||||
"""
|
"""
|
||||||
return find_libraries("*", root=root, shared=True, recursive=recursive)
|
return find_libraries("*", root=root, shared=True, recursive=recursive, runtime=runtime)
|
||||||
|
|
||||||
|
|
||||||
def find_all_static_libraries(root, recursive=False):
|
def find_all_static_libraries(root, recursive=False):
|
||||||
@@ -2226,48 +2243,36 @@ def __init__(self, package, link_install_prefix=True):
|
|||||||
self.pkg = package
|
self.pkg = package
|
||||||
self._addl_rpaths = set()
|
self._addl_rpaths = set()
|
||||||
self.link_install_prefix = link_install_prefix
|
self.link_install_prefix = link_install_prefix
|
||||||
self._internal_links = set()
|
self._additional_library_dependents = set()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def link_dest(self):
|
def library_dependents(self):
|
||||||
"""
|
"""
|
||||||
Set of directories where package binaries/libraries are located.
|
Set of directories where package binaries/libraries are located.
|
||||||
"""
|
"""
|
||||||
if hasattr(self.pkg, "libs") and self.pkg.libs:
|
return set([self.pkg.prefix.bin]) | self._additional_library_dependents
|
||||||
pkg_libs = set(self.pkg.libs.directories)
|
|
||||||
else:
|
|
||||||
pkg_libs = set((self.pkg.prefix.lib, self.pkg.prefix.lib64))
|
|
||||||
|
|
||||||
return pkg_libs | set([self.pkg.prefix.bin]) | self.internal_links
|
def add_library_dependent(self, *dest):
|
||||||
|
|
||||||
@property
|
|
||||||
def internal_links(self):
|
|
||||||
"""
|
"""
|
||||||
linking that would need to be established within the package itself. Useful for links
|
Add paths to directories or libraries/binaries to set of
|
||||||
against extension modules/build time executables/internal linkage
|
common paths that need to link against other libraries
|
||||||
"""
|
|
||||||
return self._internal_links
|
|
||||||
|
|
||||||
def add_internal_links(self, *dest):
|
Specified paths should fall outside of a package's common
|
||||||
"""
|
link paths, i.e. the bin
|
||||||
Incorporate additional paths into the rpath (sym)linking scheme.
|
|
||||||
|
|
||||||
Paths provided to this method are linked against by a package's libraries
|
|
||||||
and libraries found at these paths are linked against a package's binaries.
|
|
||||||
(i.e. /site-packages -> /bin and /bin -> /site-packages)
|
|
||||||
|
|
||||||
Specified paths should be outside of a package's lib, lib64, and bin
|
|
||||||
directories.
|
directories.
|
||||||
"""
|
"""
|
||||||
self._internal_links = self._internal_links | set(*dest)
|
for pth in dest:
|
||||||
|
if os.path.isfile(pth):
|
||||||
|
self._additional_library_dependents.add(os.path.dirname)
|
||||||
|
else:
|
||||||
|
self._additional_library_dependents.add(pth)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def link_targets(self):
|
def rpaths(self):
|
||||||
"""
|
"""
|
||||||
Set of libraries this package needs to link against during runtime
|
Set of libraries this package needs to link against during runtime
|
||||||
These packages will each be symlinked into the packages lib and binary dir
|
These packages will each be symlinked into the packages lib and binary dir
|
||||||
"""
|
"""
|
||||||
|
|
||||||
dependent_libs = []
|
dependent_libs = []
|
||||||
for path in self.pkg.rpath:
|
for path in self.pkg.rpath:
|
||||||
dependent_libs.extend(list(find_all_shared_libraries(path, recursive=True)))
|
dependent_libs.extend(list(find_all_shared_libraries(path, recursive=True)))
|
||||||
@@ -2275,18 +2280,43 @@ def link_targets(self):
|
|||||||
dependent_libs.extend(list(find_all_shared_libraries(extra_path, recursive=True)))
|
dependent_libs.extend(list(find_all_shared_libraries(extra_path, recursive=True)))
|
||||||
return set(dependent_libs)
|
return set(dependent_libs)
|
||||||
|
|
||||||
def include_additional_link_paths(self, *paths):
|
def add_rpath(self, *paths):
|
||||||
"""
|
"""
|
||||||
Add libraries found at the root of provided paths to runtime linking
|
Add libraries found at the root of provided paths to runtime linking
|
||||||
|
|
||||||
These are libraries found outside of the typical scope of rpath linking
|
These are libraries found outside of the typical scope of rpath linking
|
||||||
that require manual inclusion in a runtime linking scheme
|
that require manual inclusion in a runtime linking scheme.
|
||||||
|
These links are unidirectional, and are only
|
||||||
|
intended to bring outside dependencies into this package
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
*paths (str): arbitrary number of paths to be added to runtime linking
|
*paths (str): arbitrary number of paths to be added to runtime linking
|
||||||
"""
|
"""
|
||||||
self._addl_rpaths = self._addl_rpaths | set(paths)
|
self._addl_rpaths = self._addl_rpaths | set(paths)
|
||||||
|
|
||||||
|
def _link(self, path, dest):
|
||||||
|
file_name = os.path.basename(path)
|
||||||
|
dest_file = os.path.join(dest, file_name)
|
||||||
|
if os.path.exists(dest):
|
||||||
|
try:
|
||||||
|
symlink(path, dest_file)
|
||||||
|
# For py2 compatibility, we have to catch the specific Windows error code
|
||||||
|
# associate with trying to create a file that already exists (winerror 183)
|
||||||
|
except OSError as e:
|
||||||
|
if e.winerror == 183:
|
||||||
|
# We have either already symlinked or we are encoutering a naming clash
|
||||||
|
# either way, we don't want to overwrite existing libraries
|
||||||
|
already_linked = islink(dest_file)
|
||||||
|
tty.debug(
|
||||||
|
"Linking library %s to %s failed, " % (path, dest_file) + "already linked."
|
||||||
|
if already_linked
|
||||||
|
else "library with name %s already exists at location %s."
|
||||||
|
% (file_name, dest)
|
||||||
|
)
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise e
|
||||||
|
|
||||||
def establish_link(self):
|
def establish_link(self):
|
||||||
"""
|
"""
|
||||||
(sym)link packages to runtime dependencies based on RPath configuration for
|
(sym)link packages to runtime dependencies based on RPath configuration for
|
||||||
@@ -2298,29 +2328,8 @@ def establish_link(self):
|
|||||||
|
|
||||||
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
# for each binary install dir in self.pkg (i.e. pkg.prefix.bin, pkg.prefix.lib)
|
||||||
# install a symlink to each dependent library
|
# install a symlink to each dependent library
|
||||||
for library, lib_dir in itertools.product(self.link_targets, self.link_dest):
|
for library, lib_dir in itertools.product(self.rpaths, self.library_dependents):
|
||||||
if not path_contains_subdirectory(library, lib_dir):
|
self._link(library, lib_dir)
|
||||||
file_name = os.path.basename(library)
|
|
||||||
dest_file = os.path.join(lib_dir, file_name)
|
|
||||||
if os.path.exists(lib_dir):
|
|
||||||
try:
|
|
||||||
symlink(library, dest_file)
|
|
||||||
# For py2 compatibility, we have to catch the specific Windows error code
|
|
||||||
# associate with trying to create a file that already exists (winerror 183)
|
|
||||||
except OSError as e:
|
|
||||||
if e.winerror == 183:
|
|
||||||
# We have either already symlinked or we are encoutering a naming clash
|
|
||||||
# either way, we don't want to overwrite existing libraries
|
|
||||||
already_linked = islink(dest_file)
|
|
||||||
tty.debug(
|
|
||||||
"Linking library %s to %s failed, " % (library, dest_file)
|
|
||||||
+ "already linked."
|
|
||||||
if already_linked
|
|
||||||
else "library with name %s already exists." % file_name
|
|
||||||
)
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
raise e
|
|
||||||
|
|
||||||
|
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
|
|||||||
@@ -749,6 +749,26 @@ def _n_xxx_ago(x):
|
|||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def pretty_seconds(seconds):
|
||||||
|
"""Seconds to string with appropriate units
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
seconds (float): Number of seconds
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Time string with units
|
||||||
|
"""
|
||||||
|
if seconds >= 1:
|
||||||
|
value, unit = seconds, "s"
|
||||||
|
elif seconds >= 1e-3:
|
||||||
|
value, unit = seconds * 1e3, "ms"
|
||||||
|
elif seconds >= 1e-6:
|
||||||
|
value, unit = seconds * 1e6, "us"
|
||||||
|
else:
|
||||||
|
value, unit = seconds * 1e9, "ns"
|
||||||
|
return "%.3f%s" % (value, unit)
|
||||||
|
|
||||||
|
|
||||||
class RequiredAttributeError(ValueError):
|
class RequiredAttributeError(ValueError):
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super(RequiredAttributeError, self).__init__(message)
|
super(RequiredAttributeError, self).__init__(message)
|
||||||
@@ -1002,6 +1022,14 @@ def stable_partition(
|
|||||||
return true_items, false_items
|
return true_items, false_items
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_last(lst, *elements):
|
||||||
|
"""Performs a stable partition of lst, ensuring that ``elements``
|
||||||
|
occur at the end of ``lst`` in specified order. Mutates ``lst``.
|
||||||
|
Raises ``ValueError`` if any ``elements`` are not already in ``lst``."""
|
||||||
|
for elt in elements:
|
||||||
|
lst.append(lst.pop(lst.index(elt)))
|
||||||
|
|
||||||
|
|
||||||
class TypedMutableSequence(MutableSequence):
|
class TypedMutableSequence(MutableSequence):
|
||||||
"""Base class that behaves like a list, just with a different type.
|
"""Base class that behaves like a list, just with a different type.
|
||||||
|
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
from typing import Dict, Tuple # novm
|
from typing import Dict, Tuple # novm
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.lang import pretty_seconds
|
||||||
|
|
||||||
import spack.util.string
|
import spack.util.string
|
||||||
|
|
||||||
@@ -166,7 +167,7 @@ def _attempts_str(wait_time, nattempts):
|
|||||||
return ""
|
return ""
|
||||||
|
|
||||||
attempts = spack.util.string.plural(nattempts, "attempt")
|
attempts = spack.util.string.plural(nattempts, "attempt")
|
||||||
return " after {0:0.2f}s and {1}".format(wait_time, attempts)
|
return " after {} and {}".format(pretty_seconds(wait_time), attempts)
|
||||||
|
|
||||||
|
|
||||||
class LockType(object):
|
class LockType(object):
|
||||||
@@ -318,8 +319,8 @@ def _lock(self, op, timeout=None):
|
|||||||
raise LockROFileError(self.path)
|
raise LockROFileError(self.path)
|
||||||
|
|
||||||
self._log_debug(
|
self._log_debug(
|
||||||
"{0} locking [{1}:{2}]: timeout {3} sec".format(
|
"{} locking [{}:{}]: timeout {}".format(
|
||||||
op_str.lower(), self._start, self._length, timeout
|
op_str.lower(), self._start, self._length, pretty_seconds(timeout or 0)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -340,7 +341,8 @@ def _lock(self, op, timeout=None):
|
|||||||
total_wait_time = time.time() - start_time
|
total_wait_time = time.time() - start_time
|
||||||
return total_wait_time, num_attempts
|
return total_wait_time, num_attempts
|
||||||
|
|
||||||
raise LockTimeoutError("Timed out waiting for a {0} lock.".format(op_str.lower()))
|
total_wait_time = time.time() - start_time
|
||||||
|
raise LockTimeoutError(op_str.lower(), self.path, total_wait_time, num_attempts)
|
||||||
|
|
||||||
def _poll_lock(self, op):
|
def _poll_lock(self, op):
|
||||||
"""Attempt to acquire the lock in a non-blocking manner. Return whether
|
"""Attempt to acquire the lock in a non-blocking manner. Return whether
|
||||||
@@ -780,6 +782,18 @@ class LockLimitError(LockError):
|
|||||||
class LockTimeoutError(LockError):
|
class LockTimeoutError(LockError):
|
||||||
"""Raised when an attempt to acquire a lock times out."""
|
"""Raised when an attempt to acquire a lock times out."""
|
||||||
|
|
||||||
|
def __init__(self, lock_type, path, time, attempts):
|
||||||
|
fmt = "Timed out waiting for a {} lock after {}.\n Made {} {} on file: {}"
|
||||||
|
super(LockTimeoutError, self).__init__(
|
||||||
|
fmt.format(
|
||||||
|
lock_type,
|
||||||
|
pretty_seconds(time),
|
||||||
|
attempts,
|
||||||
|
"attempt" if attempts == 1 else "attempts",
|
||||||
|
path,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class LockUpgradeError(LockError):
|
class LockUpgradeError(LockError):
|
||||||
"""Raised when unable to upgrade from a read to a write lock."""
|
"""Raised when unable to upgrade from a read to a write lock."""
|
||||||
|
|||||||
@@ -503,6 +503,33 @@ def invalid_sha256_digest(fetcher):
|
|||||||
return errors
|
return errors
|
||||||
|
|
||||||
|
|
||||||
|
@package_properties
|
||||||
|
def _ensure_env_methods_are_ported_to_builders(pkgs, error_cls):
|
||||||
|
"""Ensure that methods modifying the build environment are ported to builder classes."""
|
||||||
|
errors = []
|
||||||
|
for pkg_name in pkgs:
|
||||||
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
|
buildsystem_variant, _ = pkg_cls.variants["build_system"]
|
||||||
|
buildsystem_names = [getattr(x, "value", x) for x in buildsystem_variant.values]
|
||||||
|
builder_cls_names = [spack.builder.BUILDER_CLS[x].__name__ for x in buildsystem_names]
|
||||||
|
module = pkg_cls.module
|
||||||
|
has_builders_in_package_py = any(
|
||||||
|
getattr(module, name, False) for name in builder_cls_names
|
||||||
|
)
|
||||||
|
if not has_builders_in_package_py:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for method_name in ("setup_build_environment", "setup_dependent_build_environment"):
|
||||||
|
if hasattr(pkg_cls, method_name):
|
||||||
|
msg = (
|
||||||
|
"Package '{}' need to move the '{}' method from the package class to the"
|
||||||
|
" appropriate builder class".format(pkg_name, method_name)
|
||||||
|
)
|
||||||
|
errors.append(error_cls(msg, []))
|
||||||
|
|
||||||
|
return errors
|
||||||
|
|
||||||
|
|
||||||
@package_https_directives
|
@package_https_directives
|
||||||
def _linting_package_file(pkgs, error_cls):
|
def _linting_package_file(pkgs, error_cls):
|
||||||
"""Check for correctness of links"""
|
"""Check for correctness of links"""
|
||||||
@@ -660,7 +687,13 @@ def _ensure_variant_defaults_are_parsable(pkgs, error_cls):
|
|||||||
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
vspec = variant.make_default()
|
try:
|
||||||
|
vspec = variant.make_default()
|
||||||
|
except spack.variant.MultipleValuesInExclusiveVariantError:
|
||||||
|
error_msg = "Cannot create a default value for the variant '{}' in package '{}'"
|
||||||
|
errors.append(error_cls(error_msg.format(variant_name, pkg_name), []))
|
||||||
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
variant.validate_or_raise(vspec, pkg_cls=pkg_cls)
|
variant.validate_or_raise(vspec, pkg_cls=pkg_cls)
|
||||||
except spack.variant.InvalidVariantValueError:
|
except spack.variant.InvalidVariantValueError:
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
import sys
|
import sys
|
||||||
import tarfile
|
import tarfile
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
import warnings
|
import warnings
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
@@ -22,7 +23,7 @@
|
|||||||
import llnl.util.filesystem as fsys
|
import llnl.util.filesystem as fsys
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import mkdirp
|
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.config as config
|
import spack.config as config
|
||||||
@@ -41,6 +42,7 @@
|
|||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
from spack.caches import misc_cache_location
|
from spack.caches import misc_cache_location
|
||||||
|
from spack.relocate import utf8_paths_to_single_binary_regex
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
from spack.stage import Stage
|
from spack.stage import Stage
|
||||||
|
|
||||||
@@ -105,6 +107,10 @@ def __init__(self, cache_root):
|
|||||||
# cache (_mirrors_for_spec)
|
# cache (_mirrors_for_spec)
|
||||||
self._specs_already_associated = set()
|
self._specs_already_associated = set()
|
||||||
|
|
||||||
|
# mapping from mirror urls to the time.time() of the last index fetch and a bool indicating
|
||||||
|
# whether the fetch succeeded or not.
|
||||||
|
self._last_fetch_times = {}
|
||||||
|
|
||||||
# _mirrors_for_spec is a dictionary mapping DAG hashes to lists of
|
# _mirrors_for_spec is a dictionary mapping DAG hashes to lists of
|
||||||
# entries indicating mirrors where that concrete spec can be found.
|
# entries indicating mirrors where that concrete spec can be found.
|
||||||
# Each entry is a dictionary consisting of:
|
# Each entry is a dictionary consisting of:
|
||||||
@@ -137,6 +143,7 @@ def clear(self):
|
|||||||
self._index_file_cache = None
|
self._index_file_cache = None
|
||||||
self._local_index_cache = None
|
self._local_index_cache = None
|
||||||
self._specs_already_associated = set()
|
self._specs_already_associated = set()
|
||||||
|
self._last_fetch_times = {}
|
||||||
self._mirrors_for_spec = {}
|
self._mirrors_for_spec = {}
|
||||||
|
|
||||||
def _write_local_index_cache(self):
|
def _write_local_index_cache(self):
|
||||||
@@ -242,7 +249,6 @@ def find_built_spec(self, spec, mirrors_to_check=None):
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
"""
|
"""
|
||||||
self.regenerate_spec_cache()
|
|
||||||
return self.find_by_hash(spec.dag_hash(), mirrors_to_check=mirrors_to_check)
|
return self.find_by_hash(spec.dag_hash(), mirrors_to_check=mirrors_to_check)
|
||||||
|
|
||||||
def find_by_hash(self, find_hash, mirrors_to_check=None):
|
def find_by_hash(self, find_hash, mirrors_to_check=None):
|
||||||
@@ -253,6 +259,9 @@ def find_by_hash(self, find_hash, mirrors_to_check=None):
|
|||||||
mirrors_to_check: Optional mapping containing mirrors to check. If
|
mirrors_to_check: Optional mapping containing mirrors to check. If
|
||||||
None, just assumes all configured mirrors.
|
None, just assumes all configured mirrors.
|
||||||
"""
|
"""
|
||||||
|
if find_hash not in self._mirrors_for_spec:
|
||||||
|
# Not found in the cached index, pull the latest from the server.
|
||||||
|
self.update(with_cooldown=True)
|
||||||
if find_hash not in self._mirrors_for_spec:
|
if find_hash not in self._mirrors_for_spec:
|
||||||
return None
|
return None
|
||||||
results = self._mirrors_for_spec[find_hash]
|
results = self._mirrors_for_spec[find_hash]
|
||||||
@@ -283,7 +292,7 @@ def update_spec(self, spec, found_list):
|
|||||||
"spec": new_entry["spec"],
|
"spec": new_entry["spec"],
|
||||||
}
|
}
|
||||||
|
|
||||||
def update(self):
|
def update(self, with_cooldown=False):
|
||||||
"""Make sure local cache of buildcache index files is up to date.
|
"""Make sure local cache of buildcache index files is up to date.
|
||||||
If the same mirrors are configured as the last time this was called
|
If the same mirrors are configured as the last time this was called
|
||||||
and none of the remote buildcache indices have changed, calling this
|
and none of the remote buildcache indices have changed, calling this
|
||||||
@@ -325,24 +334,41 @@ def update(self):
|
|||||||
|
|
||||||
fetch_errors = []
|
fetch_errors = []
|
||||||
all_methods_failed = True
|
all_methods_failed = True
|
||||||
|
ttl = spack.config.get("config:binary_index_ttl", 600)
|
||||||
|
now = time.time()
|
||||||
|
|
||||||
for cached_mirror_url in self._local_index_cache:
|
for cached_mirror_url in self._local_index_cache:
|
||||||
cache_entry = self._local_index_cache[cached_mirror_url]
|
cache_entry = self._local_index_cache[cached_mirror_url]
|
||||||
cached_index_hash = cache_entry["index_hash"]
|
cached_index_hash = cache_entry["index_hash"]
|
||||||
cached_index_path = cache_entry["index_path"]
|
cached_index_path = cache_entry["index_path"]
|
||||||
if cached_mirror_url in configured_mirror_urls:
|
if cached_mirror_url in configured_mirror_urls:
|
||||||
# May need to fetch the index and update the local caches
|
# Only do a fetch if the last fetch was longer than TTL ago
|
||||||
try:
|
if (
|
||||||
needs_regen = self._fetch_and_cache_index(
|
with_cooldown
|
||||||
cached_mirror_url, expect_hash=cached_index_hash
|
and ttl > 0
|
||||||
)
|
and cached_mirror_url in self._last_fetch_times
|
||||||
all_methods_failed = False
|
and now - self._last_fetch_times[cached_mirror_url][0] < ttl
|
||||||
except FetchCacheError as fetch_error:
|
):
|
||||||
needs_regen = False
|
# We're in the cooldown period, don't try to fetch again
|
||||||
fetch_errors.extend(fetch_error.errors)
|
# If the fetch succeeded last time, consider this update a success, otherwise
|
||||||
# The need to regenerate implies a need to clear as well.
|
# re-report the error here
|
||||||
spec_cache_clear_needed |= needs_regen
|
if self._last_fetch_times[cached_mirror_url][1]:
|
||||||
spec_cache_regenerate_needed |= needs_regen
|
all_methods_failed = False
|
||||||
|
else:
|
||||||
|
# May need to fetch the index and update the local caches
|
||||||
|
try:
|
||||||
|
needs_regen = self._fetch_and_cache_index(
|
||||||
|
cached_mirror_url, expect_hash=cached_index_hash
|
||||||
|
)
|
||||||
|
self._last_fetch_times[cached_mirror_url] = (now, True)
|
||||||
|
all_methods_failed = False
|
||||||
|
except FetchCacheError as fetch_error:
|
||||||
|
needs_regen = False
|
||||||
|
fetch_errors.extend(fetch_error.errors)
|
||||||
|
self._last_fetch_times[cached_mirror_url] = (now, False)
|
||||||
|
# The need to regenerate implies a need to clear as well.
|
||||||
|
spec_cache_clear_needed |= needs_regen
|
||||||
|
spec_cache_regenerate_needed |= needs_regen
|
||||||
else:
|
else:
|
||||||
# No longer have this mirror, cached index should be removed
|
# No longer have this mirror, cached index should be removed
|
||||||
items_to_remove.append(
|
items_to_remove.append(
|
||||||
@@ -351,6 +377,8 @@ def update(self):
|
|||||||
"cache_key": os.path.join(self._index_cache_root, cached_index_path),
|
"cache_key": os.path.join(self._index_cache_root, cached_index_path),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
if cached_mirror_url in self._last_fetch_times:
|
||||||
|
del self._last_fetch_times[cached_mirror_url]
|
||||||
spec_cache_clear_needed = True
|
spec_cache_clear_needed = True
|
||||||
spec_cache_regenerate_needed = True
|
spec_cache_regenerate_needed = True
|
||||||
|
|
||||||
@@ -369,10 +397,12 @@ def update(self):
|
|||||||
# Need to fetch the index and update the local caches
|
# Need to fetch the index and update the local caches
|
||||||
try:
|
try:
|
||||||
needs_regen = self._fetch_and_cache_index(mirror_url)
|
needs_regen = self._fetch_and_cache_index(mirror_url)
|
||||||
|
self._last_fetch_times[mirror_url] = (now, True)
|
||||||
all_methods_failed = False
|
all_methods_failed = False
|
||||||
except FetchCacheError as fetch_error:
|
except FetchCacheError as fetch_error:
|
||||||
fetch_errors.extend(fetch_error.errors)
|
fetch_errors.extend(fetch_error.errors)
|
||||||
needs_regen = False
|
needs_regen = False
|
||||||
|
self._last_fetch_times[mirror_url] = (now, False)
|
||||||
# Generally speaking, a new mirror wouldn't imply the need to
|
# Generally speaking, a new mirror wouldn't imply the need to
|
||||||
# clear the spec cache, so leave it as is.
|
# clear the spec cache, so leave it as is.
|
||||||
if needs_regen:
|
if needs_regen:
|
||||||
@@ -619,6 +649,57 @@ def read_buildinfo_file(prefix):
|
|||||||
return buildinfo
|
return buildinfo
|
||||||
|
|
||||||
|
|
||||||
|
class BuildManifestVisitor(BaseDirectoryVisitor):
|
||||||
|
"""Visitor that collects a list of files and symlinks
|
||||||
|
that can be checked for need of relocation. It knows how
|
||||||
|
to dedupe hardlinks and deal with symlinks to files and
|
||||||
|
directories."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
# Save unique identifiers of files to avoid
|
||||||
|
# relocating hardlink files for each path.
|
||||||
|
self.visited = set()
|
||||||
|
|
||||||
|
# Lists of files we will check
|
||||||
|
self.files = []
|
||||||
|
self.symlinks = []
|
||||||
|
|
||||||
|
def seen_before(self, root, rel_path):
|
||||||
|
stat_result = os.lstat(os.path.join(root, rel_path))
|
||||||
|
identifier = (stat_result.st_dev, stat_result.st_ino)
|
||||||
|
if identifier in self.visited:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
self.visited.add(identifier)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def visit_file(self, root, rel_path, depth):
|
||||||
|
if self.seen_before(root, rel_path):
|
||||||
|
return
|
||||||
|
self.files.append(rel_path)
|
||||||
|
|
||||||
|
def visit_symlinked_file(self, root, rel_path, depth):
|
||||||
|
# Note: symlinks *can* be hardlinked, but it is unclear if
|
||||||
|
# symlinks can be relinked in-place (preserving inode).
|
||||||
|
# Therefore, we do *not* de-dupe hardlinked symlinks.
|
||||||
|
self.symlinks.append(rel_path)
|
||||||
|
|
||||||
|
def before_visit_dir(self, root, rel_path, depth):
|
||||||
|
return os.path.basename(rel_path) not in (".spack", "man")
|
||||||
|
|
||||||
|
def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||||
|
# Treat symlinked directories simply as symlinks.
|
||||||
|
self.visit_symlinked_file(root, rel_path, depth)
|
||||||
|
# Never recurse into symlinked directories.
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def file_matches(path, regex):
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
contents = f.read()
|
||||||
|
return bool(regex.search(contents))
|
||||||
|
|
||||||
|
|
||||||
def get_buildfile_manifest(spec):
|
def get_buildfile_manifest(spec):
|
||||||
"""
|
"""
|
||||||
Return a data structure with information about a build, including
|
Return a data structure with information about a build, including
|
||||||
@@ -634,57 +715,61 @@ def get_buildfile_manifest(spec):
|
|||||||
"link_to_relocate": [],
|
"link_to_relocate": [],
|
||||||
"other": [],
|
"other": [],
|
||||||
"binary_to_relocate_fullpath": [],
|
"binary_to_relocate_fullpath": [],
|
||||||
|
"hardlinks_deduped": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
exclude_list = (".spack", "man")
|
# Guard against filesystem footguns of hardlinks and symlinks by using
|
||||||
|
# a visitor to retrieve a list of files and symlinks, so we don't have
|
||||||
|
# to worry about hardlinks of symlinked dirs and what not.
|
||||||
|
visitor = BuildManifestVisitor()
|
||||||
|
root = spec.prefix
|
||||||
|
visit_directory_tree(root, visitor)
|
||||||
|
|
||||||
# Do this at during tarball creation to save time when tarball unpacked.
|
# Collect a list of prefixes for this package and it's dependencies, Spack will
|
||||||
# Used by make_package_relative to determine binaries to change.
|
# look for them to decide if text file needs to be relocated or not
|
||||||
for root, dirs, files in os.walk(spec.prefix, topdown=True):
|
prefixes = [d.prefix for d in spec.traverse(root=True, deptype="all") if not d.external]
|
||||||
dirs[:] = [d for d in dirs if d not in exclude_list]
|
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
||||||
|
prefixes.append(str(spack.store.layout.root))
|
||||||
|
|
||||||
# Directories may need to be relocated too.
|
# Create a giant regex that matches all prefixes
|
||||||
for directory in dirs:
|
regex = utf8_paths_to_single_binary_regex(prefixes)
|
||||||
dir_path_name = os.path.join(root, directory)
|
|
||||||
rel_path_name = os.path.relpath(dir_path_name, spec.prefix)
|
|
||||||
if os.path.islink(dir_path_name):
|
|
||||||
link = os.readlink(dir_path_name)
|
|
||||||
if os.path.isabs(link) and link.startswith(spack.store.layout.root):
|
|
||||||
data["link_to_relocate"].append(rel_path_name)
|
|
||||||
|
|
||||||
for filename in files:
|
# Symlinks.
|
||||||
path_name = os.path.join(root, filename)
|
|
||||||
m_type, m_subtype = fsys.mime_type(path_name)
|
|
||||||
rel_path_name = os.path.relpath(path_name, spec.prefix)
|
|
||||||
added = False
|
|
||||||
|
|
||||||
if os.path.islink(path_name):
|
# Obvious bugs:
|
||||||
link = os.readlink(path_name)
|
# 1. relative links are not relocated.
|
||||||
if os.path.isabs(link):
|
# 2. paths are used as strings.
|
||||||
# Relocate absolute links into the spack tree
|
for rel_path in visitor.symlinks:
|
||||||
if link.startswith(spack.store.layout.root):
|
abs_path = os.path.join(root, rel_path)
|
||||||
data["link_to_relocate"].append(rel_path_name)
|
link = os.readlink(abs_path)
|
||||||
added = True
|
if os.path.isabs(link) and link.startswith(spack.store.layout.root):
|
||||||
|
data["link_to_relocate"].append(rel_path)
|
||||||
|
|
||||||
if relocate.needs_binary_relocation(m_type, m_subtype):
|
# Non-symlinks.
|
||||||
if (
|
for rel_path in visitor.files:
|
||||||
(
|
abs_path = os.path.join(root, rel_path)
|
||||||
m_subtype in ("x-executable", "x-sharedlib", "x-pie-executable")
|
m_type, m_subtype = fsys.mime_type(abs_path)
|
||||||
and sys.platform != "darwin"
|
|
||||||
)
|
|
||||||
or (m_subtype in ("x-mach-binary") and sys.platform == "darwin")
|
|
||||||
or (not filename.endswith(".o"))
|
|
||||||
):
|
|
||||||
data["binary_to_relocate"].append(rel_path_name)
|
|
||||||
data["binary_to_relocate_fullpath"].append(path_name)
|
|
||||||
added = True
|
|
||||||
|
|
||||||
if relocate.needs_text_relocation(m_type, m_subtype):
|
if relocate.needs_binary_relocation(m_type, m_subtype):
|
||||||
data["text_to_relocate"].append(rel_path_name)
|
# Why is this branch not part of needs_binary_relocation? :(
|
||||||
added = True
|
if (
|
||||||
|
(
|
||||||
|
m_subtype in ("x-executable", "x-sharedlib", "x-pie-executable")
|
||||||
|
and sys.platform != "darwin"
|
||||||
|
)
|
||||||
|
or (m_subtype in ("x-mach-binary") and sys.platform == "darwin")
|
||||||
|
or (not rel_path.endswith(".o"))
|
||||||
|
):
|
||||||
|
data["binary_to_relocate"].append(rel_path)
|
||||||
|
data["binary_to_relocate_fullpath"].append(abs_path)
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif relocate.needs_text_relocation(m_type, m_subtype) and file_matches(abs_path, regex):
|
||||||
|
data["text_to_relocate"].append(rel_path)
|
||||||
|
continue
|
||||||
|
|
||||||
|
data["other"].append(abs_path)
|
||||||
|
|
||||||
if not added:
|
|
||||||
data["other"].append(path_name)
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
@@ -698,7 +783,7 @@ def write_buildinfo_file(spec, workdir, rel=False):
|
|||||||
prefix_to_hash = dict()
|
prefix_to_hash = dict()
|
||||||
prefix_to_hash[str(spec.package.prefix)] = spec.dag_hash()
|
prefix_to_hash[str(spec.package.prefix)] = spec.dag_hash()
|
||||||
deps = spack.build_environment.get_rpath_deps(spec.package)
|
deps = spack.build_environment.get_rpath_deps(spec.package)
|
||||||
for d in deps:
|
for d in deps + spec.dependencies(deptype="run"):
|
||||||
prefix_to_hash[str(d.prefix)] = d.dag_hash()
|
prefix_to_hash[str(d.prefix)] = d.dag_hash()
|
||||||
|
|
||||||
# Create buildinfo data and write it to disk
|
# Create buildinfo data and write it to disk
|
||||||
@@ -711,6 +796,7 @@ def write_buildinfo_file(spec, workdir, rel=False):
|
|||||||
buildinfo["relocate_textfiles"] = manifest["text_to_relocate"]
|
buildinfo["relocate_textfiles"] = manifest["text_to_relocate"]
|
||||||
buildinfo["relocate_binaries"] = manifest["binary_to_relocate"]
|
buildinfo["relocate_binaries"] = manifest["binary_to_relocate"]
|
||||||
buildinfo["relocate_links"] = manifest["link_to_relocate"]
|
buildinfo["relocate_links"] = manifest["link_to_relocate"]
|
||||||
|
buildinfo["hardlinks_deduped"] = manifest["hardlinks_deduped"]
|
||||||
buildinfo["prefix_to_hash"] = prefix_to_hash
|
buildinfo["prefix_to_hash"] = prefix_to_hash
|
||||||
filename = buildinfo_file_name(workdir)
|
filename = buildinfo_file_name(workdir)
|
||||||
with open(filename, "w") as outfile:
|
with open(filename, "w") as outfile:
|
||||||
@@ -864,13 +950,13 @@ def generate_package_index(cache_prefix):
|
|||||||
.json) under cache_prefix.
|
.json) under cache_prefix.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
file_list = (
|
file_list = [
|
||||||
entry
|
entry
|
||||||
for entry in web_util.list_url(cache_prefix)
|
for entry in web_util.list_url(cache_prefix)
|
||||||
if entry.endswith(".yaml")
|
if entry.endswith(".yaml")
|
||||||
or entry.endswith("spec.json")
|
or entry.endswith("spec.json")
|
||||||
or entry.endswith("spec.json.sig")
|
or entry.endswith("spec.json.sig")
|
||||||
)
|
]
|
||||||
except KeyError as inst:
|
except KeyError as inst:
|
||||||
msg = "No packages at {0}: {1}".format(cache_prefix, inst)
|
msg = "No packages at {0}: {1}".format(cache_prefix, inst)
|
||||||
tty.warn(msg)
|
tty.warn(msg)
|
||||||
@@ -883,6 +969,14 @@ def generate_package_index(cache_prefix):
|
|||||||
tty.warn(msg)
|
tty.warn(msg)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if any(x.endswith(".yaml") for x in file_list):
|
||||||
|
msg = (
|
||||||
|
"The mirror in '{}' contains specs in the deprecated YAML format.\n\n\tSupport for "
|
||||||
|
"this format will be removed in v0.20, please regenerate the build cache with a "
|
||||||
|
"recent Spack\n"
|
||||||
|
).format(cache_prefix)
|
||||||
|
warnings.warn(msg)
|
||||||
|
|
||||||
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
||||||
|
|
||||||
tmpdir = tempfile.mkdtemp()
|
tmpdir = tempfile.mkdtemp()
|
||||||
@@ -1071,7 +1165,11 @@ def _build_tarball(
|
|||||||
tty.die(e)
|
tty.die(e)
|
||||||
|
|
||||||
# create gzip compressed tarball of the install prefix
|
# create gzip compressed tarball of the install prefix
|
||||||
with closing(tarfile.open(tarfile_path, "w:gz")) as tar:
|
# On AMD Ryzen 3700X and an SSD disk, we have the following on compression speed:
|
||||||
|
# compresslevel=6 gzip default: llvm takes 4mins, roughly 2.1GB
|
||||||
|
# compresslevel=9 python default: llvm takes 12mins, roughly 2.1GB
|
||||||
|
# So we follow gzip.
|
||||||
|
with closing(tarfile.open(tarfile_path, "w:gz", compresslevel=6)) as tar:
|
||||||
tar.add(name="%s" % workdir, arcname="%s" % os.path.basename(spec.prefix))
|
tar.add(name="%s" % workdir, arcname="%s" % os.path.basename(spec.prefix))
|
||||||
# remove copy of install directory
|
# remove copy of install directory
|
||||||
shutil.rmtree(workdir)
|
shutil.rmtree(workdir)
|
||||||
@@ -1346,6 +1444,13 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
|||||||
# the remaining mirrors, looking for one we can use.
|
# the remaining mirrors, looking for one we can use.
|
||||||
tarball_stage = try_fetch(spackfile_url)
|
tarball_stage = try_fetch(spackfile_url)
|
||||||
if tarball_stage:
|
if tarball_stage:
|
||||||
|
if ext == "yaml":
|
||||||
|
msg = (
|
||||||
|
"Reading {} from mirror.\n\n\tThe YAML format for buildcaches is "
|
||||||
|
"deprecated and will be removed in v0.20\n"
|
||||||
|
).format(spackfile_url)
|
||||||
|
warnings.warn(msg)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"tarball_stage": tarball_stage,
|
"tarball_stage": tarball_stage,
|
||||||
"specfile_stage": local_specfile_stage,
|
"specfile_stage": local_specfile_stage,
|
||||||
@@ -1418,6 +1523,38 @@ def check_package_relocatable(workdir, spec, allow_root):
|
|||||||
relocate.raise_if_not_relocatable(cur_path_names, allow_root)
|
relocate.raise_if_not_relocatable(cur_path_names, allow_root)
|
||||||
|
|
||||||
|
|
||||||
|
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||||
|
"""Updates a buildinfo dict for old archives that did
|
||||||
|
not dedupe hardlinks. De-duping hardlinks is necessary
|
||||||
|
when relocating files in parallel and in-place. This
|
||||||
|
means we must preserve inodes when relocating."""
|
||||||
|
|
||||||
|
# New archives don't need this.
|
||||||
|
if buildinfo.get("hardlinks_deduped", False):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Clearly we can assume that an inode is either in the
|
||||||
|
# textfile or binary group, but let's just stick to
|
||||||
|
# a single set of visited nodes.
|
||||||
|
visited = set()
|
||||||
|
|
||||||
|
# Note: we do *not* dedupe hardlinked symlinks, since
|
||||||
|
# it seems difficult or even impossible to relink
|
||||||
|
# symlinks while preserving inode.
|
||||||
|
for key in ("relocate_textfiles", "relocate_binaries"):
|
||||||
|
if key not in buildinfo:
|
||||||
|
continue
|
||||||
|
new_list = []
|
||||||
|
for rel_path in buildinfo[key]:
|
||||||
|
stat_result = os.lstat(os.path.join(root, rel_path))
|
||||||
|
identifier = (stat_result.st_dev, stat_result.st_ino)
|
||||||
|
if identifier in visited:
|
||||||
|
continue
|
||||||
|
visited.add(identifier)
|
||||||
|
new_list.append(rel_path)
|
||||||
|
buildinfo[key] = new_list
|
||||||
|
|
||||||
|
|
||||||
def relocate_package(spec, allow_root):
|
def relocate_package(spec, allow_root):
|
||||||
"""
|
"""
|
||||||
Relocate the given package
|
Relocate the given package
|
||||||
@@ -1451,7 +1588,7 @@ def relocate_package(spec, allow_root):
|
|||||||
hash_to_prefix = dict()
|
hash_to_prefix = dict()
|
||||||
hash_to_prefix[spec.format("{hash}")] = str(spec.package.prefix)
|
hash_to_prefix[spec.format("{hash}")] = str(spec.package.prefix)
|
||||||
new_deps = spack.build_environment.get_rpath_deps(spec.package)
|
new_deps = spack.build_environment.get_rpath_deps(spec.package)
|
||||||
for d in new_deps:
|
for d in new_deps + spec.dependencies(deptype="run"):
|
||||||
hash_to_prefix[d.format("{hash}")] = str(d.prefix)
|
hash_to_prefix[d.format("{hash}")] = str(d.prefix)
|
||||||
# Spurious replacements (e.g. sbang) will cause issues with binaries
|
# Spurious replacements (e.g. sbang) will cause issues with binaries
|
||||||
# For example, the new sbang can be longer than the old one.
|
# For example, the new sbang can be longer than the old one.
|
||||||
@@ -1480,6 +1617,9 @@ def relocate_package(spec, allow_root):
|
|||||||
|
|
||||||
tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root))
|
tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root))
|
||||||
|
|
||||||
|
# Old archives maybe have hardlinks repeated.
|
||||||
|
dedupe_hardlinks_if_necessary(workdir, buildinfo)
|
||||||
|
|
||||||
def is_backup_file(file):
|
def is_backup_file(file):
|
||||||
return file.endswith("~")
|
return file.endswith("~")
|
||||||
|
|
||||||
@@ -1509,7 +1649,11 @@ def is_backup_file(file):
|
|||||||
old_prefix,
|
old_prefix,
|
||||||
new_prefix,
|
new_prefix,
|
||||||
)
|
)
|
||||||
if "elf" in platform.binary_formats:
|
elif "elf" in platform.binary_formats and not rel:
|
||||||
|
# The new ELF dynamic section relocation logic only handles absolute to
|
||||||
|
# absolute relocation.
|
||||||
|
relocate.new_relocate_elf_binaries(files_to_relocate, prefix_to_prefix_bin)
|
||||||
|
elif "elf" in platform.binary_formats and rel:
|
||||||
relocate.relocate_elf_binaries(
|
relocate.relocate_elf_binaries(
|
||||||
files_to_relocate,
|
files_to_relocate,
|
||||||
old_layout_root,
|
old_layout_root,
|
||||||
@@ -1519,35 +1663,23 @@ def is_backup_file(file):
|
|||||||
old_prefix,
|
old_prefix,
|
||||||
new_prefix,
|
new_prefix,
|
||||||
)
|
)
|
||||||
# Relocate links to the new install prefix
|
|
||||||
links = [link for link in buildinfo.get("relocate_links", [])]
|
# Relocate links to the new install prefix
|
||||||
relocate.relocate_links(links, old_layout_root, old_prefix, new_prefix)
|
links = [os.path.join(workdir, f) for f in buildinfo.get("relocate_links", [])]
|
||||||
|
relocate.relocate_links(links, prefix_to_prefix_bin)
|
||||||
|
|
||||||
# For all buildcaches
|
# For all buildcaches
|
||||||
# relocate the install prefixes in text files including dependencies
|
# relocate the install prefixes in text files including dependencies
|
||||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
relocate.unsafe_relocate_text(text_names, prefix_to_prefix_text)
|
||||||
|
|
||||||
paths_to_relocate = [old_prefix, old_layout_root]
|
|
||||||
paths_to_relocate.extend(prefix_to_hash.keys())
|
|
||||||
files_to_relocate = list(
|
|
||||||
filter(
|
|
||||||
lambda pathname: not relocate.file_is_relocatable(
|
|
||||||
pathname, paths_to_relocate=paths_to_relocate
|
|
||||||
),
|
|
||||||
map(
|
|
||||||
lambda filename: os.path.join(workdir, filename),
|
|
||||||
buildinfo["relocate_binaries"],
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
# relocate the install prefixes in binary files including dependencies
|
# relocate the install prefixes in binary files including dependencies
|
||||||
relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
relocate.unsafe_relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
|
||||||
|
|
||||||
# If we are installing back to the same location
|
# If we are installing back to the same location
|
||||||
# relocate the sbang location if the spack directory changed
|
# relocate the sbang location if the spack directory changed
|
||||||
else:
|
else:
|
||||||
if old_spack_prefix != new_spack_prefix:
|
if old_spack_prefix != new_spack_prefix:
|
||||||
relocate.relocate_text(text_names, prefix_to_prefix_text)
|
relocate.unsafe_relocate_text(text_names, prefix_to_prefix_text)
|
||||||
|
|
||||||
|
|
||||||
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
|
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
|
||||||
@@ -1878,8 +2010,8 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
|||||||
|
|
||||||
results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
|
results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
|
||||||
|
|
||||||
# Maybe we just didn't have the latest information from the mirror, so
|
# The index may be out-of-date. If we aren't only considering indices, try
|
||||||
# try to fetch directly, unless we are only considering the indices.
|
# to fetch directly since we know where the file should be.
|
||||||
if not results and not index_only:
|
if not results and not index_only:
|
||||||
results = try_direct_fetch(spec, mirrors=mirrors_to_check)
|
results = try_direct_fetch(spec, mirrors=mirrors_to_check)
|
||||||
# We found a spec by the direct fetch approach, we might as well
|
# We found a spec by the direct fetch approach, we might as well
|
||||||
|
|||||||
@@ -91,6 +91,14 @@ def _try_import_from_store(module, query_spec, query_info=None):
|
|||||||
os.path.join(candidate_spec.prefix, pkg.platlib),
|
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||||
] # type: list[str]
|
] # type: list[str]
|
||||||
path_before = list(sys.path)
|
path_before = list(sys.path)
|
||||||
|
|
||||||
|
# Python 3.8+ on Windows does not search dependent DLLs in PATH,
|
||||||
|
# so we need to manually add it using os.add_dll_directory
|
||||||
|
# https://docs.python.org/3/whatsnew/3.8.html#bpo-36085-whatsnew
|
||||||
|
if sys.version_info[:2] >= (3, 8) and sys.platform == "win32":
|
||||||
|
if os.path.isdir(candidate_spec.prefix.bin):
|
||||||
|
os.add_dll_directory(candidate_spec.prefix.bin) # novermin
|
||||||
|
|
||||||
# NOTE: try module_paths first and last, last allows an existing version in path
|
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||||
# to be picked up and used, possibly depending on something in the store, first
|
# to be picked up and used, possibly depending on something in the store, first
|
||||||
# allows the bootstrap version to work when an incompatible version is in
|
# allows the bootstrap version to work when an incompatible version is in
|
||||||
@@ -667,6 +675,11 @@ def _add_externals_if_missing():
|
|||||||
_REF_COUNT = 0
|
_REF_COUNT = 0
|
||||||
|
|
||||||
|
|
||||||
|
def is_bootstrapping():
|
||||||
|
global _REF_COUNT
|
||||||
|
return _REF_COUNT > 0
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def ensure_bootstrap_configuration():
|
def ensure_bootstrap_configuration():
|
||||||
# The context manager is reference counted to ensure we don't swap multiple
|
# The context manager is reference counted to ensure we don't swap multiple
|
||||||
|
|||||||
@@ -52,6 +52,7 @@
|
|||||||
|
|
||||||
import spack.build_systems.cmake
|
import spack.build_systems.cmake
|
||||||
import spack.build_systems.meson
|
import spack.build_systems.meson
|
||||||
|
import spack.builder
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.install_test
|
import spack.install_test
|
||||||
import spack.main
|
import spack.main
|
||||||
@@ -120,18 +121,18 @@
|
|||||||
stat_suffix = "lib" if sys.platform == "win32" else "a"
|
stat_suffix = "lib" if sys.platform == "win32" else "a"
|
||||||
|
|
||||||
|
|
||||||
def should_set_parallel_jobs(jobserver_support=False):
|
def jobserver_enabled():
|
||||||
"""Returns true in general, except when:
|
"""Returns true if a posix jobserver (make) is detected."""
|
||||||
- The env variable SPACK_NO_PARALLEL_MAKE=1 is set
|
return "MAKEFLAGS" in os.environ and "--jobserver" in os.environ["MAKEFLAGS"]
|
||||||
- jobserver_support is enabled, and a jobserver was found.
|
|
||||||
"""
|
|
||||||
if (
|
def get_effective_jobs(jobs, parallel=True, supports_jobserver=False):
|
||||||
jobserver_support
|
"""Return the number of jobs, or None if supports_jobserver and a jobserver is detected."""
|
||||||
and "MAKEFLAGS" in os.environ
|
if not parallel or jobs <= 1 or env_flag(SPACK_NO_PARALLEL_MAKE):
|
||||||
and "--jobserver" in os.environ["MAKEFLAGS"]
|
return 1
|
||||||
):
|
if supports_jobserver and jobserver_enabled():
|
||||||
return False
|
return None
|
||||||
return not env_flag(SPACK_NO_PARALLEL_MAKE)
|
return jobs
|
||||||
|
|
||||||
|
|
||||||
class MakeExecutable(Executable):
|
class MakeExecutable(Executable):
|
||||||
@@ -146,26 +147,33 @@ class MakeExecutable(Executable):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, name, jobs, **kwargs):
|
def __init__(self, name, jobs, **kwargs):
|
||||||
|
supports_jobserver = kwargs.pop("supports_jobserver", True)
|
||||||
super(MakeExecutable, self).__init__(name, **kwargs)
|
super(MakeExecutable, self).__init__(name, **kwargs)
|
||||||
|
self.supports_jobserver = supports_jobserver
|
||||||
self.jobs = jobs
|
self.jobs = jobs
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
"""parallel, and jobs_env from kwargs are swallowed and used here;
|
"""parallel, and jobs_env from kwargs are swallowed and used here;
|
||||||
remaining arguments are passed through to the superclass.
|
remaining arguments are passed through to the superclass.
|
||||||
"""
|
"""
|
||||||
# TODO: figure out how to check if we are using a jobserver-supporting ninja,
|
parallel = kwargs.pop("parallel", True)
|
||||||
# the two split ninja packages make this very difficult right now
|
jobs_env = kwargs.pop("jobs_env", None)
|
||||||
parallel = should_set_parallel_jobs(jobserver_support=True) and kwargs.pop(
|
jobs_env_supports_jobserver = kwargs.pop("jobs_env_supports_jobserver", False)
|
||||||
"parallel", self.jobs > 1
|
|
||||||
)
|
|
||||||
|
|
||||||
if parallel:
|
jobs = get_effective_jobs(
|
||||||
args = ("-j{0}".format(self.jobs),) + args
|
self.jobs, parallel=parallel, supports_jobserver=self.supports_jobserver
|
||||||
jobs_env = kwargs.pop("jobs_env", None)
|
)
|
||||||
if jobs_env:
|
if jobs is not None:
|
||||||
# Caller wants us to set an environment variable to
|
args = ("-j{0}".format(jobs),) + args
|
||||||
# control the parallelism.
|
|
||||||
kwargs["extra_env"] = {jobs_env: str(self.jobs)}
|
if jobs_env:
|
||||||
|
# Caller wants us to set an environment variable to
|
||||||
|
# control the parallelism.
|
||||||
|
jobs_env_jobs = get_effective_jobs(
|
||||||
|
self.jobs, parallel=parallel, supports_jobserver=jobs_env_supports_jobserver
|
||||||
|
)
|
||||||
|
if jobs_env_jobs is not None:
|
||||||
|
kwargs["extra_env"] = {jobs_env: str(jobs_env_jobs)}
|
||||||
|
|
||||||
return super(MakeExecutable, self).__call__(*args, **kwargs)
|
return super(MakeExecutable, self).__call__(*args, **kwargs)
|
||||||
|
|
||||||
@@ -316,7 +324,7 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
env.set("SPACK_LINKER_ARG", compiler.linker_arg)
|
env.set("SPACK_LINKER_ARG", compiler.linker_arg)
|
||||||
|
|
||||||
# Check whether we want to force RPATH or RUNPATH
|
# Check whether we want to force RPATH or RUNPATH
|
||||||
if spack.config.get("config:shared_linking") == "rpath":
|
if spack.config.get("config:shared_linking:type") == "rpath":
|
||||||
env.set("SPACK_DTAGS_TO_STRIP", compiler.enable_new_dtags)
|
env.set("SPACK_DTAGS_TO_STRIP", compiler.enable_new_dtags)
|
||||||
env.set("SPACK_DTAGS_TO_ADD", compiler.disable_new_dtags)
|
env.set("SPACK_DTAGS_TO_ADD", compiler.disable_new_dtags)
|
||||||
else:
|
else:
|
||||||
@@ -324,7 +332,11 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
|
env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
|
||||||
|
|
||||||
# Set the target parameters that the compiler will add
|
# Set the target parameters that the compiler will add
|
||||||
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
# Don't set on cray platform because the targeting module handles this
|
||||||
|
if spec.satisfies("platform=cray"):
|
||||||
|
isa_arg = ""
|
||||||
|
else:
|
||||||
|
isa_arg = spec.architecture.target.optimization_flags(compiler)
|
||||||
env.set("SPACK_TARGET_ARGS", isa_arg)
|
env.set("SPACK_TARGET_ARGS", isa_arg)
|
||||||
|
|
||||||
# Trap spack-tracked compiler flags as appropriate.
|
# Trap spack-tracked compiler flags as appropriate.
|
||||||
@@ -345,7 +357,7 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
handler = pkg.flag_handler.__func__
|
handler = pkg.flag_handler.__func__
|
||||||
else:
|
else:
|
||||||
handler = pkg.flag_handler.im_func
|
handler = pkg.flag_handler.im_func
|
||||||
injf, envf, bsf = handler(pkg, flag, spec.compiler_flags[flag])
|
injf, envf, bsf = handler(pkg, flag, spec.compiler_flags[flag][:])
|
||||||
inject_flags[flag] = injf or []
|
inject_flags[flag] = injf or []
|
||||||
env_flags[flag] = envf or []
|
env_flags[flag] = envf or []
|
||||||
build_system_flags[flag] = bsf or []
|
build_system_flags[flag] = bsf or []
|
||||||
@@ -546,7 +558,7 @@ def _set_variables_for_single_module(pkg, module):
|
|||||||
# TODO: make these build deps that can be installed if not found.
|
# TODO: make these build deps that can be installed if not found.
|
||||||
m.make = MakeExecutable("make", jobs)
|
m.make = MakeExecutable("make", jobs)
|
||||||
m.gmake = MakeExecutable("gmake", jobs)
|
m.gmake = MakeExecutable("gmake", jobs)
|
||||||
m.ninja = MakeExecutable("ninja", jobs)
|
m.ninja = MakeExecutable("ninja", jobs, supports_jobserver=False)
|
||||||
|
|
||||||
# easy shortcut to os.environ
|
# easy shortcut to os.environ
|
||||||
m.env = os.environ
|
m.env = os.environ
|
||||||
@@ -558,9 +570,9 @@ def _set_variables_for_single_module(pkg, module):
|
|||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
m.nmake = Executable("nmake")
|
m.nmake = Executable("nmake")
|
||||||
# Standard CMake arguments
|
# Standard CMake arguments
|
||||||
m.std_cmake_args = spack.build_systems.cmake.CMakePackage._std_args(pkg)
|
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||||
m.std_meson_args = spack.build_systems.meson.MesonPackage._std_args(pkg)
|
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||||
m.std_pip_args = spack.build_systems.python.PythonPackage._std_args(pkg)
|
m.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
|
||||||
|
|
||||||
# Put spack compiler paths in module scope.
|
# Put spack compiler paths in module scope.
|
||||||
link_dir = spack.paths.build_env_path
|
link_dir = spack.paths.build_env_path
|
||||||
@@ -727,38 +739,6 @@ def get_rpaths(pkg):
|
|||||||
return list(dedupe(filter_system_paths(rpaths)))
|
return list(dedupe(filter_system_paths(rpaths)))
|
||||||
|
|
||||||
|
|
||||||
def get_std_cmake_args(pkg):
|
|
||||||
"""List of standard arguments used if a package is a CMakePackage.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: standard arguments that would be used if this
|
|
||||||
package were a CMakePackage instance.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
pkg (spack.package_base.PackageBase): package under consideration
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: arguments for cmake
|
|
||||||
"""
|
|
||||||
return spack.build_systems.cmake.CMakePackage._std_args(pkg)
|
|
||||||
|
|
||||||
|
|
||||||
def get_std_meson_args(pkg):
|
|
||||||
"""List of standard arguments used if a package is a MesonPackage.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: standard arguments that would be used if this
|
|
||||||
package were a MesonPackage instance.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
pkg (spack.package_base.PackageBase): package under consideration
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: arguments for meson
|
|
||||||
"""
|
|
||||||
return spack.build_systems.meson.MesonPackage._std_args(pkg)
|
|
||||||
|
|
||||||
|
|
||||||
def parent_class_modules(cls):
|
def parent_class_modules(cls):
|
||||||
"""
|
"""
|
||||||
Get list of superclass modules that descend from spack.package_base.PackageBase
|
Get list of superclass modules that descend from spack.package_base.PackageBase
|
||||||
@@ -819,7 +799,8 @@ def setup_package(pkg, dirty, context="build"):
|
|||||||
platform.setup_platform_environment(pkg, env_mods)
|
platform.setup_platform_environment(pkg, env_mods)
|
||||||
|
|
||||||
if context == "build":
|
if context == "build":
|
||||||
pkg.setup_build_environment(env_mods)
|
builder = spack.builder.create(pkg)
|
||||||
|
builder.setup_build_environment(env_mods)
|
||||||
|
|
||||||
if (not dirty) and (not env_mods.is_unset("CPATH")):
|
if (not dirty) and (not env_mods.is_unset("CPATH")):
|
||||||
tty.debug(
|
tty.debug(
|
||||||
@@ -1015,7 +996,8 @@ def add_modifications_for_dep(dep):
|
|||||||
module.__dict__.update(changes.__dict__)
|
module.__dict__.update(changes.__dict__)
|
||||||
|
|
||||||
if context == "build":
|
if context == "build":
|
||||||
dpkg.setup_dependent_build_environment(env, spec)
|
builder = spack.builder.create(dpkg)
|
||||||
|
builder.setup_dependent_build_environment(env, spec)
|
||||||
else:
|
else:
|
||||||
dpkg.setup_dependent_run_environment(env, spec)
|
dpkg.setup_dependent_run_environment(env, spec)
|
||||||
|
|
||||||
@@ -1117,8 +1099,20 @@ def _setup_pkg_and_run(
|
|||||||
pkg.test_suite.stage, spack.install_test.TestSuite.test_log_name(pkg.spec)
|
pkg.test_suite.stage, spack.install_test.TestSuite.test_log_name(pkg.spec)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
error_msg = str(exc)
|
||||||
|
if isinstance(exc, (spack.multimethod.NoSuchMethodError, AttributeError)):
|
||||||
|
error_msg = (
|
||||||
|
"The '{}' package cannot find an attribute while trying to build "
|
||||||
|
"from sources. This might be due to a change in Spack's package format "
|
||||||
|
"to support multiple build-systems for a single package. You can fix this "
|
||||||
|
"by updating the build recipe, and you can also report the issue as a bug. "
|
||||||
|
"More information at https://spack.readthedocs.io/en/latest/packaging_guide.html#installation-procedure"
|
||||||
|
).format(pkg.name)
|
||||||
|
error_msg = colorize("@*R{{{}}}".format(error_msg))
|
||||||
|
error_msg = "{}\n\n{}".format(str(exc), error_msg)
|
||||||
|
|
||||||
# make a pickleable exception to send to parent.
|
# make a pickleable exception to send to parent.
|
||||||
msg = "%s: %s" % (exc_type.__name__, str(exc))
|
msg = "%s: %s" % (exc_type.__name__, error_msg)
|
||||||
|
|
||||||
ce = ChildError(
|
ce = ChildError(
|
||||||
msg,
|
msg,
|
||||||
|
|||||||
124
lib/spack/spack/build_systems/_checks.py
Normal file
124
lib/spack/spack/build_systems/_checks.py
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import os
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
import llnl.util.lang
|
||||||
|
|
||||||
|
import spack.builder
|
||||||
|
import spack.installer
|
||||||
|
import spack.relocate
|
||||||
|
import spack.store
|
||||||
|
|
||||||
|
|
||||||
|
def sanity_check_prefix(builder):
|
||||||
|
"""Check that specific directories and files are created after installation.
|
||||||
|
|
||||||
|
The files to be checked are in the ``sanity_check_is_file`` attribute of the
|
||||||
|
package object, while the directories are in the ``sanity_check_is_dir``.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
builder (spack.builder.Builder): builder that installed the package
|
||||||
|
"""
|
||||||
|
pkg = builder.pkg
|
||||||
|
|
||||||
|
def check_paths(path_list, filetype, predicate):
|
||||||
|
if isinstance(path_list, six.string_types):
|
||||||
|
path_list = [path_list]
|
||||||
|
|
||||||
|
for path in path_list:
|
||||||
|
abs_path = os.path.join(pkg.prefix, path)
|
||||||
|
if not predicate(abs_path):
|
||||||
|
msg = "Install failed for {0}. No such {1} in prefix: {2}"
|
||||||
|
msg = msg.format(pkg.name, filetype, path)
|
||||||
|
raise spack.installer.InstallError(msg)
|
||||||
|
|
||||||
|
check_paths(pkg.sanity_check_is_file, "file", os.path.isfile)
|
||||||
|
check_paths(pkg.sanity_check_is_dir, "directory", os.path.isdir)
|
||||||
|
|
||||||
|
ignore_file = llnl.util.lang.match_predicate(spack.store.layout.hidden_file_regexes)
|
||||||
|
if all(map(ignore_file, os.listdir(pkg.prefix))):
|
||||||
|
msg = "Install failed for {0}. Nothing was installed!"
|
||||||
|
raise spack.installer.InstallError(msg.format(pkg.name))
|
||||||
|
|
||||||
|
|
||||||
|
def apply_macos_rpath_fixups(builder):
|
||||||
|
"""On Darwin, make installed libraries more easily relocatable.
|
||||||
|
|
||||||
|
Some build systems (handrolled, autotools, makefiles) can set their own
|
||||||
|
rpaths that are duplicated by spack's compiler wrapper. This fixup
|
||||||
|
interrogates, and postprocesses if necessary, all libraries installed
|
||||||
|
by the code.
|
||||||
|
|
||||||
|
It should be added as a @run_after to packaging systems (or individual
|
||||||
|
packages) that do not install relocatable libraries by default.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
builder (spack.builder.Builder): builder that installed the package
|
||||||
|
"""
|
||||||
|
spack.relocate.fixup_macos_rpaths(builder.spec)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_build_dependencies_or_raise(spec, dependencies, error_msg):
|
||||||
|
"""Ensure that some build dependencies are present in the concrete spec.
|
||||||
|
|
||||||
|
If not, raise a RuntimeError with a helpful error message.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
spec (spack.spec.Spec): concrete spec to be checked.
|
||||||
|
dependencies (list of spack.spec.Spec): list of abstract specs to be satisfied
|
||||||
|
error_msg (str): brief error message to be prepended to a longer description
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: when the required build dependencies are not found
|
||||||
|
"""
|
||||||
|
assert spec.concrete, "Can ensure build dependencies only on concrete specs"
|
||||||
|
build_deps = [d.name for d in spec.dependencies(deptype="build")]
|
||||||
|
missing_deps = [x for x in dependencies if x not in build_deps]
|
||||||
|
|
||||||
|
if not missing_deps:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Raise an exception on missing deps.
|
||||||
|
msg = (
|
||||||
|
"{0}: missing dependencies: {1}.\n\nPlease add "
|
||||||
|
"the following lines to the package:\n\n".format(error_msg, ", ".join(missing_deps))
|
||||||
|
)
|
||||||
|
|
||||||
|
for dep in missing_deps:
|
||||||
|
msg += " depends_on('{0}', type='build', when='@{1} {2}')\n".format(
|
||||||
|
dep, spec.version, "build_system=autotools"
|
||||||
|
)
|
||||||
|
|
||||||
|
msg += "\nUpdate the version (when='@{0}') as needed.".format(spec.version)
|
||||||
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def execute_build_time_tests(builder):
|
||||||
|
"""Execute the build-time tests prescribed by builder.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
builder (Builder): builder prescribing the test callbacks. The name of the callbacks is
|
||||||
|
stored as a list of strings in the ``build_time_test_callbacks`` attribute.
|
||||||
|
"""
|
||||||
|
builder.pkg.run_test_callbacks(builder, builder.build_time_test_callbacks, "build")
|
||||||
|
|
||||||
|
|
||||||
|
def execute_install_time_tests(builder):
|
||||||
|
"""Execute the install-time tests prescribed by builder.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
builder (Builder): builder prescribing the test callbacks. The name of the callbacks is
|
||||||
|
stored as a list of strings in the ``install_time_test_callbacks`` attribute.
|
||||||
|
"""
|
||||||
|
builder.pkg.run_test_callbacks(builder, builder.install_time_test_callbacks, "install")
|
||||||
|
|
||||||
|
|
||||||
|
class BaseBuilder(spack.builder.Builder):
|
||||||
|
"""Base class for builders to register common checks"""
|
||||||
|
|
||||||
|
# Check that self.prefix is there after installation
|
||||||
|
spack.builder.run_after("install")(sanity_check_prefix)
|
||||||
@@ -2,18 +2,36 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
# Why doesn't this work for me?
|
import spack.directives
|
||||||
# from spack import *
|
import spack.package_base
|
||||||
from llnl.util.filesystem import filter_file
|
import spack.util.executable
|
||||||
|
|
||||||
from spack.build_systems.autotools import AutotoolsPackage
|
from .autotools import AutotoolsBuilder, AutotoolsPackage
|
||||||
from spack.directives import extends
|
|
||||||
from spack.package_base import ExtensionError
|
|
||||||
from spack.util.executable import which
|
class AspellBuilder(AutotoolsBuilder):
|
||||||
|
"""The Aspell builder is close enough to an autotools builder to allow
|
||||||
|
specializing the builder class, so to use variables that are specific
|
||||||
|
to the Aspell extensions.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def configure(self, pkg, spec, prefix):
|
||||||
|
aspell = spec["aspell"].prefix.bin.aspell
|
||||||
|
prezip = spec["aspell"].prefix.bin.prezip
|
||||||
|
destdir = prefix
|
||||||
|
|
||||||
|
sh = spack.util.executable.which("sh")
|
||||||
|
sh(
|
||||||
|
"./configure",
|
||||||
|
"--vars",
|
||||||
|
"ASPELL={0}".format(aspell),
|
||||||
|
"PREZIP={0}".format(prezip),
|
||||||
|
"DESTDIR={0}".format(destdir),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# Aspell dictionaries install their bits into their prefix.lib
|
# Aspell dictionaries install their bits into their prefix.lib
|
||||||
# and when activated they'll get symlinked into the appropriate aspell's
|
# and when activated they'll get symlinked into the appropriate aspell's
|
||||||
# dict dir (see aspell's {de,}activate methods).
|
# dict dir (see aspell's {de,}activate methods).
|
||||||
@@ -23,12 +41,17 @@
|
|||||||
class AspellDictPackage(AutotoolsPackage):
|
class AspellDictPackage(AutotoolsPackage):
|
||||||
"""Specialized class for building aspell dictionairies."""
|
"""Specialized class for building aspell dictionairies."""
|
||||||
|
|
||||||
extends("aspell")
|
spack.directives.extends("aspell", when="build_system=autotools")
|
||||||
|
|
||||||
|
#: Override the default autotools builder
|
||||||
|
AutotoolsBuilder = AspellBuilder
|
||||||
|
|
||||||
def view_destination(self, view):
|
def view_destination(self, view):
|
||||||
aspell_spec = self.spec["aspell"]
|
aspell_spec = self.spec["aspell"]
|
||||||
if view.get_projection_for_spec(aspell_spec) != aspell_spec.prefix:
|
if view.get_projection_for_spec(aspell_spec) != aspell_spec.prefix:
|
||||||
raise ExtensionError("aspell does not support non-global extensions")
|
raise spack.package_base.ExtensionError(
|
||||||
|
"aspell does not support non-global extensions"
|
||||||
|
)
|
||||||
aspell = aspell_spec.command
|
aspell = aspell_spec.command
|
||||||
return aspell("dump", "config", "dict-dir", output=str).strip()
|
return aspell("dump", "config", "dict-dir", output=str).strip()
|
||||||
|
|
||||||
@@ -36,19 +59,5 @@ def view_source(self):
|
|||||||
return self.prefix.lib
|
return self.prefix.lib
|
||||||
|
|
||||||
def patch(self):
|
def patch(self):
|
||||||
filter_file(r"^dictdir=.*$", "dictdir=/lib", "configure")
|
fs.filter_file(r"^dictdir=.*$", "dictdir=/lib", "configure")
|
||||||
filter_file(r"^datadir=.*$", "datadir=/lib", "configure")
|
fs.filter_file(r"^datadir=.*$", "datadir=/lib", "configure")
|
||||||
|
|
||||||
def configure(self, spec, prefix):
|
|
||||||
aspell = spec["aspell"].prefix.bin.aspell
|
|
||||||
prezip = spec["aspell"].prefix.bin.prezip
|
|
||||||
destdir = prefix
|
|
||||||
|
|
||||||
sh = which("sh")
|
|
||||||
sh(
|
|
||||||
"./configure",
|
|
||||||
"--vars",
|
|
||||||
"ASPELL={0}".format(aspell),
|
|
||||||
"PREZIP={0}".format(prezip),
|
|
||||||
"DESTDIR={0}".format(destdir),
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -6,87 +6,140 @@
|
|||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import stat
|
import stat
|
||||||
from subprocess import PIPE, check_call
|
import subprocess
|
||||||
from typing import List # novm
|
from typing import List # novm
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import force_remove, working_dir
|
|
||||||
|
|
||||||
from spack.build_environment import InstallError
|
import spack.build_environment
|
||||||
from spack.directives import conflicts, depends_on
|
import spack.builder
|
||||||
|
import spack.package_base
|
||||||
|
from spack.directives import build_system, conflicts, depends_on
|
||||||
|
from spack.multimethod import when
|
||||||
from spack.operating_systems.mac_os import macos_version
|
from spack.operating_systems.mac_os import macos_version
|
||||||
from spack.package_base import PackageBase, run_after, run_before
|
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
|
|
||||||
|
from ._checks import (
|
||||||
|
BaseBuilder,
|
||||||
|
apply_macos_rpath_fixups,
|
||||||
|
ensure_build_dependencies_or_raise,
|
||||||
|
execute_build_time_tests,
|
||||||
|
execute_install_time_tests,
|
||||||
|
)
|
||||||
|
|
||||||
class AutotoolsPackage(PackageBase):
|
|
||||||
"""Specialized class for packages built using GNU Autotools.
|
|
||||||
|
|
||||||
This class provides four phases that can be overridden:
|
class AutotoolsPackage(spack.package_base.PackageBase):
|
||||||
|
"""Specialized class for packages built using GNU Autotools."""
|
||||||
|
|
||||||
1. :py:meth:`~.AutotoolsPackage.autoreconf`
|
#: This attribute is used in UI queries that need to know the build
|
||||||
2. :py:meth:`~.AutotoolsPackage.configure`
|
#: system base class
|
||||||
3. :py:meth:`~.AutotoolsPackage.build`
|
build_system_class = "AutotoolsPackage"
|
||||||
4. :py:meth:`~.AutotoolsPackage.install`
|
|
||||||
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
|
legacy_buildsystem = "autotools"
|
||||||
|
|
||||||
|
build_system("autotools")
|
||||||
|
|
||||||
|
with when("build_system=autotools"):
|
||||||
|
depends_on("gnuconfig", type="build", when="target=ppc64le:")
|
||||||
|
depends_on("gnuconfig", type="build", when="target=aarch64:")
|
||||||
|
depends_on("gnuconfig", type="build", when="target=riscv64:")
|
||||||
|
conflicts("platform=windows")
|
||||||
|
|
||||||
|
def flags_to_build_system_args(self, flags):
|
||||||
|
"""Produces a list of all command line arguments to pass specified
|
||||||
|
compiler flags to configure."""
|
||||||
|
# Has to be dynamic attribute due to caching.
|
||||||
|
setattr(self, "configure_flag_args", [])
|
||||||
|
for flag, values in flags.items():
|
||||||
|
if values:
|
||||||
|
values_str = "{0}={1}".format(flag.upper(), " ".join(values))
|
||||||
|
self.configure_flag_args.append(values_str)
|
||||||
|
# Spack's fflags are meant for both F77 and FC, therefore we
|
||||||
|
# additionaly set FCFLAGS if required.
|
||||||
|
values = flags.get("fflags", None)
|
||||||
|
if values:
|
||||||
|
values_str = "FCFLAGS={0}".format(" ".join(values))
|
||||||
|
self.configure_flag_args.append(values_str)
|
||||||
|
|
||||||
|
# Legacy methods (used by too many packages to change them,
|
||||||
|
# need to forward to the builder)
|
||||||
|
def enable_or_disable(self, *args, **kwargs):
|
||||||
|
return self.builder.enable_or_disable(*args, **kwargs)
|
||||||
|
|
||||||
|
def with_or_without(self, *args, **kwargs):
|
||||||
|
return self.builder.with_or_without(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("autotools")
|
||||||
|
class AutotoolsBuilder(BaseBuilder):
|
||||||
|
"""The autotools builder encodes the default way of installing software built
|
||||||
|
with autotools. It has four phases that can be overridden, if need be:
|
||||||
|
|
||||||
|
1. :py:meth:`~.AutotoolsBuilder.autoreconf`
|
||||||
|
2. :py:meth:`~.AutotoolsBuilder.configure`
|
||||||
|
3. :py:meth:`~.AutotoolsBuilder.build`
|
||||||
|
4. :py:meth:`~.AutotoolsBuilder.install`
|
||||||
|
|
||||||
|
They all have sensible defaults and for many packages the only thing necessary
|
||||||
|
is to override the helper method
|
||||||
|
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.configure_args`.
|
||||||
|
|
||||||
They all have sensible defaults and for many packages the only thing
|
|
||||||
necessary will be to override the helper method
|
|
||||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`.
|
|
||||||
For a finer tuning you may also override:
|
For a finer tuning you may also override:
|
||||||
|
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
| **Method** | **Purpose** |
|
| **Method** | **Purpose** |
|
||||||
+===============================================+====================+
|
+===============================================+====================+
|
||||||
| :py:attr:`~.AutotoolsPackage.build_targets` | Specify ``make`` |
|
| :py:attr:`~.AutotoolsBuilder.build_targets` | Specify ``make`` |
|
||||||
| | targets for the |
|
| | targets for the |
|
||||||
| | build phase |
|
| | build phase |
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
| :py:attr:`~.AutotoolsPackage.install_targets` | Specify ``make`` |
|
| :py:attr:`~.AutotoolsBuilder.install_targets` | Specify ``make`` |
|
||||||
| | targets for the |
|
| | targets for the |
|
||||||
| | install phase |
|
| | install phase |
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
| :py:meth:`~.AutotoolsPackage.check` | Run build time |
|
| :py:meth:`~.AutotoolsBuilder.check` | Run build time |
|
||||||
| | tests if required |
|
| | tests if required |
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
#: Phases of a GNU Autotools package
|
#: Phases of a GNU Autotools package
|
||||||
phases = ["autoreconf", "configure", "build", "install"]
|
phases = ("autoreconf", "configure", "build", "install")
|
||||||
#: This attribute is used in UI queries that need to know the build
|
|
||||||
#: system base class
|
|
||||||
build_system_class = "AutotoolsPackage"
|
|
||||||
|
|
||||||
@property
|
#: Names associated with package methods in the old build-system format
|
||||||
def patch_config_files(self):
|
legacy_methods = (
|
||||||
"""
|
"configure_args",
|
||||||
Whether or not to update old ``config.guess`` and ``config.sub`` files
|
"check",
|
||||||
distributed with the tarball. This currently only applies to
|
"installcheck",
|
||||||
``ppc64le:``, ``aarch64:``, and ``riscv64`` target architectures. The
|
)
|
||||||
substitutes are taken from the ``gnuconfig`` package, which is
|
|
||||||
automatically added as a build dependency for these architectures. In
|
|
||||||
case system versions of these config files are required, the
|
|
||||||
``gnuconfig`` package can be marked external with a prefix pointing to
|
|
||||||
the directory containing the system ``config.guess`` and ``config.sub``
|
|
||||||
files.
|
|
||||||
"""
|
|
||||||
return (
|
|
||||||
self.spec.satisfies("target=ppc64le:")
|
|
||||||
or self.spec.satisfies("target=aarch64:")
|
|
||||||
or self.spec.satisfies("target=riscv64:")
|
|
||||||
)
|
|
||||||
|
|
||||||
#: Whether or not to update ``libtool``
|
#: Names associated with package attributes in the old build-system format
|
||||||
#: (currently only for Arm/Clang/Fujitsu/NVHPC compilers)
|
legacy_attributes = (
|
||||||
|
"archive_files",
|
||||||
|
"patch_libtool",
|
||||||
|
"build_targets",
|
||||||
|
"install_targets",
|
||||||
|
"build_time_test_callbacks",
|
||||||
|
"install_time_test_callbacks",
|
||||||
|
"force_autoreconf",
|
||||||
|
"autoreconf_extra_args",
|
||||||
|
"install_libtool_archives",
|
||||||
|
"patch_config_files",
|
||||||
|
"configure_directory",
|
||||||
|
"configure_abs_path",
|
||||||
|
"build_directory",
|
||||||
|
"autoreconf_search_path_args",
|
||||||
|
)
|
||||||
|
|
||||||
|
#: Whether to update ``libtool`` (e.g. for Arm/Clang/Fujitsu/NVHPC compilers)
|
||||||
patch_libtool = True
|
patch_libtool = True
|
||||||
|
|
||||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsPackage.build`
|
#: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.build` phase
|
||||||
#: phase
|
|
||||||
build_targets = [] # type: List[str]
|
build_targets = [] # type: List[str]
|
||||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsPackage.install`
|
#: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.install` phase
|
||||||
#: phase
|
|
||||||
install_targets = ["install"]
|
install_targets = ["install"]
|
||||||
|
|
||||||
#: Callback names for build-time test
|
#: Callback names for build-time test
|
||||||
@@ -97,24 +150,40 @@ def patch_config_files(self):
|
|||||||
|
|
||||||
#: Set to true to force the autoreconf step even if configure is present
|
#: Set to true to force the autoreconf step even if configure is present
|
||||||
force_autoreconf = False
|
force_autoreconf = False
|
||||||
|
|
||||||
#: Options to be passed to autoreconf when using the default implementation
|
#: Options to be passed to autoreconf when using the default implementation
|
||||||
autoreconf_extra_args = [] # type: List[str]
|
autoreconf_extra_args = [] # type: List[str]
|
||||||
|
|
||||||
#: If False deletes all the .la files in the prefix folder
|
#: If False deletes all the .la files in the prefix folder after the installation.
|
||||||
#: after the installation. If True instead it installs them.
|
#: If True instead it installs them.
|
||||||
install_libtool_archives = False
|
install_libtool_archives = False
|
||||||
|
|
||||||
depends_on("gnuconfig", type="build", when="target=ppc64le:")
|
@property
|
||||||
depends_on("gnuconfig", type="build", when="target=aarch64:")
|
def patch_config_files(self):
|
||||||
depends_on("gnuconfig", type="build", when="target=riscv64:")
|
"""Whether to update old ``config.guess`` and ``config.sub`` files
|
||||||
conflicts("platform=windows")
|
distributed with the tarball.
|
||||||
|
|
||||||
|
This currently only applies to ``ppc64le:``, ``aarch64:``, and
|
||||||
|
``riscv64`` target architectures.
|
||||||
|
|
||||||
|
The substitutes are taken from the ``gnuconfig`` package, which is
|
||||||
|
automatically added as a build dependency for these architectures. In case
|
||||||
|
system versions of these config files are required, the ``gnuconfig`` package
|
||||||
|
can be marked external, with a prefix pointing to the directory containing the
|
||||||
|
system ``config.guess`` and ``config.sub`` files.
|
||||||
|
"""
|
||||||
|
return (
|
||||||
|
self.pkg.spec.satisfies("target=ppc64le:")
|
||||||
|
or self.pkg.spec.satisfies("target=aarch64:")
|
||||||
|
or self.pkg.spec.satisfies("target=riscv64:")
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _removed_la_files_log(self):
|
def _removed_la_files_log(self):
|
||||||
"""File containing the list of remove libtool archives"""
|
"""File containing the list of removed libtool archives"""
|
||||||
build_dir = self.build_directory
|
build_dir = self.build_directory
|
||||||
if not os.path.isabs(self.build_directory):
|
if not os.path.isabs(self.build_directory):
|
||||||
build_dir = os.path.join(self.stage.path, build_dir)
|
build_dir = os.path.join(self.pkg.stage.path, build_dir)
|
||||||
return os.path.join(build_dir, "removed_la_files.txt")
|
return os.path.join(build_dir, "removed_la_files.txt")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -125,13 +194,13 @@ def archive_files(self):
|
|||||||
files.append(self._removed_la_files_log)
|
files.append(self._removed_la_files_log)
|
||||||
return files
|
return files
|
||||||
|
|
||||||
@run_after("autoreconf")
|
@spack.builder.run_after("autoreconf")
|
||||||
def _do_patch_config_files(self):
|
def _do_patch_config_files(self):
|
||||||
"""Some packages ship with older config.guess/config.sub files and
|
"""Some packages ship with older config.guess/config.sub files and need to
|
||||||
need to have these updated when installed on a newer architecture.
|
have these updated when installed on a newer architecture.
|
||||||
In particular, config.guess fails for PPC64LE for version prior
|
|
||||||
to a 2013-06-10 build date (automake 1.13.4) and for ARM (aarch64) and
|
In particular, config.guess fails for PPC64LE for version prior to a
|
||||||
RISC-V (riscv64).
|
2013-06-10 build date (automake 1.13.4) and for AArch64 and RISC-V.
|
||||||
"""
|
"""
|
||||||
if not self.patch_config_files:
|
if not self.patch_config_files:
|
||||||
return
|
return
|
||||||
@@ -139,11 +208,11 @@ def _do_patch_config_files(self):
|
|||||||
# TODO: Expand this to select the 'config.sub'-compatible architecture
|
# TODO: Expand this to select the 'config.sub'-compatible architecture
|
||||||
# for each platform (e.g. 'config.sub' doesn't accept 'power9le', but
|
# for each platform (e.g. 'config.sub' doesn't accept 'power9le', but
|
||||||
# does accept 'ppc64le').
|
# does accept 'ppc64le').
|
||||||
if self.spec.satisfies("target=ppc64le:"):
|
if self.pkg.spec.satisfies("target=ppc64le:"):
|
||||||
config_arch = "ppc64le"
|
config_arch = "ppc64le"
|
||||||
elif self.spec.satisfies("target=aarch64:"):
|
elif self.pkg.spec.satisfies("target=aarch64:"):
|
||||||
config_arch = "aarch64"
|
config_arch = "aarch64"
|
||||||
elif self.spec.satisfies("target=riscv64:"):
|
elif self.pkg.spec.satisfies("target=riscv64:"):
|
||||||
config_arch = "riscv64"
|
config_arch = "riscv64"
|
||||||
else:
|
else:
|
||||||
config_arch = "local"
|
config_arch = "local"
|
||||||
@@ -155,7 +224,7 @@ def runs_ok(script_abs_path):
|
|||||||
args = [script_abs_path] + additional_args.get(script_name, [])
|
args = [script_abs_path] + additional_args.get(script_name, [])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
check_call(args, stdout=PIPE, stderr=PIPE)
|
subprocess.check_call(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
tty.debug(e)
|
tty.debug(e)
|
||||||
return False
|
return False
|
||||||
@@ -163,7 +232,7 @@ def runs_ok(script_abs_path):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
# Get the list of files that needs to be patched
|
# Get the list of files that needs to be patched
|
||||||
to_be_patched = fs.find(self.stage.path, files=["config.sub", "config.guess"])
|
to_be_patched = fs.find(self.pkg.stage.path, files=["config.sub", "config.guess"])
|
||||||
to_be_patched = [f for f in to_be_patched if not runs_ok(f)]
|
to_be_patched = [f for f in to_be_patched if not runs_ok(f)]
|
||||||
|
|
||||||
# If there are no files to be patched, return early
|
# If there are no files to be patched, return early
|
||||||
@@ -171,22 +240,21 @@ def runs_ok(script_abs_path):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Otherwise, require `gnuconfig` to be a build dependency
|
# Otherwise, require `gnuconfig` to be a build dependency
|
||||||
self._require_build_deps(
|
ensure_build_dependencies_or_raise(
|
||||||
pkgs=["gnuconfig"], spec=self.spec, err="Cannot patch config files"
|
spec=self.pkg.spec, dependencies=["gnuconfig"], error_msg="Cannot patch config files"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get the config files we need to patch (config.sub / config.guess).
|
# Get the config files we need to patch (config.sub / config.guess).
|
||||||
to_be_found = list(set(os.path.basename(f) for f in to_be_patched))
|
to_be_found = list(set(os.path.basename(f) for f in to_be_patched))
|
||||||
gnuconfig = self.spec["gnuconfig"]
|
gnuconfig = self.pkg.spec["gnuconfig"]
|
||||||
gnuconfig_dir = gnuconfig.prefix
|
gnuconfig_dir = gnuconfig.prefix
|
||||||
|
|
||||||
# An external gnuconfig may not not have a prefix.
|
# An external gnuconfig may not not have a prefix.
|
||||||
if gnuconfig_dir is None:
|
if gnuconfig_dir is None:
|
||||||
raise InstallError(
|
raise spack.build_environment.InstallError(
|
||||||
"Spack could not find substitutes for GNU config "
|
"Spack could not find substitutes for GNU config files because no "
|
||||||
"files because no prefix is available for the "
|
"prefix is available for the `gnuconfig` package. Make sure you set a "
|
||||||
"`gnuconfig` package. Make sure you set a prefix "
|
"prefix path instead of modules for external `gnuconfig`."
|
||||||
"path instead of modules for external `gnuconfig`."
|
|
||||||
)
|
)
|
||||||
|
|
||||||
candidates = fs.find(gnuconfig_dir, files=to_be_found, recursive=False)
|
candidates = fs.find(gnuconfig_dir, files=to_be_found, recursive=False)
|
||||||
@@ -203,7 +271,7 @@ def runs_ok(script_abs_path):
|
|||||||
msg += (
|
msg += (
|
||||||
" or the `gnuconfig` package prefix is misconfigured as" " an external package"
|
" or the `gnuconfig` package prefix is misconfigured as" " an external package"
|
||||||
)
|
)
|
||||||
raise InstallError(msg)
|
raise spack.build_environment.InstallError(msg)
|
||||||
|
|
||||||
# Filter working substitutes
|
# Filter working substitutes
|
||||||
candidates = [f for f in candidates if runs_ok(f)]
|
candidates = [f for f in candidates if runs_ok(f)]
|
||||||
@@ -228,7 +296,9 @@ def runs_ok(script_abs_path):
|
|||||||
and set the prefix to the directory containing the `config.guess` and
|
and set the prefix to the directory containing the `config.guess` and
|
||||||
`config.sub` files.
|
`config.sub` files.
|
||||||
"""
|
"""
|
||||||
raise InstallError(msg.format(", ".join(to_be_found), self.name))
|
raise spack.build_environment.InstallError(
|
||||||
|
msg.format(", ".join(to_be_found), self.name)
|
||||||
|
)
|
||||||
|
|
||||||
# Copy the good files over the bad ones
|
# Copy the good files over the bad ones
|
||||||
for abs_path in to_be_patched:
|
for abs_path in to_be_patched:
|
||||||
@@ -238,7 +308,7 @@ def runs_ok(script_abs_path):
|
|||||||
fs.copy(substitutes[name], abs_path)
|
fs.copy(substitutes[name], abs_path)
|
||||||
os.chmod(abs_path, mode)
|
os.chmod(abs_path, mode)
|
||||||
|
|
||||||
@run_before("configure")
|
@spack.builder.run_before("configure")
|
||||||
def _patch_usr_bin_file(self):
|
def _patch_usr_bin_file(self):
|
||||||
"""On NixOS file is not available in /usr/bin/file. Patch configure
|
"""On NixOS file is not available in /usr/bin/file. Patch configure
|
||||||
scripts to use file from path."""
|
scripts to use file from path."""
|
||||||
@@ -250,7 +320,7 @@ def _patch_usr_bin_file(self):
|
|||||||
with fs.keep_modification_time(*x.filenames):
|
with fs.keep_modification_time(*x.filenames):
|
||||||
x.filter(regex="/usr/bin/file", repl="file", string=True)
|
x.filter(regex="/usr/bin/file", repl="file", string=True)
|
||||||
|
|
||||||
@run_before("configure")
|
@spack.builder.run_before("configure")
|
||||||
def _set_autotools_environment_variables(self):
|
def _set_autotools_environment_variables(self):
|
||||||
"""Many autotools builds use a version of mknod.m4 that fails when
|
"""Many autotools builds use a version of mknod.m4 that fails when
|
||||||
running as root unless FORCE_UNSAFE_CONFIGURE is set to 1.
|
running as root unless FORCE_UNSAFE_CONFIGURE is set to 1.
|
||||||
@@ -261,11 +331,10 @@ def _set_autotools_environment_variables(self):
|
|||||||
Without it, configure just fails halfway through, but it can
|
Without it, configure just fails halfway through, but it can
|
||||||
still run things *before* this check. Forcing this just removes a
|
still run things *before* this check. Forcing this just removes a
|
||||||
nuisance -- this is not circumventing any real protection.
|
nuisance -- this is not circumventing any real protection.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"
|
os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"
|
||||||
|
|
||||||
@run_before("configure")
|
@spack.builder.run_before("configure")
|
||||||
def _do_patch_libtool_configure(self):
|
def _do_patch_libtool_configure(self):
|
||||||
"""Patch bugs that propagate from libtool macros into "configure" and
|
"""Patch bugs that propagate from libtool macros into "configure" and
|
||||||
further into "libtool". Note that patches that can be fixed by patching
|
further into "libtool". Note that patches that can be fixed by patching
|
||||||
@@ -293,7 +362,7 @@ def _do_patch_libtool_configure(self):
|
|||||||
# Support Libtool 2.4.2 and older:
|
# Support Libtool 2.4.2 and older:
|
||||||
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
x.filter(regex=r'^(\s*test \$p = "-R")(; then\s*)$', repl=r'\1 || test x-l = x"$p"\2')
|
||||||
|
|
||||||
@run_after("configure")
|
@spack.builder.run_after("configure")
|
||||||
def _do_patch_libtool(self):
|
def _do_patch_libtool(self):
|
||||||
"""If configure generates a "libtool" script that does not correctly
|
"""If configure generates a "libtool" script that does not correctly
|
||||||
detect the compiler (and patch_libtool is set), patch in the correct
|
detect the compiler (and patch_libtool is set), patch in the correct
|
||||||
@@ -328,31 +397,33 @@ def _do_patch_libtool(self):
|
|||||||
markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper())
|
markers[tag] = "LIBTOOL TAG CONFIG: {0}".format(tag.upper())
|
||||||
|
|
||||||
# Replace empty linker flag prefixes:
|
# Replace empty linker flag prefixes:
|
||||||
if self.compiler.name == "nag":
|
if self.pkg.compiler.name == "nag":
|
||||||
# Nag is mixed with gcc and g++, which are recognized correctly.
|
# Nag is mixed with gcc and g++, which are recognized correctly.
|
||||||
# Therefore, we change only Fortran values:
|
# Therefore, we change only Fortran values:
|
||||||
for tag in ["fc", "f77"]:
|
for tag in ["fc", "f77"]:
|
||||||
marker = markers[tag]
|
marker = markers[tag]
|
||||||
x.filter(
|
x.filter(
|
||||||
regex='^wl=""$',
|
regex='^wl=""$',
|
||||||
repl='wl="{0}"'.format(self.compiler.linker_arg),
|
repl='wl="{0}"'.format(self.pkg.compiler.linker_arg),
|
||||||
start_at="# ### BEGIN {0}".format(marker),
|
start_at="# ### BEGIN {0}".format(marker),
|
||||||
stop_at="# ### END {0}".format(marker),
|
stop_at="# ### END {0}".format(marker),
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.compiler.linker_arg))
|
x.filter(regex='^wl=""$', repl='wl="{0}"'.format(self.pkg.compiler.linker_arg))
|
||||||
|
|
||||||
# Replace empty PIC flag values:
|
# Replace empty PIC flag values:
|
||||||
for cc, marker in markers.items():
|
for cc, marker in markers.items():
|
||||||
x.filter(
|
x.filter(
|
||||||
regex='^pic_flag=""$',
|
regex='^pic_flag=""$',
|
||||||
repl='pic_flag="{0}"'.format(getattr(self.compiler, "{0}_pic_flag".format(cc))),
|
repl='pic_flag="{0}"'.format(
|
||||||
|
getattr(self.pkg.compiler, "{0}_pic_flag".format(cc))
|
||||||
|
),
|
||||||
start_at="# ### BEGIN {0}".format(marker),
|
start_at="# ### BEGIN {0}".format(marker),
|
||||||
stop_at="# ### END {0}".format(marker),
|
stop_at="# ### END {0}".format(marker),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Other compiler-specific patches:
|
# Other compiler-specific patches:
|
||||||
if self.compiler.name == "fj":
|
if self.pkg.compiler.name == "fj":
|
||||||
x.filter(regex="-nostdlib", repl="", string=True)
|
x.filter(regex="-nostdlib", repl="", string=True)
|
||||||
rehead = r"/\S*/"
|
rehead = r"/\S*/"
|
||||||
for o in [
|
for o in [
|
||||||
@@ -365,12 +436,12 @@ def _do_patch_libtool(self):
|
|||||||
"crtendS.o",
|
"crtendS.o",
|
||||||
]:
|
]:
|
||||||
x.filter(regex=(rehead + o), repl="", string=True)
|
x.filter(regex=(rehead + o), repl="", string=True)
|
||||||
elif self.compiler.name == "dpcpp":
|
elif self.pkg.compiler.name == "dpcpp":
|
||||||
# Hack to filter out spurious predep_objects when building with Intel dpcpp
|
# Hack to filter out spurious predep_objects when building with Intel dpcpp
|
||||||
# (see https://github.com/spack/spack/issues/32863):
|
# (see https://github.com/spack/spack/issues/32863):
|
||||||
x.filter(regex=r"^(predep_objects=.*)/tmp/conftest-[0-9A-Fa-f]+\.o", repl=r"\1")
|
x.filter(regex=r"^(predep_objects=.*)/tmp/conftest-[0-9A-Fa-f]+\.o", repl=r"\1")
|
||||||
x.filter(regex=r"^(predep_objects=.*)/tmp/a-[0-9A-Fa-f]+\.o", repl=r"\1")
|
x.filter(regex=r"^(predep_objects=.*)/tmp/a-[0-9A-Fa-f]+\.o", repl=r"\1")
|
||||||
elif self.compiler.name == "nag":
|
elif self.pkg.compiler.name == "nag":
|
||||||
for tag in ["fc", "f77"]:
|
for tag in ["fc", "f77"]:
|
||||||
marker = markers[tag]
|
marker = markers[tag]
|
||||||
start_at = "# ### BEGIN {0}".format(marker)
|
start_at = "# ### BEGIN {0}".format(marker)
|
||||||
@@ -446,11 +517,8 @@ def _do_patch_libtool(self):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def configure_directory(self):
|
def configure_directory(self):
|
||||||
"""Returns the directory where 'configure' resides.
|
"""Return the directory where 'configure' resides."""
|
||||||
|
return self.pkg.stage.source_path
|
||||||
:return: directory where to find configure
|
|
||||||
"""
|
|
||||||
return self.stage.source_path
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def configure_abs_path(self):
|
def configure_abs_path(self):
|
||||||
@@ -463,34 +531,12 @@ def build_directory(self):
|
|||||||
"""Override to provide another place to build the package"""
|
"""Override to provide another place to build the package"""
|
||||||
return self.configure_directory
|
return self.configure_directory
|
||||||
|
|
||||||
@run_before("autoreconf")
|
@spack.builder.run_before("autoreconf")
|
||||||
def delete_configure_to_force_update(self):
|
def delete_configure_to_force_update(self):
|
||||||
if self.force_autoreconf:
|
if self.force_autoreconf:
|
||||||
force_remove(self.configure_abs_path)
|
fs.force_remove(self.configure_abs_path)
|
||||||
|
|
||||||
def _require_build_deps(self, pkgs, spec, err):
|
def autoreconf(self, pkg, spec, prefix):
|
||||||
"""Require `pkgs` to be direct build dependencies of `spec`. Raises a
|
|
||||||
RuntimeError with a helpful error messages when any dep is missing."""
|
|
||||||
|
|
||||||
build_deps = [d.name for d in spec.dependencies(deptype="build")]
|
|
||||||
missing_deps = [x for x in pkgs if x not in build_deps]
|
|
||||||
|
|
||||||
if not missing_deps:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Raise an exception on missing deps.
|
|
||||||
msg = (
|
|
||||||
"{0}: missing dependencies: {1}.\n\nPlease add "
|
|
||||||
"the following lines to the package:\n\n".format(err, ", ".join(missing_deps))
|
|
||||||
)
|
|
||||||
|
|
||||||
for dep in missing_deps:
|
|
||||||
msg += " depends_on('{0}', type='build', when='@{1}')\n".format(dep, spec.version)
|
|
||||||
|
|
||||||
msg += "\nUpdate the version (when='@{0}') as needed.".format(spec.version)
|
|
||||||
raise RuntimeError(msg)
|
|
||||||
|
|
||||||
def autoreconf(self, spec, prefix):
|
|
||||||
"""Not needed usually, configure should be already there"""
|
"""Not needed usually, configure should be already there"""
|
||||||
|
|
||||||
# If configure exists nothing needs to be done
|
# If configure exists nothing needs to be done
|
||||||
@@ -498,8 +544,10 @@ def autoreconf(self, spec, prefix):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Else try to regenerate it, which reuquires a few build dependencies
|
# Else try to regenerate it, which reuquires a few build dependencies
|
||||||
self._require_build_deps(
|
ensure_build_dependencies_or_raise(
|
||||||
pkgs=["autoconf", "automake", "libtool"], spec=spec, err="Cannot generate configure"
|
spec=spec,
|
||||||
|
dependencies=["autoconf", "automake", "libtool"],
|
||||||
|
error_msg="Cannot generate configure",
|
||||||
)
|
)
|
||||||
|
|
||||||
tty.msg("Configure script not found: trying to generate it")
|
tty.msg("Configure script not found: trying to generate it")
|
||||||
@@ -507,8 +555,8 @@ def autoreconf(self, spec, prefix):
|
|||||||
tty.warn("* If the default procedure fails, consider implementing *")
|
tty.warn("* If the default procedure fails, consider implementing *")
|
||||||
tty.warn("* a custom AUTORECONF phase in the package *")
|
tty.warn("* a custom AUTORECONF phase in the package *")
|
||||||
tty.warn("*********************************************************")
|
tty.warn("*********************************************************")
|
||||||
with working_dir(self.configure_directory):
|
with fs.working_dir(self.configure_directory):
|
||||||
m = inspect.getmodule(self)
|
m = inspect.getmodule(self.pkg)
|
||||||
# This line is what is needed most of the time
|
# This line is what is needed most of the time
|
||||||
# --install, --verbose, --force
|
# --install, --verbose, --force
|
||||||
autoreconf_args = ["-ivf"]
|
autoreconf_args = ["-ivf"]
|
||||||
@@ -524,98 +572,66 @@ def autoreconf_search_path_args(self):
|
|||||||
spack dependencies."""
|
spack dependencies."""
|
||||||
return _autoreconf_search_path_args(self.spec)
|
return _autoreconf_search_path_args(self.spec)
|
||||||
|
|
||||||
@run_after("autoreconf")
|
@spack.builder.run_after("autoreconf")
|
||||||
def set_configure_or_die(self):
|
def set_configure_or_die(self):
|
||||||
"""Checks the presence of a ``configure`` file after the
|
"""Ensure the presence of a "configure" script, or raise. If the "configure"
|
||||||
autoreconf phase. If it is found sets a module attribute
|
is found, a module level attribute is set.
|
||||||
appropriately, otherwise raises an error.
|
|
||||||
|
|
||||||
:raises RuntimeError: if a configure script is not found in
|
Raises:
|
||||||
:py:meth:`~AutotoolsPackage.configure_directory`
|
RuntimeError: if the "configure" script is not found
|
||||||
"""
|
"""
|
||||||
# Check if a configure script is there. If not raise a RuntimeError.
|
# Check if the "configure" script is there. If not raise a RuntimeError.
|
||||||
if not os.path.exists(self.configure_abs_path):
|
if not os.path.exists(self.configure_abs_path):
|
||||||
msg = "configure script not found in {0}"
|
msg = "configure script not found in {0}"
|
||||||
raise RuntimeError(msg.format(self.configure_directory))
|
raise RuntimeError(msg.format(self.configure_directory))
|
||||||
|
|
||||||
# Monkey-patch the configure script in the corresponding module
|
# Monkey-patch the configure script in the corresponding module
|
||||||
inspect.getmodule(self).configure = Executable(self.configure_abs_path)
|
inspect.getmodule(self.pkg).configure = Executable(self.configure_abs_path)
|
||||||
|
|
||||||
def configure_args(self):
|
def configure_args(self):
|
||||||
"""Produces a list containing all the arguments that must be passed to
|
"""Return the list of all the arguments that must be passed to configure,
|
||||||
configure, except ``--prefix`` which will be pre-pended to the list.
|
except ``--prefix`` which will be pre-pended to the list.
|
||||||
|
|
||||||
:return: list of arguments for configure
|
|
||||||
"""
|
"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def flags_to_build_system_args(self, flags):
|
def configure(self, pkg, spec, prefix):
|
||||||
"""Produces a list of all command line arguments to pass specified
|
"""Run "configure", with the arguments specified by the builder and an
|
||||||
compiler flags to configure."""
|
appropriately set prefix.
|
||||||
# Has to be dynamic attribute due to caching.
|
|
||||||
setattr(self, "configure_flag_args", [])
|
|
||||||
for flag, values in flags.items():
|
|
||||||
if values:
|
|
||||||
values_str = "{0}={1}".format(flag.upper(), " ".join(values))
|
|
||||||
self.configure_flag_args.append(values_str)
|
|
||||||
# Spack's fflags are meant for both F77 and FC, therefore we
|
|
||||||
# additionaly set FCFLAGS if required.
|
|
||||||
values = flags.get("fflags", None)
|
|
||||||
if values:
|
|
||||||
values_str = "FCFLAGS={0}".format(" ".join(values))
|
|
||||||
self.configure_flag_args.append(values_str)
|
|
||||||
|
|
||||||
def configure(self, spec, prefix):
|
|
||||||
"""Runs configure with the arguments specified in
|
|
||||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`
|
|
||||||
and an appropriately set prefix.
|
|
||||||
"""
|
"""
|
||||||
options = getattr(self, "configure_flag_args", [])
|
options = getattr(self.pkg, "configure_flag_args", [])
|
||||||
options += ["--prefix={0}".format(prefix)]
|
options += ["--prefix={0}".format(prefix)]
|
||||||
options += self.configure_args()
|
options += self.configure_args()
|
||||||
|
|
||||||
with working_dir(self.build_directory, create=True):
|
with fs.working_dir(self.build_directory, create=True):
|
||||||
inspect.getmodule(self).configure(*options)
|
inspect.getmodule(self.pkg).configure(*options)
|
||||||
|
|
||||||
def setup_build_environment(self, env):
|
def build(self, pkg, spec, prefix):
|
||||||
if self.spec.platform == "darwin" and macos_version() >= Version("11"):
|
"""Run "make" on the build targets specified by the builder."""
|
||||||
# Many configure files rely on matching '10.*' for macOS version
|
|
||||||
# detection and fail to add flags if it shows as version 11.
|
|
||||||
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
|
|
||||||
|
|
||||||
def build(self, spec, prefix):
|
|
||||||
"""Makes the build targets specified by
|
|
||||||
:py:attr:``~.AutotoolsPackage.build_targets``
|
|
||||||
"""
|
|
||||||
# See https://autotools.io/automake/silent.html
|
# See https://autotools.io/automake/silent.html
|
||||||
params = ["V=1"]
|
params = ["V=1"]
|
||||||
params += self.build_targets
|
params += self.build_targets
|
||||||
with working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self).make(*params)
|
inspect.getmodule(self.pkg).make(*params)
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Makes the install targets specified by
|
"""Run "make" on the install targets specified by the builder."""
|
||||||
:py:attr:``~.AutotoolsPackage.install_targets``
|
with fs.working_dir(self.build_directory):
|
||||||
"""
|
inspect.getmodule(self.pkg).make(*self.install_targets)
|
||||||
with working_dir(self.build_directory):
|
|
||||||
inspect.getmodule(self).make(*self.install_targets)
|
|
||||||
|
|
||||||
run_after("build")(PackageBase._run_default_build_time_test_callbacks)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
"""Searches the Makefile for targets ``test`` and ``check``
|
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||||
and runs them if found.
|
with fs.working_dir(self.build_directory):
|
||||||
"""
|
self.pkg._if_make_target_execute("test")
|
||||||
with working_dir(self.build_directory):
|
self.pkg._if_make_target_execute("check")
|
||||||
self._if_make_target_execute("test")
|
|
||||||
self._if_make_target_execute("check")
|
|
||||||
|
|
||||||
def _activate_or_not(
|
def _activate_or_not(
|
||||||
self, name, activation_word, deactivation_word, activation_value=None, variant=None
|
self, name, activation_word, deactivation_word, activation_value=None, variant=None
|
||||||
):
|
):
|
||||||
"""This function contains the current implementation details of
|
"""This function contain the current implementation details of
|
||||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without` and
|
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without` and
|
||||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.enable_or_disable`.
|
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.enable_or_disable`.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
name (str): name of the option that is being activated or not
|
name (str): name of the option that is being activated or not
|
||||||
@@ -671,7 +687,7 @@ def _activate_or_not(
|
|||||||
Raises:
|
Raises:
|
||||||
KeyError: if name is not among known variants
|
KeyError: if name is not among known variants
|
||||||
"""
|
"""
|
||||||
spec = self.spec
|
spec = self.pkg.spec
|
||||||
args = []
|
args = []
|
||||||
|
|
||||||
if activation_value == "prefix":
|
if activation_value == "prefix":
|
||||||
@@ -681,16 +697,16 @@ def _activate_or_not(
|
|||||||
|
|
||||||
# Defensively look that the name passed as argument is among
|
# Defensively look that the name passed as argument is among
|
||||||
# variants
|
# variants
|
||||||
if variant not in self.variants:
|
if variant not in self.pkg.variants:
|
||||||
msg = '"{0}" is not a variant of "{1}"'
|
msg = '"{0}" is not a variant of "{1}"'
|
||||||
raise KeyError(msg.format(variant, self.name))
|
raise KeyError(msg.format(variant, self.pkg.name))
|
||||||
|
|
||||||
if variant not in spec.variants:
|
if variant not in spec.variants:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# Create a list of pairs. Each pair includes a configuration
|
# Create a list of pairs. Each pair includes a configuration
|
||||||
# option and whether or not that option is activated
|
# option and whether or not that option is activated
|
||||||
variant_desc, _ = self.variants[variant]
|
variant_desc, _ = self.pkg.variants[variant]
|
||||||
if set(variant_desc.values) == set((True, False)):
|
if set(variant_desc.values) == set((True, False)):
|
||||||
# BoolValuedVariant carry information about a single option.
|
# BoolValuedVariant carry information about a single option.
|
||||||
# Nonetheless, for uniformity of treatment we'll package them
|
# Nonetheless, for uniformity of treatment we'll package them
|
||||||
@@ -718,14 +734,18 @@ def _activate_or_not(
|
|||||||
override_name = "{0}_or_{1}_{2}".format(
|
override_name = "{0}_or_{1}_{2}".format(
|
||||||
activation_word, deactivation_word, option_value
|
activation_word, deactivation_word, option_value
|
||||||
)
|
)
|
||||||
line_generator = getattr(self, override_name, None)
|
line_generator = getattr(self, override_name, None) or getattr(
|
||||||
|
self.pkg, override_name, None
|
||||||
|
)
|
||||||
# If not available use a sensible default
|
# If not available use a sensible default
|
||||||
if line_generator is None:
|
if line_generator is None:
|
||||||
|
|
||||||
def _default_generator(is_activated):
|
def _default_generator(is_activated):
|
||||||
if is_activated:
|
if is_activated:
|
||||||
line = "--{0}-{1}".format(activation_word, option_value)
|
line = "--{0}-{1}".format(activation_word, option_value)
|
||||||
if activation_value is not None and activation_value(option_value):
|
if activation_value is not None and activation_value(
|
||||||
|
option_value
|
||||||
|
): # NOQA=ignore=E501
|
||||||
line += "={0}".format(activation_value(option_value))
|
line += "={0}".format(activation_value(option_value))
|
||||||
return line
|
return line
|
||||||
return "--{0}-{1}".format(deactivation_word, option_value)
|
return "--{0}-{1}".format(deactivation_word, option_value)
|
||||||
@@ -764,7 +784,7 @@ def with_or_without(self, name, activation_value=None, variant=None):
|
|||||||
|
|
||||||
def enable_or_disable(self, name, activation_value=None, variant=None):
|
def enable_or_disable(self, name, activation_value=None, variant=None):
|
||||||
"""Same as
|
"""Same as
|
||||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`
|
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`
|
||||||
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
|
but substitute ``with`` with ``enable`` and ``without`` with ``disable``.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -781,19 +801,14 @@ def enable_or_disable(self, name, activation_value=None, variant=None):
|
|||||||
"""
|
"""
|
||||||
return self._activate_or_not(name, "enable", "disable", activation_value, variant)
|
return self._activate_or_not(name, "enable", "disable", activation_value, variant)
|
||||||
|
|
||||||
run_after("install")(PackageBase._run_default_install_time_test_callbacks)
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
|
|
||||||
def installcheck(self):
|
def installcheck(self):
|
||||||
"""Searches the Makefile for an ``installcheck`` target
|
"""Run "make" on the ``installcheck`` target, if found."""
|
||||||
and runs it if found.
|
with fs.working_dir(self.build_directory):
|
||||||
"""
|
self.pkg._if_make_target_execute("installcheck")
|
||||||
with working_dir(self.build_directory):
|
|
||||||
self._if_make_target_execute("installcheck")
|
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
@spack.builder.run_after("install")
|
||||||
run_after("install")(PackageBase.sanity_check_prefix)
|
|
||||||
|
|
||||||
@run_after("install")
|
|
||||||
def remove_libtool_archives(self):
|
def remove_libtool_archives(self):
|
||||||
"""Remove all .la files in prefix sub-folders if the package sets
|
"""Remove all .la files in prefix sub-folders if the package sets
|
||||||
``install_libtool_archives`` to be False.
|
``install_libtool_archives`` to be False.
|
||||||
@@ -803,14 +818,20 @@ def remove_libtool_archives(self):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Remove the files and create a log of what was removed
|
# Remove the files and create a log of what was removed
|
||||||
libtool_files = fs.find(str(self.prefix), "*.la", recursive=True)
|
libtool_files = fs.find(str(self.pkg.prefix), "*.la", recursive=True)
|
||||||
with fs.safe_remove(*libtool_files):
|
with fs.safe_remove(*libtool_files):
|
||||||
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
|
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
|
||||||
with open(self._removed_la_files_log, mode="w") as f:
|
with open(self._removed_la_files_log, mode="w") as f:
|
||||||
f.write("\n".join(libtool_files))
|
f.write("\n".join(libtool_files))
|
||||||
|
|
||||||
|
def setup_build_environment(self, env):
|
||||||
|
if self.spec.platform == "darwin" and macos_version() >= Version("11"):
|
||||||
|
# Many configure files rely on matching '10.*' for macOS version
|
||||||
|
# detection and fail to add flags if it shows as version 11.
|
||||||
|
env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
|
||||||
|
|
||||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||||
run_after("install")(PackageBase.apply_macos_rpath_fixups)
|
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||||
|
|
||||||
|
|
||||||
def _autoreconf_search_path_args(spec):
|
def _autoreconf_search_path_args(spec):
|
||||||
|
|||||||
31
lib/spack/spack/build_systems/bundle.py
Normal file
31
lib/spack/spack/build_systems/bundle.py
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import spack.builder
|
||||||
|
import spack.directives
|
||||||
|
import spack.package_base
|
||||||
|
|
||||||
|
|
||||||
|
class BundlePackage(spack.package_base.PackageBase):
|
||||||
|
"""General purpose bundle, or no-code, package class."""
|
||||||
|
|
||||||
|
#: This attribute is used in UI queries that require to know which
|
||||||
|
#: build-system class we are using
|
||||||
|
build_system_class = "BundlePackage"
|
||||||
|
|
||||||
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
|
legacy_buildsystem = "bundle"
|
||||||
|
|
||||||
|
#: Bundle packages do not have associated source or binary code.
|
||||||
|
has_code = False
|
||||||
|
|
||||||
|
spack.directives.build_system("bundle")
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("bundle")
|
||||||
|
class BundleBuilder(spack.builder.Builder):
|
||||||
|
phases = ("install",)
|
||||||
|
|
||||||
|
def install(self, pkg, spec, prefix):
|
||||||
|
pass
|
||||||
@@ -3,12 +3,14 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import install, mkdirp
|
|
||||||
|
|
||||||
from spack.build_systems.cmake import CMakePackage
|
import spack.builder
|
||||||
from spack.package_base import run_after
|
|
||||||
|
from .cmake import CMakeBuilder, CMakePackage
|
||||||
|
|
||||||
|
|
||||||
def cmake_cache_path(name, value, comment=""):
|
def cmake_cache_path(name, value, comment=""):
|
||||||
@@ -28,44 +30,50 @@ def cmake_cache_option(name, boolean_value, comment=""):
|
|||||||
return 'set({0} {1} CACHE BOOL "{2}")\n'.format(name, value, comment)
|
return 'set({0} {1} CACHE BOOL "{2}")\n'.format(name, value, comment)
|
||||||
|
|
||||||
|
|
||||||
class CachedCMakePackage(CMakePackage):
|
class CachedCMakeBuilder(CMakeBuilder):
|
||||||
"""Specialized class for packages built using CMake initial cache.
|
|
||||||
|
|
||||||
This feature of CMake allows packages to increase reproducibility,
|
#: Phases of a Cached CMake package
|
||||||
especially between Spack- and manual builds. It also allows packages to
|
#: Note: the initconfig phase is used for developer builds as a final phase to stop on
|
||||||
sidestep certain parsing bugs in extremely long ``cmake`` commands, and to
|
phases = ("initconfig", "cmake", "build", "install") # type: Tuple[str, ...]
|
||||||
avoid system limits on the length of the command line."""
|
|
||||||
|
|
||||||
phases = ["initconfig", "cmake", "build", "install"]
|
#: Names associated with package methods in the old build-system format
|
||||||
|
legacy_methods = CMakeBuilder.legacy_methods + (
|
||||||
|
"initconfig_compiler_entries",
|
||||||
|
"initconfig_mpi_entries",
|
||||||
|
"initconfig_hardware_entries",
|
||||||
|
"std_initconfig_entries",
|
||||||
|
"initconfig_package_entries",
|
||||||
|
) # type: Tuple[str, ...]
|
||||||
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = CMakeBuilder.legacy_attributes + (
|
||||||
|
"cache_name",
|
||||||
|
"cache_path",
|
||||||
|
) # type: Tuple[str, ...]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cache_name(self):
|
def cache_name(self):
|
||||||
return "{0}-{1}-{2}@{3}.cmake".format(
|
return "{0}-{1}-{2}@{3}.cmake".format(
|
||||||
self.name,
|
self.pkg.name,
|
||||||
self.spec.architecture,
|
self.pkg.spec.architecture,
|
||||||
self.spec.compiler.name,
|
self.pkg.spec.compiler.name,
|
||||||
self.spec.compiler.version,
|
self.pkg.spec.compiler.version,
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cache_path(self):
|
def cache_path(self):
|
||||||
return os.path.join(self.stage.source_path, self.cache_name)
|
return os.path.join(self.pkg.stage.source_path, self.cache_name)
|
||||||
|
|
||||||
def flag_handler(self, name, flags):
|
|
||||||
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
|
|
||||||
return (None, None, None) # handled in the cmake cache
|
|
||||||
return (flags, None, None)
|
|
||||||
|
|
||||||
def initconfig_compiler_entries(self):
|
def initconfig_compiler_entries(self):
|
||||||
# This will tell cmake to use the Spack compiler wrappers when run
|
# This will tell cmake to use the Spack compiler wrappers when run
|
||||||
# through Spack, but use the underlying compiler when run outside of
|
# through Spack, but use the underlying compiler when run outside of
|
||||||
# Spack
|
# Spack
|
||||||
spec = self.spec
|
spec = self.pkg.spec
|
||||||
|
|
||||||
# Fortran compiler is optional
|
# Fortran compiler is optional
|
||||||
if "FC" in os.environ:
|
if "FC" in os.environ:
|
||||||
spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"])
|
spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"])
|
||||||
system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.compiler.fc)
|
system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.pkg.compiler.fc)
|
||||||
else:
|
else:
|
||||||
spack_fc_entry = "# No Fortran compiler defined in spec"
|
spack_fc_entry = "# No Fortran compiler defined in spec"
|
||||||
system_fc_entry = "# No Fortran compiler defined in spec"
|
system_fc_entry = "# No Fortran compiler defined in spec"
|
||||||
@@ -81,8 +89,8 @@ def initconfig_compiler_entries(self):
|
|||||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]),
|
" " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]),
|
||||||
" " + spack_fc_entry,
|
" " + spack_fc_entry,
|
||||||
"else()\n",
|
"else()\n",
|
||||||
" " + cmake_cache_path("CMAKE_C_COMPILER", self.compiler.cc),
|
" " + cmake_cache_path("CMAKE_C_COMPILER", self.pkg.compiler.cc),
|
||||||
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.compiler.cxx),
|
" " + cmake_cache_path("CMAKE_CXX_COMPILER", self.pkg.compiler.cxx),
|
||||||
" " + system_fc_entry,
|
" " + system_fc_entry,
|
||||||
"endif()\n",
|
"endif()\n",
|
||||||
]
|
]
|
||||||
@@ -126,7 +134,7 @@ def initconfig_compiler_entries(self):
|
|||||||
return entries
|
return entries
|
||||||
|
|
||||||
def initconfig_mpi_entries(self):
|
def initconfig_mpi_entries(self):
|
||||||
spec = self.spec
|
spec = self.pkg.spec
|
||||||
|
|
||||||
if not spec.satisfies("^mpi"):
|
if not spec.satisfies("^mpi"):
|
||||||
return []
|
return []
|
||||||
@@ -160,13 +168,13 @@ def initconfig_mpi_entries(self):
|
|||||||
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpiexec")
|
mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpiexec")
|
||||||
|
|
||||||
if not os.path.exists(mpiexec):
|
if not os.path.exists(mpiexec):
|
||||||
msg = "Unable to determine MPIEXEC, %s tests may fail" % self.name
|
msg = "Unable to determine MPIEXEC, %s tests may fail" % self.pkg.name
|
||||||
entries.append("# {0}\n".format(msg))
|
entries.append("# {0}\n".format(msg))
|
||||||
tty.warn(msg)
|
tty.warn(msg)
|
||||||
else:
|
else:
|
||||||
# starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE
|
# starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE
|
||||||
# vs the older versions which expect MPIEXEC
|
# vs the older versions which expect MPIEXEC
|
||||||
if self.spec["cmake"].satisfies("@3.10:"):
|
if self.pkg.spec["cmake"].satisfies("@3.10:"):
|
||||||
entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE", mpiexec))
|
entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE", mpiexec))
|
||||||
else:
|
else:
|
||||||
entries.append(cmake_cache_path("MPIEXEC", mpiexec))
|
entries.append(cmake_cache_path("MPIEXEC", mpiexec))
|
||||||
@@ -180,7 +188,7 @@ def initconfig_mpi_entries(self):
|
|||||||
return entries
|
return entries
|
||||||
|
|
||||||
def initconfig_hardware_entries(self):
|
def initconfig_hardware_entries(self):
|
||||||
spec = self.spec
|
spec = self.pkg.spec
|
||||||
|
|
||||||
entries = [
|
entries = [
|
||||||
"#------------------{0}".format("-" * 60),
|
"#------------------{0}".format("-" * 60),
|
||||||
@@ -212,7 +220,7 @@ def std_initconfig_entries(self):
|
|||||||
"#------------------{0}".format("-" * 60),
|
"#------------------{0}".format("-" * 60),
|
||||||
"# !!!! This is a generated file, edit at own risk !!!!",
|
"# !!!! This is a generated file, edit at own risk !!!!",
|
||||||
"#------------------{0}".format("-" * 60),
|
"#------------------{0}".format("-" * 60),
|
||||||
"# CMake executable path: {0}".format(self.spec["cmake"].command.path),
|
"# CMake executable path: {0}".format(self.pkg.spec["cmake"].command.path),
|
||||||
"#------------------{0}\n".format("-" * 60),
|
"#------------------{0}\n".format("-" * 60),
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -220,7 +228,7 @@ def initconfig_package_entries(self):
|
|||||||
"""This method is to be overwritten by the package"""
|
"""This method is to be overwritten by the package"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def initconfig(self, spec, prefix):
|
def initconfig(self, pkg, spec, prefix):
|
||||||
cache_entries = (
|
cache_entries = (
|
||||||
self.std_initconfig_entries()
|
self.std_initconfig_entries()
|
||||||
+ self.initconfig_compiler_entries()
|
+ self.initconfig_compiler_entries()
|
||||||
@@ -236,11 +244,28 @@ def initconfig(self, spec, prefix):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def std_cmake_args(self):
|
def std_cmake_args(self):
|
||||||
args = super(CachedCMakePackage, self).std_cmake_args
|
args = super(CachedCMakeBuilder, self).std_cmake_args
|
||||||
args.extend(["-C", self.cache_path])
|
args.extend(["-C", self.cache_path])
|
||||||
return args
|
return args
|
||||||
|
|
||||||
@run_after("install")
|
@spack.builder.run_after("install")
|
||||||
def install_cmake_cache(self):
|
def install_cmake_cache(self):
|
||||||
mkdirp(self.spec.prefix.share.cmake)
|
fs.mkdirp(self.pkg.spec.prefix.share.cmake)
|
||||||
install(self.cache_path, self.spec.prefix.share.cmake)
|
fs.install(self.cache_path, self.pkg.spec.prefix.share.cmake)
|
||||||
|
|
||||||
|
|
||||||
|
class CachedCMakePackage(CMakePackage):
|
||||||
|
"""Specialized class for packages built using CMake initial cache.
|
||||||
|
|
||||||
|
This feature of CMake allows packages to increase reproducibility,
|
||||||
|
especially between Spack- and manual builds. It also allows packages to
|
||||||
|
sidestep certain parsing bugs in extremely long ``cmake`` commands, and to
|
||||||
|
avoid system limits on the length of the command line.
|
||||||
|
"""
|
||||||
|
|
||||||
|
CMakeBuilder = CachedCMakeBuilder
|
||||||
|
|
||||||
|
def flag_handler(self, name, flags):
|
||||||
|
if name in ("cflags", "cxxflags", "cppflags", "fflags"):
|
||||||
|
return None, None, None # handled in the cmake cache
|
||||||
|
return flags, None, None
|
||||||
|
|||||||
@@ -2,23 +2,26 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import List
|
from typing import List, Tuple
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
|
import llnl.util.filesystem as fs
|
||||||
from llnl.util.compat import Sequence
|
from llnl.util.compat import Sequence
|
||||||
from llnl.util.filesystem import working_dir
|
|
||||||
|
|
||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
from spack.directives import conflicts, depends_on, variant
|
import spack.builder
|
||||||
from spack.package_base import InstallError, PackageBase, run_after
|
import spack.package_base
|
||||||
|
import spack.util.path
|
||||||
|
from spack.directives import build_system, depends_on, variant
|
||||||
|
from spack.multimethod import when
|
||||||
|
|
||||||
|
from ._checks import BaseBuilder, execute_build_time_tests
|
||||||
|
|
||||||
# Regex to extract the primary generator from the CMake generator
|
# Regex to extract the primary generator from the CMake generator
|
||||||
# string.
|
# string.
|
||||||
@@ -34,56 +37,141 @@ def _extract_primary_generator(generator):
|
|||||||
return primary_generator
|
return primary_generator
|
||||||
|
|
||||||
|
|
||||||
class CMakePackage(PackageBase):
|
class CMakePackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for packages built using CMake
|
"""Specialized class for packages built using CMake
|
||||||
|
|
||||||
For more information on the CMake build system, see:
|
For more information on the CMake build system, see:
|
||||||
https://cmake.org/cmake/help/latest/
|
https://cmake.org/cmake/help/latest/
|
||||||
|
"""
|
||||||
|
|
||||||
This class provides three phases that can be overridden:
|
#: This attribute is used in UI queries that need to know the build
|
||||||
|
#: system base class
|
||||||
|
build_system_class = "CMakePackage"
|
||||||
|
|
||||||
1. :py:meth:`~.CMakePackage.cmake`
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
2. :py:meth:`~.CMakePackage.build`
|
legacy_buildsystem = "cmake"
|
||||||
3. :py:meth:`~.CMakePackage.install`
|
|
||||||
|
build_system("cmake")
|
||||||
|
|
||||||
|
with when("build_system=cmake"):
|
||||||
|
# https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
||||||
|
variant(
|
||||||
|
"build_type",
|
||||||
|
default="RelWithDebInfo",
|
||||||
|
description="CMake build type",
|
||||||
|
values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"),
|
||||||
|
)
|
||||||
|
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
||||||
|
# https://cmake.org/cmake/help/latest/variable/CMAKE_INTERPROCEDURAL_OPTIMIZATION.html
|
||||||
|
variant(
|
||||||
|
"ipo",
|
||||||
|
default=False,
|
||||||
|
when="^cmake@3.9:",
|
||||||
|
description="CMake interprocedural optimization",
|
||||||
|
)
|
||||||
|
depends_on("cmake", type="build")
|
||||||
|
depends_on("ninja", type="build", when="platform=windows")
|
||||||
|
|
||||||
|
def flags_to_build_system_args(self, flags):
|
||||||
|
"""Return a list of all command line arguments to pass the specified
|
||||||
|
compiler flags to cmake. Note CMAKE does not have a cppflags option,
|
||||||
|
so cppflags will be added to cflags, cxxflags, and fflags to mimic the
|
||||||
|
behavior in other tools.
|
||||||
|
"""
|
||||||
|
# Has to be dynamic attribute due to caching
|
||||||
|
setattr(self, "cmake_flag_args", [])
|
||||||
|
|
||||||
|
flag_string = "-DCMAKE_{0}_FLAGS={1}"
|
||||||
|
langs = {"C": "c", "CXX": "cxx", "Fortran": "f"}
|
||||||
|
|
||||||
|
# Handle language compiler flags
|
||||||
|
for lang, pre in langs.items():
|
||||||
|
flag = pre + "flags"
|
||||||
|
# cmake has no explicit cppflags support -> add it to all langs
|
||||||
|
lang_flags = " ".join(flags.get(flag, []) + flags.get("cppflags", []))
|
||||||
|
if lang_flags:
|
||||||
|
self.cmake_flag_args.append(flag_string.format(lang, lang_flags))
|
||||||
|
|
||||||
|
# Cmake has different linker arguments for different build types.
|
||||||
|
# We specify for each of them.
|
||||||
|
if flags["ldflags"]:
|
||||||
|
ldflags = " ".join(flags["ldflags"])
|
||||||
|
ld_string = "-DCMAKE_{0}_LINKER_FLAGS={1}"
|
||||||
|
# cmake has separate linker arguments for types of builds.
|
||||||
|
for type in ["EXE", "MODULE", "SHARED", "STATIC"]:
|
||||||
|
self.cmake_flag_args.append(ld_string.format(type, ldflags))
|
||||||
|
|
||||||
|
# CMake has libs options separated by language. Apply ours to each.
|
||||||
|
if flags["ldlibs"]:
|
||||||
|
libs_flags = " ".join(flags["ldlibs"])
|
||||||
|
libs_string = "-DCMAKE_{0}_STANDARD_LIBRARIES={1}"
|
||||||
|
for lang in langs:
|
||||||
|
self.cmake_flag_args.append(libs_string.format(lang, libs_flags))
|
||||||
|
|
||||||
|
# Legacy methods (used by too many packages to change them,
|
||||||
|
# need to forward to the builder)
|
||||||
|
def define(self, *args, **kwargs):
|
||||||
|
return self.builder.define(*args, **kwargs)
|
||||||
|
|
||||||
|
def define_from_variant(self, *args, **kwargs):
|
||||||
|
return self.builder.define_from_variant(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("cmake")
|
||||||
|
class CMakeBuilder(BaseBuilder):
|
||||||
|
"""The cmake builder encodes the default way of building software with CMake. IT
|
||||||
|
has three phases that can be overridden:
|
||||||
|
|
||||||
|
1. :py:meth:`~.CMakeBuilder.cmake`
|
||||||
|
2. :py:meth:`~.CMakeBuilder.build`
|
||||||
|
3. :py:meth:`~.CMakeBuilder.install`
|
||||||
|
|
||||||
They all have sensible defaults and for many packages the only thing
|
They all have sensible defaults and for many packages the only thing
|
||||||
necessary will be to override :py:meth:`~.CMakePackage.cmake_args`.
|
necessary will be to override :py:meth:`~.CMakeBuilder.cmake_args`.
|
||||||
|
|
||||||
For a finer tuning you may also override:
|
For a finer tuning you may also override:
|
||||||
|
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
| **Method** | **Purpose** |
|
| **Method** | **Purpose** |
|
||||||
+===============================================+====================+
|
+===============================================+====================+
|
||||||
| :py:meth:`~.CMakePackage.root_cmakelists_dir` | Location of the |
|
| :py:meth:`~.CMakeBuilder.root_cmakelists_dir` | Location of the |
|
||||||
| | root CMakeLists.txt|
|
| | root CMakeLists.txt|
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
| :py:meth:`~.CMakePackage.build_directory` | Directory where to |
|
| :py:meth:`~.CMakeBuilder.build_directory` | Directory where to |
|
||||||
| | build the package |
|
| | build the package |
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
|
|
||||||
|
The generator used by CMake can be specified by providing the ``generator``
|
||||||
The generator used by CMake can be specified by providing the
|
attribute. Per
|
||||||
generator attribute. Per
|
|
||||||
https://cmake.org/cmake/help/git-master/manual/cmake-generators.7.html,
|
https://cmake.org/cmake/help/git-master/manual/cmake-generators.7.html,
|
||||||
the format is: [<secondary-generator> - ]<primary_generator>. The
|
the format is: [<secondary-generator> - ]<primary_generator>.
|
||||||
full list of primary and secondary generators supported by CMake may
|
|
||||||
be found in the documentation for the version of CMake used;
|
The full list of primary and secondary generators supported by CMake may be found
|
||||||
however, at this time Spack supports only the primary generators
|
in the documentation for the version of CMake used; however, at this time Spack
|
||||||
"Unix Makefiles" and "Ninja." Spack's CMake support is agnostic with
|
supports only the primary generators "Unix Makefiles" and "Ninja." Spack's CMake
|
||||||
respect to primary generators. Spack will generate a runtime error
|
support is agnostic with respect to primary generators. Spack will generate a
|
||||||
if the generator string does not follow the prescribed format, or if
|
runtime error if the generator string does not follow the prescribed format, or if
|
||||||
the primary generator is not supported.
|
the primary generator is not supported.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
#: Phases of a CMake package
|
#: Phases of a CMake package
|
||||||
phases = ["cmake", "build", "install"]
|
phases = ("cmake", "build", "install") # type: Tuple[str, ...]
|
||||||
#: This attribute is used in UI queries that need to know the build
|
|
||||||
#: system base class
|
|
||||||
build_system_class = "CMakePackage"
|
|
||||||
|
|
||||||
build_targets = [] # type: List[str]
|
#: Names associated with package methods in the old build-system format
|
||||||
install_targets = ["install"]
|
legacy_methods = ("cmake_args", "check") # type: Tuple[str, ...]
|
||||||
|
|
||||||
build_time_test_callbacks = ["check"]
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = (
|
||||||
|
"generator",
|
||||||
|
"build_targets",
|
||||||
|
"install_targets",
|
||||||
|
"build_time_test_callbacks",
|
||||||
|
"archive_files",
|
||||||
|
"root_cmakelists_dir",
|
||||||
|
"std_cmake_args",
|
||||||
|
"build_dirname",
|
||||||
|
"build_directory",
|
||||||
|
) # type: Tuple[str, ...]
|
||||||
|
|
||||||
#: The build system generator to use.
|
#: The build system generator to use.
|
||||||
#:
|
#:
|
||||||
@@ -93,27 +181,14 @@ class CMakePackage(PackageBase):
|
|||||||
#:
|
#:
|
||||||
#: See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
|
#: See https://cmake.org/cmake/help/latest/manual/cmake-generators.7.html
|
||||||
#: for more information.
|
#: for more information.
|
||||||
|
generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||||
|
|
||||||
generator = "Unix Makefiles"
|
#: Targets to be used during the build phase
|
||||||
|
build_targets = [] # type: List[str]
|
||||||
if sys.platform == "win32":
|
#: Targets to be used during the install phase
|
||||||
generator = "Ninja"
|
install_targets = ["install"]
|
||||||
depends_on("ninja")
|
#: Callback names for build-time test
|
||||||
|
build_time_test_callbacks = ["check"]
|
||||||
# https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
|
|
||||||
variant(
|
|
||||||
"build_type",
|
|
||||||
default="RelWithDebInfo",
|
|
||||||
description="CMake build type",
|
|
||||||
values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"),
|
|
||||||
)
|
|
||||||
|
|
||||||
# https://cmake.org/cmake/help/latest/variable/CMAKE_INTERPROCEDURAL_OPTIMIZATION.html
|
|
||||||
variant("ipo", default=False, description="CMake interprocedural optimization")
|
|
||||||
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
|
|
||||||
conflicts("+ipo", when="^cmake@:3.8", msg="+ipo is not supported by CMake < 3.9")
|
|
||||||
|
|
||||||
depends_on("cmake", type="build")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def archive_files(self):
|
def archive_files(self):
|
||||||
@@ -126,40 +201,30 @@ def root_cmakelists_dir(self):
|
|||||||
|
|
||||||
This path is relative to the root of the extracted tarball,
|
This path is relative to the root of the extracted tarball,
|
||||||
not to the ``build_directory``. Defaults to the current directory.
|
not to the ``build_directory``. Defaults to the current directory.
|
||||||
|
|
||||||
:return: directory containing CMakeLists.txt
|
|
||||||
"""
|
"""
|
||||||
return self.stage.source_path
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def std_cmake_args(self):
|
def std_cmake_args(self):
|
||||||
"""Standard cmake arguments provided as a property for
|
"""Standard cmake arguments provided as a property for
|
||||||
convenience of package writers
|
convenience of package writers
|
||||||
|
|
||||||
:return: standard cmake arguments
|
|
||||||
"""
|
"""
|
||||||
# standard CMake arguments
|
# standard CMake arguments
|
||||||
std_cmake_args = CMakePackage._std_args(self)
|
std_cmake_args = CMakeBuilder.std_args(self.pkg, generator=self.generator)
|
||||||
std_cmake_args += getattr(self, "cmake_flag_args", [])
|
std_cmake_args += getattr(self.pkg, "cmake_flag_args", [])
|
||||||
return std_cmake_args
|
return std_cmake_args
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _std_args(pkg):
|
def std_args(pkg, generator=None):
|
||||||
"""Computes the standard cmake arguments for a generic package"""
|
"""Computes the standard cmake arguments for a generic package"""
|
||||||
|
generator = generator or "Unix Makefiles"
|
||||||
try:
|
|
||||||
generator = pkg.generator
|
|
||||||
except AttributeError:
|
|
||||||
generator = CMakePackage.generator
|
|
||||||
|
|
||||||
# Make sure a valid generator was chosen
|
|
||||||
valid_primary_generators = ["Unix Makefiles", "Ninja"]
|
valid_primary_generators = ["Unix Makefiles", "Ninja"]
|
||||||
primary_generator = _extract_primary_generator(generator)
|
primary_generator = _extract_primary_generator(generator)
|
||||||
if primary_generator not in valid_primary_generators:
|
if primary_generator not in valid_primary_generators:
|
||||||
msg = "Invalid CMake generator: '{0}'\n".format(generator)
|
msg = "Invalid CMake generator: '{0}'\n".format(generator)
|
||||||
msg += "CMakePackage currently supports the following "
|
msg += "CMakePackage currently supports the following "
|
||||||
msg += "primary generators: '{0}'".format("', '".join(valid_primary_generators))
|
msg += "primary generators: '{0}'".format("', '".join(valid_primary_generators))
|
||||||
raise InstallError(msg)
|
raise spack.package_base.InstallError(msg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
build_type = pkg.spec.variants["build_type"].value
|
build_type = pkg.spec.variants["build_type"].value
|
||||||
@@ -171,7 +236,7 @@ def _std_args(pkg):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
ipo = False
|
ipo = False
|
||||||
|
|
||||||
define = CMakePackage.define
|
define = CMakeBuilder.define
|
||||||
args = [
|
args = [
|
||||||
"-G",
|
"-G",
|
||||||
generator,
|
generator,
|
||||||
@@ -251,7 +316,7 @@ def define_from_variant(self, cmake_var, variant=None):
|
|||||||
of ``cmake_var``.
|
of ``cmake_var``.
|
||||||
|
|
||||||
This utility function is similar to
|
This utility function is similar to
|
||||||
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without`.
|
:meth:`~spack.build_systems.autotools.AutotoolsBuilder.with_or_without`.
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
|
|
||||||
@@ -291,122 +356,75 @@ def define_from_variant(self, cmake_var, variant=None):
|
|||||||
if variant is None:
|
if variant is None:
|
||||||
variant = cmake_var.lower()
|
variant = cmake_var.lower()
|
||||||
|
|
||||||
if variant not in self.variants:
|
if variant not in self.pkg.variants:
|
||||||
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.name))
|
raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.pkg.name))
|
||||||
|
|
||||||
if variant not in self.spec.variants:
|
if variant not in self.pkg.spec.variants:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
value = self.spec.variants[variant].value
|
value = self.pkg.spec.variants[variant].value
|
||||||
if isinstance(value, (tuple, list)):
|
if isinstance(value, (tuple, list)):
|
||||||
# Sort multi-valued variants for reproducibility
|
# Sort multi-valued variants for reproducibility
|
||||||
value = sorted(value)
|
value = sorted(value)
|
||||||
|
|
||||||
return self.define(cmake_var, value)
|
return self.define(cmake_var, value)
|
||||||
|
|
||||||
def flags_to_build_system_args(self, flags):
|
|
||||||
"""Produces a list of all command line arguments to pass the specified
|
|
||||||
compiler flags to cmake. Note CMAKE does not have a cppflags option,
|
|
||||||
so cppflags will be added to cflags, cxxflags, and fflags to mimic the
|
|
||||||
behavior in other tools."""
|
|
||||||
# Has to be dynamic attribute due to caching
|
|
||||||
setattr(self, "cmake_flag_args", [])
|
|
||||||
|
|
||||||
flag_string = "-DCMAKE_{0}_FLAGS={1}"
|
|
||||||
langs = {"C": "c", "CXX": "cxx", "Fortran": "f"}
|
|
||||||
|
|
||||||
# Handle language compiler flags
|
|
||||||
for lang, pre in langs.items():
|
|
||||||
flag = pre + "flags"
|
|
||||||
# cmake has no explicit cppflags support -> add it to all langs
|
|
||||||
lang_flags = " ".join(flags.get(flag, []) + flags.get("cppflags", []))
|
|
||||||
if lang_flags:
|
|
||||||
self.cmake_flag_args.append(flag_string.format(lang, lang_flags))
|
|
||||||
|
|
||||||
# Cmake has different linker arguments for different build types.
|
|
||||||
# We specify for each of them.
|
|
||||||
if flags["ldflags"]:
|
|
||||||
ldflags = " ".join(flags["ldflags"])
|
|
||||||
ld_string = "-DCMAKE_{0}_LINKER_FLAGS={1}"
|
|
||||||
# cmake has separate linker arguments for types of builds.
|
|
||||||
for type in ["EXE", "MODULE", "SHARED", "STATIC"]:
|
|
||||||
self.cmake_flag_args.append(ld_string.format(type, ldflags))
|
|
||||||
|
|
||||||
# CMake has libs options separated by language. Apply ours to each.
|
|
||||||
if flags["ldlibs"]:
|
|
||||||
libs_flags = " ".join(flags["ldlibs"])
|
|
||||||
libs_string = "-DCMAKE_{0}_STANDARD_LIBRARIES={1}"
|
|
||||||
for lang in langs:
|
|
||||||
self.cmake_flag_args.append(libs_string.format(lang, libs_flags))
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_dirname(self):
|
def build_dirname(self):
|
||||||
"""Returns the directory name to use when building the package
|
"""Directory name to use when building the package."""
|
||||||
|
return "spack-build-%s" % self.pkg.spec.dag_hash(7)
|
||||||
:return: name of the subdirectory for building the package
|
|
||||||
"""
|
|
||||||
return "spack-build-%s" % self.spec.dag_hash(7)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self):
|
def build_directory(self):
|
||||||
"""Returns the directory to use when building the package
|
"""Full-path to the directory to use when building the package."""
|
||||||
|
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
||||||
:return: directory where to build the package
|
|
||||||
"""
|
|
||||||
return os.path.join(self.stage.path, self.build_dirname)
|
|
||||||
|
|
||||||
def cmake_args(self):
|
def cmake_args(self):
|
||||||
"""Produces a list containing all the arguments that must be passed to
|
"""List of all the arguments that must be passed to cmake, except:
|
||||||
cmake, except:
|
|
||||||
|
|
||||||
* CMAKE_INSTALL_PREFIX
|
* CMAKE_INSTALL_PREFIX
|
||||||
* CMAKE_BUILD_TYPE
|
* CMAKE_BUILD_TYPE
|
||||||
* BUILD_TESTING
|
* BUILD_TESTING
|
||||||
|
|
||||||
which will be set automatically.
|
which will be set automatically.
|
||||||
|
|
||||||
:return: list of arguments for cmake
|
|
||||||
"""
|
"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def cmake(self, spec, prefix):
|
def cmake(self, pkg, spec, prefix):
|
||||||
"""Runs ``cmake`` in the build directory"""
|
"""Runs ``cmake`` in the build directory"""
|
||||||
options = self.std_cmake_args
|
options = self.std_cmake_args
|
||||||
options += self.cmake_args()
|
options += self.cmake_args()
|
||||||
options.append(os.path.abspath(self.root_cmakelists_dir))
|
options.append(os.path.abspath(self.root_cmakelists_dir))
|
||||||
with working_dir(self.build_directory, create=True):
|
with fs.working_dir(self.build_directory, create=True):
|
||||||
inspect.getmodule(self).cmake(*options)
|
inspect.getmodule(self.pkg).cmake(*options)
|
||||||
|
|
||||||
def build(self, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
with working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
if self.generator == "Unix Makefiles":
|
if self.generator == "Unix Makefiles":
|
||||||
inspect.getmodule(self).make(*self.build_targets)
|
inspect.getmodule(self.pkg).make(*self.build_targets)
|
||||||
elif self.generator == "Ninja":
|
elif self.generator == "Ninja":
|
||||||
self.build_targets.append("-v")
|
self.build_targets.append("-v")
|
||||||
inspect.getmodule(self).ninja(*self.build_targets)
|
inspect.getmodule(self.pkg).ninja(*self.build_targets)
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
if self.generator == "Unix Makefiles":
|
if self.generator == "Unix Makefiles":
|
||||||
inspect.getmodule(self).make(*self.install_targets)
|
inspect.getmodule(self.pkg).make(*self.install_targets)
|
||||||
elif self.generator == "Ninja":
|
elif self.generator == "Ninja":
|
||||||
inspect.getmodule(self).ninja(*self.install_targets)
|
inspect.getmodule(self.pkg).ninja(*self.install_targets)
|
||||||
|
|
||||||
run_after("build")(PackageBase._run_default_build_time_test_callbacks)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
"""Searches the CMake-generated Makefile for the target ``test``
|
"""Search the CMake-generated files for the targets ``test`` and ``check``,
|
||||||
and runs it if found.
|
and runs them if found.
|
||||||
"""
|
"""
|
||||||
with working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
if self.generator == "Unix Makefiles":
|
if self.generator == "Unix Makefiles":
|
||||||
self._if_make_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
|
self.pkg._if_make_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
|
||||||
self._if_make_target_execute("check")
|
self.pkg._if_make_target_execute("check")
|
||||||
elif self.generator == "Ninja":
|
elif self.generator == "Ninja":
|
||||||
self._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
|
self.pkg._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
|
||||||
self._if_ninja_target_execute("check")
|
self.pkg._if_ninja_target_execute("check")
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
|
||||||
run_after("install")(PackageBase.sanity_check_prefix)
|
|
||||||
|
|||||||
44
lib/spack/spack/build_systems/generic.py
Normal file
44
lib/spack/spack/build_systems/generic.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
import spack.builder
|
||||||
|
import spack.directives
|
||||||
|
import spack.package_base
|
||||||
|
|
||||||
|
from ._checks import BaseBuilder, apply_macos_rpath_fixups
|
||||||
|
|
||||||
|
|
||||||
|
class Package(spack.package_base.PackageBase):
|
||||||
|
"""General purpose class with a single ``install`` phase that needs to be
|
||||||
|
coded by packagers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
#: This attribute is used in UI queries that require to know which
|
||||||
|
#: build-system class we are using
|
||||||
|
build_system_class = "Package"
|
||||||
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
|
legacy_buildsystem = "generic"
|
||||||
|
|
||||||
|
spack.directives.build_system("generic")
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("generic")
|
||||||
|
class GenericBuilder(BaseBuilder):
|
||||||
|
"""A builder for a generic build system, that require packagers
|
||||||
|
to implement an "install" phase.
|
||||||
|
"""
|
||||||
|
|
||||||
|
#: A generic package has only the "install" phase
|
||||||
|
phases = ("install",)
|
||||||
|
|
||||||
|
#: Names associated with package methods in the old build-system format
|
||||||
|
legacy_methods = () # type: Tuple[str, ...]
|
||||||
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = ("archive_files",) # type: Tuple[str, ...]
|
||||||
|
|
||||||
|
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||||
|
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||||
@@ -2,8 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
import glob
|
import glob
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
@@ -26,12 +24,14 @@
|
|||||||
|
|
||||||
import spack.error
|
import spack.error
|
||||||
from spack.build_environment import dso_suffix
|
from spack.build_environment import dso_suffix
|
||||||
from spack.package_base import InstallError, PackageBase, run_after
|
from spack.package_base import InstallError
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
from spack.util.prefix import Prefix
|
from spack.util.prefix import Prefix
|
||||||
from spack.version import Version, ver
|
from spack.version import Version, ver
|
||||||
|
|
||||||
|
from .generic import Package
|
||||||
|
|
||||||
# A couple of utility functions that might be useful in general. If so, they
|
# A couple of utility functions that might be useful in general. If so, they
|
||||||
# should really be defined elsewhere, unless deemed heretical.
|
# should really be defined elsewhere, unless deemed heretical.
|
||||||
# (Or na"ive on my part).
|
# (Or na"ive on my part).
|
||||||
@@ -86,7 +86,7 @@ def _expand_fields(s):
|
|||||||
return s
|
return s
|
||||||
|
|
||||||
|
|
||||||
class IntelPackage(PackageBase):
|
class IntelPackage(Package):
|
||||||
"""Specialized class for licensed Intel software.
|
"""Specialized class for licensed Intel software.
|
||||||
|
|
||||||
This class provides two phases that can be overridden:
|
This class provides two phases that can be overridden:
|
||||||
@@ -99,9 +99,6 @@ class IntelPackage(PackageBase):
|
|||||||
to set the appropriate environment variables.
|
to set the appropriate environment variables.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
#: Phases of an Intel package
|
|
||||||
phases = ["configure", "install"]
|
|
||||||
|
|
||||||
#: This attribute is used in UI queries that need to know the build
|
#: This attribute is used in UI queries that need to know the build
|
||||||
#: system base class
|
#: system base class
|
||||||
build_system_class = "IntelPackage"
|
build_system_class = "IntelPackage"
|
||||||
@@ -1184,12 +1181,13 @@ def _determine_license_type(self):
|
|||||||
debug_print(license_type)
|
debug_print(license_type)
|
||||||
return license_type
|
return license_type
|
||||||
|
|
||||||
def configure(self, spec, prefix):
|
@spack.builder.run_before("install")
|
||||||
|
def configure(self):
|
||||||
"""Generates the silent.cfg file to pass to installer.sh.
|
"""Generates the silent.cfg file to pass to installer.sh.
|
||||||
|
|
||||||
See https://software.intel.com/en-us/articles/configuration-file-format
|
See https://software.intel.com/en-us/articles/configuration-file-format
|
||||||
"""
|
"""
|
||||||
|
prefix = self.prefix
|
||||||
# Both tokens AND values of the configuration file are validated during
|
# Both tokens AND values of the configuration file are validated during
|
||||||
# the run of the underlying binary installer. Any unknown token or
|
# the run of the underlying binary installer. Any unknown token or
|
||||||
# unacceptable value will cause that installer to fail. Notably, this
|
# unacceptable value will cause that installer to fail. Notably, this
|
||||||
@@ -1270,7 +1268,7 @@ def install(self, spec, prefix):
|
|||||||
for f in glob.glob("%s/intel*log" % tmpdir):
|
for f in glob.glob("%s/intel*log" % tmpdir):
|
||||||
install(f, dst)
|
install(f, dst)
|
||||||
|
|
||||||
@run_after("install")
|
@spack.builder.run_after("install")
|
||||||
def validate_install(self):
|
def validate_install(self):
|
||||||
# Sometimes the installer exits with an error but doesn't pass a
|
# Sometimes the installer exits with an error but doesn't pass a
|
||||||
# non-zero exit code to spack. Check for the existence of a 'bin'
|
# non-zero exit code to spack. Check for the existence of a 'bin'
|
||||||
@@ -1278,7 +1276,7 @@ def validate_install(self):
|
|||||||
if not os.path.exists(self.prefix.bin):
|
if not os.path.exists(self.prefix.bin):
|
||||||
raise InstallError("The installer has failed to install anything.")
|
raise InstallError("The installer has failed to install anything.")
|
||||||
|
|
||||||
@run_after("install")
|
@spack.builder.run_after("install")
|
||||||
def configure_rpath(self):
|
def configure_rpath(self):
|
||||||
if "+rpath" not in self.spec:
|
if "+rpath" not in self.spec:
|
||||||
return
|
return
|
||||||
@@ -1296,7 +1294,7 @@ def configure_rpath(self):
|
|||||||
with open(compiler_cfg, "w") as fh:
|
with open(compiler_cfg, "w") as fh:
|
||||||
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
|
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
|
||||||
|
|
||||||
@run_after("install")
|
@spack.builder.run_after("install")
|
||||||
def configure_auto_dispatch(self):
|
def configure_auto_dispatch(self):
|
||||||
if self._has_compilers:
|
if self._has_compilers:
|
||||||
if "auto_dispatch=none" in self.spec:
|
if "auto_dispatch=none" in self.spec:
|
||||||
@@ -1320,7 +1318,7 @@ def configure_auto_dispatch(self):
|
|||||||
with open(compiler_cfg, "a") as fh:
|
with open(compiler_cfg, "a") as fh:
|
||||||
fh.write("-ax{0}\n".format(",".join(ad)))
|
fh.write("-ax{0}\n".format(",".join(ad)))
|
||||||
|
|
||||||
@run_after("install")
|
@spack.builder.run_after("install")
|
||||||
def filter_compiler_wrappers(self):
|
def filter_compiler_wrappers(self):
|
||||||
if ("+mpi" in self.spec or self.provides("mpi")) and "~newdtags" in self.spec:
|
if ("+mpi" in self.spec or self.provides("mpi")) and "~newdtags" in self.spec:
|
||||||
bin_dir = self.component_bin_dir("mpi")
|
bin_dir = self.component_bin_dir("mpi")
|
||||||
@@ -1328,7 +1326,7 @@ def filter_compiler_wrappers(self):
|
|||||||
f = os.path.join(bin_dir, f)
|
f = os.path.join(bin_dir, f)
|
||||||
filter_file("-Xlinker --enable-new-dtags", " ", f, string=True)
|
filter_file("-Xlinker --enable-new-dtags", " ", f, string=True)
|
||||||
|
|
||||||
@run_after("install")
|
@spack.builder.run_after("install")
|
||||||
def uninstall_ism(self):
|
def uninstall_ism(self):
|
||||||
# The "Intel(R) Software Improvement Program" [ahem] gets installed,
|
# The "Intel(R) Software Improvement Program" [ahem] gets installed,
|
||||||
# apparently regardless of PHONEHOME_SEND_USAGE_DATA.
|
# apparently regardless of PHONEHOME_SEND_USAGE_DATA.
|
||||||
@@ -1360,7 +1358,7 @@ def base_lib_dir(self):
|
|||||||
debug_print(d)
|
debug_print(d)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
@run_after("install")
|
@spack.builder.run_after("install")
|
||||||
def modify_LLVMgold_rpath(self):
|
def modify_LLVMgold_rpath(self):
|
||||||
"""Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so.
|
"""Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so.
|
||||||
|
|
||||||
@@ -1391,6 +1389,3 @@ def modify_LLVMgold_rpath(self):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
patchelf("--set-rpath", rpath, lib)
|
patchelf("--set-rpath", rpath, lib)
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
|
||||||
run_after("install")(PackageBase.sanity_check_prefix)
|
|
||||||
|
|||||||
@@ -2,59 +2,82 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from llnl.util.filesystem import find
|
from llnl.util.filesystem import find
|
||||||
|
|
||||||
from spack.directives import depends_on, extends
|
import spack.builder
|
||||||
|
import spack.package_base
|
||||||
|
import spack.util.executable
|
||||||
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.multimethod import when
|
from spack.multimethod import when
|
||||||
from spack.package_base import PackageBase
|
|
||||||
from spack.util.executable import Executable
|
|
||||||
|
|
||||||
|
|
||||||
class LuaPackage(PackageBase):
|
class LuaPackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for lua packages"""
|
"""Specialized class for lua packages"""
|
||||||
|
|
||||||
phases = ["unpack", "generate_luarocks_config", "preprocess", "install"]
|
|
||||||
#: This attribute is used in UI queries that need to know the build
|
#: This attribute is used in UI queries that need to know the build
|
||||||
#: system base class
|
#: system base class
|
||||||
build_system_class = "LuaPackage"
|
build_system_class = "LuaPackage"
|
||||||
|
|
||||||
list_depth = 1 # LuaRocks requires at least one level of spidering to find versions
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
depends_on("lua-lang")
|
legacy_buildsystem = "lua"
|
||||||
extends("lua", when="^lua")
|
|
||||||
with when("^lua-luajit"):
|
|
||||||
extends("lua-luajit")
|
|
||||||
depends_on("luajit")
|
|
||||||
depends_on("lua-luajit+lualinks")
|
|
||||||
with when("^lua-luajit-openresty"):
|
|
||||||
extends("lua-luajit-openresty")
|
|
||||||
depends_on("luajit")
|
|
||||||
depends_on("lua-luajit-openresty+lualinks")
|
|
||||||
|
|
||||||
def unpack(self, spec, prefix):
|
list_depth = 1 # LuaRocks requires at least one level of spidering to find versions
|
||||||
if os.path.splitext(self.stage.archive_file)[1] == ".rock":
|
|
||||||
directory = self.luarocks("unpack", self.stage.archive_file, output=str)
|
build_system("lua")
|
||||||
|
|
||||||
|
with when("build_system=lua"):
|
||||||
|
depends_on("lua-lang")
|
||||||
|
extends("lua", when="^lua")
|
||||||
|
with when("^lua-luajit"):
|
||||||
|
extends("lua-luajit")
|
||||||
|
depends_on("luajit")
|
||||||
|
depends_on("lua-luajit+lualinks")
|
||||||
|
with when("^lua-luajit-openresty"):
|
||||||
|
extends("lua-luajit-openresty")
|
||||||
|
depends_on("luajit")
|
||||||
|
depends_on("lua-luajit-openresty+lualinks")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def lua(self):
|
||||||
|
return spack.util.executable.Executable(self.spec["lua-lang"].prefix.bin.lua)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def luarocks(self):
|
||||||
|
lr = spack.util.executable.Executable(self.spec["lua-lang"].prefix.bin.luarocks)
|
||||||
|
return lr
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("lua")
|
||||||
|
class LuaBuilder(spack.builder.Builder):
|
||||||
|
phases = ("unpack", "generate_luarocks_config", "preprocess", "install")
|
||||||
|
|
||||||
|
#: Names associated with package methods in the old build-system format
|
||||||
|
legacy_methods = ("luarocks_args",)
|
||||||
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = ()
|
||||||
|
|
||||||
|
def unpack(self, pkg, spec, prefix):
|
||||||
|
if os.path.splitext(pkg.stage.archive_file)[1] == ".rock":
|
||||||
|
directory = pkg.luarocks("unpack", pkg.stage.archive_file, output=str)
|
||||||
dirlines = directory.split("\n")
|
dirlines = directory.split("\n")
|
||||||
# TODO: figure out how to scope this better
|
# TODO: figure out how to scope this better
|
||||||
os.chdir(dirlines[2])
|
os.chdir(dirlines[2])
|
||||||
|
|
||||||
def _generate_tree_line(self, name, prefix):
|
@staticmethod
|
||||||
|
def _generate_tree_line(name, prefix):
|
||||||
return """{{ name = "{name}", root = "{prefix}" }};""".format(
|
return """{{ name = "{name}", root = "{prefix}" }};""".format(
|
||||||
name=name,
|
name=name,
|
||||||
prefix=prefix,
|
prefix=prefix,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _luarocks_config_path(self):
|
def generate_luarocks_config(self, pkg, spec, prefix):
|
||||||
return os.path.join(self.stage.source_path, "spack_luarocks.lua")
|
spec = self.pkg.spec
|
||||||
|
|
||||||
def generate_luarocks_config(self, spec, prefix):
|
|
||||||
spec = self.spec
|
|
||||||
table_entries = []
|
table_entries = []
|
||||||
for d in spec.traverse(deptypes=("build", "run"), deptype_query="run"):
|
for d in spec.traverse(deptype=("build", "run")):
|
||||||
if d.package.extends(self.extendee_spec):
|
if d.package.extends(self.pkg.extendee_spec):
|
||||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||||
|
|
||||||
path = self._luarocks_config_path()
|
path = self._luarocks_config_path()
|
||||||
@@ -71,30 +94,24 @@ def generate_luarocks_config(self, spec, prefix):
|
|||||||
)
|
)
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def setup_build_environment(self, env):
|
def preprocess(self, pkg, spec, prefix):
|
||||||
env.set("LUAROCKS_CONFIG", self._luarocks_config_path())
|
|
||||||
|
|
||||||
def preprocess(self, spec, prefix):
|
|
||||||
"""Override this to preprocess source before building with luarocks"""
|
"""Override this to preprocess source before building with luarocks"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@property
|
|
||||||
def lua(self):
|
|
||||||
return Executable(self.spec["lua-lang"].prefix.bin.lua)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def luarocks(self):
|
|
||||||
lr = Executable(self.spec["lua-lang"].prefix.bin.luarocks)
|
|
||||||
return lr
|
|
||||||
|
|
||||||
def luarocks_args(self):
|
def luarocks_args(self):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
rock = "."
|
rock = "."
|
||||||
specs = find(".", "*.rockspec", recursive=False)
|
specs = find(".", "*.rockspec", recursive=False)
|
||||||
if specs:
|
if specs:
|
||||||
rock = specs[0]
|
rock = specs[0]
|
||||||
rocks_args = self.luarocks_args()
|
rocks_args = self.luarocks_args()
|
||||||
rocks_args.append(rock)
|
rocks_args.append(rock)
|
||||||
self.luarocks("--tree=" + prefix, "make", *rocks_args)
|
self.pkg.luarocks("--tree=" + prefix, "make", *rocks_args)
|
||||||
|
|
||||||
|
def _luarocks_config_path(self):
|
||||||
|
return os.path.join(self.pkg.stage.source_path, "spack_luarocks.lua")
|
||||||
|
|
||||||
|
def setup_build_environment(self, env):
|
||||||
|
env.set("LUAROCKS_CONFIG", self._luarocks_config_path())
|
||||||
|
|||||||
@@ -2,62 +2,85 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
from typing import List # novm
|
from typing import List # novm
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.filesystem as fs
|
||||||
from llnl.util.filesystem import working_dir
|
|
||||||
|
|
||||||
from spack.directives import conflicts
|
import spack.builder
|
||||||
from spack.package_base import PackageBase, run_after
|
import spack.package_base
|
||||||
|
from spack.directives import build_system, conflicts
|
||||||
|
|
||||||
|
from ._checks import (
|
||||||
|
BaseBuilder,
|
||||||
|
apply_macos_rpath_fixups,
|
||||||
|
execute_build_time_tests,
|
||||||
|
execute_install_time_tests,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class MakefilePackage(PackageBase):
|
class MakefilePackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for packages that are built using editable Makefiles
|
"""Specialized class for packages built using a Makefiles."""
|
||||||
|
|
||||||
This class provides three phases that can be overridden:
|
#: This attribute is used in UI queries that need to know the build
|
||||||
|
#: system base class
|
||||||
|
build_system_class = "MakefilePackage"
|
||||||
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
|
legacy_buildsystem = "makefile"
|
||||||
|
|
||||||
1. :py:meth:`~.MakefilePackage.edit`
|
build_system("makefile")
|
||||||
2. :py:meth:`~.MakefilePackage.build`
|
conflicts("platform=windows", when="build_system=makefile")
|
||||||
3. :py:meth:`~.MakefilePackage.install`
|
|
||||||
|
|
||||||
|
@spack.builder.builder("makefile")
|
||||||
|
class MakefileBuilder(BaseBuilder):
|
||||||
|
"""The Makefile builder encodes the most common way of building software with
|
||||||
|
Makefiles. It has three phases that can be overridden, if need be:
|
||||||
|
|
||||||
|
1. :py:meth:`~.MakefileBuilder.edit`
|
||||||
|
2. :py:meth:`~.MakefileBuilder.build`
|
||||||
|
3. :py:meth:`~.MakefileBuilder.install`
|
||||||
|
|
||||||
|
It is usually necessary to override the :py:meth:`~.MakefileBuilder.edit`
|
||||||
|
phase (which is by default a no-op), while the other two have sensible defaults.
|
||||||
|
|
||||||
It is usually necessary to override the :py:meth:`~.MakefilePackage.edit`
|
|
||||||
phase, while :py:meth:`~.MakefilePackage.build` and
|
|
||||||
:py:meth:`~.MakefilePackage.install` have sensible defaults.
|
|
||||||
For a finer tuning you may override:
|
For a finer tuning you may override:
|
||||||
|
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
| **Method** | **Purpose** |
|
| **Method** | **Purpose** |
|
||||||
+===============================================+====================+
|
+===============================================+====================+
|
||||||
| :py:attr:`~.MakefilePackage.build_targets` | Specify ``make`` |
|
| :py:attr:`~.MakefileBuilder.build_targets` | Specify ``make`` |
|
||||||
| | targets for the |
|
| | targets for the |
|
||||||
| | build phase |
|
| | build phase |
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
| :py:attr:`~.MakefilePackage.install_targets` | Specify ``make`` |
|
| :py:attr:`~.MakefileBuilder.install_targets` | Specify ``make`` |
|
||||||
| | targets for the |
|
| | targets for the |
|
||||||
| | install phase |
|
| | install phase |
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
| :py:meth:`~.MakefilePackage.build_directory` | Directory where the|
|
| :py:meth:`~.MakefileBuilder.build_directory` | Directory where the|
|
||||||
| | Makefile is located|
|
| | Makefile is located|
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
"""
|
"""
|
||||||
|
|
||||||
#: Phases of a package that is built with an hand-written Makefile
|
phases = ("edit", "build", "install")
|
||||||
phases = ["edit", "build", "install"]
|
|
||||||
#: This attribute is used in UI queries that need to know the build
|
|
||||||
#: system base class
|
|
||||||
build_system_class = "MakefilePackage"
|
|
||||||
|
|
||||||
#: Targets for ``make`` during the :py:meth:`~.MakefilePackage.build`
|
#: Names associated with package methods in the old build-system format
|
||||||
#: phase
|
legacy_methods = ("check", "installcheck")
|
||||||
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = (
|
||||||
|
"build_targets",
|
||||||
|
"install_targets",
|
||||||
|
"build_time_test_callbacks",
|
||||||
|
"install_time_test_callbacks",
|
||||||
|
"build_directory",
|
||||||
|
)
|
||||||
|
|
||||||
|
#: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.build` phase
|
||||||
build_targets = [] # type: List[str]
|
build_targets = [] # type: List[str]
|
||||||
#: Targets for ``make`` during the :py:meth:`~.MakefilePackage.install`
|
#: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.install` phase
|
||||||
#: phase
|
|
||||||
install_targets = ["install"]
|
install_targets = ["install"]
|
||||||
|
|
||||||
conflicts("platform=windows")
|
|
||||||
#: Callback names for build-time test
|
#: Callback names for build-time test
|
||||||
build_time_test_callbacks = ["check"]
|
build_time_test_callbacks = ["check"]
|
||||||
|
|
||||||
@@ -66,53 +89,39 @@ class MakefilePackage(PackageBase):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self):
|
def build_directory(self):
|
||||||
"""Returns the directory containing the main Makefile
|
"""Return the directory containing the main Makefile."""
|
||||||
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
:return: build directory
|
def edit(self, pkg, spec, prefix):
|
||||||
"""
|
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||||
return self.stage.source_path
|
pass
|
||||||
|
|
||||||
def edit(self, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Edits the Makefile before calling make. This phase cannot
|
"""Run "make" on the build targets specified by the builder."""
|
||||||
be defaulted.
|
with fs.working_dir(self.build_directory):
|
||||||
"""
|
inspect.getmodule(self.pkg).make(*self.build_targets)
|
||||||
tty.msg("Using default implementation: skipping edit phase.")
|
|
||||||
|
|
||||||
def build(self, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Calls make, passing :py:attr:`~.MakefilePackage.build_targets`
|
"""Run "make" on the install targets specified by the builder."""
|
||||||
as targets.
|
with fs.working_dir(self.build_directory):
|
||||||
"""
|
inspect.getmodule(self.pkg).make(*self.install_targets)
|
||||||
with working_dir(self.build_directory):
|
|
||||||
inspect.getmodule(self).make(*self.build_targets)
|
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
"""Calls make, passing :py:attr:`~.MakefilePackage.install_targets`
|
|
||||||
as targets.
|
|
||||||
"""
|
|
||||||
with working_dir(self.build_directory):
|
|
||||||
inspect.getmodule(self).make(*self.install_targets)
|
|
||||||
|
|
||||||
run_after("build")(PackageBase._run_default_build_time_test_callbacks)
|
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
"""Searches the Makefile for targets ``test`` and ``check``
|
"""Run "make" on the ``test`` and ``check`` targets, if found."""
|
||||||
and runs them if found.
|
with fs.working_dir(self.build_directory):
|
||||||
"""
|
self.pkg._if_make_target_execute("test")
|
||||||
with working_dir(self.build_directory):
|
self.pkg._if_make_target_execute("check")
|
||||||
self._if_make_target_execute("test")
|
|
||||||
self._if_make_target_execute("check")
|
|
||||||
|
|
||||||
run_after("install")(PackageBase._run_default_install_time_test_callbacks)
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
|
|
||||||
def installcheck(self):
|
def installcheck(self):
|
||||||
"""Searches the Makefile for an ``installcheck`` target
|
"""Searches the Makefile for an ``installcheck`` target
|
||||||
and runs it if found.
|
and runs it if found.
|
||||||
"""
|
"""
|
||||||
with working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
self._if_make_target_execute("installcheck")
|
self.pkg._if_make_target_execute("installcheck")
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
|
||||||
run_after("install")(PackageBase.sanity_check_prefix)
|
|
||||||
|
|
||||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||||
run_after("install")(PackageBase.apply_macos_rpath_fixups)
|
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||||
|
|||||||
@@ -2,60 +2,73 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
|
import spack.builder
|
||||||
from llnl.util.filesystem import install_tree, working_dir
|
import spack.package_base
|
||||||
|
from spack.directives import build_system, depends_on
|
||||||
from spack.directives import depends_on
|
from spack.multimethod import when
|
||||||
from spack.package_base import PackageBase, run_after
|
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
|
|
||||||
|
from ._checks import BaseBuilder
|
||||||
|
|
||||||
class MavenPackage(PackageBase):
|
|
||||||
|
class MavenPackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for packages that are built using the
|
"""Specialized class for packages that are built using the
|
||||||
Maven build system. See https://maven.apache.org/index.html
|
Maven build system. See https://maven.apache.org/index.html
|
||||||
for more information.
|
for more information.
|
||||||
|
|
||||||
This class provides the following phases that can be overridden:
|
|
||||||
|
|
||||||
* build
|
|
||||||
* install
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Default phases
|
|
||||||
phases = ["build", "install"]
|
|
||||||
|
|
||||||
# To be used in UI queries that require to know which
|
# To be used in UI queries that require to know which
|
||||||
# build-system class we are using
|
# build-system class we are using
|
||||||
build_system_class = "MavenPackage"
|
build_system_class = "MavenPackage"
|
||||||
|
|
||||||
depends_on("java", type=("build", "run"))
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
depends_on("maven", type="build")
|
legacy_buildsystem = "maven"
|
||||||
|
|
||||||
|
build_system("maven")
|
||||||
|
|
||||||
|
with when("build_system=maven"):
|
||||||
|
depends_on("java", type=("build", "run"))
|
||||||
|
depends_on("maven", type="build")
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("maven")
|
||||||
|
class MavenBuilder(BaseBuilder):
|
||||||
|
"""The Maven builder encodes the default way to build software with Maven.
|
||||||
|
It has two phases that can be overridden, if need be:
|
||||||
|
|
||||||
|
1. :py:meth:`~.MavenBuilder.build`
|
||||||
|
2. :py:meth:`~.MavenBuilder.install`
|
||||||
|
"""
|
||||||
|
|
||||||
|
phases = ("build", "install")
|
||||||
|
|
||||||
|
#: Names associated with package methods in the old build-system format
|
||||||
|
legacy_methods = ("build_args",)
|
||||||
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = ("build_directory",)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self):
|
def build_directory(self):
|
||||||
"""The directory containing the ``pom.xml`` file."""
|
"""The directory containing the ``pom.xml`` file."""
|
||||||
return self.stage.source_path
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
def build_args(self):
|
def build_args(self):
|
||||||
"""List of args to pass to build phase."""
|
"""List of args to pass to build phase."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Compile code and package into a JAR file."""
|
"""Compile code and package into a JAR file."""
|
||||||
|
with fs.working_dir(self.build_directory):
|
||||||
with working_dir(self.build_directory):
|
|
||||||
mvn = which("mvn")
|
mvn = which("mvn")
|
||||||
if self.run_tests:
|
if self.pkg.run_tests:
|
||||||
mvn("verify", *self.build_args())
|
mvn("verify", *self.build_args())
|
||||||
else:
|
else:
|
||||||
mvn("package", "-DskipTests", *self.build_args())
|
mvn("package", "-DskipTests", *self.build_args())
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Copy to installation prefix."""
|
"""Copy to installation prefix."""
|
||||||
|
with fs.working_dir(self.build_directory):
|
||||||
with working_dir(self.build_directory):
|
fs.install_tree(".", prefix)
|
||||||
install_tree(".", prefix)
|
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
|
||||||
run_after("install")(PackageBase.sanity_check_prefix)
|
|
||||||
|
|||||||
@@ -2,76 +2,104 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
from typing import List # novm
|
from typing import List # novm
|
||||||
|
|
||||||
from llnl.util.filesystem import working_dir
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
from spack.directives import depends_on, variant
|
import spack.builder
|
||||||
from spack.package_base import PackageBase, run_after
|
import spack.package_base
|
||||||
|
from spack.directives import build_system, depends_on, variant
|
||||||
|
from spack.multimethod import when
|
||||||
|
|
||||||
|
from ._checks import BaseBuilder, execute_build_time_tests
|
||||||
|
|
||||||
|
|
||||||
class MesonPackage(PackageBase):
|
class MesonPackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for packages built using Meson
|
"""Specialized class for packages built using Meson. For more information
|
||||||
|
on the Meson build system, see https://mesonbuild.com/
|
||||||
|
"""
|
||||||
|
|
||||||
For more information on the Meson build system, see:
|
#: This attribute is used in UI queries that need to know the build
|
||||||
https://mesonbuild.com/
|
#: system base class
|
||||||
|
build_system_class = "MesonPackage"
|
||||||
|
|
||||||
This class provides three phases that can be overridden:
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
|
legacy_buildsystem = "meson"
|
||||||
|
|
||||||
1. :py:meth:`~.MesonPackage.meson`
|
build_system("meson")
|
||||||
2. :py:meth:`~.MesonPackage.build`
|
|
||||||
3. :py:meth:`~.MesonPackage.install`
|
with when("build_system=meson"):
|
||||||
|
variant(
|
||||||
|
"buildtype",
|
||||||
|
default="debugoptimized",
|
||||||
|
description="Meson build type",
|
||||||
|
values=("plain", "debug", "debugoptimized", "release", "minsize"),
|
||||||
|
)
|
||||||
|
variant(
|
||||||
|
"default_library",
|
||||||
|
default="shared",
|
||||||
|
values=("shared", "static"),
|
||||||
|
multi=True,
|
||||||
|
description="Build shared libs, static libs or both",
|
||||||
|
)
|
||||||
|
variant("strip", default=False, description="Strip targets on install")
|
||||||
|
depends_on("meson", type="build")
|
||||||
|
depends_on("ninja", type="build")
|
||||||
|
|
||||||
|
def flags_to_build_system_args(self, flags):
|
||||||
|
"""Produces a list of all command line arguments to pass the specified
|
||||||
|
compiler flags to meson."""
|
||||||
|
# Has to be dynamic attribute due to caching
|
||||||
|
setattr(self, "meson_flag_args", [])
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("meson")
|
||||||
|
class MesonBuilder(BaseBuilder):
|
||||||
|
"""The Meson builder encodes the default way to build software with Meson.
|
||||||
|
The builder has three phases that can be overridden, if need be:
|
||||||
|
|
||||||
|
1. :py:meth:`~.MesonBuilder.meson`
|
||||||
|
2. :py:meth:`~.MesonBuilder.build`
|
||||||
|
3. :py:meth:`~.MesonBuilder.install`
|
||||||
|
|
||||||
They all have sensible defaults and for many packages the only thing
|
They all have sensible defaults and for many packages the only thing
|
||||||
necessary will be to override :py:meth:`~.MesonPackage.meson_args`.
|
necessary will be to override :py:meth:`~.MesonBuilder.meson_args`.
|
||||||
|
|
||||||
For a finer tuning you may also override:
|
For a finer tuning you may also override:
|
||||||
|
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
| **Method** | **Purpose** |
|
| **Method** | **Purpose** |
|
||||||
+===============================================+====================+
|
+===============================================+====================+
|
||||||
| :py:meth:`~.MesonPackage.root_mesonlists_dir` | Location of the |
|
| :py:meth:`~.MesonBuilder.root_mesonlists_dir` | Location of the |
|
||||||
| | root MesonLists.txt|
|
| | root MesonLists.txt|
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
| :py:meth:`~.MesonPackage.build_directory` | Directory where to |
|
| :py:meth:`~.MesonBuilder.build_directory` | Directory where to |
|
||||||
| | build the package |
|
| | build the package |
|
||||||
+-----------------------------------------------+--------------------+
|
+-----------------------------------------------+--------------------+
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
#: Phases of a Meson package
|
phases = ("meson", "build", "install")
|
||||||
phases = ["meson", "build", "install"]
|
|
||||||
#: This attribute is used in UI queries that need to know the build
|
#: Names associated with package methods in the old build-system format
|
||||||
#: system base class
|
legacy_methods = ("meson_args", "check")
|
||||||
build_system_class = "MesonPackage"
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = (
|
||||||
|
"build_targets",
|
||||||
|
"install_targets",
|
||||||
|
"build_time_test_callbacks",
|
||||||
|
"root_mesonlists_dir",
|
||||||
|
"std_meson_args",
|
||||||
|
"build_directory",
|
||||||
|
)
|
||||||
|
|
||||||
build_targets = [] # type: List[str]
|
build_targets = [] # type: List[str]
|
||||||
install_targets = ["install"]
|
install_targets = ["install"]
|
||||||
|
|
||||||
build_time_test_callbacks = ["check"]
|
build_time_test_callbacks = ["check"]
|
||||||
|
|
||||||
variant(
|
|
||||||
"buildtype",
|
|
||||||
default="debugoptimized",
|
|
||||||
description="Meson build type",
|
|
||||||
values=("plain", "debug", "debugoptimized", "release", "minsize"),
|
|
||||||
)
|
|
||||||
variant(
|
|
||||||
"default_library",
|
|
||||||
default="shared",
|
|
||||||
values=("shared", "static"),
|
|
||||||
multi=True,
|
|
||||||
description="Build shared libs, static libs or both",
|
|
||||||
)
|
|
||||||
variant("strip", default=False, description="Strip targets on install")
|
|
||||||
|
|
||||||
depends_on("meson", type="build")
|
|
||||||
depends_on("ninja", type="build")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def archive_files(self):
|
def archive_files(self):
|
||||||
"""Files to archive for packages based on Meson"""
|
"""Files to archive for packages based on Meson"""
|
||||||
@@ -79,31 +107,26 @@ def archive_files(self):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def root_mesonlists_dir(self):
|
def root_mesonlists_dir(self):
|
||||||
"""The relative path to the directory containing meson.build
|
"""Relative path to the directory containing meson.build
|
||||||
|
|
||||||
This path is relative to the root of the extracted tarball,
|
This path is relative to the root of the extracted tarball,
|
||||||
not to the ``build_directory``. Defaults to the current directory.
|
not to the ``build_directory``. Defaults to the current directory.
|
||||||
|
|
||||||
:return: directory containing meson.build
|
|
||||||
"""
|
"""
|
||||||
return self.stage.source_path
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def std_meson_args(self):
|
def std_meson_args(self):
|
||||||
"""Standard meson arguments provided as a property for
|
"""Standard meson arguments provided as a property for convenience
|
||||||
convenience of package writers
|
of package writers.
|
||||||
|
|
||||||
:return: standard meson arguments
|
|
||||||
"""
|
"""
|
||||||
# standard Meson arguments
|
# standard Meson arguments
|
||||||
std_meson_args = MesonPackage._std_args(self)
|
std_meson_args = MesonBuilder.std_args(self.pkg)
|
||||||
std_meson_args += getattr(self, "meson_flag_args", [])
|
std_meson_args += getattr(self, "meson_flag_args", [])
|
||||||
return std_meson_args
|
return std_meson_args
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _std_args(pkg):
|
def std_args(pkg):
|
||||||
"""Computes the standard meson arguments for a generic package"""
|
"""Standard meson arguments for a generic package."""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
build_type = pkg.spec.variants["buildtype"].value
|
build_type = pkg.spec.variants["buildtype"].value
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@@ -132,31 +155,18 @@ def _std_args(pkg):
|
|||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
def flags_to_build_system_args(self, flags):
|
|
||||||
"""Produces a list of all command line arguments to pass the specified
|
|
||||||
compiler flags to meson."""
|
|
||||||
# Has to be dynamic attribute due to caching
|
|
||||||
setattr(self, "meson_flag_args", [])
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_dirname(self):
|
def build_dirname(self):
|
||||||
"""Returns the directory name to use when building the package
|
"""Returns the directory name to use when building the package."""
|
||||||
|
return "spack-build-{}".format(self.spec.dag_hash(7))
|
||||||
:return: name of the subdirectory for building the package
|
|
||||||
"""
|
|
||||||
return "spack-build-%s" % self.spec.dag_hash(7)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self):
|
def build_directory(self):
|
||||||
"""Returns the directory to use when building the package
|
"""Directory to use when building the package."""
|
||||||
|
return os.path.join(self.pkg.stage.path, self.build_dirname)
|
||||||
:return: directory where to build the package
|
|
||||||
"""
|
|
||||||
return os.path.join(self.stage.path, self.build_dirname)
|
|
||||||
|
|
||||||
def meson_args(self):
|
def meson_args(self):
|
||||||
"""Produces a list containing all the arguments that must be passed to
|
"""List of arguments that must be passed to meson, except:
|
||||||
meson, except:
|
|
||||||
|
|
||||||
* ``--prefix``
|
* ``--prefix``
|
||||||
* ``--libdir``
|
* ``--libdir``
|
||||||
@@ -165,40 +175,33 @@ def meson_args(self):
|
|||||||
* ``--default_library``
|
* ``--default_library``
|
||||||
|
|
||||||
which will be set automatically.
|
which will be set automatically.
|
||||||
|
|
||||||
:return: list of arguments for meson
|
|
||||||
"""
|
"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def meson(self, spec, prefix):
|
def meson(self, pkg, spec, prefix):
|
||||||
"""Runs ``meson`` in the build directory"""
|
"""Run ``meson`` in the build directory"""
|
||||||
options = [os.path.abspath(self.root_mesonlists_dir)]
|
options = [os.path.abspath(self.root_mesonlists_dir)]
|
||||||
options += self.std_meson_args
|
options += self.std_meson_args
|
||||||
options += self.meson_args()
|
options += self.meson_args()
|
||||||
with working_dir(self.build_directory, create=True):
|
with fs.working_dir(self.build_directory, create=True):
|
||||||
inspect.getmodule(self).meson(*options)
|
inspect.getmodule(self.pkg).meson(*options)
|
||||||
|
|
||||||
def build(self, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
options = ["-v"]
|
options = ["-v"]
|
||||||
options += self.build_targets
|
options += self.build_targets
|
||||||
with working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self).ninja(*options)
|
inspect.getmodule(self.pkg).ninja(*options)
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
with working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
inspect.getmodule(self).ninja(*self.install_targets)
|
inspect.getmodule(self.pkg).ninja(*self.install_targets)
|
||||||
|
|
||||||
run_after("build")(PackageBase._run_default_build_time_test_callbacks)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
"""Searches the Meson-generated file for the target ``test``
|
"""Search Meson-generated files for the target ``test`` and run it if found."""
|
||||||
and runs it if found.
|
with fs.working_dir(self.build_directory):
|
||||||
"""
|
|
||||||
with working_dir(self.build_directory):
|
|
||||||
self._if_ninja_target_execute("test")
|
self._if_ninja_target_execute("test")
|
||||||
self._if_ninja_target_execute("check")
|
self._if_ninja_target_execute("check")
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
|
||||||
run_after("install")(PackageBase.sanity_check_prefix)
|
|
||||||
|
|||||||
102
lib/spack/spack/build_systems/nmake.py
Normal file
102
lib/spack/spack/build_systems/nmake.py
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import inspect
|
||||||
|
from typing import List # novm
|
||||||
|
|
||||||
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
|
import spack.builder
|
||||||
|
import spack.package_base
|
||||||
|
from spack.directives import build_system, conflicts
|
||||||
|
|
||||||
|
from ._checks import BaseBuilder
|
||||||
|
|
||||||
|
|
||||||
|
class NMakePackage(spack.package_base.PackageBase):
|
||||||
|
"""Specialized class for packages built using a Makefiles."""
|
||||||
|
|
||||||
|
#: This attribute is used in UI queries that need to know the build
|
||||||
|
#: system base class
|
||||||
|
build_system_class = "NmakePackage"
|
||||||
|
|
||||||
|
build_system("nmake")
|
||||||
|
conflicts("platform=linux", when="build_system=nmake")
|
||||||
|
conflicts("platform=darwin", when="build_system=nmake")
|
||||||
|
conflicts("platform=cray", when="build_system=nmake")
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("nmake")
|
||||||
|
class NMakeBuilder(BaseBuilder):
|
||||||
|
"""The NMake builder encodes the most common way of building software with
|
||||||
|
NMake on Windows. It has three phases that can be overridden, if need be:
|
||||||
|
|
||||||
|
1. :py:meth:`~.NMakeBuilder.edit`
|
||||||
|
2. :py:meth:`~.NMakeBuilder.build`
|
||||||
|
3. :py:meth:`~.NMakeBuilder.install`
|
||||||
|
|
||||||
|
It is usually necessary to override the :py:meth:`~.NMakeBuilder.edit`
|
||||||
|
phase (which is by default a no-op), while the other two have sensible defaults.
|
||||||
|
|
||||||
|
For a finer tuning you may override:
|
||||||
|
|
||||||
|
+--------------------------------------------+--------------------+
|
||||||
|
| **Method** | **Purpose** |
|
||||||
|
+============================================+====================+
|
||||||
|
| :py:attr:`~.NMakeBuilder.build_targets` | Specify ``nmake`` |
|
||||||
|
| | targets for the |
|
||||||
|
| | build phase |
|
||||||
|
+--------------------------------------------+--------------------+
|
||||||
|
| :py:attr:`~.NMakeBuilder.install_targets` | Specify ``nmake`` |
|
||||||
|
| | targets for the |
|
||||||
|
| | install phase |
|
||||||
|
+--------------------------------------------+--------------------+
|
||||||
|
| :py:meth:`~.NMakeBuilder.build_directory` | Directory where the|
|
||||||
|
| | Makefile is located|
|
||||||
|
+--------------------------------------------+--------------------+
|
||||||
|
"""
|
||||||
|
|
||||||
|
phases = ("edit", "build", "install")
|
||||||
|
|
||||||
|
#: Names associated with package methods in the old build-system format
|
||||||
|
legacy_methods = ("check", "installcheck")
|
||||||
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = (
|
||||||
|
"build_targets",
|
||||||
|
"install_targets",
|
||||||
|
"build_time_test_callbacks",
|
||||||
|
"install_time_test_callbacks",
|
||||||
|
"build_directory",
|
||||||
|
)
|
||||||
|
|
||||||
|
#: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.build` phase
|
||||||
|
build_targets = [] # type: List[str]
|
||||||
|
#: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.install` phase
|
||||||
|
install_targets = ["install"]
|
||||||
|
|
||||||
|
#: Callback names for build-time test
|
||||||
|
build_time_test_callbacks = ["check"]
|
||||||
|
|
||||||
|
#: Callback names for install-time test
|
||||||
|
install_time_test_callbacks = ["installcheck"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def build_directory(self):
|
||||||
|
"""Return the directory containing the main Makefile."""
|
||||||
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
|
def edit(self, pkg, spec, prefix):
|
||||||
|
"""Edit the Makefile before calling make. The default is a no-op."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def build(self, pkg, spec, prefix):
|
||||||
|
"""Run "make" on the build targets specified by the builder."""
|
||||||
|
with fs.working_dir(self.build_directory):
|
||||||
|
inspect.getmodule(self.pkg).nmake(*self.build_targets)
|
||||||
|
|
||||||
|
def install(self, pkg, spec, prefix):
|
||||||
|
"""Run "make" on the install targets specified by the builder."""
|
||||||
|
with fs.working_dir(self.build_directory):
|
||||||
|
inspect.getmodule(self.pkg).nmake(*self.install_targets)
|
||||||
@@ -2,51 +2,62 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
from spack.directives import extends
|
import spack.builder
|
||||||
from spack.package_base import PackageBase, run_after
|
import spack.package_base
|
||||||
|
from spack.directives import build_system, extends
|
||||||
|
from spack.multimethod import when
|
||||||
|
|
||||||
|
from ._checks import BaseBuilder
|
||||||
|
|
||||||
|
|
||||||
class OctavePackage(PackageBase):
|
class OctavePackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for Octave packages. See
|
"""Specialized class for Octave packages. See
|
||||||
https://www.gnu.org/software/octave/doc/v4.2.0/Installing-and-Removing-Packages.html
|
https://www.gnu.org/software/octave/doc/v4.2.0/Installing-and-Removing-Packages.html
|
||||||
for more information.
|
for more information.
|
||||||
|
|
||||||
This class provides the following phases that can be overridden:
|
|
||||||
|
|
||||||
1. :py:meth:`~.OctavePackage.install`
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Default phases
|
|
||||||
phases = ["install"]
|
|
||||||
|
|
||||||
# To be used in UI queries that require to know which
|
# To be used in UI queries that require to know which
|
||||||
# build-system class we are using
|
# build-system class we are using
|
||||||
build_system_class = "OctavePackage"
|
build_system_class = "OctavePackage"
|
||||||
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
|
legacy_buildsystem = "octave"
|
||||||
|
|
||||||
extends("octave")
|
build_system("octave")
|
||||||
|
|
||||||
|
with when("build_system=octave"):
|
||||||
|
extends("octave")
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("octave")
|
||||||
|
class OctaveBuilder(BaseBuilder):
|
||||||
|
"""The octave builder provides the following phases that can be overridden:
|
||||||
|
|
||||||
|
1. :py:meth:`~.OctaveBuilder.install`
|
||||||
|
"""
|
||||||
|
|
||||||
|
phases = ("install",)
|
||||||
|
|
||||||
|
#: Names associated with package methods in the old build-system format
|
||||||
|
legacy_methods = ()
|
||||||
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = ()
|
||||||
|
|
||||||
|
def install(self, pkg, spec, prefix):
|
||||||
|
"""Install the package from the archive file"""
|
||||||
|
inspect.getmodule(self.pkg).octave(
|
||||||
|
"--quiet",
|
||||||
|
"--norc",
|
||||||
|
"--built-in-docstrings-file=/dev/null",
|
||||||
|
"--texi-macros-file=/dev/null",
|
||||||
|
"--eval",
|
||||||
|
"pkg prefix %s; pkg install %s" % (prefix, self.pkg.stage.archive_file),
|
||||||
|
)
|
||||||
|
|
||||||
def setup_build_environment(self, env):
|
def setup_build_environment(self, env):
|
||||||
# octave does not like those environment variables to be set:
|
# octave does not like those environment variables to be set:
|
||||||
env.unset("CC")
|
env.unset("CC")
|
||||||
env.unset("CXX")
|
env.unset("CXX")
|
||||||
env.unset("FC")
|
env.unset("FC")
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
|
||||||
"""Install the package from the archive file"""
|
|
||||||
inspect.getmodule(self).octave(
|
|
||||||
"--quiet",
|
|
||||||
"--norc",
|
|
||||||
"--built-in-docstrings-file=/dev/null",
|
|
||||||
"--texi-macros-file=/dev/null",
|
|
||||||
"--eval",
|
|
||||||
"pkg prefix %s; pkg install %s" % (prefix, self.stage.archive_file),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Testing
|
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
|
||||||
run_after("install")(PackageBase.sanity_check_prefix)
|
|
||||||
|
|||||||
@@ -2,11 +2,7 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
"""Common utilities for managing intel oneapi packages."""
|
||||||
"""Common utilities for managing intel oneapi packages.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import getpass
|
import getpass
|
||||||
import platform
|
import platform
|
||||||
import shutil
|
import shutil
|
||||||
@@ -14,18 +10,17 @@
|
|||||||
|
|
||||||
from llnl.util.filesystem import find_headers, find_libraries, join_path
|
from llnl.util.filesystem import find_headers, find_libraries, join_path
|
||||||
|
|
||||||
from spack.package_base import Package
|
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
|
|
||||||
|
from .generic import Package
|
||||||
|
|
||||||
|
|
||||||
class IntelOneApiPackage(Package):
|
class IntelOneApiPackage(Package):
|
||||||
"""Base class for Intel oneAPI packages."""
|
"""Base class for Intel oneAPI packages."""
|
||||||
|
|
||||||
homepage = "https://software.intel.com/oneapi"
|
homepage = "https://software.intel.com/oneapi"
|
||||||
|
|
||||||
phases = ["install"]
|
|
||||||
|
|
||||||
# oneAPI license does not allow mirroring outside of the
|
# oneAPI license does not allow mirroring outside of the
|
||||||
# organization (e.g. University/Company).
|
# organization (e.g. University/Company).
|
||||||
redistribute_source = False
|
redistribute_source = False
|
||||||
|
|||||||
@@ -2,73 +2,87 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from llnl.util.filesystem import filter_file
|
from llnl.util.filesystem import filter_file
|
||||||
|
|
||||||
from spack.directives import extends
|
import spack.builder
|
||||||
from spack.package_base import PackageBase, run_after
|
import spack.package_base
|
||||||
|
from spack.directives import build_system, extends
|
||||||
|
from spack.package_base import PackageBase
|
||||||
from spack.util.executable import Executable
|
from spack.util.executable import Executable
|
||||||
|
|
||||||
|
from ._checks import BaseBuilder, execute_build_time_tests
|
||||||
|
|
||||||
|
|
||||||
class PerlPackage(PackageBase):
|
class PerlPackage(PackageBase):
|
||||||
"""Specialized class for packages that are built using Perl.
|
"""Specialized class for packages that are built using Perl."""
|
||||||
|
|
||||||
This class provides four phases that can be overridden if required:
|
#: This attribute is used in UI queries that need to know the build
|
||||||
|
#: system base class
|
||||||
|
build_system_class = "PerlPackage"
|
||||||
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
|
legacy_buildsystem = "perl"
|
||||||
|
|
||||||
1. :py:meth:`~.PerlPackage.configure`
|
build_system("perl")
|
||||||
2. :py:meth:`~.PerlPackage.build`
|
|
||||||
3. :py:meth:`~.PerlPackage.check`
|
extends("perl", when="build_system=perl")
|
||||||
4. :py:meth:`~.PerlPackage.install`
|
|
||||||
|
|
||||||
|
@spack.builder.builder("perl")
|
||||||
|
class PerlBuilder(BaseBuilder):
|
||||||
|
"""The perl builder provides four phases that can be overridden, if required:
|
||||||
|
|
||||||
|
1. :py:meth:`~.PerlBuilder.configure`
|
||||||
|
2. :py:meth:`~.PerlBuilder.build`
|
||||||
|
3. :py:meth:`~.PerlBuilder.check`
|
||||||
|
4. :py:meth:`~.PerlBuilder.install`
|
||||||
|
|
||||||
The default methods use, in order of preference:
|
The default methods use, in order of preference:
|
||||||
(1) Makefile.PL,
|
(1) Makefile.PL,
|
||||||
(2) Build.PL.
|
(2) Build.PL.
|
||||||
|
|
||||||
Some packages may need to override
|
Some packages may need to override :py:meth:`~.PerlBuilder.configure_args`,
|
||||||
:py:meth:`~.PerlPackage.configure_args`,
|
which produces a list of arguments for :py:meth:`~.PerlBuilder.configure`.
|
||||||
which produces a list of arguments for
|
|
||||||
:py:meth:`~.PerlPackage.configure`.
|
|
||||||
Arguments should not include the installation base directory.
|
Arguments should not include the installation base directory.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
#: Phases of a Perl package
|
#: Phases of a Perl package
|
||||||
phases = ["configure", "build", "install"]
|
phases = ("configure", "build", "install")
|
||||||
|
|
||||||
#: This attribute is used in UI queries that need to know the build
|
#: Names associated with package methods in the old build-system format
|
||||||
#: system base class
|
legacy_methods = ("configure_args", "check")
|
||||||
build_system_class = "PerlPackage"
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = ()
|
||||||
|
|
||||||
#: Callback names for build-time test
|
#: Callback names for build-time test
|
||||||
build_time_test_callbacks = ["check"]
|
build_time_test_callbacks = ["check"]
|
||||||
|
|
||||||
extends("perl")
|
|
||||||
|
|
||||||
def configure_args(self):
|
def configure_args(self):
|
||||||
"""Produces a list containing the arguments that must be passed to
|
"""List of arguments passed to :py:meth:`~.PerlBuilder.configure`.
|
||||||
:py:meth:`~.PerlPackage.configure`. Arguments should not include
|
|
||||||
the installation base directory, which is prepended automatically.
|
|
||||||
|
|
||||||
:return: list of arguments for Makefile.PL or Build.PL
|
Arguments should not include the installation base directory, which
|
||||||
|
is prepended automatically.
|
||||||
"""
|
"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def configure(self, spec, prefix):
|
def configure(self, pkg, spec, prefix):
|
||||||
"""Runs Makefile.PL or Build.PL with arguments consisting of
|
"""Run Makefile.PL or Build.PL with arguments consisting of
|
||||||
an appropriate installation base directory followed by the
|
an appropriate installation base directory followed by the
|
||||||
list returned by :py:meth:`~.PerlPackage.configure_args`.
|
list returned by :py:meth:`~.PerlBuilder.configure_args`.
|
||||||
|
|
||||||
:raise RuntimeError: if neither Makefile.PL or Build.PL exist
|
Raises:
|
||||||
|
RuntimeError: if neither Makefile.PL nor Build.PL exist
|
||||||
"""
|
"""
|
||||||
if os.path.isfile("Makefile.PL"):
|
if os.path.isfile("Makefile.PL"):
|
||||||
self.build_method = "Makefile.PL"
|
self.build_method = "Makefile.PL"
|
||||||
self.build_executable = inspect.getmodule(self).make
|
self.build_executable = inspect.getmodule(self.pkg).make
|
||||||
elif os.path.isfile("Build.PL"):
|
elif os.path.isfile("Build.PL"):
|
||||||
self.build_method = "Build.PL"
|
self.build_method = "Build.PL"
|
||||||
self.build_executable = Executable(os.path.join(self.stage.source_path, "Build"))
|
self.build_executable = Executable(os.path.join(self.pkg.stage.source_path, "Build"))
|
||||||
else:
|
else:
|
||||||
raise RuntimeError("Unknown build_method for perl package")
|
raise RuntimeError("Unknown build_method for perl package")
|
||||||
|
|
||||||
@@ -78,33 +92,30 @@ def configure(self, spec, prefix):
|
|||||||
options = ["Build.PL", "--install_base", prefix]
|
options = ["Build.PL", "--install_base", prefix]
|
||||||
options += self.configure_args()
|
options += self.configure_args()
|
||||||
|
|
||||||
inspect.getmodule(self).perl(*options)
|
inspect.getmodule(self.pkg).perl(*options)
|
||||||
|
|
||||||
# It is possible that the shebang in the Build script that is created from
|
# It is possible that the shebang in the Build script that is created from
|
||||||
# Build.PL may be too long causing the build to fail. Patching the shebang
|
# Build.PL may be too long causing the build to fail. Patching the shebang
|
||||||
# does not happen until after install so set '/usr/bin/env perl' here in
|
# does not happen until after install so set '/usr/bin/env perl' here in
|
||||||
# the Build script.
|
# the Build script.
|
||||||
@run_after("configure")
|
@spack.builder.run_after("configure")
|
||||||
def fix_shebang(self):
|
def fix_shebang(self):
|
||||||
if self.build_method == "Build.PL":
|
if self.build_method == "Build.PL":
|
||||||
pattern = "#!{0}".format(self.spec["perl"].command.path)
|
pattern = "#!{0}".format(self.spec["perl"].command.path)
|
||||||
repl = "#!/usr/bin/env perl"
|
repl = "#!/usr/bin/env perl"
|
||||||
filter_file(pattern, repl, "Build", backup=False)
|
filter_file(pattern, repl, "Build", backup=False)
|
||||||
|
|
||||||
def build(self, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Builds a Perl package."""
|
"""Builds a Perl package."""
|
||||||
self.build_executable()
|
self.build_executable()
|
||||||
|
|
||||||
# Ensure that tests run after build (if requested):
|
# Ensure that tests run after build (if requested):
|
||||||
run_after("build")(PackageBase._run_default_build_time_test_callbacks)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
"""Runs built-in tests of a Perl package."""
|
"""Runs built-in tests of a Perl package."""
|
||||||
self.build_executable("test")
|
self.build_executable("test")
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Installs a Perl package."""
|
"""Installs a Perl package."""
|
||||||
self.build_executable("install")
|
self.build_executable("install")
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
|
||||||
run_after("install")(PackageBase.sanity_check_prefix)
|
|
||||||
|
|||||||
@@ -8,93 +8,22 @@
|
|||||||
import shutil
|
import shutil
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
import llnl.util.filesystem as fs
|
||||||
|
import llnl.util.lang as lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import (
|
|
||||||
filter_file,
|
|
||||||
find,
|
|
||||||
find_all_headers,
|
|
||||||
find_libraries,
|
|
||||||
is_nonsymlink_exe_with_shebang,
|
|
||||||
path_contains_subdirectory,
|
|
||||||
same_path,
|
|
||||||
working_dir,
|
|
||||||
)
|
|
||||||
from llnl.util.lang import classproperty, match_predicate
|
|
||||||
|
|
||||||
from spack.directives import depends_on, extends
|
import spack.builder
|
||||||
|
import spack.multimethod
|
||||||
|
import spack.package_base
|
||||||
|
from spack.directives import build_system, depends_on, extends
|
||||||
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
from spack.error import NoHeadersError, NoLibrariesError, SpecError
|
||||||
from spack.package_base import PackageBase, run_after
|
|
||||||
from spack.version import Version
|
from spack.version import Version
|
||||||
|
|
||||||
|
from ._checks import BaseBuilder, execute_install_time_tests
|
||||||
|
|
||||||
class PythonPackage(PackageBase):
|
|
||||||
"""Specialized class for packages that are built using pip."""
|
|
||||||
|
|
||||||
#: Package name, version, and extension on PyPI
|
class PythonExtension(spack.package_base.PackageBase):
|
||||||
pypi = None # type: Optional[str]
|
maintainers = ["adamjstewart"]
|
||||||
|
|
||||||
maintainers = ["adamjstewart", "pradyunsg"]
|
|
||||||
|
|
||||||
# Default phases
|
|
||||||
phases = ["install"]
|
|
||||||
|
|
||||||
# To be used in UI queries that require to know which
|
|
||||||
# build-system class we are using
|
|
||||||
build_system_class = "PythonPackage"
|
|
||||||
|
|
||||||
#: Callback names for install-time test
|
|
||||||
install_time_test_callbacks = ["test"]
|
|
||||||
|
|
||||||
extends("python")
|
|
||||||
depends_on("py-pip", type="build")
|
|
||||||
# FIXME: technically wheel is only needed when building from source, not when
|
|
||||||
# installing a downloaded wheel, but I don't want to add wheel as a dep to every
|
|
||||||
# package manually
|
|
||||||
depends_on("py-wheel", type="build")
|
|
||||||
|
|
||||||
py_namespace = None # type: Optional[str]
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _std_args(cls):
|
|
||||||
return [
|
|
||||||
# Verbose
|
|
||||||
"-vvv",
|
|
||||||
# Disable prompting for input
|
|
||||||
"--no-input",
|
|
||||||
# Disable the cache
|
|
||||||
"--no-cache-dir",
|
|
||||||
# Don't check to see if pip is up-to-date
|
|
||||||
"--disable-pip-version-check",
|
|
||||||
# Install packages
|
|
||||||
"install",
|
|
||||||
# Don't install package dependencies
|
|
||||||
"--no-deps",
|
|
||||||
# Overwrite existing packages
|
|
||||||
"--ignore-installed",
|
|
||||||
# Use env vars like PYTHONPATH
|
|
||||||
"--no-build-isolation",
|
|
||||||
# Don't warn that prefix.bin is not in PATH
|
|
||||||
"--no-warn-script-location",
|
|
||||||
# Ignore the PyPI package index
|
|
||||||
"--no-index",
|
|
||||||
]
|
|
||||||
|
|
||||||
@classproperty
|
|
||||||
def homepage(cls):
|
|
||||||
if cls.pypi:
|
|
||||||
name = cls.pypi.split("/")[0]
|
|
||||||
return "https://pypi.org/project/" + name + "/"
|
|
||||||
|
|
||||||
@classproperty
|
|
||||||
def url(cls):
|
|
||||||
if cls.pypi:
|
|
||||||
return "https://files.pythonhosted.org/packages/source/" + cls.pypi[0] + "/" + cls.pypi
|
|
||||||
|
|
||||||
@classproperty
|
|
||||||
def list_url(cls):
|
|
||||||
if cls.pypi:
|
|
||||||
name = cls.pypi.split("/")[0]
|
|
||||||
return "https://pypi.org/simple/" + name + "/"
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def import_modules(self):
|
def import_modules(self):
|
||||||
@@ -124,7 +53,7 @@ def import_modules(self):
|
|||||||
|
|
||||||
# Some Python libraries are packages: collections of modules
|
# Some Python libraries are packages: collections of modules
|
||||||
# distributed in directories containing __init__.py files
|
# distributed in directories containing __init__.py files
|
||||||
for path in find(root, "__init__.py", recursive=True):
|
for path in fs.find(root, "__init__.py", recursive=True):
|
||||||
modules.append(
|
modules.append(
|
||||||
path.replace(root + os.sep, "", 1)
|
path.replace(root + os.sep, "", 1)
|
||||||
.replace(os.sep + "__init__.py", "")
|
.replace(os.sep + "__init__.py", "")
|
||||||
@@ -133,7 +62,7 @@ def import_modules(self):
|
|||||||
|
|
||||||
# Some Python libraries are modules: individual *.py files
|
# Some Python libraries are modules: individual *.py files
|
||||||
# found in the site-packages directory
|
# found in the site-packages directory
|
||||||
for path in find(root, "*.py", recursive=False):
|
for path in fs.find(root, "*.py", recursive=False):
|
||||||
modules.append(
|
modules.append(
|
||||||
path.replace(root + os.sep, "", 1).replace(".py", "").replace("/", ".")
|
path.replace(root + os.sep, "", 1).replace(".py", "").replace("/", ".")
|
||||||
)
|
)
|
||||||
@@ -160,6 +89,208 @@ def skip_modules(self):
|
|||||||
"""
|
"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
def view_file_conflicts(self, view, merge_map):
|
||||||
|
"""Report all file conflicts, excepting special cases for python.
|
||||||
|
Specifically, this does not report errors for duplicate
|
||||||
|
__init__.py files for packages in the same namespace.
|
||||||
|
"""
|
||||||
|
conflicts = list(dst for src, dst in merge_map.items() if os.path.exists(dst))
|
||||||
|
|
||||||
|
if conflicts and self.py_namespace:
|
||||||
|
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
|
||||||
|
namespaces = set(x.package.py_namespace for x in ext_map.values())
|
||||||
|
namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
||||||
|
find_namespace = lang.match_predicate(namespace_re)
|
||||||
|
if self.py_namespace in namespaces:
|
||||||
|
conflicts = list(x for x in conflicts if not find_namespace(x))
|
||||||
|
|
||||||
|
return conflicts
|
||||||
|
|
||||||
|
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
||||||
|
bin_dir = self.spec.prefix.bin
|
||||||
|
python_prefix = self.extendee_spec.prefix
|
||||||
|
python_is_external = self.extendee_spec.external
|
||||||
|
global_view = fs.same_path(python_prefix, view.get_projection_for_spec(self.spec))
|
||||||
|
for src, dst in merge_map.items():
|
||||||
|
if os.path.exists(dst):
|
||||||
|
continue
|
||||||
|
elif global_view or not fs.path_contains_subdirectory(src, bin_dir):
|
||||||
|
view.link(src, dst)
|
||||||
|
elif not os.path.islink(src):
|
||||||
|
shutil.copy2(src, dst)
|
||||||
|
is_script = fs.is_nonsymlink_exe_with_shebang(src)
|
||||||
|
if is_script and not python_is_external:
|
||||||
|
fs.filter_file(
|
||||||
|
python_prefix,
|
||||||
|
os.path.abspath(view.get_projection_for_spec(self.spec)),
|
||||||
|
dst,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
orig_link_target = os.path.realpath(src)
|
||||||
|
new_link_target = os.path.abspath(merge_map[orig_link_target])
|
||||||
|
view.link(new_link_target, dst)
|
||||||
|
|
||||||
|
def remove_files_from_view(self, view, merge_map):
|
||||||
|
ignore_namespace = False
|
||||||
|
if self.py_namespace:
|
||||||
|
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
|
||||||
|
remaining_namespaces = set(
|
||||||
|
spec.package.py_namespace for name, spec in ext_map.items() if name != self.name
|
||||||
|
)
|
||||||
|
if self.py_namespace in remaining_namespaces:
|
||||||
|
namespace_init = lang.match_predicate(
|
||||||
|
r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
||||||
|
)
|
||||||
|
ignore_namespace = True
|
||||||
|
|
||||||
|
bin_dir = self.spec.prefix.bin
|
||||||
|
global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec)
|
||||||
|
|
||||||
|
to_remove = []
|
||||||
|
for src, dst in merge_map.items():
|
||||||
|
if ignore_namespace and namespace_init(dst):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if global_view or not fs.path_contains_subdirectory(src, bin_dir):
|
||||||
|
to_remove.append(dst)
|
||||||
|
else:
|
||||||
|
os.remove(dst)
|
||||||
|
|
||||||
|
view.remove_files(to_remove)
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
"""Attempts to import modules of the installed package."""
|
||||||
|
|
||||||
|
# Make sure we are importing the installed modules,
|
||||||
|
# not the ones in the source directory
|
||||||
|
for module in self.import_modules:
|
||||||
|
self.run_test(
|
||||||
|
inspect.getmodule(self).python.path,
|
||||||
|
["-c", "import {0}".format(module)],
|
||||||
|
purpose="checking import of {0}".format(module),
|
||||||
|
work_dir="spack-test",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PythonPackage(PythonExtension):
|
||||||
|
"""Specialized class for packages that are built using pip."""
|
||||||
|
|
||||||
|
#: Package name, version, and extension on PyPI
|
||||||
|
pypi = None # type: Optional[str]
|
||||||
|
|
||||||
|
maintainers = ["adamjstewart", "pradyunsg"]
|
||||||
|
|
||||||
|
# To be used in UI queries that require to know which
|
||||||
|
# build-system class we are using
|
||||||
|
build_system_class = "PythonPackage"
|
||||||
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
|
legacy_buildsystem = "python_pip"
|
||||||
|
|
||||||
|
#: Callback names for install-time test
|
||||||
|
install_time_test_callbacks = ["test"]
|
||||||
|
|
||||||
|
build_system("python_pip")
|
||||||
|
|
||||||
|
with spack.multimethod.when("build_system=python_pip"):
|
||||||
|
extends("python")
|
||||||
|
depends_on("py-pip", type="build")
|
||||||
|
# FIXME: technically wheel is only needed when building from source, not when
|
||||||
|
# installing a downloaded wheel, but I don't want to add wheel as a dep to every
|
||||||
|
# package manually
|
||||||
|
depends_on("py-wheel", type="build")
|
||||||
|
|
||||||
|
py_namespace = None # type: Optional[str]
|
||||||
|
|
||||||
|
@lang.classproperty
|
||||||
|
def homepage(cls):
|
||||||
|
if cls.pypi:
|
||||||
|
name = cls.pypi.split("/")[0]
|
||||||
|
return "https://pypi.org/project/" + name + "/"
|
||||||
|
|
||||||
|
@lang.classproperty
|
||||||
|
def url(cls):
|
||||||
|
if cls.pypi:
|
||||||
|
return "https://files.pythonhosted.org/packages/source/" + cls.pypi[0] + "/" + cls.pypi
|
||||||
|
|
||||||
|
@lang.classproperty
|
||||||
|
def list_url(cls):
|
||||||
|
if cls.pypi:
|
||||||
|
name = cls.pypi.split("/")[0]
|
||||||
|
return "https://pypi.org/simple/" + name + "/"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def headers(self):
|
||||||
|
"""Discover header files in platlib."""
|
||||||
|
|
||||||
|
# Headers may be in either location
|
||||||
|
include = self.prefix.join(self.spec["python"].package.include)
|
||||||
|
platlib = self.prefix.join(self.spec["python"].package.platlib)
|
||||||
|
headers = fs.find_all_headers(include) + fs.find_all_headers(platlib)
|
||||||
|
|
||||||
|
if headers:
|
||||||
|
return headers
|
||||||
|
|
||||||
|
msg = "Unable to locate {} headers in {} or {}"
|
||||||
|
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def libs(self):
|
||||||
|
"""Discover libraries in platlib."""
|
||||||
|
|
||||||
|
# Remove py- prefix in package name
|
||||||
|
library = "lib" + self.spec.name[3:].replace("-", "?")
|
||||||
|
root = self.prefix.join(self.spec["python"].package.platlib)
|
||||||
|
|
||||||
|
for shared in [True, False]:
|
||||||
|
libs = fs.find_libraries(library, root, shared=shared, recursive=True)
|
||||||
|
if libs:
|
||||||
|
return libs
|
||||||
|
|
||||||
|
msg = "Unable to recursively locate {} libraries in {}"
|
||||||
|
raise NoLibrariesError(msg.format(self.spec.name, root))
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("python_pip")
|
||||||
|
class PythonPipBuilder(BaseBuilder):
|
||||||
|
phases = ("install",)
|
||||||
|
|
||||||
|
#: Names associated with package methods in the old build-system format
|
||||||
|
legacy_methods = ("test",)
|
||||||
|
|
||||||
|
#: Same as legacy_methods, but the signature is different
|
||||||
|
legacy_long_methods = ("install_options", "global_options", "config_settings")
|
||||||
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = ("build_directory", "install_time_test_callbacks")
|
||||||
|
|
||||||
|
#: Callback names for install-time test
|
||||||
|
install_time_test_callbacks = ["test"]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def std_args(cls):
|
||||||
|
return [
|
||||||
|
# Verbose
|
||||||
|
"-vvv",
|
||||||
|
# Disable prompting for input
|
||||||
|
"--no-input",
|
||||||
|
# Disable the cache
|
||||||
|
"--no-cache-dir",
|
||||||
|
# Don't check to see if pip is up-to-date
|
||||||
|
"--disable-pip-version-check",
|
||||||
|
# Install packages
|
||||||
|
"install",
|
||||||
|
# Don't install package dependencies
|
||||||
|
"--no-deps",
|
||||||
|
# Overwrite existing packages
|
||||||
|
"--ignore-installed",
|
||||||
|
# Use env vars like PYTHONPATH
|
||||||
|
"--no-build-isolation",
|
||||||
|
# Don't warn that prefix.bin is not in PATH
|
||||||
|
"--no-warn-script-location",
|
||||||
|
# Ignore the PyPI package index
|
||||||
|
"--no-index",
|
||||||
|
]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self):
|
def build_directory(self):
|
||||||
"""The root directory of the Python package.
|
"""The root directory of the Python package.
|
||||||
@@ -170,11 +301,10 @@ def build_directory(self):
|
|||||||
* ``setup.cfg``
|
* ``setup.cfg``
|
||||||
* ``setup.py``
|
* ``setup.py``
|
||||||
"""
|
"""
|
||||||
return self.stage.source_path
|
return self.pkg.stage.source_path
|
||||||
|
|
||||||
def config_settings(self, spec, prefix):
|
def config_settings(self, spec, prefix):
|
||||||
"""Configuration settings to be passed to the PEP 517 build backend.
|
"""Configuration settings to be passed to the PEP 517 build backend.
|
||||||
|
|
||||||
Requires pip 22.1+, which requires Python 3.7+.
|
Requires pip 22.1+, which requires Python 3.7+.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -211,10 +341,10 @@ def global_options(self, spec, prefix):
|
|||||||
"""
|
"""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Install everything from build directory."""
|
"""Install everything from build directory."""
|
||||||
|
|
||||||
args = PythonPackage._std_args(self) + ["--prefix=" + prefix]
|
args = PythonPipBuilder.std_args(pkg) + ["--prefix=" + prefix]
|
||||||
|
|
||||||
for key, value in self.config_settings(spec, prefix).items():
|
for key, value in self.config_settings(spec, prefix).items():
|
||||||
if spec["py-pip"].version < Version("22.1"):
|
if spec["py-pip"].version < Version("22.1"):
|
||||||
@@ -223,137 +353,21 @@ def install(self, spec, prefix):
|
|||||||
"pip 22.1+. Add the following line to the package to fix this:\n\n"
|
"pip 22.1+. Add the following line to the package to fix this:\n\n"
|
||||||
' depends_on("py-pip@22.1:", type="build")'.format(spec.name)
|
' depends_on("py-pip@22.1:", type="build")'.format(spec.name)
|
||||||
)
|
)
|
||||||
|
|
||||||
args.append("--config-settings={}={}".format(key, value))
|
args.append("--config-settings={}={}".format(key, value))
|
||||||
|
|
||||||
for option in self.install_options(spec, prefix):
|
for option in self.install_options(spec, prefix):
|
||||||
args.append("--install-option=" + option)
|
args.append("--install-option=" + option)
|
||||||
for option in self.global_options(spec, prefix):
|
for option in self.global_options(spec, prefix):
|
||||||
args.append("--global-option=" + option)
|
args.append("--global-option=" + option)
|
||||||
|
|
||||||
if self.stage.archive_file and self.stage.archive_file.endswith(".whl"):
|
if pkg.stage.archive_file and pkg.stage.archive_file.endswith(".whl"):
|
||||||
args.append(self.stage.archive_file)
|
args.append(pkg.stage.archive_file)
|
||||||
else:
|
else:
|
||||||
args.append(".")
|
args.append(".")
|
||||||
|
|
||||||
pip = inspect.getmodule(self).pip
|
pip = inspect.getmodule(pkg).pip
|
||||||
with working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
pip(*args)
|
pip(*args)
|
||||||
|
|
||||||
@property
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
def headers(self):
|
|
||||||
"""Discover header files in platlib."""
|
|
||||||
|
|
||||||
# Headers may be in either location
|
|
||||||
include = self.prefix.join(self.spec["python"].package.include)
|
|
||||||
platlib = self.prefix.join(self.spec["python"].package.platlib)
|
|
||||||
headers = find_all_headers(include) + find_all_headers(platlib)
|
|
||||||
|
|
||||||
if headers:
|
|
||||||
return headers
|
|
||||||
|
|
||||||
msg = "Unable to locate {} headers in {} or {}"
|
|
||||||
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def libs(self):
|
|
||||||
"""Discover libraries in platlib."""
|
|
||||||
|
|
||||||
# Remove py- prefix in package name
|
|
||||||
library = "lib" + self.spec.name[3:].replace("-", "?")
|
|
||||||
root = self.prefix.join(self.spec["python"].package.platlib)
|
|
||||||
|
|
||||||
for shared in [True, False]:
|
|
||||||
libs = find_libraries(library, root, shared=shared, recursive=True)
|
|
||||||
if libs:
|
|
||||||
return libs
|
|
||||||
|
|
||||||
msg = "Unable to recursively locate {} libraries in {}"
|
|
||||||
raise NoLibrariesError(msg.format(self.spec.name, root))
|
|
||||||
|
|
||||||
# Testing
|
|
||||||
|
|
||||||
def test(self):
|
|
||||||
"""Attempts to import modules of the installed package."""
|
|
||||||
|
|
||||||
# Make sure we are importing the installed modules,
|
|
||||||
# not the ones in the source directory
|
|
||||||
for module in self.import_modules:
|
|
||||||
self.run_test(
|
|
||||||
inspect.getmodule(self).python.path,
|
|
||||||
["-c", "import {0}".format(module)],
|
|
||||||
purpose="checking import of {0}".format(module),
|
|
||||||
work_dir="spack-test",
|
|
||||||
)
|
|
||||||
|
|
||||||
run_after("install")(PackageBase._run_default_install_time_test_callbacks)
|
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
|
||||||
run_after("install")(PackageBase.sanity_check_prefix)
|
|
||||||
|
|
||||||
def view_file_conflicts(self, view, merge_map):
|
|
||||||
"""Report all file conflicts, excepting special cases for python.
|
|
||||||
Specifically, this does not report errors for duplicate
|
|
||||||
__init__.py files for packages in the same namespace.
|
|
||||||
"""
|
|
||||||
conflicts = list(dst for src, dst in merge_map.items() if os.path.exists(dst))
|
|
||||||
|
|
||||||
if conflicts and self.py_namespace:
|
|
||||||
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
|
|
||||||
namespaces = set(x.package.py_namespace for x in ext_map.values())
|
|
||||||
namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
|
||||||
find_namespace = match_predicate(namespace_re)
|
|
||||||
if self.py_namespace in namespaces:
|
|
||||||
conflicts = list(x for x in conflicts if not find_namespace(x))
|
|
||||||
|
|
||||||
return conflicts
|
|
||||||
|
|
||||||
def add_files_to_view(self, view, merge_map, skip_if_exists=True):
|
|
||||||
bin_dir = self.spec.prefix.bin
|
|
||||||
python_prefix = self.extendee_spec.prefix
|
|
||||||
python_is_external = self.extendee_spec.external
|
|
||||||
global_view = same_path(python_prefix, view.get_projection_for_spec(self.spec))
|
|
||||||
for src, dst in merge_map.items():
|
|
||||||
if os.path.exists(dst):
|
|
||||||
continue
|
|
||||||
elif global_view or not path_contains_subdirectory(src, bin_dir):
|
|
||||||
view.link(src, dst)
|
|
||||||
elif not os.path.islink(src):
|
|
||||||
shutil.copy2(src, dst)
|
|
||||||
is_script = is_nonsymlink_exe_with_shebang(src)
|
|
||||||
if is_script and not python_is_external:
|
|
||||||
filter_file(
|
|
||||||
python_prefix,
|
|
||||||
os.path.abspath(view.get_projection_for_spec(self.spec)),
|
|
||||||
dst,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
orig_link_target = os.path.realpath(src)
|
|
||||||
new_link_target = os.path.abspath(merge_map[orig_link_target])
|
|
||||||
view.link(new_link_target, dst)
|
|
||||||
|
|
||||||
def remove_files_from_view(self, view, merge_map):
|
|
||||||
ignore_namespace = False
|
|
||||||
if self.py_namespace:
|
|
||||||
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
|
|
||||||
remaining_namespaces = set(
|
|
||||||
spec.package.py_namespace for name, spec in ext_map.items() if name != self.name
|
|
||||||
)
|
|
||||||
if self.py_namespace in remaining_namespaces:
|
|
||||||
namespace_init = match_predicate(
|
|
||||||
r"site-packages/{0}/__init__.py".format(self.py_namespace)
|
|
||||||
)
|
|
||||||
ignore_namespace = True
|
|
||||||
|
|
||||||
bin_dir = self.spec.prefix.bin
|
|
||||||
global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec)
|
|
||||||
|
|
||||||
to_remove = []
|
|
||||||
for src, dst in merge_map.items():
|
|
||||||
if ignore_namespace and namespace_init(dst):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if global_view or not path_contains_subdirectory(src, bin_dir):
|
|
||||||
to_remove.append(dst)
|
|
||||||
else:
|
|
||||||
os.remove(dst)
|
|
||||||
|
|
||||||
view.remove_files(to_remove)
|
|
||||||
|
|||||||
@@ -2,82 +2,85 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
from spack.directives import depends_on
|
import spack.builder
|
||||||
from spack.package_base import PackageBase, run_after
|
import spack.package_base
|
||||||
|
from spack.directives import build_system, depends_on
|
||||||
|
|
||||||
|
from ._checks import BaseBuilder, execute_build_time_tests
|
||||||
|
|
||||||
|
|
||||||
class QMakePackage(PackageBase):
|
class QMakePackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for packages built using qmake.
|
"""Specialized class for packages built using qmake.
|
||||||
|
|
||||||
For more information on the qmake build system, see:
|
For more information on the qmake build system, see:
|
||||||
http://doc.qt.io/qt-5/qmake-manual.html
|
http://doc.qt.io/qt-5/qmake-manual.html
|
||||||
|
|
||||||
This class provides three phases that can be overridden:
|
|
||||||
|
|
||||||
1. :py:meth:`~.QMakePackage.qmake`
|
|
||||||
2. :py:meth:`~.QMakePackage.build`
|
|
||||||
3. :py:meth:`~.QMakePackage.install`
|
|
||||||
|
|
||||||
They all have sensible defaults and for many packages the only thing
|
|
||||||
necessary will be to override :py:meth:`~.QMakePackage.qmake_args`.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
#: Phases of a qmake package
|
|
||||||
phases = ["qmake", "build", "install"]
|
|
||||||
|
|
||||||
#: This attribute is used in UI queries that need to know the build
|
#: This attribute is used in UI queries that need to know the build
|
||||||
#: system base class
|
#: system base class
|
||||||
build_system_class = "QMakePackage"
|
build_system_class = "QMakePackage"
|
||||||
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
|
legacy_buildsystem = "qmake"
|
||||||
|
|
||||||
|
build_system("qmake")
|
||||||
|
|
||||||
|
depends_on("qt", type="build", when="build_system=qmake")
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("qmake")
|
||||||
|
class QMakeBuilder(BaseBuilder):
|
||||||
|
"""The qmake builder provides three phases that can be overridden:
|
||||||
|
|
||||||
|
1. :py:meth:`~.QMakeBuilder.qmake`
|
||||||
|
2. :py:meth:`~.QMakeBuilder.build`
|
||||||
|
3. :py:meth:`~.QMakeBuilder.install`
|
||||||
|
|
||||||
|
They all have sensible defaults and for many packages the only thing
|
||||||
|
necessary will be to override :py:meth:`~.QMakeBuilder.qmake_args`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
phases = ("qmake", "build", "install")
|
||||||
|
|
||||||
|
#: Names associated with package methods in the old build-system format
|
||||||
|
legacy_methods = ("qmake_args", "check")
|
||||||
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = ("build_directory", "build_time_test_callbacks")
|
||||||
|
|
||||||
#: Callback names for build-time test
|
#: Callback names for build-time test
|
||||||
build_time_test_callbacks = ["check"]
|
build_time_test_callbacks = ["check"]
|
||||||
|
|
||||||
depends_on("qt", type="build")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self):
|
def build_directory(self):
|
||||||
"""The directory containing the ``*.pro`` file."""
|
"""The directory containing the ``*.pro`` file."""
|
||||||
return self.stage.source_path
|
return self.stage.source_path
|
||||||
|
|
||||||
def qmake_args(self):
|
def qmake_args(self):
|
||||||
"""Produces a list containing all the arguments that must be passed to
|
"""List of arguments passed to qmake."""
|
||||||
qmake
|
|
||||||
"""
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def qmake(self, spec, prefix):
|
def qmake(self, pkg, spec, prefix):
|
||||||
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
"""Run ``qmake`` to configure the project and generate a Makefile."""
|
||||||
|
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
inspect.getmodule(self).qmake(*self.qmake_args())
|
inspect.getmodule(self.pkg).qmake(*self.qmake_args())
|
||||||
|
|
||||||
def build(self, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Make the build targets"""
|
"""Make the build targets"""
|
||||||
|
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
inspect.getmodule(self).make()
|
inspect.getmodule(self.pkg).make()
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Make the install targets"""
|
"""Make the install targets"""
|
||||||
|
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
inspect.getmodule(self).make("install")
|
inspect.getmodule(self.pkg).make("install")
|
||||||
|
|
||||||
# Tests
|
|
||||||
|
|
||||||
def check(self):
|
def check(self):
|
||||||
"""Searches the Makefile for a ``check:`` target and runs it if found."""
|
"""Search the Makefile for a ``check:`` target and runs it if found."""
|
||||||
|
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
self._if_make_target_execute("check")
|
self._if_make_target_execute("check")
|
||||||
|
|
||||||
run_after("build")(PackageBase._run_default_build_time_test_callbacks)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
|
||||||
run_after("install")(PackageBase.sanity_check_prefix)
|
|
||||||
|
|||||||
@@ -3,30 +3,64 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
import inspect
|
||||||
from typing import Optional
|
from typing import Optional, Tuple
|
||||||
|
|
||||||
import llnl.util.lang as lang
|
import llnl.util.lang as lang
|
||||||
|
|
||||||
from spack.directives import extends
|
from spack.directives import extends
|
||||||
from spack.package_base import PackageBase, run_after
|
|
||||||
|
from .generic import GenericBuilder, Package
|
||||||
|
|
||||||
|
|
||||||
class RPackage(PackageBase):
|
class RBuilder(GenericBuilder):
|
||||||
|
"""The R builder provides a single phase that can be overridden:
|
||||||
|
|
||||||
|
1. :py:meth:`~.RBuilder.install`
|
||||||
|
|
||||||
|
It has sensible defaults, and for many packages the only thing
|
||||||
|
necessary will be to add dependencies.
|
||||||
|
"""
|
||||||
|
|
||||||
|
#: Names associated with package methods in the old build-system format
|
||||||
|
legacy_methods = (
|
||||||
|
"configure_args",
|
||||||
|
"configure_vars",
|
||||||
|
) + GenericBuilder.legacy_methods # type: Tuple[str, ...]
|
||||||
|
|
||||||
|
def configure_args(self):
|
||||||
|
"""Arguments to pass to install via ``--configure-args``."""
|
||||||
|
return []
|
||||||
|
|
||||||
|
def configure_vars(self):
|
||||||
|
"""Arguments to pass to install via ``--configure-vars``."""
|
||||||
|
return []
|
||||||
|
|
||||||
|
def install(self, pkg, spec, prefix):
|
||||||
|
"""Installs an R package."""
|
||||||
|
|
||||||
|
config_args = self.configure_args()
|
||||||
|
config_vars = self.configure_vars()
|
||||||
|
|
||||||
|
args = ["--vanilla", "CMD", "INSTALL"]
|
||||||
|
|
||||||
|
if config_args:
|
||||||
|
args.append("--configure-args={0}".format(" ".join(config_args)))
|
||||||
|
|
||||||
|
if config_vars:
|
||||||
|
args.append("--configure-vars={0}".format(" ".join(config_vars)))
|
||||||
|
|
||||||
|
args.extend(["--library={0}".format(self.pkg.module.r_lib_dir), self.stage.source_path])
|
||||||
|
|
||||||
|
inspect.getmodule(self.pkg).R(*args)
|
||||||
|
|
||||||
|
|
||||||
|
class RPackage(Package):
|
||||||
"""Specialized class for packages that are built using R.
|
"""Specialized class for packages that are built using R.
|
||||||
|
|
||||||
For more information on the R build system, see:
|
For more information on the R build system, see:
|
||||||
https://stat.ethz.ch/R-manual/R-devel/library/utils/html/INSTALL.html
|
https://stat.ethz.ch/R-manual/R-devel/library/utils/html/INSTALL.html
|
||||||
|
|
||||||
This class provides a single phase that can be overridden:
|
|
||||||
|
|
||||||
1. :py:meth:`~.RPackage.install`
|
|
||||||
|
|
||||||
It has sensible defaults, and for many packages the only thing
|
|
||||||
necessary will be to add dependencies
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
phases = ["install"]
|
|
||||||
|
|
||||||
# package attributes that can be expanded to set the homepage, url,
|
# package attributes that can be expanded to set the homepage, url,
|
||||||
# list_url, and git values
|
# list_url, and git values
|
||||||
# For CRAN packages
|
# For CRAN packages
|
||||||
@@ -35,6 +69,8 @@ class RPackage(PackageBase):
|
|||||||
# For Bioconductor packages
|
# For Bioconductor packages
|
||||||
bioc = None # type: Optional[str]
|
bioc = None # type: Optional[str]
|
||||||
|
|
||||||
|
GenericBuilder = RBuilder
|
||||||
|
|
||||||
maintainers = ["glennpj"]
|
maintainers = ["glennpj"]
|
||||||
|
|
||||||
#: This attribute is used in UI queries that need to know the build
|
#: This attribute is used in UI queries that need to know the build
|
||||||
@@ -70,32 +106,3 @@ def list_url(cls):
|
|||||||
def git(self):
|
def git(self):
|
||||||
if self.bioc:
|
if self.bioc:
|
||||||
return "https://git.bioconductor.org/packages/" + self.bioc
|
return "https://git.bioconductor.org/packages/" + self.bioc
|
||||||
|
|
||||||
def configure_args(self):
|
|
||||||
"""Arguments to pass to install via ``--configure-args``."""
|
|
||||||
return []
|
|
||||||
|
|
||||||
def configure_vars(self):
|
|
||||||
"""Arguments to pass to install via ``--configure-vars``."""
|
|
||||||
return []
|
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
|
||||||
"""Installs an R package."""
|
|
||||||
|
|
||||||
config_args = self.configure_args()
|
|
||||||
config_vars = self.configure_vars()
|
|
||||||
|
|
||||||
args = ["--vanilla", "CMD", "INSTALL"]
|
|
||||||
|
|
||||||
if config_args:
|
|
||||||
args.append("--configure-args={0}".format(" ".join(config_args)))
|
|
||||||
|
|
||||||
if config_vars:
|
|
||||||
args.append("--configure-vars={0}".format(" ".join(config_vars)))
|
|
||||||
|
|
||||||
args.extend(["--library={0}".format(self.module.r_lib_dir), self.stage.source_path])
|
|
||||||
|
|
||||||
inspect.getmodule(self).R(*args)
|
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
|
||||||
run_after("install")(PackageBase.sanity_check_prefix)
|
|
||||||
|
|||||||
@@ -3,14 +3,15 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
from typing import Optional
|
from typing import Optional, Tuple
|
||||||
|
|
||||||
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.lang as lang
|
import llnl.util.lang as lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import working_dir
|
|
||||||
|
|
||||||
|
import spack.builder
|
||||||
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
from spack.build_environment import SPACK_NO_PARALLEL_MAKE, determine_number_of_jobs
|
||||||
from spack.directives import extends
|
from spack.directives import build_system, extends
|
||||||
from spack.package_base import PackageBase
|
from spack.package_base import PackageBase
|
||||||
from spack.util.environment import env_flag
|
from spack.util.environment import env_flag
|
||||||
from spack.util.executable import Executable, ProcessError
|
from spack.util.executable import Executable, ProcessError
|
||||||
@@ -19,34 +20,52 @@
|
|||||||
class RacketPackage(PackageBase):
|
class RacketPackage(PackageBase):
|
||||||
"""Specialized class for packages that are built using Racket's
|
"""Specialized class for packages that are built using Racket's
|
||||||
`raco pkg install` and `raco setup` commands.
|
`raco pkg install` and `raco setup` commands.
|
||||||
|
|
||||||
This class provides the following phases that can be overridden:
|
|
||||||
|
|
||||||
* install
|
|
||||||
* setup
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
#: Package name, version, and extension on PyPI
|
#: Package name, version, and extension on PyPI
|
||||||
maintainers = ["elfprince13"]
|
maintainers = ["elfprince13"]
|
||||||
|
|
||||||
# Default phases
|
|
||||||
phases = ["install"]
|
|
||||||
|
|
||||||
# To be used in UI queries that require to know which
|
# To be used in UI queries that require to know which
|
||||||
# build-system class we are using
|
# build-system class we are using
|
||||||
build_system_class = "RacketPackage"
|
build_system_class = "RacketPackage"
|
||||||
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
|
legacy_buildsystem = "racket"
|
||||||
|
|
||||||
extends("racket")
|
build_system("racket")
|
||||||
|
|
||||||
|
extends("racket", when="build_system=racket")
|
||||||
|
|
||||||
pkgs = False
|
|
||||||
subdirectory = None # type: Optional[str]
|
|
||||||
racket_name = None # type: Optional[str]
|
racket_name = None # type: Optional[str]
|
||||||
parallel = True
|
parallel = True
|
||||||
|
|
||||||
@lang.classproperty
|
@lang.classproperty
|
||||||
def homepage(cls):
|
def homepage(cls):
|
||||||
if cls.pkgs:
|
if cls.racket_name:
|
||||||
return "https://pkgs.racket-lang.org/package/{0}".format(cls.racket_name)
|
return "https://pkgs.racket-lang.org/package/{0}".format(cls.racket_name)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("racket")
|
||||||
|
class RacketBuilder(spack.builder.Builder):
|
||||||
|
"""The Racket builder provides an ``install`` phase that can be overridden."""
|
||||||
|
|
||||||
|
phases = ("install",)
|
||||||
|
|
||||||
|
#: Names associated with package methods in the old build-system format
|
||||||
|
legacy_methods = tuple() # type: Tuple[str, ...]
|
||||||
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = ("build_directory", "build_time_test_callbacks", "subdirectory")
|
||||||
|
|
||||||
|
#: Callback names for build-time test
|
||||||
|
build_time_test_callbacks = ["check"]
|
||||||
|
|
||||||
|
racket_name = None # type: Optional[str]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def subdirectory(self):
|
||||||
|
if self.racket_name:
|
||||||
|
return "pkgs/{0}".format(self.pkg.racket_name)
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self):
|
def build_directory(self):
|
||||||
@@ -55,25 +74,25 @@ def build_directory(self):
|
|||||||
ret = os.path.join(ret, self.subdirectory)
|
ret = os.path.join(ret, self.subdirectory)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Install everything from build directory."""
|
"""Install everything from build directory."""
|
||||||
raco = Executable("raco")
|
raco = Executable("raco")
|
||||||
with working_dir(self.build_directory):
|
with fs.working_dir(self.build_directory):
|
||||||
allow_parallel = self.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
parallel = self.pkg.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
|
||||||
args = [
|
args = [
|
||||||
"pkg",
|
"pkg",
|
||||||
"install",
|
"install",
|
||||||
"-t",
|
"-t",
|
||||||
"dir",
|
"dir",
|
||||||
"-n",
|
"-n",
|
||||||
self.racket_name,
|
self.pkg.racket_name,
|
||||||
"--deps",
|
"--deps",
|
||||||
"fail",
|
"fail",
|
||||||
"--ignore-implies",
|
"--ignore-implies",
|
||||||
"--copy",
|
"--copy",
|
||||||
"-i",
|
"-i",
|
||||||
"-j",
|
"-j",
|
||||||
str(determine_number_of_jobs(allow_parallel)),
|
str(determine_number_of_jobs(parallel)),
|
||||||
"--",
|
"--",
|
||||||
os.getcwd(),
|
os.getcwd(),
|
||||||
]
|
]
|
||||||
@@ -82,9 +101,8 @@ def install(self, spec, prefix):
|
|||||||
except ProcessError:
|
except ProcessError:
|
||||||
args.insert(-2, "--skip-installed")
|
args.insert(-2, "--skip-installed")
|
||||||
raco(*args)
|
raco(*args)
|
||||||
tty.warn(
|
msg = (
|
||||||
(
|
"Racket package {0} was already installed, uninstalling via "
|
||||||
"Racket package {0} was already installed, uninstalling via "
|
"Spack may make someone unhappy!"
|
||||||
"Spack may make someone unhappy!"
|
|
||||||
).format(self.racket_name)
|
|
||||||
)
|
)
|
||||||
|
tty.warn(msg.format(self.pkg.racket_name))
|
||||||
|
|||||||
@@ -2,35 +2,49 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import glob
|
import glob
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
from spack.directives import extends
|
import spack.builder
|
||||||
from spack.package_base import PackageBase, run_after
|
import spack.package_base
|
||||||
|
from spack.directives import build_system, extends
|
||||||
|
|
||||||
|
from ._checks import BaseBuilder
|
||||||
|
|
||||||
|
|
||||||
class RubyPackage(PackageBase):
|
class RubyPackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for building Ruby gems.
|
"""Specialized class for building Ruby gems."""
|
||||||
|
|
||||||
This class provides two phases that can be overridden if required:
|
|
||||||
|
|
||||||
#. :py:meth:`~.RubyPackage.build`
|
|
||||||
#. :py:meth:`~.RubyPackage.install`
|
|
||||||
"""
|
|
||||||
|
|
||||||
maintainers = ["Kerilk"]
|
maintainers = ["Kerilk"]
|
||||||
|
|
||||||
#: Phases of a Ruby package
|
|
||||||
phases = ["build", "install"]
|
|
||||||
|
|
||||||
#: This attribute is used in UI queries that need to know the build
|
#: This attribute is used in UI queries that need to know the build
|
||||||
#: system base class
|
#: system base class
|
||||||
build_system_class = "RubyPackage"
|
build_system_class = "RubyPackage"
|
||||||
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
|
legacy_buildsystem = "ruby"
|
||||||
|
|
||||||
extends("ruby")
|
build_system("ruby")
|
||||||
|
|
||||||
def build(self, spec, prefix):
|
extends("ruby", when="build_system=ruby")
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("ruby")
|
||||||
|
class RubyBuilder(BaseBuilder):
|
||||||
|
"""The Ruby builder provides two phases that can be overridden if required:
|
||||||
|
|
||||||
|
#. :py:meth:`~.RubyBuilder.build`
|
||||||
|
#. :py:meth:`~.RubyBuilder.install`
|
||||||
|
"""
|
||||||
|
|
||||||
|
phases = ("build", "install")
|
||||||
|
|
||||||
|
#: Names associated with package methods in the old build-system format
|
||||||
|
legacy_methods = ()
|
||||||
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = ()
|
||||||
|
|
||||||
|
def build(self, pkg, spec, prefix):
|
||||||
"""Build a Ruby gem."""
|
"""Build a Ruby gem."""
|
||||||
|
|
||||||
# ruby-rake provides both rake.gemspec and Rakefile, but only
|
# ruby-rake provides both rake.gemspec and Rakefile, but only
|
||||||
@@ -38,15 +52,15 @@ def build(self, spec, prefix):
|
|||||||
gemspecs = glob.glob("*.gemspec")
|
gemspecs = glob.glob("*.gemspec")
|
||||||
rakefiles = glob.glob("Rakefile")
|
rakefiles = glob.glob("Rakefile")
|
||||||
if gemspecs:
|
if gemspecs:
|
||||||
inspect.getmodule(self).gem("build", "--norc", gemspecs[0])
|
inspect.getmodule(self.pkg).gem("build", "--norc", gemspecs[0])
|
||||||
elif rakefiles:
|
elif rakefiles:
|
||||||
jobs = inspect.getmodule(self).make_jobs
|
jobs = inspect.getmodule(self.pkg).make_jobs
|
||||||
inspect.getmodule(self).rake("package", "-j{0}".format(jobs))
|
inspect.getmodule(self.pkg).rake("package", "-j{0}".format(jobs))
|
||||||
else:
|
else:
|
||||||
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
# Some Ruby packages only ship `*.gem` files, so nothing to build
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Install a Ruby gem.
|
"""Install a Ruby gem.
|
||||||
|
|
||||||
The ruby package sets ``GEM_HOME`` to tell gem where to install to."""
|
The ruby package sets ``GEM_HOME`` to tell gem where to install to."""
|
||||||
@@ -56,9 +70,6 @@ def install(self, spec, prefix):
|
|||||||
# if --install-dir is not used, GEM_PATH is deleted from the
|
# if --install-dir is not used, GEM_PATH is deleted from the
|
||||||
# environement, and Gems required to build native extensions will
|
# environement, and Gems required to build native extensions will
|
||||||
# not be found. Those extensions are built during `gem install`.
|
# not be found. Those extensions are built during `gem install`.
|
||||||
inspect.getmodule(self).gem(
|
inspect.getmodule(self.pkg).gem(
|
||||||
"install", "--norc", "--ignore-dependencies", "--install-dir", prefix, gems[0]
|
"install", "--norc", "--ignore-dependencies", "--install-dir", prefix, gems[0]
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
|
||||||
run_after("install")(PackageBase.sanity_check_prefix)
|
|
||||||
|
|||||||
@@ -2,63 +2,75 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
from spack.directives import depends_on
|
import spack.builder
|
||||||
from spack.package_base import PackageBase, run_after
|
import spack.package_base
|
||||||
|
from spack.directives import build_system, depends_on
|
||||||
|
|
||||||
|
from ._checks import BaseBuilder, execute_build_time_tests
|
||||||
|
|
||||||
|
|
||||||
class SConsPackage(PackageBase):
|
class SConsPackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for packages built using SCons.
|
"""Specialized class for packages built using SCons.
|
||||||
|
|
||||||
See http://scons.org/documentation.html for more information.
|
See http://scons.org/documentation.html for more information.
|
||||||
|
|
||||||
This class provides the following phases that can be overridden:
|
|
||||||
|
|
||||||
1. :py:meth:`~.SConsPackage.build`
|
|
||||||
2. :py:meth:`~.SConsPackage.install`
|
|
||||||
|
|
||||||
Packages that use SCons as a build system are less uniform than packages
|
|
||||||
that use other build systems. Developers can add custom subcommands or
|
|
||||||
variables that control the build. You will likely need to override
|
|
||||||
:py:meth:`~.SConsPackage.build_args` to pass the appropriate variables.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
#: Phases of a SCons package
|
|
||||||
phases = ["build", "install"]
|
|
||||||
|
|
||||||
#: To be used in UI queries that require to know which
|
#: To be used in UI queries that require to know which
|
||||||
#: build-system class we are using
|
#: build-system class we are using
|
||||||
build_system_class = "SConsPackage"
|
build_system_class = "SConsPackage"
|
||||||
|
|
||||||
#: Callback names for build-time test
|
#: Callback names for build-time test
|
||||||
build_time_test_callbacks = ["build_test"]
|
build_time_test_callbacks = ["build_test"]
|
||||||
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
|
legacy_buildsystem = "scons"
|
||||||
|
|
||||||
depends_on("scons", type="build")
|
build_system("scons")
|
||||||
|
|
||||||
def build_args(self, spec, prefix):
|
depends_on("scons", type="build", when="build_system=scons")
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("scons")
|
||||||
|
class SConsBuilder(BaseBuilder):
|
||||||
|
"""The Scons builder provides the following phases that can be overridden:
|
||||||
|
|
||||||
|
1. :py:meth:`~.SConsBuilder.build`
|
||||||
|
2. :py:meth:`~.SConsBuilder.install`
|
||||||
|
|
||||||
|
Packages that use SCons as a build system are less uniform than packages that use
|
||||||
|
other build systems. Developers can add custom subcommands or variables that
|
||||||
|
control the build. You will likely need to override
|
||||||
|
:py:meth:`~.SConsBuilder.build_args` to pass the appropriate variables.
|
||||||
|
"""
|
||||||
|
|
||||||
|
#: Phases of a SCons package
|
||||||
|
phases = ("build", "install")
|
||||||
|
|
||||||
|
#: Names associated with package methods in the old build-system format
|
||||||
|
legacy_methods = ("build_args", "install_args", "build_test")
|
||||||
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = ()
|
||||||
|
|
||||||
|
def build_args(self):
|
||||||
"""Arguments to pass to build."""
|
"""Arguments to pass to build."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Build the package."""
|
"""Build the package."""
|
||||||
args = self.build_args(spec, prefix)
|
args = self.build_args()
|
||||||
|
inspect.getmodule(self.pkg).scons(*args)
|
||||||
|
|
||||||
inspect.getmodule(self).scons(*args)
|
def install_args(self):
|
||||||
|
|
||||||
def install_args(self, spec, prefix):
|
|
||||||
"""Arguments to pass to install."""
|
"""Arguments to pass to install."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Install the package."""
|
"""Install the package."""
|
||||||
args = self.install_args(spec, prefix)
|
args = self.install_args()
|
||||||
|
|
||||||
inspect.getmodule(self).scons("install", *args)
|
inspect.getmodule(self.pkg).scons("install", *args)
|
||||||
|
|
||||||
# Testing
|
|
||||||
|
|
||||||
def build_test(self):
|
def build_test(self):
|
||||||
"""Run unit tests after build.
|
"""Run unit tests after build.
|
||||||
@@ -68,7 +80,4 @@ def build_test(self):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
run_after("build")(PackageBase._run_default_build_time_test_callbacks)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
|
||||||
run_after("install")(PackageBase.sanity_check_prefix)
|
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -10,28 +9,20 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import find, join_path, working_dir
|
from llnl.util.filesystem import find, join_path, working_dir
|
||||||
|
|
||||||
from spack.directives import depends_on, extends
|
import spack.builder
|
||||||
from spack.package_base import PackageBase, run_after
|
import spack.package_base
|
||||||
|
from spack.directives import build_system, depends_on, extends
|
||||||
|
from spack.multimethod import when
|
||||||
|
|
||||||
|
from ._checks import BaseBuilder, execute_install_time_tests
|
||||||
|
|
||||||
|
|
||||||
class SIPPackage(PackageBase):
|
class SIPPackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for packages that are built using the
|
"""Specialized class for packages that are built using the
|
||||||
SIP build system. See https://www.riverbankcomputing.com/software/sip/intro
|
SIP build system. See https://www.riverbankcomputing.com/software/sip/intro
|
||||||
for more information.
|
for more information.
|
||||||
|
|
||||||
This class provides the following phases that can be overridden:
|
|
||||||
|
|
||||||
* configure
|
|
||||||
* build
|
|
||||||
* install
|
|
||||||
|
|
||||||
The configure phase already adds a set of default flags. To see more
|
|
||||||
options, run ``python configure.py --help``.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Default phases
|
|
||||||
phases = ["configure", "build", "install"]
|
|
||||||
|
|
||||||
# To be used in UI queries that require to know which
|
# To be used in UI queries that require to know which
|
||||||
# build-system class we are using
|
# build-system class we are using
|
||||||
build_system_class = "SIPPackage"
|
build_system_class = "SIPPackage"
|
||||||
@@ -41,11 +32,15 @@ class SIPPackage(PackageBase):
|
|||||||
|
|
||||||
#: Callback names for install-time test
|
#: Callback names for install-time test
|
||||||
install_time_test_callbacks = ["test"]
|
install_time_test_callbacks = ["test"]
|
||||||
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
|
legacy_buildsystem = "sip"
|
||||||
|
|
||||||
extends("python")
|
build_system("sip")
|
||||||
|
|
||||||
depends_on("qt")
|
with when("build_system=sip"):
|
||||||
depends_on("py-sip")
|
extends("python")
|
||||||
|
depends_on("qt")
|
||||||
|
depends_on("py-sip")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def import_modules(self):
|
def import_modules(self):
|
||||||
@@ -95,11 +90,51 @@ def python(self, *args, **kwargs):
|
|||||||
"""The python ``Executable``."""
|
"""The python ``Executable``."""
|
||||||
inspect.getmodule(self).python(*args, **kwargs)
|
inspect.getmodule(self).python(*args, **kwargs)
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
"""Attempts to import modules of the installed package."""
|
||||||
|
|
||||||
|
# Make sure we are importing the installed modules,
|
||||||
|
# not the ones in the source directory
|
||||||
|
for module in self.import_modules:
|
||||||
|
self.run_test(
|
||||||
|
inspect.getmodule(self).python.path,
|
||||||
|
["-c", "import {0}".format(module)],
|
||||||
|
purpose="checking import of {0}".format(module),
|
||||||
|
work_dir="spack-test",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("sip")
|
||||||
|
class SIPBuilder(BaseBuilder):
|
||||||
|
"""The SIP builder provides the following phases that can be overridden:
|
||||||
|
|
||||||
|
* configure
|
||||||
|
* build
|
||||||
|
* install
|
||||||
|
|
||||||
|
The configure phase already adds a set of default flags. To see more
|
||||||
|
options, run ``python configure.py --help``.
|
||||||
|
"""
|
||||||
|
|
||||||
|
phases = ("configure", "build", "install")
|
||||||
|
|
||||||
|
#: Names associated with package methods in the old build-system format
|
||||||
|
legacy_methods = ("configure_file", "configure_args", "build_args", "install_args")
|
||||||
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = (
|
||||||
|
"build_targets",
|
||||||
|
"install_targets",
|
||||||
|
"build_time_test_callbacks",
|
||||||
|
"install_time_test_callbacks",
|
||||||
|
"build_directory",
|
||||||
|
)
|
||||||
|
|
||||||
def configure_file(self):
|
def configure_file(self):
|
||||||
"""Returns the name of the configure file to use."""
|
"""Returns the name of the configure file to use."""
|
||||||
return "configure.py"
|
return "configure.py"
|
||||||
|
|
||||||
def configure(self, spec, prefix):
|
def configure(self, pkg, spec, prefix):
|
||||||
"""Configure the package."""
|
"""Configure the package."""
|
||||||
configure = self.configure_file()
|
configure = self.configure_file()
|
||||||
|
|
||||||
@@ -118,7 +153,7 @@ def configure(self, spec, prefix):
|
|||||||
"--bindir",
|
"--bindir",
|
||||||
prefix.bin,
|
prefix.bin,
|
||||||
"--destdir",
|
"--destdir",
|
||||||
inspect.getmodule(self).python_platlib,
|
inspect.getmodule(self.pkg).python_platlib,
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -128,53 +163,35 @@ def configure_args(self):
|
|||||||
"""Arguments to pass to configure."""
|
"""Arguments to pass to configure."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Build the package."""
|
"""Build the package."""
|
||||||
args = self.build_args()
|
args = self.build_args()
|
||||||
|
|
||||||
inspect.getmodule(self).make(*args)
|
inspect.getmodule(self.pkg).make(*args)
|
||||||
|
|
||||||
def build_args(self):
|
def build_args(self):
|
||||||
"""Arguments to pass to build."""
|
"""Arguments to pass to build."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Install the package."""
|
"""Install the package."""
|
||||||
args = self.install_args()
|
args = self.install_args()
|
||||||
|
|
||||||
inspect.getmodule(self).make("install", parallel=False, *args)
|
inspect.getmodule(self.pkg).make("install", parallel=False, *args)
|
||||||
|
|
||||||
def install_args(self):
|
def install_args(self):
|
||||||
"""Arguments to pass to install."""
|
"""Arguments to pass to install."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# Testing
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
|
|
||||||
def test(self):
|
@spack.builder.run_after("install")
|
||||||
"""Attempts to import modules of the installed package."""
|
|
||||||
|
|
||||||
# Make sure we are importing the installed modules,
|
|
||||||
# not the ones in the source directory
|
|
||||||
for module in self.import_modules:
|
|
||||||
self.run_test(
|
|
||||||
inspect.getmodule(self).python.path,
|
|
||||||
["-c", "import {0}".format(module)],
|
|
||||||
purpose="checking import of {0}".format(module),
|
|
||||||
work_dir="spack-test",
|
|
||||||
)
|
|
||||||
|
|
||||||
run_after("install")(PackageBase._run_default_install_time_test_callbacks)
|
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
|
||||||
run_after("install")(PackageBase.sanity_check_prefix)
|
|
||||||
|
|
||||||
@run_after("install")
|
|
||||||
def extend_path_setup(self):
|
def extend_path_setup(self):
|
||||||
# See github issue #14121 and PR #15297
|
# See github issue #14121 and PR #15297
|
||||||
module = self.spec["py-sip"].variants["module"].value
|
module = self.pkg.spec["py-sip"].variants["module"].value
|
||||||
if module != "sip":
|
if module != "sip":
|
||||||
module = module.split(".")[0]
|
module = module.split(".")[0]
|
||||||
with working_dir(inspect.getmodule(self).python_platlib):
|
with working_dir(inspect.getmodule(self.pkg).python_platlib):
|
||||||
with open(os.path.join(module, "__init__.py"), "a") as f:
|
with open(os.path.join(module, "__init__.py"), "a") as f:
|
||||||
f.write("from pkgutil import extend_path\n")
|
f.write("from pkgutil import extend_path\n")
|
||||||
f.write("__path__ = extend_path(__path__, __name__)\n")
|
f.write("__path__ = extend_path(__path__, __name__)\n")
|
||||||
|
|||||||
@@ -2,21 +2,38 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
from spack.directives import depends_on
|
import spack.builder
|
||||||
from spack.package_base import PackageBase, run_after
|
import spack.package_base
|
||||||
|
from spack.directives import build_system, depends_on
|
||||||
|
|
||||||
|
from ._checks import BaseBuilder, execute_build_time_tests, execute_install_time_tests
|
||||||
|
|
||||||
|
|
||||||
class WafPackage(PackageBase):
|
class WafPackage(spack.package_base.PackageBase):
|
||||||
"""Specialized class for packages that are built using the
|
"""Specialized class for packages that are built using the
|
||||||
Waf build system. See https://waf.io/book/ for more information.
|
Waf build system. See https://waf.io/book/ for more information.
|
||||||
|
"""
|
||||||
|
|
||||||
This class provides the following phases that can be overridden:
|
# To be used in UI queries that require to know which
|
||||||
|
# build-system class we are using
|
||||||
|
build_system_class = "WafPackage"
|
||||||
|
#: Legacy buildsystem attribute used to deserialize and install old specs
|
||||||
|
legacy_buildsystem = "waf"
|
||||||
|
|
||||||
|
build_system("waf")
|
||||||
|
# Much like AutotoolsPackage does not require automake and autoconf
|
||||||
|
# to build, WafPackage does not require waf to build. It only requires
|
||||||
|
# python to run the waf build script.
|
||||||
|
depends_on("python@2.5:", type="build", when="build_system=waf")
|
||||||
|
|
||||||
|
|
||||||
|
@spack.builder.builder("waf")
|
||||||
|
class WafBuilder(BaseBuilder):
|
||||||
|
"""The WAF builder provides the following phases that can be overridden:
|
||||||
|
|
||||||
* configure
|
* configure
|
||||||
* build
|
* build
|
||||||
@@ -40,12 +57,25 @@ class WafPackage(PackageBase):
|
|||||||
function, which passes ``--prefix=/path/to/installation/prefix``.
|
function, which passes ``--prefix=/path/to/installation/prefix``.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Default phases
|
phases = ("configure", "build", "install")
|
||||||
phases = ["configure", "build", "install"]
|
|
||||||
|
|
||||||
# To be used in UI queries that require to know which
|
#: Names associated with package methods in the old build-system format
|
||||||
# build-system class we are using
|
legacy_methods = (
|
||||||
build_system_class = "WafPackage"
|
"build_test",
|
||||||
|
"install_test",
|
||||||
|
"configure_args",
|
||||||
|
"build_args",
|
||||||
|
"install_args",
|
||||||
|
"build_test",
|
||||||
|
"install_test",
|
||||||
|
)
|
||||||
|
|
||||||
|
#: Names associated with package attributes in the old build-system format
|
||||||
|
legacy_attributes = (
|
||||||
|
"build_time_test_callbacks",
|
||||||
|
"build_time_test_callbacks",
|
||||||
|
"build_directory",
|
||||||
|
)
|
||||||
|
|
||||||
# Callback names for build-time test
|
# Callback names for build-time test
|
||||||
build_time_test_callbacks = ["build_test"]
|
build_time_test_callbacks = ["build_test"]
|
||||||
@@ -53,11 +83,6 @@ class WafPackage(PackageBase):
|
|||||||
# Callback names for install-time test
|
# Callback names for install-time test
|
||||||
install_time_test_callbacks = ["install_test"]
|
install_time_test_callbacks = ["install_test"]
|
||||||
|
|
||||||
# Much like AutotoolsPackage does not require automake and autoconf
|
|
||||||
# to build, WafPackage does not require waf to build. It only requires
|
|
||||||
# python to run the waf build script.
|
|
||||||
depends_on("python@2.5:", type="build")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def build_directory(self):
|
def build_directory(self):
|
||||||
"""The directory containing the ``waf`` file."""
|
"""The directory containing the ``waf`` file."""
|
||||||
@@ -65,18 +90,18 @@ def build_directory(self):
|
|||||||
|
|
||||||
def python(self, *args, **kwargs):
|
def python(self, *args, **kwargs):
|
||||||
"""The python ``Executable``."""
|
"""The python ``Executable``."""
|
||||||
inspect.getmodule(self).python(*args, **kwargs)
|
inspect.getmodule(self.pkg).python(*args, **kwargs)
|
||||||
|
|
||||||
def waf(self, *args, **kwargs):
|
def waf(self, *args, **kwargs):
|
||||||
"""Runs the waf ``Executable``."""
|
"""Runs the waf ``Executable``."""
|
||||||
jobs = inspect.getmodule(self).make_jobs
|
jobs = inspect.getmodule(self.pkg).make_jobs
|
||||||
|
|
||||||
with working_dir(self.build_directory):
|
with working_dir(self.build_directory):
|
||||||
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
|
||||||
|
|
||||||
def configure(self, spec, prefix):
|
def configure(self, pkg, spec, prefix):
|
||||||
"""Configures the project."""
|
"""Configures the project."""
|
||||||
args = ["--prefix={0}".format(self.prefix)]
|
args = ["--prefix={0}".format(self.pkg.prefix)]
|
||||||
args += self.configure_args()
|
args += self.configure_args()
|
||||||
|
|
||||||
self.waf("configure", *args)
|
self.waf("configure", *args)
|
||||||
@@ -85,7 +110,7 @@ def configure_args(self):
|
|||||||
"""Arguments to pass to configure."""
|
"""Arguments to pass to configure."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def build(self, spec, prefix):
|
def build(self, pkg, spec, prefix):
|
||||||
"""Executes the build."""
|
"""Executes the build."""
|
||||||
args = self.build_args()
|
args = self.build_args()
|
||||||
|
|
||||||
@@ -95,7 +120,7 @@ def build_args(self):
|
|||||||
"""Arguments to pass to build."""
|
"""Arguments to pass to build."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
def install(self, spec, prefix):
|
def install(self, pkg, spec, prefix):
|
||||||
"""Installs the targets on the system."""
|
"""Installs the targets on the system."""
|
||||||
args = self.install_args()
|
args = self.install_args()
|
||||||
|
|
||||||
@@ -105,8 +130,6 @@ def install_args(self):
|
|||||||
"""Arguments to pass to install."""
|
"""Arguments to pass to install."""
|
||||||
return []
|
return []
|
||||||
|
|
||||||
# Testing
|
|
||||||
|
|
||||||
def build_test(self):
|
def build_test(self):
|
||||||
"""Run unit tests after build.
|
"""Run unit tests after build.
|
||||||
|
|
||||||
@@ -115,7 +138,7 @@ def build_test(self):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
run_after("build")(PackageBase._run_default_build_time_test_callbacks)
|
spack.builder.run_after("build")(execute_build_time_tests)
|
||||||
|
|
||||||
def install_test(self):
|
def install_test(self):
|
||||||
"""Run unit tests after install.
|
"""Run unit tests after install.
|
||||||
@@ -125,7 +148,4 @@ def install_test(self):
|
|||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
run_after("install")(PackageBase._run_default_install_time_test_callbacks)
|
spack.builder.run_after("install")(execute_install_time_tests)
|
||||||
|
|
||||||
# Check that self.prefix is there after installation
|
|
||||||
run_after("install")(PackageBase.sanity_check_prefix)
|
|
||||||
|
|||||||
574
lib/spack/spack/builder.py
Normal file
574
lib/spack/spack/builder.py
Normal file
@@ -0,0 +1,574 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import collections
|
||||||
|
import copy
|
||||||
|
import functools
|
||||||
|
import inspect
|
||||||
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
import llnl.util.compat
|
||||||
|
|
||||||
|
import spack.build_environment
|
||||||
|
|
||||||
|
#: Builder classes, as registered by the "builder" decorator
|
||||||
|
BUILDER_CLS = {}
|
||||||
|
|
||||||
|
#: An object of this kind is a shared global state used to collect callbacks during
|
||||||
|
#: class definition time, and is flushed when the class object is created at the end
|
||||||
|
#: of the class definition
|
||||||
|
#:
|
||||||
|
#: Args:
|
||||||
|
#: attribute_name (str): name of the attribute that will be attached to the builder
|
||||||
|
#: callbacks (list): container used to temporarily aggregate the callbacks
|
||||||
|
CallbackTemporaryStage = collections.namedtuple(
|
||||||
|
"CallbackTemporaryStage", ["attribute_name", "callbacks"]
|
||||||
|
)
|
||||||
|
|
||||||
|
#: Shared global state to aggregate "@run_before" callbacks
|
||||||
|
_RUN_BEFORE = CallbackTemporaryStage(attribute_name="run_before_callbacks", callbacks=[])
|
||||||
|
#: Shared global state to aggregate "@run_after" callbacks
|
||||||
|
_RUN_AFTER = CallbackTemporaryStage(attribute_name="run_after_callbacks", callbacks=[])
|
||||||
|
|
||||||
|
#: Map id(pkg) to a builder, to avoid creating multiple
|
||||||
|
#: builders for the same package object.
|
||||||
|
_BUILDERS = {}
|
||||||
|
|
||||||
|
|
||||||
|
def builder(build_system_name):
|
||||||
|
"""Class decorator used to register the default builder
|
||||||
|
for a given build-system.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
build_system_name (str): name of the build-system
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _decorator(cls):
|
||||||
|
cls.build_system = build_system_name
|
||||||
|
BUILDER_CLS[build_system_name] = cls
|
||||||
|
return cls
|
||||||
|
|
||||||
|
return _decorator
|
||||||
|
|
||||||
|
|
||||||
|
def create(pkg):
|
||||||
|
"""Given a package object with an associated concrete spec,
|
||||||
|
return the builder object that can install it.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg (spack.package_base.PackageBase): package for which we want the builder
|
||||||
|
"""
|
||||||
|
if id(pkg) not in _BUILDERS:
|
||||||
|
_BUILDERS[id(pkg)] = _create(pkg)
|
||||||
|
return _BUILDERS[id(pkg)]
|
||||||
|
|
||||||
|
|
||||||
|
class _PhaseAdapter(object):
|
||||||
|
def __init__(self, builder, phase_fn):
|
||||||
|
self.builder = builder
|
||||||
|
self.phase_fn = phase_fn
|
||||||
|
|
||||||
|
def __call__(self, spec, prefix):
|
||||||
|
return self.phase_fn(self.builder.pkg, spec, prefix)
|
||||||
|
|
||||||
|
|
||||||
|
def _create(pkg):
|
||||||
|
"""Return a new builder object for the package object being passed as argument.
|
||||||
|
|
||||||
|
The function inspects the build-system used by the package object and try to:
|
||||||
|
|
||||||
|
1. Return a custom builder, if any is defined in the same ``package.py`` file.
|
||||||
|
2. Return a customization of more generic builders, if any is defined in the
|
||||||
|
class hierarchy (look at AspellDictPackage for an example of that)
|
||||||
|
3. Return a run-time generated adapter builder otherwise
|
||||||
|
|
||||||
|
The run-time generated adapter builder is capable of adapting an old-style package
|
||||||
|
to the new architecture, where the installation procedure has been extracted from
|
||||||
|
the ``*Package`` hierarchy into a ``*Builder`` hierarchy. This means that the
|
||||||
|
adapter looks for attribute or method overrides preferably in the ``*Package``
|
||||||
|
before using the default builder implementation.
|
||||||
|
|
||||||
|
Note that in case a builder is explicitly coded in ``package.py``, no attempt is made
|
||||||
|
to look for build-related methods in the ``*Package``.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg (spack.package_base.PackageBase): package object for which we need a builder
|
||||||
|
"""
|
||||||
|
package_module = inspect.getmodule(pkg)
|
||||||
|
package_buildsystem = buildsystem_name(pkg)
|
||||||
|
default_builder_cls = BUILDER_CLS[package_buildsystem]
|
||||||
|
builder_cls_name = default_builder_cls.__name__
|
||||||
|
builder_cls = getattr(package_module, builder_cls_name, None)
|
||||||
|
if builder_cls:
|
||||||
|
return builder_cls(pkg)
|
||||||
|
|
||||||
|
# Specialized version of a given buildsystem can subclass some
|
||||||
|
# base classes and specialize certain phases or methods or attributes.
|
||||||
|
# In that case they can store their builder class as a class level attribute.
|
||||||
|
# See e.g. AspellDictPackage as an example.
|
||||||
|
base_cls = getattr(pkg, builder_cls_name, default_builder_cls)
|
||||||
|
|
||||||
|
# From here on we define classes to construct a special builder that adapts to the
|
||||||
|
# old, single class, package format. The adapter forwards any call or access to an
|
||||||
|
# attribute related to the installation procedure to a package object wrapped in
|
||||||
|
# a class that falls-back on calling the base builder if no override is found on the
|
||||||
|
# package. The semantic should be the same as the method in the base builder were still
|
||||||
|
# present in the base class of the package.
|
||||||
|
|
||||||
|
class _ForwardToBaseBuilder(object):
|
||||||
|
def __init__(self, wrapped_pkg_object, root_builder):
|
||||||
|
self.wrapped_package_object = wrapped_pkg_object
|
||||||
|
self.root_builder = root_builder
|
||||||
|
|
||||||
|
package_cls = type(wrapped_pkg_object)
|
||||||
|
wrapper_cls = type(self)
|
||||||
|
bases = (package_cls, wrapper_cls)
|
||||||
|
new_cls_name = package_cls.__name__ + "Wrapper"
|
||||||
|
new_cls = type(new_cls_name, bases, {})
|
||||||
|
new_cls.__module__ = package_cls.__module__
|
||||||
|
self.__class__ = new_cls
|
||||||
|
self.__dict__.update(wrapped_pkg_object.__dict__)
|
||||||
|
|
||||||
|
def __getattr__(self, item):
|
||||||
|
result = getattr(super(type(self.root_builder), self.root_builder), item)
|
||||||
|
if item in super(type(self.root_builder), self.root_builder).phases:
|
||||||
|
result = _PhaseAdapter(self.root_builder, result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def forward_method_to_getattr(fn_name):
|
||||||
|
def __forward(self, *args, **kwargs):
|
||||||
|
return self.__getattr__(fn_name)(*args, **kwargs)
|
||||||
|
|
||||||
|
return __forward
|
||||||
|
|
||||||
|
# Add fallback methods for the Package object to refer to the builder. If a method
|
||||||
|
# with the same name is defined in the Package, it will override this definition
|
||||||
|
# (when _ForwardToBaseBuilder is initialized)
|
||||||
|
for method_name in (
|
||||||
|
base_cls.phases
|
||||||
|
+ base_cls.legacy_methods
|
||||||
|
+ getattr(base_cls, "legacy_long_methods", tuple())
|
||||||
|
+ ("setup_build_environment", "setup_dependent_build_environment")
|
||||||
|
):
|
||||||
|
setattr(_ForwardToBaseBuilder, method_name, forward_method_to_getattr(method_name))
|
||||||
|
|
||||||
|
def forward_property_to_getattr(property_name):
|
||||||
|
def __forward(self):
|
||||||
|
return self.__getattr__(property_name)
|
||||||
|
|
||||||
|
return __forward
|
||||||
|
|
||||||
|
for attribute_name in base_cls.legacy_attributes:
|
||||||
|
setattr(
|
||||||
|
_ForwardToBaseBuilder,
|
||||||
|
attribute_name,
|
||||||
|
property(forward_property_to_getattr(attribute_name)),
|
||||||
|
)
|
||||||
|
|
||||||
|
class Adapter(six.with_metaclass(_PackageAdapterMeta, base_cls)):
|
||||||
|
def __init__(self, pkg):
|
||||||
|
# Deal with custom phases in packages here
|
||||||
|
if hasattr(pkg, "phases"):
|
||||||
|
self.phases = pkg.phases
|
||||||
|
for phase in self.phases:
|
||||||
|
setattr(Adapter, phase, _PackageAdapterMeta.phase_method_adapter(phase))
|
||||||
|
|
||||||
|
# Attribute containing the package wrapped in dispatcher with a `__getattr__`
|
||||||
|
# method that will forward certain calls to the default builder.
|
||||||
|
self.pkg_with_dispatcher = _ForwardToBaseBuilder(pkg, root_builder=self)
|
||||||
|
super(Adapter, self).__init__(pkg)
|
||||||
|
|
||||||
|
# These two methods don't follow the (self, spec, prefix) signature of phases nor
|
||||||
|
# the (self) signature of methods, so they are added explicitly to avoid using a
|
||||||
|
# catch-all (*args, **kwargs)
|
||||||
|
def setup_build_environment(self, env):
|
||||||
|
return self.pkg_with_dispatcher.setup_build_environment(env)
|
||||||
|
|
||||||
|
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||||
|
return self.pkg_with_dispatcher.setup_dependent_build_environment(env, dependent_spec)
|
||||||
|
|
||||||
|
return Adapter(pkg)
|
||||||
|
|
||||||
|
|
||||||
|
def buildsystem_name(pkg):
|
||||||
|
"""Given a package object with an associated concrete spec,
|
||||||
|
return the name of its build system.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg (spack.package_base.PackageBase): package for which we want
|
||||||
|
the build system name
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return pkg.spec.variants["build_system"].value
|
||||||
|
except KeyError:
|
||||||
|
# We are reading an old spec without the build_system variant
|
||||||
|
return pkg.legacy_buildsystem
|
||||||
|
|
||||||
|
|
||||||
|
class PhaseCallbacksMeta(type):
|
||||||
|
"""Permit to register arbitrary functions during class definition and run them
|
||||||
|
later, before or after a given install phase.
|
||||||
|
|
||||||
|
Each method decorated with ``run_before`` or ``run_after`` gets temporarily
|
||||||
|
stored in a global shared state when a class being defined is parsed by the Python
|
||||||
|
interpreter. At class definition time that temporary storage gets flushed and a list
|
||||||
|
of callbacks is attached to the class being defined.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __new__(mcs, name, bases, attr_dict):
|
||||||
|
for temporary_stage in (_RUN_BEFORE, _RUN_AFTER):
|
||||||
|
staged_callbacks = temporary_stage.callbacks
|
||||||
|
|
||||||
|
# We don't have callbacks in this class, move on
|
||||||
|
if not staged_callbacks:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If we are here we have callbacks. To get a complete list, get first what
|
||||||
|
# was attached to parent classes, then prepend what we have registered here.
|
||||||
|
#
|
||||||
|
# The order should be:
|
||||||
|
# 1. Callbacks are registered in order within the same class
|
||||||
|
# 2. Callbacks defined in derived classes precede those defined in base
|
||||||
|
# classes
|
||||||
|
for base in bases:
|
||||||
|
callbacks_from_base = getattr(base, temporary_stage.attribute_name, None)
|
||||||
|
if callbacks_from_base:
|
||||||
|
break
|
||||||
|
callbacks_from_base = callbacks_from_base or []
|
||||||
|
|
||||||
|
# Set the callbacks in this class and flush the temporary stage
|
||||||
|
attr_dict[temporary_stage.attribute_name] = staged_callbacks[:] + callbacks_from_base
|
||||||
|
del temporary_stage.callbacks[:]
|
||||||
|
|
||||||
|
return super(PhaseCallbacksMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def run_after(phase, when=None):
|
||||||
|
"""Decorator to register a function for running after a given phase.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
phase (str): phase after which the function must run.
|
||||||
|
when (str): condition under which the function is run (if None, it is always run).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _decorator(fn):
|
||||||
|
key = (phase, when)
|
||||||
|
item = (key, fn)
|
||||||
|
_RUN_AFTER.callbacks.append(item)
|
||||||
|
return fn
|
||||||
|
|
||||||
|
return _decorator
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def run_before(phase, when=None):
|
||||||
|
"""Decorator to register a function for running before a given phase.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
phase (str): phase before which the function must run.
|
||||||
|
when (str): condition under which the function is run (if None, it is always run).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _decorator(fn):
|
||||||
|
key = (phase, when)
|
||||||
|
item = (key, fn)
|
||||||
|
_RUN_BEFORE.callbacks.append(item)
|
||||||
|
return fn
|
||||||
|
|
||||||
|
return _decorator
|
||||||
|
|
||||||
|
|
||||||
|
class BuilderMeta(PhaseCallbacksMeta, type(llnl.util.compat.Sequence)): # type: ignore
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class _PackageAdapterMeta(BuilderMeta):
|
||||||
|
"""Metaclass to adapt old-style packages to the new architecture based on builders
|
||||||
|
for the installation phase.
|
||||||
|
|
||||||
|
This class does the necessary mangling to function argument so that a call to a
|
||||||
|
builder object can delegate to a package object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def phase_method_adapter(phase_name):
|
||||||
|
def _adapter(self, pkg, spec, prefix):
|
||||||
|
phase_fn = getattr(self.pkg_with_dispatcher, phase_name)
|
||||||
|
return phase_fn(spec, prefix)
|
||||||
|
|
||||||
|
return _adapter
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def legacy_long_method_adapter(method_name):
|
||||||
|
def _adapter(self, spec, prefix):
|
||||||
|
bind_method = getattr(self.pkg_with_dispatcher, method_name)
|
||||||
|
return bind_method(spec, prefix)
|
||||||
|
|
||||||
|
return _adapter
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def legacy_method_adapter(method_name):
|
||||||
|
def _adapter(self):
|
||||||
|
bind_method = getattr(self.pkg_with_dispatcher, method_name)
|
||||||
|
return bind_method()
|
||||||
|
|
||||||
|
return _adapter
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def legacy_attribute_adapter(attribute_name):
|
||||||
|
def _adapter(self):
|
||||||
|
return getattr(self.pkg_with_dispatcher, attribute_name)
|
||||||
|
|
||||||
|
return property(_adapter)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def combine_callbacks(pipeline_attribute_name):
|
||||||
|
"""This function combines callbacks from old-style packages with callbacks that might
|
||||||
|
be registered for the default builder.
|
||||||
|
|
||||||
|
It works by:
|
||||||
|
1. Extracting the callbacks from the old-style package
|
||||||
|
2. Transforming those callbacks by adding an adapter that receives a builder as argument
|
||||||
|
and calls the wrapped function with ``builder.pkg``
|
||||||
|
3. Combining the list of transformed callbacks with those that might be present in the
|
||||||
|
default builder
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _adapter(self):
|
||||||
|
def unwrap_pkg(fn):
|
||||||
|
@functools.wraps(fn)
|
||||||
|
def _wrapped(builder):
|
||||||
|
return fn(builder.pkg_with_dispatcher)
|
||||||
|
|
||||||
|
return _wrapped
|
||||||
|
|
||||||
|
# Concatenate the current list with the one from package
|
||||||
|
callbacks_from_package = getattr(self.pkg, pipeline_attribute_name, [])
|
||||||
|
callbacks_from_package = [(key, unwrap_pkg(x)) for key, x in callbacks_from_package]
|
||||||
|
callbacks_from_builder = getattr(super(type(self), self), pipeline_attribute_name, [])
|
||||||
|
return callbacks_from_package + callbacks_from_builder
|
||||||
|
|
||||||
|
return property(_adapter)
|
||||||
|
|
||||||
|
def __new__(mcs, name, bases, attr_dict):
|
||||||
|
# Add ways to intercept methods and attribute calls and dispatch
|
||||||
|
# them first to a package object
|
||||||
|
default_builder_cls = bases[0]
|
||||||
|
for phase_name in default_builder_cls.phases:
|
||||||
|
attr_dict[phase_name] = _PackageAdapterMeta.phase_method_adapter(phase_name)
|
||||||
|
|
||||||
|
for method_name in default_builder_cls.legacy_methods:
|
||||||
|
attr_dict[method_name] = _PackageAdapterMeta.legacy_method_adapter(method_name)
|
||||||
|
|
||||||
|
# These exist e.g. for Python, see discussion in https://github.com/spack/spack/pull/32068
|
||||||
|
for method_name in getattr(default_builder_cls, "legacy_long_methods", []):
|
||||||
|
attr_dict[method_name] = _PackageAdapterMeta.legacy_long_method_adapter(method_name)
|
||||||
|
|
||||||
|
for attribute_name in default_builder_cls.legacy_attributes:
|
||||||
|
attr_dict[attribute_name] = _PackageAdapterMeta.legacy_attribute_adapter(
|
||||||
|
attribute_name
|
||||||
|
)
|
||||||
|
|
||||||
|
combine_callbacks = _PackageAdapterMeta.combine_callbacks
|
||||||
|
attr_dict[_RUN_BEFORE.attribute_name] = combine_callbacks(_RUN_BEFORE.attribute_name)
|
||||||
|
attr_dict[_RUN_AFTER.attribute_name] = combine_callbacks(_RUN_AFTER.attribute_name)
|
||||||
|
|
||||||
|
return super(_PackageAdapterMeta, mcs).__new__(mcs, name, bases, attr_dict)
|
||||||
|
|
||||||
|
|
||||||
|
class InstallationPhase(object):
|
||||||
|
"""Manages a single phase of the installation.
|
||||||
|
|
||||||
|
This descriptor stores at creation time the name of the method it should
|
||||||
|
search for execution. The method is retrieved at __get__ time, so that
|
||||||
|
it can be overridden by subclasses of whatever class declared the phases.
|
||||||
|
|
||||||
|
It also provides hooks to execute arbitrary callbacks before and after
|
||||||
|
the phase.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name, builder):
|
||||||
|
self.name = name
|
||||||
|
self.builder = builder
|
||||||
|
self.phase_fn = self._select_phase_fn()
|
||||||
|
self.run_before = self._make_callbacks(_RUN_BEFORE.attribute_name)
|
||||||
|
self.run_after = self._make_callbacks(_RUN_AFTER.attribute_name)
|
||||||
|
|
||||||
|
def _make_callbacks(self, callbacks_attribute):
|
||||||
|
result = []
|
||||||
|
callbacks = getattr(self.builder, callbacks_attribute, [])
|
||||||
|
for (phase, condition), fn in callbacks:
|
||||||
|
# Same if it is for another phase
|
||||||
|
if phase != self.name:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If we have no condition or the callback satisfies a condition, register it
|
||||||
|
if condition is None or self.builder.pkg.spec.satisfies(condition):
|
||||||
|
result.append(fn)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
msg = '{0}: executing "{1}" phase'
|
||||||
|
return msg.format(self.builder, self.name)
|
||||||
|
|
||||||
|
def execute(self):
|
||||||
|
pkg = self.builder.pkg
|
||||||
|
self._on_phase_start(pkg)
|
||||||
|
|
||||||
|
for callback in self.run_before:
|
||||||
|
callback(self.builder)
|
||||||
|
|
||||||
|
self.phase_fn(pkg, pkg.spec, pkg.prefix)
|
||||||
|
|
||||||
|
for callback in self.run_after:
|
||||||
|
callback(self.builder)
|
||||||
|
|
||||||
|
self._on_phase_exit(pkg)
|
||||||
|
|
||||||
|
def _select_phase_fn(self):
|
||||||
|
phase_fn = getattr(self.builder, self.name, None)
|
||||||
|
|
||||||
|
if not phase_fn:
|
||||||
|
msg = (
|
||||||
|
'unexpected error: package "{0.fullname}" must implement an '
|
||||||
|
'"{1}" phase for the "{2}" build system'
|
||||||
|
)
|
||||||
|
raise RuntimeError(msg.format(self.builder.pkg, self.name, self.builder.build_system))
|
||||||
|
|
||||||
|
return phase_fn
|
||||||
|
|
||||||
|
def _on_phase_start(self, instance):
|
||||||
|
# If a phase has a matching stop_before_phase attribute,
|
||||||
|
# stop the installation process raising a StopPhase
|
||||||
|
if getattr(instance, "stop_before_phase", None) == self.name:
|
||||||
|
raise spack.build_environment.StopPhase(
|
||||||
|
"Stopping before '{0}' phase".format(self.name)
|
||||||
|
)
|
||||||
|
|
||||||
|
def _on_phase_exit(self, instance):
|
||||||
|
# If a phase has a matching last_phase attribute,
|
||||||
|
# stop the installation process raising a StopPhase
|
||||||
|
if getattr(instance, "last_phase", None) == self.name:
|
||||||
|
raise spack.build_environment.StopPhase("Stopping at '{0}' phase".format(self.name))
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
return copy.deepcopy(self)
|
||||||
|
|
||||||
|
|
||||||
|
class Builder(six.with_metaclass(BuilderMeta, llnl.util.compat.Sequence)):
|
||||||
|
"""A builder is a class that, given a package object (i.e. associated with
|
||||||
|
concrete spec), knows how to install it.
|
||||||
|
|
||||||
|
The builder behaves like a sequence, and when iterated over return the
|
||||||
|
"phases" of the installation in the correct order.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg (spack.package_base.PackageBase): package object to be built
|
||||||
|
"""
|
||||||
|
|
||||||
|
#: Sequence of phases. Must be defined in derived classes
|
||||||
|
phases = () # type: Tuple[str, ...]
|
||||||
|
#: Build system name. Must also be defined in derived classes.
|
||||||
|
build_system = None # type: Optional[str]
|
||||||
|
|
||||||
|
legacy_methods = () # type: Tuple[str, ...]
|
||||||
|
legacy_attributes = () # type: Tuple[str, ...]
|
||||||
|
|
||||||
|
#: List of glob expressions. Each expression must either be
|
||||||
|
#: absolute or relative to the package source path.
|
||||||
|
#: Matching artifacts found at the end of the build process will be
|
||||||
|
#: copied in the same directory tree as _spack_build_logfile and
|
||||||
|
#: _spack_build_envfile.
|
||||||
|
archive_files = [] # type: List[str]
|
||||||
|
|
||||||
|
def __init__(self, pkg):
|
||||||
|
self.pkg = pkg
|
||||||
|
self.callbacks = {}
|
||||||
|
for phase in self.phases:
|
||||||
|
self.callbacks[phase] = InstallationPhase(phase, self)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def spec(self):
|
||||||
|
return self.pkg.spec
|
||||||
|
|
||||||
|
@property
|
||||||
|
def stage(self):
|
||||||
|
return self.pkg.stage
|
||||||
|
|
||||||
|
@property
|
||||||
|
def prefix(self):
|
||||||
|
return self.pkg.prefix
|
||||||
|
|
||||||
|
def test(self):
|
||||||
|
# Defer tests to virtual and concrete packages
|
||||||
|
pass
|
||||||
|
|
||||||
|
def setup_build_environment(self, env):
|
||||||
|
"""Sets up the build environment for a package.
|
||||||
|
|
||||||
|
This method will be called before the current package prefix exists in
|
||||||
|
Spack's store.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
env (spack.util.environment.EnvironmentModifications): environment
|
||||||
|
modifications to be applied when the package is built. Package authors
|
||||||
|
can call methods on it to alter the build environment.
|
||||||
|
"""
|
||||||
|
if not hasattr(super(Builder, self), "setup_build_environment"):
|
||||||
|
return
|
||||||
|
super(Builder, self).setup_build_environment(env)
|
||||||
|
|
||||||
|
def setup_dependent_build_environment(self, env, dependent_spec):
|
||||||
|
"""Sets up the build environment of packages that depend on this one.
|
||||||
|
|
||||||
|
This is similar to ``setup_build_environment``, but it is used to
|
||||||
|
modify the build environments of packages that *depend* on this one.
|
||||||
|
|
||||||
|
This gives packages like Python and others that follow the extension
|
||||||
|
model a way to implement common environment or compile-time settings
|
||||||
|
for dependencies.
|
||||||
|
|
||||||
|
This method will be called before the dependent package prefix exists
|
||||||
|
in Spack's store.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
1. Installing python modules generally requires ``PYTHONPATH``
|
||||||
|
to point to the ``lib/pythonX.Y/site-packages`` directory in the
|
||||||
|
module's install prefix. This method could be used to set that
|
||||||
|
variable.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
env (spack.util.environment.EnvironmentModifications): environment
|
||||||
|
modifications to be applied when the dependent package is built.
|
||||||
|
Package authors can call methods on it to alter the build environment.
|
||||||
|
|
||||||
|
dependent_spec (spack.spec.Spec): the spec of the dependent package
|
||||||
|
about to be built. This allows the extendee (self) to query
|
||||||
|
the dependent's state. Note that *this* package's spec is
|
||||||
|
available as ``self.spec``
|
||||||
|
"""
|
||||||
|
if not hasattr(super(Builder, self), "setup_dependent_build_environment"):
|
||||||
|
return
|
||||||
|
super(Builder, self).setup_dependent_build_environment(env, dependent_spec)
|
||||||
|
|
||||||
|
def __getitem__(self, idx):
|
||||||
|
key = self.phases[idx]
|
||||||
|
return self.callbacks[key]
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self.phases)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
msg = "{0}({1})"
|
||||||
|
return msg.format(type(self).__name__, self.pkg.spec.format("{name}/{hash:7}"))
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
msg = '"{0}" builder for "{1}"'
|
||||||
|
return msg.format(type(self).build_system, self.pkg.spec.format("{name}/{hash:7}"))
|
||||||
|
|
||||||
|
|
||||||
|
# Export these names as standalone to be used in packages
|
||||||
|
run_after = PhaseCallbacksMeta.run_after
|
||||||
|
run_before = PhaseCallbacksMeta.run_before
|
||||||
@@ -17,7 +17,7 @@
|
|||||||
import time
|
import time
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
from six import iteritems
|
from six import iteritems, string_types
|
||||||
from six.moves.urllib.error import HTTPError, URLError
|
from six.moves.urllib.error import HTTPError, URLError
|
||||||
from six.moves.urllib.parse import urlencode
|
from six.moves.urllib.parse import urlencode
|
||||||
from six.moves.urllib.request import HTTPHandler, Request, build_opener
|
from six.moves.urllib.request import HTTPHandler, Request, build_opener
|
||||||
@@ -43,7 +43,6 @@
|
|||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
from spack.reporters.cdash import CDash
|
from spack.reporters.cdash import CDash
|
||||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||||
from spack.spec import Spec
|
|
||||||
from spack.util.pattern import Bunch
|
from spack.util.pattern import Bunch
|
||||||
|
|
||||||
JOB_RETRY_CONDITIONS = [
|
JOB_RETRY_CONDITIONS = [
|
||||||
@@ -143,13 +142,6 @@ def _get_spec_string(spec):
|
|||||||
return spec.format("".join(format_elements))
|
return spec.format("".join(format_elements))
|
||||||
|
|
||||||
|
|
||||||
def _format_root_spec(spec, main_phase, strip_compiler):
|
|
||||||
if main_phase is False and strip_compiler is True:
|
|
||||||
return "{0}@{1} arch={2}".format(spec.name, spec.version, spec.architecture)
|
|
||||||
else:
|
|
||||||
return spec.dag_hash()
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_deps_key(s):
|
def _spec_deps_key(s):
|
||||||
return "{0}/{1}".format(s.name, s.dag_hash(7))
|
return "{0}/{1}".format(s.name, s.dag_hash(7))
|
||||||
|
|
||||||
@@ -175,8 +167,7 @@ def _get_spec_dependencies(
|
|||||||
|
|
||||||
for entry in specs:
|
for entry in specs:
|
||||||
spec_labels[entry["label"]] = {
|
spec_labels[entry["label"]] = {
|
||||||
"spec": Spec(entry["spec"]),
|
"spec": entry["spec"],
|
||||||
"rootSpec": entry["root_spec"],
|
|
||||||
"needs_rebuild": entry["needs_rebuild"],
|
"needs_rebuild": entry["needs_rebuild"],
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -203,7 +194,7 @@ def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
|||||||
and stages:
|
and stages:
|
||||||
|
|
||||||
spec_labels: A dictionary mapping the spec labels which are made of
|
spec_labels: A dictionary mapping the spec labels which are made of
|
||||||
(pkg-name/hash-prefix), to objects containing "rootSpec" and "spec"
|
(pkg-name/hash-prefix), to objects containing "spec" and "needs_rebuild"
|
||||||
keys. The root spec is the spec of which this spec is a dependency
|
keys. The root spec is the spec of which this spec is a dependency
|
||||||
and the spec is the formatted spec string for this spec.
|
and the spec is the formatted spec string for this spec.
|
||||||
|
|
||||||
@@ -318,17 +309,14 @@ def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None)
|
|||||||
],
|
],
|
||||||
"specs": [
|
"specs": [
|
||||||
{
|
{
|
||||||
"root_spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-...",
|
|
||||||
"spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-highs...",
|
"spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-highs...",
|
||||||
"label": "readline/ip6aiun"
|
"label": "readline/ip6aiun"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"root_spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-...",
|
|
||||||
"spec": "ncurses@6.1%apple-clang@9.1.0 arch=darwin-highsi...",
|
"spec": "ncurses@6.1%apple-clang@9.1.0 arch=darwin-highsi...",
|
||||||
"label": "ncurses/y43rifz"
|
"label": "ncurses/y43rifz"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"root_spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-...",
|
|
||||||
"spec": "pkgconf@1.5.4%apple-clang@9.1.0 arch=darwin-high...",
|
"spec": "pkgconf@1.5.4%apple-clang@9.1.0 arch=darwin-high...",
|
||||||
"label": "pkgconf/eg355zb"
|
"label": "pkgconf/eg355zb"
|
||||||
}
|
}
|
||||||
@@ -350,8 +338,6 @@ def append_dep(s, d):
|
|||||||
)
|
)
|
||||||
|
|
||||||
for spec in spec_list:
|
for spec in spec_list:
|
||||||
root_spec = spec
|
|
||||||
|
|
||||||
for s in spec.traverse(deptype=all):
|
for s in spec.traverse(deptype=all):
|
||||||
if s.external:
|
if s.external:
|
||||||
tty.msg("Will not stage external pkg: {0}".format(s))
|
tty.msg("Will not stage external pkg: {0}".format(s))
|
||||||
@@ -363,8 +349,7 @@ def append_dep(s, d):
|
|||||||
|
|
||||||
skey = _spec_deps_key(s)
|
skey = _spec_deps_key(s)
|
||||||
spec_labels[skey] = {
|
spec_labels[skey] = {
|
||||||
"spec": _get_spec_string(s),
|
"spec": s,
|
||||||
"root": root_spec,
|
|
||||||
"needs_rebuild": not up_to_date_mirrors,
|
"needs_rebuild": not up_to_date_mirrors,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -381,7 +366,6 @@ def append_dep(s, d):
|
|||||||
{
|
{
|
||||||
"label": spec_label,
|
"label": spec_label,
|
||||||
"spec": spec_holder["spec"],
|
"spec": spec_holder["spec"],
|
||||||
"root_spec": spec_holder["root"],
|
|
||||||
"needs_rebuild": spec_holder["needs_rebuild"],
|
"needs_rebuild": spec_holder["needs_rebuild"],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -458,10 +442,6 @@ def _find_matching_config(spec, gitlab_ci):
|
|||||||
return runner_attributes if matched else None
|
return runner_attributes if matched else None
|
||||||
|
|
||||||
|
|
||||||
def _pkg_name_from_spec_label(spec_label):
|
|
||||||
return spec_label[: spec_label.index("/")]
|
|
||||||
|
|
||||||
|
|
||||||
def _format_job_needs(
|
def _format_job_needs(
|
||||||
phase_name,
|
phase_name,
|
||||||
strip_compilers,
|
strip_compilers,
|
||||||
@@ -535,38 +515,36 @@ def compute_affected_packages(rev1="HEAD^", rev2="HEAD"):
|
|||||||
return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
|
return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
|
||||||
|
|
||||||
|
|
||||||
def get_spec_filter_list(env, affected_pkgs, dependencies=True, dependents=True):
|
def get_spec_filter_list(env, affected_pkgs):
|
||||||
"""Given a list of package names, and assuming an active and
|
"""Given a list of package names and an active/concretized
|
||||||
concretized environment, return a set of concrete specs from
|
environment, return the set of all concrete specs from the
|
||||||
the environment corresponding to any of the affected pkgs (or
|
environment that could have been affected by changing the
|
||||||
optionally to any of their dependencies/dependents).
|
list of packages.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
|
|
||||||
env (spack.environment.Environment): Active concrete environment
|
env (spack.environment.Environment): Active concrete environment
|
||||||
affected_pkgs (List[str]): Affected package names
|
affected_pkgs (List[str]): Affected package names
|
||||||
dependencies (bool): Include dependencies of affected packages
|
|
||||||
dependents (bool): Include dependents of affected pacakges
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
|
|
||||||
A list of concrete specs from the active environment including
|
A set of concrete specs from the active environment including
|
||||||
those associated with affected packages, and possible their
|
those associated with affected packages, their dependencies and
|
||||||
dependencies and dependents as well.
|
dependents, as well as their dependents dependencies.
|
||||||
"""
|
"""
|
||||||
affected_specs = set()
|
affected_specs = set()
|
||||||
all_concrete_specs = env.all_specs()
|
all_concrete_specs = env.all_specs()
|
||||||
tty.debug("All concrete environment specs:")
|
tty.debug("All concrete environment specs:")
|
||||||
for s in all_concrete_specs:
|
for s in all_concrete_specs:
|
||||||
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
|
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
|
||||||
for pkg in affected_pkgs:
|
env_matches = [s for s in all_concrete_specs if s.name in frozenset(affected_pkgs)]
|
||||||
env_matches = [s for s in all_concrete_specs if s.name == pkg]
|
visited = set()
|
||||||
for match in env_matches:
|
dag_hash = lambda s: s.dag_hash()
|
||||||
affected_specs.add(match)
|
for match in env_matches:
|
||||||
if dependencies:
|
for parent in match.traverse(direction="parents", key=dag_hash):
|
||||||
affected_specs.update(match.traverse(direction="children", root=False))
|
affected_specs.update(
|
||||||
if dependents:
|
parent.traverse(direction="children", visited=visited, key=dag_hash)
|
||||||
affected_specs.update(match.traverse(direction="parents", root=False))
|
)
|
||||||
return affected_specs
|
return affected_specs
|
||||||
|
|
||||||
|
|
||||||
@@ -645,7 +623,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
affected_specs = get_spec_filter_list(env, affected_pkgs)
|
affected_specs = get_spec_filter_list(env, affected_pkgs)
|
||||||
tty.debug("all affected specs:")
|
tty.debug("all affected specs:")
|
||||||
for s in affected_specs:
|
for s in affected_specs:
|
||||||
tty.debug(" {0}".format(s.name))
|
tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
|
||||||
|
|
||||||
# Allow overriding --prune-dag cli opt with environment variable
|
# Allow overriding --prune-dag cli opt with environment variable
|
||||||
prune_dag_override = os.environ.get("SPACK_PRUNE_UP_TO_DATE", None)
|
prune_dag_override = os.environ.get("SPACK_PRUNE_UP_TO_DATE", None)
|
||||||
@@ -854,7 +832,6 @@ def generate_gitlab_ci_yaml(
|
|||||||
phase_name = phase["name"]
|
phase_name = phase["name"]
|
||||||
strip_compilers = phase["strip-compilers"]
|
strip_compilers = phase["strip-compilers"]
|
||||||
|
|
||||||
main_phase = _is_main_phase(phase_name)
|
|
||||||
spec_labels, dependencies, stages = staged_phases[phase_name]
|
spec_labels, dependencies, stages = staged_phases[phase_name]
|
||||||
|
|
||||||
for stage_jobs in stages:
|
for stage_jobs in stages:
|
||||||
@@ -864,14 +841,16 @@ def generate_gitlab_ci_yaml(
|
|||||||
|
|
||||||
for spec_label in stage_jobs:
|
for spec_label in stage_jobs:
|
||||||
spec_record = spec_labels[spec_label]
|
spec_record = spec_labels[spec_label]
|
||||||
root_spec = spec_record["rootSpec"]
|
release_spec = spec_record["spec"]
|
||||||
pkg_name = _pkg_name_from_spec_label(spec_label)
|
|
||||||
release_spec = root_spec[pkg_name]
|
|
||||||
release_spec_dag_hash = release_spec.dag_hash()
|
release_spec_dag_hash = release_spec.dag_hash()
|
||||||
|
|
||||||
if prune_untouched_packages:
|
if prune_untouched_packages:
|
||||||
if release_spec not in affected_specs:
|
if release_spec not in affected_specs:
|
||||||
tty.debug("Pruning {0}, untouched by change.".format(release_spec.name))
|
tty.debug(
|
||||||
|
"Pruning {0}/{1}, untouched by change.".format(
|
||||||
|
release_spec.name, release_spec.dag_hash()[:7]
|
||||||
|
)
|
||||||
|
)
|
||||||
spec_record["needs_rebuild"] = False
|
spec_record["needs_rebuild"] = False
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -936,7 +915,6 @@ def generate_gitlab_ci_yaml(
|
|||||||
compiler_action = "INSTALL_MISSING"
|
compiler_action = "INSTALL_MISSING"
|
||||||
|
|
||||||
job_vars = {
|
job_vars = {
|
||||||
"SPACK_ROOT_SPEC": _format_root_spec(root_spec, main_phase, strip_compilers),
|
|
||||||
"SPACK_JOB_SPEC_DAG_HASH": release_spec_dag_hash,
|
"SPACK_JOB_SPEC_DAG_HASH": release_spec_dag_hash,
|
||||||
"SPACK_JOB_SPEC_PKG_NAME": release_spec.name,
|
"SPACK_JOB_SPEC_PKG_NAME": release_spec.name,
|
||||||
"SPACK_COMPILER_ACTION": compiler_action,
|
"SPACK_COMPILER_ACTION": compiler_action,
|
||||||
@@ -953,9 +931,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
# purposes, so we only get the direct dependencies.
|
# purposes, so we only get the direct dependencies.
|
||||||
dep_jobs = []
|
dep_jobs = []
|
||||||
for dep_label in dependencies[spec_label]:
|
for dep_label in dependencies[spec_label]:
|
||||||
dep_pkg = _pkg_name_from_spec_label(dep_label)
|
dep_jobs.append(spec_labels[dep_label]["spec"])
|
||||||
dep_root = spec_labels[dep_label]["rootSpec"]
|
|
||||||
dep_jobs.append(dep_root[dep_pkg])
|
|
||||||
|
|
||||||
job_dependencies.extend(
|
job_dependencies.extend(
|
||||||
_format_job_needs(
|
_format_job_needs(
|
||||||
@@ -1039,7 +1015,11 @@ def generate_gitlab_ci_yaml(
|
|||||||
tty.debug(debug_msg)
|
tty.debug(debug_msg)
|
||||||
|
|
||||||
if prune_dag and not rebuild_spec:
|
if prune_dag and not rebuild_spec:
|
||||||
tty.debug("Pruning {0}, does not need rebuild.".format(release_spec.name))
|
tty.debug(
|
||||||
|
"Pruning {0}/{1}, does not need rebuild.".format(
|
||||||
|
release_spec.name, release_spec.dag_hash()
|
||||||
|
)
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls:
|
if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls:
|
||||||
@@ -1482,64 +1462,6 @@ def configure_compilers(compiler_action, scope=None):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_concrete_specs(env, root_spec, job_name, compiler_action):
|
|
||||||
"""Build a dictionary of concrete specs relevant to a particular
|
|
||||||
rebuild job. This includes the root spec and the spec to be
|
|
||||||
rebuilt (which could be the same).
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
|
|
||||||
env (spack.environment.Environment): Activated spack environment
|
|
||||||
used to get concrete root spec by hash in case compiler_action
|
|
||||||
is anthing other than FIND_ANY.
|
|
||||||
root_spec (str): If compiler_action is FIND_ANY root_spec is
|
|
||||||
a string representation which can be turned directly into
|
|
||||||
a spec, otherwise, it's a hash used to index the activated
|
|
||||||
spack environment.
|
|
||||||
job_name (str): Name of package to be built, used to index the
|
|
||||||
concrete root spec and produce the concrete spec to be
|
|
||||||
built.
|
|
||||||
compiler_action (str): Determines how to interpret the root_spec
|
|
||||||
parameter, either as a string representation as a hash.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
|
|
||||||
.. code-block:: JSON
|
|
||||||
|
|
||||||
{
|
|
||||||
"root": "<spec>",
|
|
||||||
"<job-pkg-name>": "<spec>",
|
|
||||||
}
|
|
||||||
|
|
||||||
"""
|
|
||||||
spec_map = {
|
|
||||||
"root": None,
|
|
||||||
}
|
|
||||||
|
|
||||||
if compiler_action == "FIND_ANY":
|
|
||||||
# This corresponds to a bootstrapping phase where we need to
|
|
||||||
# rely on any available compiler to build the package (i.e. the
|
|
||||||
# compiler needed to be stripped from the spec when we generated
|
|
||||||
# the job), and thus we need to concretize the root spec again.
|
|
||||||
tty.debug("About to concretize {0}".format(root_spec))
|
|
||||||
concrete_root = Spec(root_spec).concretized()
|
|
||||||
tty.debug("Resulting concrete root: {0}".format(concrete_root))
|
|
||||||
else:
|
|
||||||
# in this case, either we're relying on Spack to install missing
|
|
||||||
# compiler bootstrapped in a previous phase, or else we only had one
|
|
||||||
# phase (like a site which already knows what compilers are available
|
|
||||||
# on it's runners), so we don't want to concretize that root spec
|
|
||||||
# again. The reason we take this path in the first case (bootstrapped
|
|
||||||
# compiler), is that we can't concretize a spec at this point if we're
|
|
||||||
# going to ask spack to "install_missing_compilers".
|
|
||||||
concrete_root = env.specs_by_hash[root_spec]
|
|
||||||
|
|
||||||
spec_map["root"] = concrete_root
|
|
||||||
spec_map[job_name] = concrete_root[job_name]
|
|
||||||
|
|
||||||
return spec_map
|
|
||||||
|
|
||||||
|
|
||||||
def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
|
def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
|
||||||
"""Unchecked version of the public API, for easier mocking"""
|
"""Unchecked version of the public API, for easier mocking"""
|
||||||
unsigned = not sign_binaries
|
unsigned = not sign_binaries
|
||||||
@@ -2016,26 +1938,35 @@ def reproduce_ci_job(url, work_dir):
|
|||||||
print("".join(inst_list))
|
print("".join(inst_list))
|
||||||
|
|
||||||
|
|
||||||
def process_command(cmd, cmd_args, repro_dir):
|
def process_command(name, commands, repro_dir):
|
||||||
"""
|
"""
|
||||||
Create a script for and run the command. Copy the script to the
|
Create a script for and run the command. Copy the script to the
|
||||||
reproducibility directory.
|
reproducibility directory.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
cmd (str): name of the command being processed
|
name (str): name of the command being processed
|
||||||
cmd_args (list): string arguments to pass to the command
|
commands (list): list of arguments for single command or list of lists of
|
||||||
|
arguments for multiple commands. No shell escape is performed.
|
||||||
repro_dir (str): Job reproducibility directory
|
repro_dir (str): Job reproducibility directory
|
||||||
|
|
||||||
Returns: the exit code from processing the command
|
Returns: the exit code from processing the command
|
||||||
"""
|
"""
|
||||||
tty.debug("spack {0} arguments: {1}".format(cmd, cmd_args))
|
tty.debug("spack {0} arguments: {1}".format(name, commands))
|
||||||
|
|
||||||
|
if len(commands) == 0 or isinstance(commands[0], string_types):
|
||||||
|
commands = [commands]
|
||||||
|
|
||||||
|
# Create a string [command 1] && [command 2] && ... && [command n] with commands
|
||||||
|
# quoted using double quotes.
|
||||||
|
args_to_string = lambda args: " ".join('"{}"'.format(arg) for arg in args)
|
||||||
|
full_command = " && ".join(map(args_to_string, commands))
|
||||||
|
|
||||||
# Write the command to a shell script
|
# Write the command to a shell script
|
||||||
script = "{0}.sh".format(cmd)
|
script = "{0}.sh".format(name)
|
||||||
with open(script, "w") as fd:
|
with open(script, "w") as fd:
|
||||||
fd.write("#!/bin/bash\n\n")
|
fd.write("#!/bin/sh\n\n")
|
||||||
fd.write("\n# spack {0} command\n".format(cmd))
|
fd.write("\n# spack {0} command\n".format(name))
|
||||||
fd.write(" ".join(['"{0}"'.format(i) for i in cmd_args]))
|
fd.write(full_command)
|
||||||
fd.write("\n")
|
fd.write("\n")
|
||||||
|
|
||||||
st = os.stat(script)
|
st = os.stat(script)
|
||||||
@@ -2047,15 +1978,15 @@ def process_command(cmd, cmd_args, repro_dir):
|
|||||||
# Run the generated install.sh shell script as if it were being run in
|
# Run the generated install.sh shell script as if it were being run in
|
||||||
# a login shell.
|
# a login shell.
|
||||||
try:
|
try:
|
||||||
cmd_process = subprocess.Popen(["bash", "./{0}".format(script)])
|
cmd_process = subprocess.Popen(["/bin/sh", "./{0}".format(script)])
|
||||||
cmd_process.wait()
|
cmd_process.wait()
|
||||||
exit_code = cmd_process.returncode
|
exit_code = cmd_process.returncode
|
||||||
except (ValueError, subprocess.CalledProcessError, OSError) as err:
|
except (ValueError, subprocess.CalledProcessError, OSError) as err:
|
||||||
tty.error("Encountered error running {0} script".format(cmd))
|
tty.error("Encountered error running {0} script".format(name))
|
||||||
tty.error(err)
|
tty.error(err)
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
|
|
||||||
tty.debug("spack {0} exited {1}".format(cmd, exit_code))
|
tty.debug("spack {0} exited {1}".format(name, exit_code))
|
||||||
return exit_code
|
return exit_code
|
||||||
|
|
||||||
|
|
||||||
@@ -2198,8 +2129,9 @@ def run_standalone_tests(**kwargs):
|
|||||||
|
|
||||||
test_args = [
|
test_args = [
|
||||||
"spack",
|
"spack",
|
||||||
"-d",
|
"--color=always",
|
||||||
"-v",
|
"--backtrace",
|
||||||
|
"--verbose",
|
||||||
"test",
|
"test",
|
||||||
"run",
|
"run",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -234,7 +234,8 @@ def parse_specs(args, **kwargs):
|
|||||||
msg = e.message
|
msg = e.message
|
||||||
if e.long_message:
|
if e.long_message:
|
||||||
msg += e.long_message
|
msg += e.long_message
|
||||||
if unquoted_flags:
|
# Unquoted flags will be read as a variant or hash
|
||||||
|
if unquoted_flags and ("variant" in msg or "hash" in msg):
|
||||||
msg += "\n\n"
|
msg += "\n\n"
|
||||||
msg += unquoted_flags.report()
|
msg += unquoted_flags.report()
|
||||||
|
|
||||||
|
|||||||
@@ -5,8 +5,10 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
|
import platform
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import warnings
|
||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
import llnl.util.tty
|
import llnl.util.tty
|
||||||
@@ -28,7 +30,7 @@
|
|||||||
|
|
||||||
|
|
||||||
# Tarball to be downloaded if binary packages are requested in a local mirror
|
# Tarball to be downloaded if binary packages are requested in a local mirror
|
||||||
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.2/bootstrap-buildcache.tar.gz"
|
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.4/bootstrap-buildcache.tar.gz"
|
||||||
|
|
||||||
#: Subdirectory where to create the mirror
|
#: Subdirectory where to create the mirror
|
||||||
LOCAL_MIRROR_DIR = "bootstrap_cache"
|
LOCAL_MIRROR_DIR = "bootstrap_cache"
|
||||||
@@ -48,8 +50,8 @@
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.2/clingo.json"
|
CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/clingo.json"
|
||||||
GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.2/gnupg.json"
|
GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/gnupg.json"
|
||||||
|
|
||||||
# Metadata for a generated source mirror
|
# Metadata for a generated source mirror
|
||||||
SOURCE_METADATA = {
|
SOURCE_METADATA = {
|
||||||
@@ -73,6 +75,8 @@ def _add_scope_option(parser):
|
|||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
sp = subparser.add_subparsers(dest="subcommand")
|
sp = subparser.add_subparsers(dest="subcommand")
|
||||||
|
|
||||||
|
sp.add_parser("now", help="Spack ready, right now!")
|
||||||
|
|
||||||
status = sp.add_parser("status", help="get the status of Spack")
|
status = sp.add_parser("status", help="get the status of Spack")
|
||||||
status.add_argument(
|
status.add_argument(
|
||||||
"--optional",
|
"--optional",
|
||||||
@@ -89,9 +93,11 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
enable = sp.add_parser("enable", help="enable bootstrapping")
|
enable = sp.add_parser("enable", help="enable bootstrapping")
|
||||||
_add_scope_option(enable)
|
_add_scope_option(enable)
|
||||||
|
enable.add_argument("name", help="name of the source to be enabled", nargs="?", default=None)
|
||||||
|
|
||||||
disable = sp.add_parser("disable", help="disable bootstrapping")
|
disable = sp.add_parser("disable", help="disable bootstrapping")
|
||||||
_add_scope_option(disable)
|
_add_scope_option(disable)
|
||||||
|
disable.add_argument("name", help="name of the source to be disabled", nargs="?", default=None)
|
||||||
|
|
||||||
reset = sp.add_parser("reset", help="reset bootstrapping configuration to Spack defaults")
|
reset = sp.add_parser("reset", help="reset bootstrapping configuration to Spack defaults")
|
||||||
spack.cmd.common.arguments.add_common_arguments(reset, ["yes_to_all"])
|
spack.cmd.common.arguments.add_common_arguments(reset, ["yes_to_all"])
|
||||||
@@ -105,11 +111,11 @@ def setup_parser(subparser):
|
|||||||
list = sp.add_parser("list", help="list all the sources of software to bootstrap Spack")
|
list = sp.add_parser("list", help="list all the sources of software to bootstrap Spack")
|
||||||
_add_scope_option(list)
|
_add_scope_option(list)
|
||||||
|
|
||||||
trust = sp.add_parser("trust", help="trust a bootstrapping source")
|
trust = sp.add_parser("trust", help="(DEPRECATED) trust a bootstrapping source")
|
||||||
_add_scope_option(trust)
|
_add_scope_option(trust)
|
||||||
trust.add_argument("name", help="name of the source to be trusted")
|
trust.add_argument("name", help="name of the source to be trusted")
|
||||||
|
|
||||||
untrust = sp.add_parser("untrust", help="untrust a bootstrapping source")
|
untrust = sp.add_parser("untrust", help="(DEPRECATED) untrust a bootstrapping source")
|
||||||
_add_scope_option(untrust)
|
_add_scope_option(untrust)
|
||||||
untrust.add_argument("name", help="name of the source to be untrusted")
|
untrust.add_argument("name", help="name of the source to be untrusted")
|
||||||
|
|
||||||
@@ -137,9 +143,21 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
|
|
||||||
def _enable_or_disable(args):
|
def _enable_or_disable(args):
|
||||||
# Set to True if we called "enable", otherwise set to false
|
|
||||||
value = args.subcommand == "enable"
|
value = args.subcommand == "enable"
|
||||||
spack.config.set("bootstrap:enable", value, scope=args.scope)
|
if args.name is None:
|
||||||
|
# Set to True if we called "enable", otherwise set to false
|
||||||
|
old_value = spack.config.get("bootstrap:enable", scope=args.scope)
|
||||||
|
if old_value == value:
|
||||||
|
llnl.util.tty.msg("Bootstrapping is already {}d".format(args.subcommand))
|
||||||
|
else:
|
||||||
|
spack.config.set("bootstrap:enable", value, scope=args.scope)
|
||||||
|
llnl.util.tty.msg("Bootstrapping has been {}d".format(args.subcommand))
|
||||||
|
return
|
||||||
|
|
||||||
|
if value is True:
|
||||||
|
_trust(args)
|
||||||
|
else:
|
||||||
|
_untrust(args)
|
||||||
|
|
||||||
|
|
||||||
def _reset(args):
|
def _reset(args):
|
||||||
@@ -170,6 +188,8 @@ def _reset(args):
|
|||||||
if os.path.exists(bootstrap_yaml):
|
if os.path.exists(bootstrap_yaml):
|
||||||
shutil.move(bootstrap_yaml, backup_file)
|
shutil.move(bootstrap_yaml, backup_file)
|
||||||
|
|
||||||
|
spack.config.config.clear_caches()
|
||||||
|
|
||||||
|
|
||||||
def _root(args):
|
def _root(args):
|
||||||
if args.path:
|
if args.path:
|
||||||
@@ -194,30 +214,41 @@ def fmt(header, content):
|
|||||||
header_fmt = "@*b{{{0}:}} {1}"
|
header_fmt = "@*b{{{0}:}} {1}"
|
||||||
color.cprint(header_fmt.format(header, content))
|
color.cprint(header_fmt.format(header, content))
|
||||||
|
|
||||||
trust_str = "@*y{UNKNOWN}"
|
trust_str = "@*y{DISABLED}"
|
||||||
if trusted is True:
|
if trusted is True:
|
||||||
trust_str = "@*g{TRUSTED}"
|
trust_str = "@*g{ENABLED}"
|
||||||
elif trusted is False:
|
elif trusted is False:
|
||||||
trust_str = "@*r{UNTRUSTED}"
|
trust_str = "@*r{DISABLED}"
|
||||||
|
|
||||||
fmt("Name", source["name"] + " " + trust_str)
|
fmt("Name", source["name"] + " " + trust_str)
|
||||||
print()
|
print()
|
||||||
fmt(" Type", source["type"])
|
if trusted is True or args.verbose:
|
||||||
print()
|
fmt(" Type", source["type"])
|
||||||
|
print()
|
||||||
|
|
||||||
info_lines = ["\n"]
|
info_lines = ["\n"]
|
||||||
for key, value in source.get("info", {}).items():
|
for key, value in source.get("info", {}).items():
|
||||||
info_lines.append(" " * 4 + "@*{{{0}}}: {1}\n".format(key, value))
|
info_lines.append(" " * 4 + "@*{{{0}}}: {1}\n".format(key, value))
|
||||||
if len(info_lines) > 1:
|
if len(info_lines) > 1:
|
||||||
fmt(" Info", "".join(info_lines))
|
fmt(" Info", "".join(info_lines))
|
||||||
|
|
||||||
description_lines = ["\n"]
|
description_lines = ["\n"]
|
||||||
for line in source["description"].split("\n"):
|
for line in source["description"].split("\n"):
|
||||||
description_lines.append(" " * 4 + line + "\n")
|
description_lines.append(" " * 4 + line + "\n")
|
||||||
|
|
||||||
fmt(" Description", "".join(description_lines))
|
fmt(" Description", "".join(description_lines))
|
||||||
|
|
||||||
trusted = spack.config.get("bootstrap:trusted", {})
|
trusted = spack.config.get("bootstrap:trusted", {})
|
||||||
|
|
||||||
|
def sort_fn(x):
|
||||||
|
x_trust = trusted.get(x["name"], None)
|
||||||
|
if x_trust is True:
|
||||||
|
return 0
|
||||||
|
elif x_trust is None:
|
||||||
|
return 1
|
||||||
|
return 2
|
||||||
|
|
||||||
|
sources = sorted(sources, key=sort_fn)
|
||||||
for s in sources:
|
for s in sources:
|
||||||
_print_method(s, trusted.get(s["name"], None))
|
_print_method(s, trusted.get(s["name"], None))
|
||||||
|
|
||||||
@@ -249,15 +280,27 @@ def _write_trust_state(args, value):
|
|||||||
spack.config.add("bootstrap:trusted:{0}:{1}".format(name, str(value)), scope=scope)
|
spack.config.add("bootstrap:trusted:{0}:{1}".format(name, str(value)), scope=scope)
|
||||||
|
|
||||||
|
|
||||||
|
def _deprecate_command(deprecated_cmd, suggested_cmd):
|
||||||
|
msg = (
|
||||||
|
"the 'spack bootstrap {} ...' command is deprecated and will be "
|
||||||
|
"removed in v0.20, use 'spack bootstrap {} ...' instead"
|
||||||
|
)
|
||||||
|
warnings.warn(msg.format(deprecated_cmd, suggested_cmd))
|
||||||
|
|
||||||
|
|
||||||
def _trust(args):
|
def _trust(args):
|
||||||
|
if args.subcommand == "trust":
|
||||||
|
_deprecate_command("trust", "enable")
|
||||||
_write_trust_state(args, value=True)
|
_write_trust_state(args, value=True)
|
||||||
msg = '"{0}" is now trusted for bootstrapping'
|
msg = '"{0}" is now enabled for bootstrapping'
|
||||||
llnl.util.tty.msg(msg.format(args.name))
|
llnl.util.tty.msg(msg.format(args.name))
|
||||||
|
|
||||||
|
|
||||||
def _untrust(args):
|
def _untrust(args):
|
||||||
|
if args.subcommand == "untrust":
|
||||||
|
_deprecate_command("untrust", "disable")
|
||||||
_write_trust_state(args, value=False)
|
_write_trust_state(args, value=False)
|
||||||
msg = '"{0}" is now untrusted and will not be used for bootstrapping'
|
msg = '"{0}" is now disabled and will not be used for bootstrapping'
|
||||||
llnl.util.tty.msg(msg.format(args.name))
|
llnl.util.tty.msg(msg.format(args.name))
|
||||||
|
|
||||||
|
|
||||||
@@ -404,6 +447,14 @@ def write_metadata(subdir, metadata):
|
|||||||
print(instructions)
|
print(instructions)
|
||||||
|
|
||||||
|
|
||||||
|
def _now(args):
|
||||||
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
|
if platform.system().lower() == "linux":
|
||||||
|
spack.bootstrap.ensure_patchelf_in_path_or_raise()
|
||||||
|
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||||
|
spack.bootstrap.ensure_gpg_in_path_or_raise()
|
||||||
|
|
||||||
|
|
||||||
def bootstrap(parser, args):
|
def bootstrap(parser, args):
|
||||||
callbacks = {
|
callbacks = {
|
||||||
"status": _status,
|
"status": _status,
|
||||||
@@ -417,5 +468,6 @@ def bootstrap(parser, args):
|
|||||||
"add": _add,
|
"add": _add,
|
||||||
"remove": _remove,
|
"remove": _remove,
|
||||||
"mirror": _mirror,
|
"mirror": _mirror,
|
||||||
|
"now": _now,
|
||||||
}
|
}
|
||||||
callbacks[args.subcommand](args)
|
callbacks[args.subcommand](args)
|
||||||
|
|||||||
@@ -8,7 +8,6 @@
|
|||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
import warnings
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
@@ -258,19 +257,6 @@ def setup_parser(subparser):
|
|||||||
)
|
)
|
||||||
savespecfile.set_defaults(func=save_specfile_fn)
|
savespecfile.set_defaults(func=save_specfile_fn)
|
||||||
|
|
||||||
# Copy buildcache from some directory to another mirror url
|
|
||||||
copy = subparsers.add_parser("copy", help=copy_fn.__doc__)
|
|
||||||
copy.add_argument(
|
|
||||||
"--base-dir", default=None, help="Path to mirror directory (root of existing buildcache)"
|
|
||||||
)
|
|
||||||
copy.add_argument(
|
|
||||||
"--spec-file",
|
|
||||||
default=None,
|
|
||||||
help=("Path to spec json or yaml file representing buildcache entry to" + " copy"),
|
|
||||||
)
|
|
||||||
copy.add_argument("--destination-url", default=None, help="Destination mirror url")
|
|
||||||
copy.set_defaults(func=copy_fn)
|
|
||||||
|
|
||||||
# Sync buildcache entries from one mirror to another
|
# Sync buildcache entries from one mirror to another
|
||||||
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
|
||||||
sync.add_argument(
|
sync.add_argument(
|
||||||
@@ -549,78 +535,6 @@ def save_specfile_fn(args):
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
def copy_fn(args):
|
|
||||||
"""Copy a buildcache entry and all its files from one mirror, given as
|
|
||||||
'--base-dir', to some other mirror, specified as '--destination-url'.
|
|
||||||
The specific buildcache entry to be copied from one location to the
|
|
||||||
other is identified using the '--spec-file' argument."""
|
|
||||||
# TODO: Remove after v0.18.0 release
|
|
||||||
msg = (
|
|
||||||
'"spack buildcache copy" is deprecated and will be removed from '
|
|
||||||
"Spack starting in v0.19.0"
|
|
||||||
)
|
|
||||||
warnings.warn(msg)
|
|
||||||
|
|
||||||
if not args.spec_file:
|
|
||||||
tty.msg("No spec yaml provided, exiting.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if not args.base_dir:
|
|
||||||
tty.msg("No base directory provided, exiting.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if not args.destination_url:
|
|
||||||
tty.msg("No destination mirror url provided, exiting.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
dest_url = args.destination_url
|
|
||||||
|
|
||||||
if dest_url[0:7] != "file://" and dest_url[0] != "/":
|
|
||||||
tty.msg('Only urls beginning with "file://" or "/" are supported ' + "by buildcache copy.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(args.spec_file, "r") as fd:
|
|
||||||
spec = Spec.from_yaml(fd.read())
|
|
||||||
except Exception as e:
|
|
||||||
tty.debug(e)
|
|
||||||
tty.error("Unable to concrectize spec from yaml {0}".format(args.spec_file))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
dest_root_path = dest_url
|
|
||||||
if dest_url[0:7] == "file://":
|
|
||||||
dest_root_path = dest_url[7:]
|
|
||||||
|
|
||||||
build_cache_dir = bindist.build_cache_relative_path()
|
|
||||||
|
|
||||||
tarball_rel_path = os.path.join(build_cache_dir, bindist.tarball_path_name(spec, ".spack"))
|
|
||||||
tarball_src_path = os.path.join(args.base_dir, tarball_rel_path)
|
|
||||||
tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path)
|
|
||||||
|
|
||||||
specfile_rel_path = os.path.join(build_cache_dir, bindist.tarball_name(spec, ".spec.json"))
|
|
||||||
specfile_src_path = os.path.join(args.base_dir, specfile_rel_path)
|
|
||||||
specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path)
|
|
||||||
|
|
||||||
specfile_rel_path_yaml = os.path.join(
|
|
||||||
build_cache_dir, bindist.tarball_name(spec, ".spec.yaml")
|
|
||||||
)
|
|
||||||
specfile_src_path_yaml = os.path.join(args.base_dir, specfile_rel_path)
|
|
||||||
specfile_dest_path_yaml = os.path.join(dest_root_path, specfile_rel_path)
|
|
||||||
|
|
||||||
# Make sure directory structure exists before attempting to copy
|
|
||||||
os.makedirs(os.path.dirname(tarball_dest_path))
|
|
||||||
|
|
||||||
# Now copy the specfile and tarball files to the destination mirror
|
|
||||||
tty.msg("Copying {0}".format(tarball_rel_path))
|
|
||||||
shutil.copyfile(tarball_src_path, tarball_dest_path)
|
|
||||||
|
|
||||||
tty.msg("Copying {0}".format(specfile_rel_path))
|
|
||||||
shutil.copyfile(specfile_src_path, specfile_dest_path)
|
|
||||||
|
|
||||||
tty.msg("Copying {0}".format(specfile_rel_path_yaml))
|
|
||||||
shutil.copyfile(specfile_src_path_yaml, specfile_dest_path_yaml)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_buildcache_file(src_url, dest_url, local_path=None):
|
def copy_buildcache_file(src_url, dest_url, local_path=None):
|
||||||
"""Copy from source url to destination url"""
|
"""Copy from source url to destination url"""
|
||||||
tmpdir = None
|
tmpdir = None
|
||||||
|
|||||||
@@ -15,7 +15,7 @@
|
|||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
from spack.package_base import preferred_version
|
from spack.package_base import deprecated_version, preferred_version
|
||||||
from spack.util.naming import valid_fully_qualified_module_name
|
from spack.util.naming import valid_fully_qualified_module_name
|
||||||
from spack.version import VersionBase, ver
|
from spack.version import VersionBase, ver
|
||||||
|
|
||||||
@@ -81,6 +81,9 @@ def checksum(parser, args):
|
|||||||
if versions:
|
if versions:
|
||||||
remote_versions = None
|
remote_versions = None
|
||||||
for version in versions:
|
for version in versions:
|
||||||
|
if deprecated_version(pkg, version):
|
||||||
|
tty.warn("Version {0} is deprecated".format(version))
|
||||||
|
|
||||||
version = ver(version)
|
version = ver(version)
|
||||||
if not isinstance(version, VersionBase):
|
if not isinstance(version, VersionBase):
|
||||||
tty.die(
|
tty.die(
|
||||||
@@ -101,7 +104,7 @@ def checksum(parser, args):
|
|||||||
url_dict = pkg.fetch_remote_versions()
|
url_dict = pkg.fetch_remote_versions()
|
||||||
|
|
||||||
if not url_dict:
|
if not url_dict:
|
||||||
tty.die("Could not find any versions for {0}".format(pkg.name))
|
tty.die("Could not find any remote versions for {0}".format(pkg.name))
|
||||||
|
|
||||||
version_lines = spack.stage.get_checksums_for_versions(
|
version_lines = spack.stage.get_checksums_for_versions(
|
||||||
url_dict,
|
url_dict,
|
||||||
|
|||||||
@@ -25,7 +25,8 @@
|
|||||||
section = "build"
|
section = "build"
|
||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
CI_REBUILD_INSTALL_BASE_ARGS = ["spack", "-d", "-v"]
|
SPACK_COMMAND = "spack"
|
||||||
|
MAKE_COMMAND = "make"
|
||||||
INSTALL_FAIL_CODE = 1
|
INSTALL_FAIL_CODE = 1
|
||||||
|
|
||||||
|
|
||||||
@@ -277,8 +278,8 @@ def ci_rebuild(args):
|
|||||||
ci_pipeline_id = get_env_var("CI_PIPELINE_ID")
|
ci_pipeline_id = get_env_var("CI_PIPELINE_ID")
|
||||||
ci_job_name = get_env_var("CI_JOB_NAME")
|
ci_job_name = get_env_var("CI_JOB_NAME")
|
||||||
signing_key = get_env_var("SPACK_SIGNING_KEY")
|
signing_key = get_env_var("SPACK_SIGNING_KEY")
|
||||||
root_spec = get_env_var("SPACK_ROOT_SPEC")
|
|
||||||
job_spec_pkg_name = get_env_var("SPACK_JOB_SPEC_PKG_NAME")
|
job_spec_pkg_name = get_env_var("SPACK_JOB_SPEC_PKG_NAME")
|
||||||
|
job_spec_dag_hash = get_env_var("SPACK_JOB_SPEC_DAG_HASH")
|
||||||
compiler_action = get_env_var("SPACK_COMPILER_ACTION")
|
compiler_action = get_env_var("SPACK_COMPILER_ACTION")
|
||||||
spack_pipeline_type = get_env_var("SPACK_PIPELINE_TYPE")
|
spack_pipeline_type = get_env_var("SPACK_PIPELINE_TYPE")
|
||||||
remote_mirror_override = get_env_var("SPACK_REMOTE_MIRROR_OVERRIDE")
|
remote_mirror_override = get_env_var("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||||
@@ -297,7 +298,6 @@ def ci_rebuild(args):
|
|||||||
|
|
||||||
# Debug print some of the key environment variables we should have received
|
# Debug print some of the key environment variables we should have received
|
||||||
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
|
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
|
||||||
tty.debug("root_spec = {0}".format(root_spec))
|
|
||||||
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
|
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
|
||||||
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
|
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
|
||||||
tty.debug("compiler_action = {0}".format(compiler_action))
|
tty.debug("compiler_action = {0}".format(compiler_action))
|
||||||
@@ -360,10 +360,11 @@ def ci_rebuild(args):
|
|||||||
mirror_msg = "artifact buildcache enabled, mirror url: {0}".format(pipeline_mirror_url)
|
mirror_msg = "artifact buildcache enabled, mirror url: {0}".format(pipeline_mirror_url)
|
||||||
tty.debug(mirror_msg)
|
tty.debug(mirror_msg)
|
||||||
|
|
||||||
# Whatever form of root_spec we got, use it to get a map giving us concrete
|
# Get the concrete spec to be built by this job.
|
||||||
# specs for this job and all of its dependencies.
|
try:
|
||||||
spec_map = spack_ci.get_concrete_specs(env, root_spec, job_spec_pkg_name, compiler_action)
|
job_spec = env.get_one_by_hash(job_spec_dag_hash)
|
||||||
job_spec = spec_map[job_spec_pkg_name]
|
except AssertionError:
|
||||||
|
tty.die("Could not find environment spec with hash {0}".format(job_spec_dag_hash))
|
||||||
|
|
||||||
job_spec_json_file = "{0}.json".format(job_spec_pkg_name)
|
job_spec_json_file = "{0}.json".format(job_spec_pkg_name)
|
||||||
job_spec_json_path = os.path.join(repro_dir, job_spec_json_file)
|
job_spec_json_path = os.path.join(repro_dir, job_spec_json_file)
|
||||||
@@ -427,17 +428,11 @@ def ci_rebuild(args):
|
|||||||
with open(job_spec_json_path, "w") as fd:
|
with open(job_spec_json_path, "w") as fd:
|
||||||
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
||||||
|
|
||||||
# Write the concrete root spec json into the reproduction directory
|
|
||||||
root_spec_json_path = os.path.join(repro_dir, "root.json")
|
|
||||||
with open(root_spec_json_path, "w") as fd:
|
|
||||||
fd.write(spec_map["root"].to_json(hash=ht.dag_hash))
|
|
||||||
|
|
||||||
# Write some other details to aid in reproduction into an artifact
|
# Write some other details to aid in reproduction into an artifact
|
||||||
repro_file = os.path.join(repro_dir, "repro.json")
|
repro_file = os.path.join(repro_dir, "repro.json")
|
||||||
repro_details = {
|
repro_details = {
|
||||||
"job_name": ci_job_name,
|
"job_name": ci_job_name,
|
||||||
"job_spec_json": job_spec_json_file,
|
"job_spec_json": job_spec_json_file,
|
||||||
"root_spec_json": "root.json",
|
|
||||||
"ci_project_dir": ci_project_dir,
|
"ci_project_dir": ci_project_dir,
|
||||||
}
|
}
|
||||||
with open(repro_file, "w") as fd:
|
with open(repro_file, "w") as fd:
|
||||||
@@ -515,42 +510,88 @@ def ci_rebuild(args):
|
|||||||
# No hash match anywhere means we need to rebuild spec
|
# No hash match anywhere means we need to rebuild spec
|
||||||
|
|
||||||
# Start with spack arguments
|
# Start with spack arguments
|
||||||
install_args = [base_arg for base_arg in CI_REBUILD_INSTALL_BASE_ARGS]
|
spack_cmd = [SPACK_COMMAND, "--color=always", "--backtrace", "--verbose"]
|
||||||
|
|
||||||
config = cfg.get("config")
|
config = cfg.get("config")
|
||||||
if not config["verify_ssl"]:
|
if not config["verify_ssl"]:
|
||||||
install_args.append("-k")
|
spack_cmd.append("-k")
|
||||||
|
|
||||||
install_args.extend(
|
install_args = []
|
||||||
[
|
|
||||||
"install",
|
|
||||||
"--show-log-on-error", # Print full log on fails
|
|
||||||
"--keep-stage",
|
|
||||||
"--use-buildcache",
|
|
||||||
"dependencies:only,package:never",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
can_verify = spack_ci.can_verify_binaries()
|
can_verify = spack_ci.can_verify_binaries()
|
||||||
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
||||||
if not verify_binaries:
|
if not verify_binaries:
|
||||||
install_args.append("--no-check-signature")
|
install_args.append("--no-check-signature")
|
||||||
|
|
||||||
|
cdash_args = []
|
||||||
if cdash_handler:
|
if cdash_handler:
|
||||||
# Add additional arguments to `spack install` for CDash reporting.
|
# Add additional arguments to `spack install` for CDash reporting.
|
||||||
install_args.extend(cdash_handler.args())
|
cdash_args.extend(cdash_handler.args())
|
||||||
|
|
||||||
# A compiler action of 'FIND_ANY' means we are building a bootstrap
|
slash_hash = "/{}".format(job_spec.dag_hash())
|
||||||
# compiler or one of its deps.
|
deps_install_args = install_args
|
||||||
# TODO: when compilers are dependencies, we should include --no-add
|
root_install_args = install_args + [
|
||||||
if compiler_action != "FIND_ANY":
|
"--no-add",
|
||||||
install_args.append("--no-add")
|
"--keep-stage",
|
||||||
|
"--only=package",
|
||||||
|
"--use-buildcache=package:never,dependencies:only",
|
||||||
|
slash_hash,
|
||||||
|
]
|
||||||
|
|
||||||
# Identify spec to install by hash
|
# ["x", "y"] -> "'x' 'y'"
|
||||||
install_args.append("/{0}".format(job_spec.dag_hash()))
|
args_to_string = lambda args: " ".join("'{}'".format(arg) for arg in args)
|
||||||
|
|
||||||
|
commands = [
|
||||||
|
# apparently there's a race when spack bootstraps? do it up front once
|
||||||
|
[
|
||||||
|
SPACK_COMMAND,
|
||||||
|
"-e",
|
||||||
|
env.path,
|
||||||
|
"bootstrap",
|
||||||
|
"now",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
SPACK_COMMAND,
|
||||||
|
"-e",
|
||||||
|
env.path,
|
||||||
|
"config",
|
||||||
|
"add",
|
||||||
|
"config:db_lock_timeout:120", # 2 minutes for processes to fight for a db lock
|
||||||
|
],
|
||||||
|
[
|
||||||
|
SPACK_COMMAND,
|
||||||
|
"-e",
|
||||||
|
env.path,
|
||||||
|
"env",
|
||||||
|
"depfile",
|
||||||
|
"-o",
|
||||||
|
"Makefile",
|
||||||
|
"--use-buildcache=package:never,dependencies:only",
|
||||||
|
"--make-target-prefix",
|
||||||
|
"ci",
|
||||||
|
slash_hash, # limit to spec we're building
|
||||||
|
],
|
||||||
|
[
|
||||||
|
# --output-sync requires GNU make 4.x.
|
||||||
|
# Old make errors when you pass it a flag it doesn't recognize,
|
||||||
|
# but it doesn't error or warn when you set unrecognized flags in
|
||||||
|
# this variable.
|
||||||
|
"export",
|
||||||
|
"GNUMAKEFLAGS=--output-sync=recurse",
|
||||||
|
],
|
||||||
|
[
|
||||||
|
MAKE_COMMAND,
|
||||||
|
"SPACK={}".format(args_to_string(spack_cmd)),
|
||||||
|
"SPACK_COLOR=always",
|
||||||
|
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
||||||
|
"-j$(nproc)",
|
||||||
|
"ci/.install-deps/{}".format(job_spec.dag_hash()),
|
||||||
|
],
|
||||||
|
spack_cmd + ["install"] + root_install_args,
|
||||||
|
]
|
||||||
|
|
||||||
tty.debug("Installing {0} from source".format(job_spec.name))
|
tty.debug("Installing {0} from source".format(job_spec.name))
|
||||||
install_exit_code = spack_ci.process_command("install", install_args, repro_dir)
|
install_exit_code = spack_ci.process_command("install", commands, repro_dir)
|
||||||
|
|
||||||
# Now do the post-install tasks
|
# Now do the post-install tasks
|
||||||
tty.debug("spack install exited {0}".format(install_exit_code))
|
tty.debug("spack install exited {0}".format(install_exit_code))
|
||||||
|
|||||||
@@ -6,6 +6,8 @@
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
from llnl.util.lang import stable_partition
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.dependency as dep
|
import spack.dependency as dep
|
||||||
@@ -437,3 +439,57 @@ def add_s3_connection_args(subparser, add_help):
|
|||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"--s3-endpoint-url", help="Endpoint URL to use to connect to this S3 mirror"
|
"--s3-endpoint-url", help="Endpoint URL to use to connect to this S3 mirror"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def use_buildcache(cli_arg_value):
|
||||||
|
"""Translate buildcache related command line arguments into a pair of strings,
|
||||||
|
representing whether the root or its dependencies can use buildcaches.
|
||||||
|
|
||||||
|
Argument type that accepts comma-separated subargs:
|
||||||
|
|
||||||
|
1. auto|only|never
|
||||||
|
2. package:auto|only|never
|
||||||
|
3. dependencies:auto|only|never
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cli_arg_value (str): command line argument value to be translated
|
||||||
|
|
||||||
|
Return:
|
||||||
|
Tuple of two strings
|
||||||
|
"""
|
||||||
|
valid_keys = frozenset(["package", "dependencies"])
|
||||||
|
valid_values = frozenset(["only", "never", "auto"])
|
||||||
|
|
||||||
|
# Split in args, split in key/value, and trim whitespace
|
||||||
|
args = [tuple(map(lambda x: x.strip(), part.split(":"))) for part in cli_arg_value.split(",")]
|
||||||
|
|
||||||
|
# Verify keys and values
|
||||||
|
def is_valid(arg):
|
||||||
|
if len(arg) == 1:
|
||||||
|
return arg[0] in valid_values
|
||||||
|
if len(arg) == 2:
|
||||||
|
return arg[0] in valid_keys and arg[1] in valid_values
|
||||||
|
return False
|
||||||
|
|
||||||
|
valid, invalid = stable_partition(args, is_valid)
|
||||||
|
|
||||||
|
# print first error
|
||||||
|
if invalid:
|
||||||
|
raise argparse.ArgumentTypeError("invalid argument `{}`".format(":".join(invalid[0])))
|
||||||
|
|
||||||
|
# Default values
|
||||||
|
package = "auto"
|
||||||
|
dependencies = "auto"
|
||||||
|
|
||||||
|
# Override in order.
|
||||||
|
for arg in valid:
|
||||||
|
if len(arg) == 1:
|
||||||
|
package = dependencies = arg[0]
|
||||||
|
continue
|
||||||
|
key, val = arg
|
||||||
|
if key == "package":
|
||||||
|
package = val
|
||||||
|
else:
|
||||||
|
dependencies = val
|
||||||
|
|
||||||
|
return package, dependencies
|
||||||
|
|||||||
@@ -91,6 +91,6 @@ def deactivate(parser, args):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if not args.force and not spec.package.is_activated(view):
|
if not args.force and not spec.package.is_activated(view):
|
||||||
tty.die("Package %s is not activated." % specs[0].short_spec)
|
tty.die("Package %s is not activated." % spec.short_spec)
|
||||||
|
|
||||||
spec.package.do_deactivate(view, force=args.force)
|
spec.package.do_deactivate(view, force=args.force)
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ def deprecate(parser, args):
|
|||||||
all_deprecators = []
|
all_deprecators = []
|
||||||
|
|
||||||
generator = (
|
generator = (
|
||||||
deprecate.traverse(order="post", type="link", root=True)
|
deprecate.traverse(order="post", deptype="link", root=True)
|
||||||
if args.dependencies
|
if args.dependencies
|
||||||
else [deprecate]
|
else [deprecate]
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import argparse
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
@@ -15,6 +16,8 @@
|
|||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
from llnl.util.tty.color import colorize
|
from llnl.util.tty.color import colorize
|
||||||
|
|
||||||
|
import spack.cmd
|
||||||
|
import spack.cmd.common
|
||||||
import spack.cmd.common.arguments
|
import spack.cmd.common.arguments
|
||||||
import spack.cmd.common.arguments as arguments
|
import spack.cmd.common.arguments as arguments
|
||||||
import spack.cmd.install
|
import spack.cmd.install
|
||||||
@@ -25,6 +28,7 @@
|
|||||||
import spack.environment.shell
|
import spack.environment.shell
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
import spack.tengine
|
import spack.tengine
|
||||||
|
import spack.traverse as traverse
|
||||||
import spack.util.string as string
|
import spack.util.string as string
|
||||||
from spack.util.environment import EnvironmentModifications
|
from spack.util.environment import EnvironmentModifications
|
||||||
|
|
||||||
@@ -599,6 +603,15 @@ def env_depfile_setup_parser(subparser):
|
|||||||
dest="jobserver",
|
dest="jobserver",
|
||||||
help="disable POSIX jobserver support.",
|
help="disable POSIX jobserver support.",
|
||||||
)
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"--use-buildcache",
|
||||||
|
dest="use_buildcache",
|
||||||
|
type=arguments.use_buildcache,
|
||||||
|
default="package:auto,dependencies:auto",
|
||||||
|
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
|
||||||
|
help="When using `only`, redundant build dependencies are pruned from the DAG. "
|
||||||
|
"This flag is passed on to the generated spack install commands.",
|
||||||
|
)
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-o",
|
"-o",
|
||||||
"--output",
|
"--output",
|
||||||
@@ -613,6 +626,64 @@ def env_depfile_setup_parser(subparser):
|
|||||||
choices=("make",),
|
choices=("make",),
|
||||||
help="specify the depfile type. Currently only make is supported.",
|
help="specify the depfile type. Currently only make is supported.",
|
||||||
)
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
metavar="specs",
|
||||||
|
dest="specs",
|
||||||
|
nargs=argparse.REMAINDER,
|
||||||
|
default=None,
|
||||||
|
help="generate a depfile only for matching specs in the environment",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _deptypes(use_buildcache):
|
||||||
|
"""What edges should we follow for a given node? If it's a cache-only
|
||||||
|
node, then we can drop build type deps."""
|
||||||
|
return ("link", "run") if use_buildcache == "only" else ("build", "link", "run")
|
||||||
|
|
||||||
|
|
||||||
|
class MakeTargetVisitor(object):
|
||||||
|
"""This visitor produces an adjacency list of a (reduced) DAG, which
|
||||||
|
is used to generate Makefile targets with their prerequisites."""
|
||||||
|
|
||||||
|
def __init__(self, target, pkg_buildcache, deps_buildcache):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
target: function that maps dag_hash -> make target string
|
||||||
|
pkg_buildcache (str): "only", "never", "auto": when "only",
|
||||||
|
redundant build deps of roots are dropped
|
||||||
|
deps_buildcache (str): same as pkg_buildcache, but for non-root specs.
|
||||||
|
"""
|
||||||
|
self.adjacency_list = []
|
||||||
|
self.target = target
|
||||||
|
self.pkg_buildcache = pkg_buildcache
|
||||||
|
self.deps_buildcache = deps_buildcache
|
||||||
|
self.deptypes_root = _deptypes(pkg_buildcache)
|
||||||
|
self.deptypes_deps = _deptypes(deps_buildcache)
|
||||||
|
|
||||||
|
def neighbors(self, node):
|
||||||
|
"""Produce a list of spec to follow from node"""
|
||||||
|
deptypes = self.deptypes_root if node.depth == 0 else self.deptypes_deps
|
||||||
|
return traverse.sort_edges(node.edge.spec.edges_to_dependencies(deptype=deptypes))
|
||||||
|
|
||||||
|
def build_cache_flag(self, depth):
|
||||||
|
setting = self.pkg_buildcache if depth == 0 else self.deps_buildcache
|
||||||
|
if setting == "only":
|
||||||
|
return "--use-buildcache=only"
|
||||||
|
elif setting == "never":
|
||||||
|
return "--use-buildcache=never"
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def accept(self, node):
|
||||||
|
dag_hash = node.edge.spec.dag_hash()
|
||||||
|
spec_str = node.edge.spec.format(
|
||||||
|
"{name}{@version}{%compiler}{variants}{arch=architecture}"
|
||||||
|
)
|
||||||
|
buildcache_flag = self.build_cache_flag(node.depth)
|
||||||
|
prereqs = " ".join([self.target(dep.spec.dag_hash()) for dep in self.neighbors(node)])
|
||||||
|
self.adjacency_list.append((dag_hash, spec_str, buildcache_flag, prereqs))
|
||||||
|
|
||||||
|
# We already accepted this
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def env_depfile(args):
|
def env_depfile(args):
|
||||||
@@ -620,10 +691,6 @@ def env_depfile(args):
|
|||||||
spack.cmd.require_active_env(cmd_name="env depfile")
|
spack.cmd.require_active_env(cmd_name="env depfile")
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
|
|
||||||
# Maps each hash in the environment to a string of install prereqs
|
|
||||||
hash_to_prereqs = {}
|
|
||||||
hash_to_spec = {}
|
|
||||||
|
|
||||||
if args.make_target_prefix is None:
|
if args.make_target_prefix is None:
|
||||||
target_prefix = os.path.join(env.env_subdir_path, "makedeps")
|
target_prefix = os.path.join(env.env_subdir_path, "makedeps")
|
||||||
else:
|
else:
|
||||||
@@ -645,48 +712,49 @@ def get_install_target(name):
|
|||||||
def get_install_deps_target(name):
|
def get_install_deps_target(name):
|
||||||
return os.path.join(target_prefix, ".install-deps", name)
|
return os.path.join(target_prefix, ".install-deps", name)
|
||||||
|
|
||||||
for _, spec in env.concretized_specs():
|
# What things do we build when running make? By default, we build the
|
||||||
for s in spec.traverse(root=True):
|
# root specs. If specific specs are provided as input, we build those.
|
||||||
hash_to_spec[s.dag_hash()] = s
|
if args.specs:
|
||||||
hash_to_prereqs[s.dag_hash()] = [
|
abstract_specs = spack.cmd.parse_specs(args.specs)
|
||||||
get_install_target(dep.dag_hash()) for dep in s.dependencies()
|
roots = [env.matching_spec(s) for s in abstract_specs]
|
||||||
]
|
else:
|
||||||
|
roots = [s for _, s in env.concretized_specs()]
|
||||||
|
|
||||||
root_dags = [s.dag_hash() for _, s in env.concretized_specs()]
|
# We produce a sub-DAG from the DAG induced by roots, where we drop build
|
||||||
|
# edges for those specs that are installed through a binary cache.
|
||||||
|
pkg_buildcache, dep_buildcache = args.use_buildcache
|
||||||
|
make_targets = MakeTargetVisitor(get_install_target, pkg_buildcache, dep_buildcache)
|
||||||
|
traverse.traverse_breadth_first_with_visitor(
|
||||||
|
roots, traverse.CoverNodesVisitor(make_targets, key=lambda s: s.dag_hash())
|
||||||
|
)
|
||||||
|
|
||||||
# Root specs without deps are the prereqs for the environment target
|
# Root specs without deps are the prereqs for the environment target
|
||||||
root_install_targets = [get_install_target(h) for h in root_dags]
|
root_install_targets = [get_install_target(h.dag_hash()) for h in roots]
|
||||||
|
|
||||||
# All package install targets, not just roots.
|
# Cleanable targets...
|
||||||
all_install_targets = [get_install_target(h) for h in hash_to_spec.keys()]
|
cleanable_targets = [get_install_target(h) for h, _, _, _ in make_targets.adjacency_list]
|
||||||
all_install_deps_targets = [get_install_deps_target(h) for h, _ in hash_to_prereqs.items()]
|
cleanable_targets.extend(
|
||||||
|
[get_install_deps_target(h) for h, _, _, _ in make_targets.adjacency_list]
|
||||||
|
)
|
||||||
|
|
||||||
buf = six.StringIO()
|
buf = six.StringIO()
|
||||||
|
|
||||||
template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile"))
|
template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile"))
|
||||||
|
|
||||||
fmt = "{name}{@version}{%compiler}{variants}{arch=architecture}"
|
|
||||||
hash_with_name = [(h, hash_to_spec[h].format(fmt)) for h in hash_to_prereqs.keys()]
|
|
||||||
targets_to_prereqs = [
|
|
||||||
(get_install_deps_target(h), " ".join(prereqs)) for h, prereqs in hash_to_prereqs.items()
|
|
||||||
]
|
|
||||||
|
|
||||||
rendered = template.render(
|
rendered = template.render(
|
||||||
{
|
{
|
||||||
"all_target": get_target("all"),
|
"all_target": get_target("all"),
|
||||||
"env_target": get_target("env"),
|
"env_target": get_target("env"),
|
||||||
"clean_target": get_target("clean"),
|
"clean_target": get_target("clean"),
|
||||||
"all_install_targets": " ".join(all_install_targets),
|
"cleanable_targets": " ".join(cleanable_targets),
|
||||||
"all_install_deps_targets": " ".join(all_install_deps_targets),
|
|
||||||
"root_install_targets": " ".join(root_install_targets),
|
"root_install_targets": " ".join(root_install_targets),
|
||||||
"dirs_target": get_target("dirs"),
|
"dirs_target": get_target("dirs"),
|
||||||
"environment": env.path,
|
"environment": env.path,
|
||||||
"install_target": get_target(".install"),
|
"install_target": get_target(".install"),
|
||||||
"install_deps_target": get_target(".install-deps"),
|
"install_deps_target": get_target(".install-deps"),
|
||||||
"any_hash_target": get_target("%"),
|
"any_hash_target": get_target("%"),
|
||||||
"hash_with_name": hash_with_name,
|
|
||||||
"jobserver_support": "+" if args.jobserver else "",
|
"jobserver_support": "+" if args.jobserver else "",
|
||||||
"targets_to_prereqs": targets_to_prereqs,
|
"adjacency_list": make_targets.adjacency_list,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ def extensions(parser, args):
|
|||||||
spec = cmd.disambiguate_spec(spec[0], env)
|
spec = cmd.disambiguate_spec(spec[0], env)
|
||||||
|
|
||||||
if not spec.package.extendable:
|
if not spec.package.extendable:
|
||||||
tty.die("%s is not an extendable package." % spec[0].name)
|
tty.die("%s is not an extendable package." % spec.name)
|
||||||
|
|
||||||
if not spec.package.extendable:
|
if not spec.package.extendable:
|
||||||
tty.die("%s does not have extensions." % spec.short_spec)
|
tty.die("%s does not have extensions." % spec.short_spec)
|
||||||
|
|||||||
@@ -210,11 +210,11 @@ def print_maintainers(pkg):
|
|||||||
def print_phases(pkg):
|
def print_phases(pkg):
|
||||||
"""output installation phases"""
|
"""output installation phases"""
|
||||||
|
|
||||||
if hasattr(pkg, "phases") and pkg.phases:
|
if hasattr(pkg.builder, "phases") and pkg.builder.phases:
|
||||||
color.cprint("")
|
color.cprint("")
|
||||||
color.cprint(section_title("Installation Phases:"))
|
color.cprint(section_title("Installation Phases:"))
|
||||||
phase_str = ""
|
phase_str = ""
|
||||||
for phase in pkg.phases:
|
for phase in pkg.builder.phases:
|
||||||
phase_str += " {0}".format(phase)
|
phase_str += " {0}".format(phase)
|
||||||
color.cprint(phase_str)
|
color.cprint(phase_str)
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
import textwrap
|
||||||
@@ -32,33 +31,6 @@
|
|||||||
level = "short"
|
level = "short"
|
||||||
|
|
||||||
|
|
||||||
# Pass in the value string passed to use-buildcache and get back
|
|
||||||
# the package and dependencies values.
|
|
||||||
def parse_use_buildcache(opt):
|
|
||||||
bc_keys = ["package:", "dependencies:", ""]
|
|
||||||
bc_values = ["only", "never", "auto"]
|
|
||||||
kv_list = re.findall("([a-z]+:)?([a-z]+)", opt)
|
|
||||||
|
|
||||||
# Verify keys and values
|
|
||||||
bc_map = {k: v for k, v in kv_list if k in bc_keys and v in bc_values}
|
|
||||||
if not len(kv_list) == len(bc_map):
|
|
||||||
tty.error("Unrecognized arguments passed to use-buildcache")
|
|
||||||
tty.error(
|
|
||||||
"Expected: --use-buildcache "
|
|
||||||
"[[auto|only|never],[package:[auto|only|never]],[dependencies:[auto|only|never]]]"
|
|
||||||
)
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
for _group in ["package:", "dependencies:"]:
|
|
||||||
if _group not in bc_map:
|
|
||||||
if "" in bc_map:
|
|
||||||
bc_map[_group] = bc_map[""]
|
|
||||||
else:
|
|
||||||
bc_map[_group] = "auto"
|
|
||||||
|
|
||||||
return bc_map["package:"], bc_map["dependencies:"]
|
|
||||||
|
|
||||||
|
|
||||||
# Determine value of cache flag
|
# Determine value of cache flag
|
||||||
def cache_opt(default_opt, use_buildcache):
|
def cache_opt(default_opt, use_buildcache):
|
||||||
if use_buildcache == "auto":
|
if use_buildcache == "auto":
|
||||||
@@ -73,8 +45,7 @@ def install_kwargs_from_args(args):
|
|||||||
"""Translate command line arguments into a dictionary that will be passed
|
"""Translate command line arguments into a dictionary that will be passed
|
||||||
to the package installer.
|
to the package installer.
|
||||||
"""
|
"""
|
||||||
|
pkg_use_bc, dep_use_bc = args.use_buildcache
|
||||||
pkg_use_bc, dep_use_bc = parse_use_buildcache(args.use_buildcache)
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"fail_fast": args.fail_fast,
|
"fail_fast": args.fail_fast,
|
||||||
@@ -169,6 +140,7 @@ def setup_parser(subparser):
|
|||||||
cache_group.add_argument(
|
cache_group.add_argument(
|
||||||
"--use-buildcache",
|
"--use-buildcache",
|
||||||
dest="use_buildcache",
|
dest="use_buildcache",
|
||||||
|
type=arguments.use_buildcache,
|
||||||
default="package:auto,dependencies:auto",
|
default="package:auto,dependencies:auto",
|
||||||
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
|
metavar="[{auto,only,never},][package:{auto,only,never},][dependencies:{auto,only,never}]",
|
||||||
help="""select the mode of buildcache for the 'package' and 'dependencies'.
|
help="""select the mode of buildcache for the 'package' and 'dependencies'.
|
||||||
|
|||||||
@@ -76,8 +76,7 @@ def setup_parser(subparser):
|
|||||||
"-t", "--types", action="store_true", default=False, help="show dependency types"
|
"-t", "--types", action="store_true", default=False, help="show dependency types"
|
||||||
)
|
)
|
||||||
arguments.add_common_arguments(subparser, ["specs"])
|
arguments.add_common_arguments(subparser, ["specs"])
|
||||||
|
arguments.add_concretizer_args(subparser)
|
||||||
spack.cmd.common.arguments.add_concretizer_args(subparser)
|
|
||||||
|
|
||||||
|
|
||||||
def spec(parser, args):
|
def spec(parser, args):
|
||||||
|
|||||||
@@ -54,6 +54,12 @@ def setup_parser(subparser):
|
|||||||
run_parser.add_argument(
|
run_parser.add_argument(
|
||||||
"--externals", action="store_true", help="Test packages that are externally installed."
|
"--externals", action="store_true", help="Test packages that are externally installed."
|
||||||
)
|
)
|
||||||
|
run_parser.add_argument(
|
||||||
|
"-x",
|
||||||
|
"--explicit",
|
||||||
|
action="store_true",
|
||||||
|
help="Only test packages that are explicitly installed.",
|
||||||
|
)
|
||||||
run_parser.add_argument(
|
run_parser.add_argument(
|
||||||
"--keep-stage", action="store_true", help="Keep testing directory for debugging"
|
"--keep-stage", action="store_true", help="Keep testing directory for debugging"
|
||||||
)
|
)
|
||||||
@@ -188,6 +194,9 @@ def test_run(args):
|
|||||||
if args.fail_fast:
|
if args.fail_fast:
|
||||||
spack.config.set("config:fail_fast", True, scope="command_line")
|
spack.config.set("config:fail_fast", True, scope="command_line")
|
||||||
|
|
||||||
|
explicit = args.explicit or any
|
||||||
|
explicit_str = "explicitly " if args.explicit else ""
|
||||||
|
|
||||||
# Get specs to test
|
# Get specs to test
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
hashes = env.all_hashes() if env else None
|
hashes = env.all_hashes() if env else None
|
||||||
@@ -195,9 +204,13 @@ def test_run(args):
|
|||||||
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
specs = spack.cmd.parse_specs(args.specs) if args.specs else [None]
|
||||||
specs_to_test = []
|
specs_to_test = []
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
matching = spack.store.db.query_local(spec, hashes=hashes)
|
matching = spack.store.db.query_local(
|
||||||
|
spec,
|
||||||
|
hashes=hashes,
|
||||||
|
explicit=explicit,
|
||||||
|
)
|
||||||
if spec and not matching:
|
if spec and not matching:
|
||||||
tty.warn("No installed packages match spec %s" % spec)
|
tty.warn("No {0}installed packages match spec {1}".format(explicit_str, spec))
|
||||||
"""
|
"""
|
||||||
TODO: Need to write out a log message and/or CDASH Testing
|
TODO: Need to write out a log message and/or CDASH Testing
|
||||||
output that package not installed IF continue to process
|
output that package not installed IF continue to process
|
||||||
@@ -208,6 +221,7 @@ def test_run(args):
|
|||||||
# to ensure report package as skipped (e.g., for CI)
|
# to ensure report package as skipped (e.g., for CI)
|
||||||
specs_to_test.append(spec)
|
specs_to_test.append(spec)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
specs_to_test.extend(matching)
|
specs_to_test.extend(matching)
|
||||||
|
|
||||||
# test_stage_dir
|
# test_stage_dir
|
||||||
|
|||||||
@@ -228,7 +228,7 @@ def do_uninstall(env, specs, force):
|
|||||||
except spack.repo.UnknownEntityError:
|
except spack.repo.UnknownEntityError:
|
||||||
# The package.py file has gone away -- but still
|
# The package.py file has gone away -- but still
|
||||||
# want to uninstall.
|
# want to uninstall.
|
||||||
spack.package_base.Package.uninstall_by_spec(item, force=True)
|
spack.package_base.PackageBase.uninstall_by_spec(item, force=True)
|
||||||
|
|
||||||
# A package is ready to be uninstalled when nothing else references it,
|
# A package is ready to be uninstalled when nothing else references it,
|
||||||
# unless we are requested to force uninstall it.
|
# unless we are requested to force uninstall it.
|
||||||
|
|||||||
@@ -56,25 +56,25 @@ def get_compiler_version_output(compiler_path, *args, **kwargs):
|
|||||||
return _get_compiler_version_output(compiler_path, *args, **kwargs)
|
return _get_compiler_version_output(compiler_path, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def tokenize_flags(flags_str):
|
def tokenize_flags(flags_values, propagate=False):
|
||||||
"""Given a compiler flag specification as a string, this returns a list
|
"""Given a compiler flag specification as a string, this returns a list
|
||||||
where the entries are the flags. For compiler options which set values
|
where the entries are the flags. For compiler options which set values
|
||||||
using the syntax "-flag value", this function groups flags and their
|
using the syntax "-flag value", this function groups flags and their
|
||||||
values together. Any token not preceded by a "-" is considered the
|
values together. Any token not preceded by a "-" is considered the
|
||||||
value of a prior flag."""
|
value of a prior flag."""
|
||||||
tokens = flags_str.split()
|
tokens = flags_values.split()
|
||||||
if not tokens:
|
if not tokens:
|
||||||
return []
|
return []
|
||||||
flag = tokens[0]
|
flag = tokens[0]
|
||||||
flags = []
|
flags_with_propagation = []
|
||||||
for token in tokens[1:]:
|
for token in tokens[1:]:
|
||||||
if not token.startswith("-"):
|
if not token.startswith("-"):
|
||||||
flag += " " + token
|
flag += " " + token
|
||||||
else:
|
else:
|
||||||
flags.append(flag)
|
flags_with_propagation.append((flag, propagate))
|
||||||
flag = token
|
flag = token
|
||||||
flags.append(flag)
|
flags_with_propagation.append((flag, propagate))
|
||||||
return flags
|
return flags_with_propagation
|
||||||
|
|
||||||
|
|
||||||
#: regex for parsing linker lines
|
#: regex for parsing linker lines
|
||||||
@@ -311,11 +311,13 @@ def __init__(
|
|||||||
# Unfortunately have to make sure these params are accepted
|
# Unfortunately have to make sure these params are accepted
|
||||||
# in the same order they are returned by sorted(flags)
|
# in the same order they are returned by sorted(flags)
|
||||||
# in compilers/__init__.py
|
# in compilers/__init__.py
|
||||||
self.flags = {}
|
self.flags = spack.spec.FlagMap(self.spec)
|
||||||
for flag in spack.spec.FlagMap.valid_compiler_flags():
|
for flag in self.flags.valid_compiler_flags():
|
||||||
value = kwargs.get(flag, None)
|
value = kwargs.get(flag, None)
|
||||||
if value is not None:
|
if value is not None:
|
||||||
self.flags[flag] = tokenize_flags(value)
|
values_with_propagation = tokenize_flags(value, False)
|
||||||
|
for value, propagation in values_with_propagation:
|
||||||
|
self.flags.add_flag(flag, value, propagation)
|
||||||
|
|
||||||
# caching value for compiler reported version
|
# caching value for compiler reported version
|
||||||
# used for version checks for API, e.g. C++11 flag
|
# used for version checks for API, e.g. C++11 flag
|
||||||
|
|||||||
@@ -346,6 +346,10 @@ def compilers_for_arch(arch_spec, scope=None):
|
|||||||
return list(get_compilers(config, arch_spec=arch_spec))
|
return list(get_compilers(config, arch_spec=arch_spec))
|
||||||
|
|
||||||
|
|
||||||
|
def compiler_specs_for_arch(arch_spec, scope=None):
|
||||||
|
return [c.spec for c in compilers_for_arch(arch_spec, scope)]
|
||||||
|
|
||||||
|
|
||||||
class CacheReference(object):
|
class CacheReference(object):
|
||||||
"""This acts as a hashable reference to any object (regardless of whether
|
"""This acts as a hashable reference to any object (regardless of whether
|
||||||
the object itself is hashable) and also prevents the object from being
|
the object itself is hashable) and also prevents the object from being
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user