Compare commits
366 Commits
v0.19.1
...
features/r
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
88d364a6e2 | ||
|
|
2b84985aa7 | ||
|
|
ab6499ce1e | ||
|
|
412bec45aa | ||
|
|
c3dcd94ebc | ||
|
|
cb8f642297 | ||
|
|
92f19c8491 | ||
|
|
f3f8b31be5 | ||
|
|
63cadf04ea | ||
|
|
541e75350f | ||
|
|
8806e74419 | ||
|
|
381f8161b1 | ||
|
|
884123b7ce | ||
|
|
35aa875762 | ||
|
|
9b0e79fcab | ||
|
|
8ba0faa9ee | ||
|
|
d464185bba | ||
|
|
7f4d71252b | ||
|
|
7950311767 | ||
|
|
194f9a9ca9 | ||
|
|
a72021fd63 | ||
|
|
d910b3725b | ||
|
|
99f209019e | ||
|
|
c11a4e0ad3 | ||
|
|
4a429ec315 | ||
|
|
eadccfe332 | ||
|
|
dfab5b5ceb | ||
|
|
862029215c | ||
|
|
559c3de213 | ||
|
|
e3bf7358d7 | ||
|
|
b58ec9e2b9 | ||
|
|
95b5d54129 | ||
|
|
bcce9c3e9c | ||
|
|
4c05fe569c | ||
|
|
e550665df7 | ||
|
|
d92d34b162 | ||
|
|
f27be808a4 | ||
|
|
855d3519b6 | ||
|
|
37f232e319 | ||
|
|
ac1c29eac0 | ||
|
|
56072172f5 | ||
|
|
64d957dece | ||
|
|
3edc85ec21 | ||
|
|
d8006a9495 | ||
|
|
a2cfc07412 | ||
|
|
1295ea5d40 | ||
|
|
4664b3cd1e | ||
|
|
dc7e0e3ef6 | ||
|
|
9aa615aa98 | ||
|
|
85b6bf99a4 | ||
|
|
78ec3d5662 | ||
|
|
a7b5f2ef39 | ||
|
|
f71701f39d | ||
|
|
54008a2342 | ||
|
|
1670c325c6 | ||
|
|
534a994b4c | ||
|
|
359efca201 | ||
|
|
65809140f3 | ||
|
|
3f1622f9e7 | ||
|
|
8332a59194 | ||
|
|
05abea3a3a | ||
|
|
e7fc9ea243 | ||
|
|
eea3ea7675 | ||
|
|
895ac2626d | ||
|
|
94dc86e163 | ||
|
|
729b1c9fa6 | ||
|
|
82b7fe649f | ||
|
|
76417d6ac6 | ||
|
|
fe995542ab | ||
|
|
8f5209063d | ||
|
|
241a8f6be6 | ||
|
|
a8a0a6916a | ||
|
|
8d10dce651 | ||
|
|
a2938c9348 | ||
|
|
8017f4b55b | ||
|
|
588d2e295f | ||
|
|
c10b84f08d | ||
|
|
99044bedd7 | ||
|
|
3afe6f1adc | ||
|
|
fcd9038225 | ||
|
|
9d82024f1a | ||
|
|
bcefe6a73e | ||
|
|
87562042df | ||
|
|
10d10b612a | ||
|
|
69dd742dc9 | ||
|
|
18efd817b1 | ||
|
|
65a5369d6a | ||
|
|
f66ec00fa9 | ||
|
|
f63fb2f521 | ||
|
|
dfa00f5a8d | ||
|
|
6602780657 | ||
|
|
8420c610fa | ||
|
|
b139cab687 | ||
|
|
99fcc57607 | ||
|
|
5a394d37b7 | ||
|
|
472074cb7c | ||
|
|
45c8d7f457 | ||
|
|
dce1f01f1a | ||
|
|
03cc83bc67 | ||
|
|
f452741e3d | ||
|
|
99b68e646d | ||
|
|
f78c8265f4 | ||
|
|
5d3efbba14 | ||
|
|
7423f52cd3 | ||
|
|
43d93f7773 | ||
|
|
f8dec3e87f | ||
|
|
ef06b9db5b | ||
|
|
c64c9649be | ||
|
|
2c6b52f137 | ||
|
|
33422acef0 | ||
|
|
428f635142 | ||
|
|
c6c74e98ff | ||
|
|
d9b438ec76 | ||
|
|
c6ee30497c | ||
|
|
1270ae1526 | ||
|
|
d15fead30c | ||
|
|
23aaaf2d28 | ||
|
|
56f9c76394 | ||
|
|
49cda811fc | ||
|
|
a97312535a | ||
|
|
a0180ef741 | ||
|
|
d640a573a8 | ||
|
|
b3679406d0 | ||
|
|
587488882a | ||
|
|
a17844a367 | ||
|
|
093a37750c | ||
|
|
173cc7e973 | ||
|
|
451e3ff50b | ||
|
|
523c4c2b63 | ||
|
|
35e5a916bc | ||
|
|
1374577659 | ||
|
|
4c017403db | ||
|
|
fdfda72371 | ||
|
|
efa1dba9e4 | ||
|
|
2a7ae2a700 | ||
|
|
a1b4e1bccd | ||
|
|
066ec31604 | ||
|
|
bb1888dbd4 | ||
|
|
bc17b6cefb | ||
|
|
46a0cd8e55 | ||
|
|
b2ceb23165 | ||
|
|
2fad966139 | ||
|
|
0b01c8c950 | ||
|
|
613d0b7e8e | ||
|
|
21c29ee375 | ||
|
|
e236339e5a | ||
|
|
7a03525c35 | ||
|
|
17ca86a309 | ||
|
|
ce71a38703 | ||
|
|
12c23f2724 | ||
|
|
b8ae0fbbf4 | ||
|
|
6b5c86e0be | ||
|
|
1ed1b49c9b | ||
|
|
4265d5e111 | ||
|
|
8c0fb91d4e | ||
|
|
567532b9e5 | ||
|
|
47d59e571e | ||
|
|
93ff19c9b7 | ||
|
|
2167cbf72c | ||
|
|
7a5e527cab | ||
|
|
a25868594c | ||
|
|
dd5263694b | ||
|
|
5fca1c9aff | ||
|
|
1d7393c281 | ||
|
|
f0bc551718 | ||
|
|
46b9a09843 | ||
|
|
c0898565b9 | ||
|
|
3018e7f63d | ||
|
|
dfa1a42420 | ||
|
|
2c8ab85e6a | ||
|
|
b2505aed5c | ||
|
|
7847d4332e | ||
|
|
70bcbba5eb | ||
|
|
0182603609 | ||
|
|
bf1b846f26 | ||
|
|
d06fd26c9a | ||
|
|
5d2c9636ff | ||
|
|
63e4406514 | ||
|
|
d56380fc07 | ||
|
|
f89cc96b0c | ||
|
|
cf952d41d8 | ||
|
|
5f737c5a71 | ||
|
|
a845b1f984 | ||
|
|
b8d059e8f4 | ||
|
|
1006c77374 | ||
|
|
38d4fd7711 | ||
|
|
643ce586de | ||
|
|
5b3b0130f2 | ||
|
|
55c77d659e | ||
|
|
fe1c105161 | ||
|
|
09f2b6f5f5 | ||
|
|
73fe21ba41 | ||
|
|
81fb87cedf | ||
|
|
7de39c44b1 | ||
|
|
c902e27e52 | ||
|
|
65b991a4c5 | ||
|
|
65520311a6 | ||
|
|
def79731d0 | ||
|
|
0fd3c9f451 | ||
|
|
c5883fffd7 | ||
|
|
4bf964e6b3 | ||
|
|
bcc0fda4e2 | ||
|
|
69987fd323 | ||
|
|
9a16234ed4 | ||
|
|
bd198312c9 | ||
|
|
7f9af8d4a0 | ||
|
|
793a7bc6a9 | ||
|
|
376afd631c | ||
|
|
e287c6ac4b | ||
|
|
e864744b60 | ||
|
|
5b3af53b10 | ||
|
|
44c22a54c9 | ||
|
|
f97f37550a | ||
|
|
0e4ee3d352 | ||
|
|
05fc800db9 | ||
|
|
2387c116ad | ||
|
|
6411cbd803 | ||
|
|
8ea366b33f | ||
|
|
9a2fbf373c | ||
|
|
9e1fef8813 | ||
|
|
f8a6e3ad90 | ||
|
|
0706919b09 | ||
|
|
b9b93ce272 | ||
|
|
87cb9760ce | ||
|
|
d472e28bfe | ||
|
|
dbc81549db | ||
|
|
dc00c4fdae | ||
|
|
f1b9da16c8 | ||
|
|
93ce943301 | ||
|
|
632b36ab5d | ||
|
|
94c76c5823 | ||
|
|
45b4cedb7e | ||
|
|
6d0a8f78b2 | ||
|
|
409cf185ce | ||
|
|
602984460d | ||
|
|
62b1d52a1e | ||
|
|
790bd175e0 | ||
|
|
2f057d729d | ||
|
|
a124185090 | ||
|
|
c5235bbe86 | ||
|
|
e715901cb2 | ||
|
|
1db914f567 | ||
|
|
688dae7058 | ||
|
|
ddb460ec8d | ||
|
|
703e5fe44a | ||
|
|
c601bdf7bf | ||
|
|
778dddc523 | ||
|
|
acc19ad34f | ||
|
|
f4826e1b33 | ||
|
|
05ff7e657c | ||
|
|
839a14c0ba | ||
|
|
9aafbec121 | ||
|
|
20071e0c04 | ||
|
|
51bb2f23a3 | ||
|
|
2060d51bd0 | ||
|
|
e5af0ccc09 | ||
|
|
284859e742 | ||
|
|
37e77f7a15 | ||
|
|
d2432e1ba4 | ||
|
|
5809ba0e3f | ||
|
|
95e294b2e8 | ||
|
|
cdaac58488 | ||
|
|
13389f7eb8 | ||
|
|
4964633614 | ||
|
|
381bedf369 | ||
|
|
6811651a0f | ||
|
|
22aada0e20 | ||
|
|
4a71020cd2 | ||
|
|
294e6f80a0 | ||
|
|
cc2d0eade6 | ||
|
|
f00e411287 | ||
|
|
da0a6280ac | ||
|
|
6ee6844473 | ||
|
|
69822b0d82 | ||
|
|
93eecae0c3 | ||
|
|
61f5d85525 | ||
|
|
a90e86de75 | ||
|
|
7247a493ab | ||
|
|
cd8ec60ae9 | ||
|
|
fe597dfb0c | ||
|
|
c721aab006 | ||
|
|
6a08e9ed08 | ||
|
|
7637efb363 | ||
|
|
b31f1b0353 | ||
|
|
497682260f | ||
|
|
e47beceb8a | ||
|
|
067976f4b8 | ||
|
|
1263b5c444 | ||
|
|
90f0a8eacc | ||
|
|
61a7420c94 | ||
|
|
39a1f1462b | ||
|
|
6de5d8e68c | ||
|
|
b0f2523350 | ||
|
|
bc8cc39871 | ||
|
|
b36a8f4f2e | ||
|
|
0a952f8b7b | ||
|
|
26a0384171 | ||
|
|
d18cccf7c5 | ||
|
|
fbe6b4b486 | ||
|
|
5fe08a5647 | ||
|
|
ac2fc4f271 | ||
|
|
93430496e2 | ||
|
|
901b31a7aa | ||
|
|
0cec2d3110 | ||
|
|
40e4884e8b | ||
|
|
f18425a51f | ||
|
|
472893c5c4 | ||
|
|
289bbf74f6 | ||
|
|
a0182c069f | ||
|
|
4ecb6ecaff | ||
|
|
d5193f73d8 | ||
|
|
1aab5bb9f2 | ||
|
|
8dda4ff60b | ||
|
|
0811f81a09 | ||
|
|
af74680405 | ||
|
|
d1715c5fdf | ||
|
|
b245f1ece1 | ||
|
|
e10c47c53d | ||
|
|
0697d20fd4 | ||
|
|
fd4f905ce5 | ||
|
|
d36c7b20d2 | ||
|
|
2948248d7a | ||
|
|
850c54c3b1 | ||
|
|
90fb16033e | ||
|
|
13a68d547d | ||
|
|
857ae5a74b | ||
|
|
b3124bff7c | ||
|
|
5c4137baf1 | ||
|
|
a9dcd4c01e | ||
|
|
2cd7322b11 | ||
|
|
f9e9ecd0c1 | ||
|
|
d756034161 | ||
|
|
2460c4fc28 | ||
|
|
6e39efbb9a | ||
|
|
277e35c3b0 | ||
|
|
bf1b2a828c | ||
|
|
2913f8b42b | ||
|
|
57f4c922e9 | ||
|
|
8d82fecce9 | ||
|
|
96126cbf17 | ||
|
|
6ecb57e91f | ||
|
|
a75af62fe3 | ||
|
|
e4e02dbeae | ||
|
|
3efa4ee26f | ||
|
|
f4c3d98064 | ||
|
|
a4cec82841 | ||
|
|
3812edd0db | ||
|
|
3ea9c8529a | ||
|
|
ed28797f83 | ||
|
|
eadb6ae774 | ||
|
|
a5d35c3077 | ||
|
|
3d811617e6 | ||
|
|
03224e52d2 | ||
|
|
4ebe57cd64 | ||
|
|
343cd04a54 | ||
|
|
ed45385b7b | ||
|
|
8a3b596042 | ||
|
|
1792327874 | ||
|
|
d0dedda9a9 | ||
|
|
368dde437a | ||
|
|
022a2d2eaf | ||
|
|
5f8511311c | ||
|
|
2d2c591633 | ||
|
|
d49c992b23 | ||
|
|
f1392bbd49 | ||
|
|
c14dc2f56a |
2
.github/workflows/bootstrap.yml
vendored
2
.github/workflows/bootstrap.yml
vendored
@@ -214,7 +214,7 @@ jobs:
|
|||||||
- name: Bootstrap clingo
|
- name: Bootstrap clingo
|
||||||
run: |
|
run: |
|
||||||
set -ex
|
set -ex
|
||||||
for ver in '2.7' '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
for ver in '3.6' '3.7' '3.8' '3.9' '3.10' ; do
|
||||||
not_found=1
|
not_found=1
|
||||||
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
ver_dir="$(find $RUNNER_TOOL_CACHE/Python -wholename "*/${ver}.*/*/bin" | grep . || true)"
|
||||||
echo "Testing $ver_dir"
|
echo "Testing $ver_dir"
|
||||||
|
|||||||
8
.github/workflows/ci.yaml
vendored
8
.github/workflows/ci.yaml
vendored
@@ -20,12 +20,6 @@ jobs:
|
|||||||
uses: ./.github/workflows/valid-style.yml
|
uses: ./.github/workflows/valid-style.yml
|
||||||
with:
|
with:
|
||||||
with_coverage: ${{ needs.changes.outputs.core }}
|
with_coverage: ${{ needs.changes.outputs.core }}
|
||||||
audit-ancient-python:
|
|
||||||
uses: ./.github/workflows/audit.yaml
|
|
||||||
needs: [ changes ]
|
|
||||||
with:
|
|
||||||
with_coverage: ${{ needs.changes.outputs.core }}
|
|
||||||
python_version: 2.7
|
|
||||||
all-prechecks:
|
all-prechecks:
|
||||||
needs: [ prechecks ]
|
needs: [ prechecks ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -85,7 +79,7 @@ jobs:
|
|||||||
needs: [ prechecks ]
|
needs: [ prechecks ]
|
||||||
uses: ./.github/workflows/windows_python.yml
|
uses: ./.github/workflows/windows_python.yml
|
||||||
all:
|
all:
|
||||||
needs: [ windows, unit-tests, bootstrap, audit-ancient-python ]
|
needs: [ windows, unit-tests, bootstrap ]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Success
|
- name: Success
|
||||||
|
|||||||
46
.github/workflows/unit_tests.yaml
vendored
46
.github/workflows/unit_tests.yaml
vendored
@@ -11,31 +11,38 @@ concurrency:
|
|||||||
jobs:
|
jobs:
|
||||||
# Run unit tests with different configurations on linux
|
# Run unit tests with different configurations on linux
|
||||||
ubuntu:
|
ubuntu:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ['2.7', '3.6', '3.7', '3.8', '3.9', '3.10', '3.11']
|
os: [ubuntu-latest]
|
||||||
|
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
|
||||||
concretizer: ['clingo']
|
concretizer: ['clingo']
|
||||||
on_develop:
|
on_develop:
|
||||||
- ${{ github.ref == 'refs/heads/develop' }}
|
- ${{ github.ref == 'refs/heads/develop' }}
|
||||||
include:
|
include:
|
||||||
- python-version: 2.7
|
- python-version: '3.11'
|
||||||
|
os: ubuntu-latest
|
||||||
concretizer: original
|
concretizer: original
|
||||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||||
- python-version: '3.11'
|
- python-version: '3.6'
|
||||||
concretizer: original
|
os: ubuntu-20.04
|
||||||
|
concretizer: clingo
|
||||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||||
exclude:
|
exclude:
|
||||||
- python-version: '3.7'
|
- python-version: '3.7'
|
||||||
|
os: ubuntu-latest
|
||||||
concretizer: 'clingo'
|
concretizer: 'clingo'
|
||||||
on_develop: false
|
on_develop: false
|
||||||
- python-version: '3.8'
|
- python-version: '3.8'
|
||||||
|
os: ubuntu-latest
|
||||||
concretizer: 'clingo'
|
concretizer: 'clingo'
|
||||||
on_develop: false
|
on_develop: false
|
||||||
- python-version: '3.9'
|
- python-version: '3.9'
|
||||||
|
os: ubuntu-latest
|
||||||
concretizer: 'clingo'
|
concretizer: 'clingo'
|
||||||
on_develop: false
|
on_develop: false
|
||||||
- python-version: '3.10'
|
- python-version: '3.10'
|
||||||
|
os: ubuntu-latest
|
||||||
concretizer: 'clingo'
|
concretizer: 'clingo'
|
||||||
on_develop: false
|
on_develop: false
|
||||||
|
|
||||||
@@ -52,24 +59,11 @@ jobs:
|
|||||||
# Needed for unit tests
|
# Needed for unit tests
|
||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||||
patchelf cmake bison libbison-dev kcov
|
cmake bison libbison-dev kcov
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-xdist
|
pip install --upgrade pip six setuptools pytest codecov[toml] pytest-xdist pytest-cov
|
||||||
# Install pytest-cov only on recent Python, to avoid stalling on Python 2.7 due
|
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click" "black"
|
||||||
# to bugs on an unmaintained version of the package when used with xdist.
|
|
||||||
if [[ ${{ matrix.python-version }} != "2.7" ]]; then
|
|
||||||
pip install --upgrade pytest-cov
|
|
||||||
fi
|
|
||||||
# ensure style checks are not skipped in unit tests for python >= 3.6
|
|
||||||
# note that true/false (i.e., 1/0) are opposite in conditions in python and bash
|
|
||||||
if python -c 'import sys; sys.exit(not sys.version_info >= (3, 6))'; then
|
|
||||||
pip install --upgrade flake8 "isort>=4.3.5" "mypy>=0.900" "click==8.0.4" "black<=21.12b0"
|
|
||||||
fi
|
|
||||||
- name: Pin pathlib for Python 2.7
|
|
||||||
if: ${{ matrix.python-version == 2.7 }}
|
|
||||||
run: |
|
|
||||||
pip install -U pathlib2==2.3.6 toml
|
|
||||||
- name: Setup git configuration
|
- name: Setup git configuration
|
||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
@@ -82,6 +76,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
. share/spack/setup-env.sh
|
. share/spack/setup-env.sh
|
||||||
spack bootstrap disable spack-install
|
spack bootstrap disable spack-install
|
||||||
|
spack bootstrap now
|
||||||
spack -v solve zlib
|
spack -v solve zlib
|
||||||
- name: Run unit tests
|
- name: Run unit tests
|
||||||
env:
|
env:
|
||||||
@@ -89,7 +84,7 @@ jobs:
|
|||||||
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
SPACK_TEST_SOLVER: ${{ matrix.concretizer }}
|
||||||
SPACK_TEST_PARALLEL: 2
|
SPACK_TEST_PARALLEL: 2
|
||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
UNIT_TEST_COVERAGE: ${{ (matrix.python-version == '3.11') }}
|
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
- uses: codecov/codecov-action@d9f34f8cd5cb3b3eb79b3e4b5dae3a16df499a70
|
||||||
@@ -150,7 +145,7 @@ jobs:
|
|||||||
shell: runuser -u spack-test -- bash {0}
|
shell: runuser -u spack-test -- bash {0}
|
||||||
run: |
|
run: |
|
||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack -d solve zlib
|
spack -d bootstrap now --dev
|
||||||
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
spack unit-test -k 'not cvs and not svn and not hg' -x --verbose
|
||||||
# Test for the clingo based solver (using clingo-cffi)
|
# Test for the clingo based solver (using clingo-cffi)
|
||||||
clingo-cffi:
|
clingo-cffi:
|
||||||
@@ -165,10 +160,7 @@ jobs:
|
|||||||
- name: Install System packages
|
- name: Install System packages
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get -y update
|
sudo apt-get -y update
|
||||||
# Needed for unit tests
|
sudo apt-get -y install coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build kcov
|
||||||
sudo apt-get -y install \
|
|
||||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
|
||||||
patchelf kcov
|
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-cov clingo pytest-xdist
|
pip install --upgrade pip six setuptools pytest codecov coverage[toml] pytest-cov clingo pytest-xdist
|
||||||
|
|||||||
6
.github/workflows/valid-style.yml
vendored
6
.github/workflows/valid-style.yml
vendored
@@ -28,9 +28,9 @@ jobs:
|
|||||||
pip install --upgrade pip
|
pip install --upgrade pip
|
||||||
pip install --upgrade vermin
|
pip install --upgrade vermin
|
||||||
- name: vermin (Spack's Core)
|
- name: vermin (Spack's Core)
|
||||||
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv lib/spack/spack/ lib/spack/llnl/ bin/
|
||||||
- name: vermin (Repositories)
|
- name: vermin (Repositories)
|
||||||
run: vermin --backport argparse --violations --backport typing -t=2.7- -t=3.6- -vvv var/spack/repos
|
run: vermin --backport importlib --backport argparse --violations --backport typing -t=3.6- -vvv var/spack/repos
|
||||||
# Run style checks on the files that have been changed
|
# Run style checks on the files that have been changed
|
||||||
style:
|
style:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -44,7 +44,7 @@ jobs:
|
|||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
python3 -m pip install --upgrade pip six setuptools types-six click==8.0.2 'black==21.12b0' mypy isort clingo flake8
|
python3 -m pip install --upgrade pip six setuptools types-six black mypy isort clingo flake8
|
||||||
- name: Setup git configuration
|
- name: Setup git configuration
|
||||||
run: |
|
run: |
|
||||||
# Need this for the git tests to succeed.
|
# Need this for the git tests to succeed.
|
||||||
|
|||||||
274
CHANGELOG.md
274
CHANGELOG.md
@@ -1,16 +1,284 @@
|
|||||||
|
# v0.19.0 (2022-11-11)
|
||||||
|
|
||||||
|
`v0.19.0` is a major feature release.
|
||||||
|
|
||||||
|
## Major features in this release
|
||||||
|
|
||||||
|
1. **Package requirements**
|
||||||
|
|
||||||
|
Spack's traditional [package preferences](
|
||||||
|
https://spack.readthedocs.io/en/latest/build_settings.html#package-preferences)
|
||||||
|
are soft, but we've added hard requriements to `packages.yaml` and `spack.yaml`
|
||||||
|
(#32528, #32369). Package requirements use the same syntax as specs:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
packages:
|
||||||
|
libfabric:
|
||||||
|
require: "@1.13.2"
|
||||||
|
mpich:
|
||||||
|
require:
|
||||||
|
- one_of: ["+cuda", "+rocm"]
|
||||||
|
```
|
||||||
|
|
||||||
|
More details in [the docs](
|
||||||
|
https://spack.readthedocs.io/en/latest/build_settings.html#package-requirements).
|
||||||
|
|
||||||
|
2. **Environment UI Improvements**
|
||||||
|
|
||||||
|
* Fewer surprising modifications to `spack.yaml` (#33711):
|
||||||
|
|
||||||
|
* `spack install` in an environment will no longer add to the `specs:` list; you'll
|
||||||
|
need to either use `spack add <spec>` or `spack install --add <spec>`.
|
||||||
|
|
||||||
|
* Similarly, `spack uninstall` will not remove from your environment's `specs:`
|
||||||
|
list; you'll need to use `spack remove` or `spack uninstall --remove`.
|
||||||
|
|
||||||
|
This will make it easier to manage an environment, as there is clear separation
|
||||||
|
between the stack to be installed (`spack.yaml`/`spack.lock`) and which parts of
|
||||||
|
it should be installed (`spack install` / `spack uninstall`).
|
||||||
|
|
||||||
|
* `concretizer:unify:true` is now the default mode for new environments (#31787)
|
||||||
|
|
||||||
|
We see more users creating `unify:true` environments now. Users who need
|
||||||
|
`unify:false` can add it to their environment to get the old behavior. This will
|
||||||
|
concretize every spec in the environment independently.
|
||||||
|
|
||||||
|
* Include environment configuration from URLs (#29026, [docs](
|
||||||
|
https://spack.readthedocs.io/en/latest/environments.html#included-configurations))
|
||||||
|
|
||||||
|
You can now include configuration in your environment directly from a URL:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
spack:
|
||||||
|
include:
|
||||||
|
- https://github.com/path/to/raw/config/compilers.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **Multiple Build Systems**
|
||||||
|
|
||||||
|
An increasing number of packages in the ecosystem need the ability to support
|
||||||
|
multiple build systems (#30738, [docs](
|
||||||
|
https://spack.readthedocs.io/en/latest/packaging_guide.html#multiple-build-systems)),
|
||||||
|
either across versions, across platforms, or within the same version of the software.
|
||||||
|
This has been hard to support through multiple inheritance, as methods from different
|
||||||
|
build system superclasses would conflict. `package.py` files can now define separate
|
||||||
|
builder classes with installation logic for different build systems, e.g.:
|
||||||
|
|
||||||
|
```python
|
||||||
|
class ArpackNg(CMakePackage, AutotoolsPackage):
|
||||||
|
|
||||||
|
build_system(
|
||||||
|
conditional("cmake", when="@0.64:"),
|
||||||
|
conditional("autotools", when="@:0.63"),
|
||||||
|
default="cmake",
|
||||||
|
)
|
||||||
|
|
||||||
|
class CMakeBuilder(spack.build_systems.cmake.CMakeBuilder):
|
||||||
|
def cmake_args(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Autotoolsbuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||||
|
def configure_args(self):
|
||||||
|
pass
|
||||||
|
```
|
||||||
|
|
||||||
|
5. **Compiler and variant propagation**
|
||||||
|
|
||||||
|
Currently, compiler flags and variants are inconsistent: compiler flags set for a
|
||||||
|
package are inherited by its dependencies, while variants are not. We should have
|
||||||
|
these be consistent by allowing for inheritance to be enabled or disabled for both
|
||||||
|
variants and compiler flags.
|
||||||
|
|
||||||
|
Example syntax:
|
||||||
|
- `package ++variant`:
|
||||||
|
enabled variant that will be propagated to dependencies
|
||||||
|
- `package +variant`:
|
||||||
|
enabled variant that will NOT be propagated to dependencies
|
||||||
|
- `package ~~variant`:
|
||||||
|
disabled variant that will be propagated to dependencies
|
||||||
|
- `package ~variant`:
|
||||||
|
disabled variant that will NOT be propagated to dependencies
|
||||||
|
- `package cflags==-g`:
|
||||||
|
`cflags` will be propagated to dependencies
|
||||||
|
- `package cflags=-g`:
|
||||||
|
`cflags` will NOT be propagated to dependencies
|
||||||
|
|
||||||
|
Syntax for non-boolan variants is similar to compiler flags. More in the docs for
|
||||||
|
[variants](
|
||||||
|
https://spack.readthedocs.io/en/latest/basic_usage.html#variants) and [compiler flags](
|
||||||
|
https://spack.readthedocs.io/en/latest/basic_usage.html#compiler-flags).
|
||||||
|
|
||||||
|
6. **Enhancements to git version specifiers**
|
||||||
|
|
||||||
|
* `v0.18.0` added the ability to use git commits as versions. You can now use the
|
||||||
|
`git.` prefix to specify git tags or branches as versions. All of these are valid git
|
||||||
|
versions in `v0.19` (#31200):
|
||||||
|
|
||||||
|
```console
|
||||||
|
foo@abcdef1234abcdef1234abcdef1234abcdef1234 # raw commit
|
||||||
|
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234 # commit with git prefix
|
||||||
|
foo@git.develop # the develop branch
|
||||||
|
foo@git.0.19 # use the 0.19 tag
|
||||||
|
```
|
||||||
|
|
||||||
|
* `v0.19` also gives you more control over how Spack interprets git versions, in case
|
||||||
|
Spack cannot detect the version from the git repository. You can suffix a git
|
||||||
|
version with `=<version>` to force Spack to concretize it as a particular version
|
||||||
|
(#30998, #31914, #32257):
|
||||||
|
|
||||||
|
```console
|
||||||
|
# use mybranch, but treat it as version 3.2 for version comparison
|
||||||
|
foo@git.mybranch=3.2
|
||||||
|
|
||||||
|
# use the given commit, but treat it as develop for version comparison
|
||||||
|
foo@git.abcdef1234abcdef1234abcdef1234abcdef1234=develop
|
||||||
|
```
|
||||||
|
|
||||||
|
More in [the docs](
|
||||||
|
https://spack.readthedocs.io/en/latest/basic_usage.html#version-specifier)
|
||||||
|
|
||||||
|
7. **Changes to Cray EX Support**
|
||||||
|
|
||||||
|
Cray machines have historically had their own "platform" within Spack, because we
|
||||||
|
needed to go through the module system to leverage compilers and MPI installations on
|
||||||
|
these machines. The Cray EX programming environment now provides standalone `craycc`
|
||||||
|
executables and proper `mpicc` wrappers, so Spack can treat EX machines like Linux
|
||||||
|
with extra packages (#29392).
|
||||||
|
|
||||||
|
We expect this to greatly reduce bugs, as external packages and compilers can now be
|
||||||
|
used by prefix instead of through modules. We will also no longer be subject to
|
||||||
|
reproducibility issues when modules change from Cray PE release to release and from
|
||||||
|
site to site. This also simplifies dealing with the underlying Linux OS on cray
|
||||||
|
systems, as Spack will properly model the machine's OS as either SuSE or RHEL.
|
||||||
|
|
||||||
|
8. **Improvements to tests and testing in CI**
|
||||||
|
|
||||||
|
* `spack ci generate --tests` will generate a `.gitlab-ci.yml` file that not only does
|
||||||
|
builds but also runs tests for built packages (#27877). Public GitHub pipelines now
|
||||||
|
also run tests in CI.
|
||||||
|
|
||||||
|
* `spack test run --explicit` will only run tests for packages that are explicitly
|
||||||
|
installed, instead of all packages.
|
||||||
|
|
||||||
|
9. **Experimental binding link model**
|
||||||
|
|
||||||
|
You can add a new option to `config.yaml` to make Spack embed absolute paths to
|
||||||
|
needed shared libraries in ELF executables and shared libraries on Linux (#31948, [docs](
|
||||||
|
https://spack.readthedocs.io/en/latest/config_yaml.html#shared-linking-bind)):
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
config:
|
||||||
|
shared_linking:
|
||||||
|
type: rpath
|
||||||
|
bind: true
|
||||||
|
```
|
||||||
|
|
||||||
|
This can improve launch time at scale for parallel applications, and it can make
|
||||||
|
installations less susceptible to environment variables like `LD_LIBRARY_PATH`, even
|
||||||
|
especially when dealing with external libraries that use `RUNPATH`. You can think of
|
||||||
|
this as a faster, even higher-precedence version of `RPATH`.
|
||||||
|
|
||||||
|
## Other new features of note
|
||||||
|
|
||||||
|
* `spack spec` prints dependencies more legibly. Dependencies in the output now appear
|
||||||
|
at the *earliest* level of indentation possible (#33406)
|
||||||
|
* You can override `package.py` attributes like `url`, directly in `packages.yaml`
|
||||||
|
(#33275, [docs](
|
||||||
|
https://spack.readthedocs.io/en/latest/build_settings.html#assigning-package-attributes))
|
||||||
|
* There are a number of new architecture-related format strings you can use in Spack
|
||||||
|
configuration files to specify paths (#29810, [docs](
|
||||||
|
https://spack.readthedocs.io/en/latest/configuration.html#config-file-variables))
|
||||||
|
* Spack now supports bootstrapping Clingo on Windows (#33400)
|
||||||
|
* There is now support for an `RPATH`-like library model on Windows (#31930)
|
||||||
|
|
||||||
|
## Performance Improvements
|
||||||
|
|
||||||
|
* Major performance improvements for installation from binary caches (#27610, #33628,
|
||||||
|
#33636, #33608, #33590, #33496)
|
||||||
|
* Test suite can now be parallelized using `xdist` (used in GitHub Actions) (#32361)
|
||||||
|
* Reduce lock contention for parallel builds in environments (#31643)
|
||||||
|
|
||||||
|
## New binary caches and stacks
|
||||||
|
|
||||||
|
* We now build nearly all of E4S with `oneapi` in our buildcache (#31781, #31804,
|
||||||
|
#31804, #31803, #31840, #31991, #32117, #32107, #32239)
|
||||||
|
* Added 3 new machine learning-centric stacks to binary cache: `x86_64_v3`, CUDA, ROCm
|
||||||
|
(#31592, #33463)
|
||||||
|
|
||||||
|
## Removals and Deprecations
|
||||||
|
|
||||||
|
* Support for Python 3.5 is dropped (#31908). Only Python 2.7 and 3.6+ are officially
|
||||||
|
supported.
|
||||||
|
|
||||||
|
* This is the last Spack release that will support Python 2 (#32615). Spack `v0.19`
|
||||||
|
will emit a deprecation warning if you run it with Python 2, and Python 2 support will
|
||||||
|
soon be removed from the `develop` branch.
|
||||||
|
|
||||||
|
* `LD_LIBRARY_PATH` is no longer set by default by `spack load` or module loads.
|
||||||
|
|
||||||
|
Setting `LD_LIBRARY_PATH` in Spack environments/modules can cause binaries from
|
||||||
|
outside of Spack to crash, and Spack's own builds use `RPATH` and do not need
|
||||||
|
`LD_LIBRARY_PATH` set in order to run. If you still want the old behavior, you
|
||||||
|
can run these commands to configure Spack to set `LD_LIBRARY_PATH`:
|
||||||
|
|
||||||
|
```console
|
||||||
|
spack config add modules:prefix_inspections:lib64:[LD_LIBRARY_PATH]
|
||||||
|
spack config add modules:prefix_inspections:lib:[LD_LIBRARY_PATH]
|
||||||
|
```
|
||||||
|
|
||||||
|
* The `spack:concretization:[together|separately]` has been removed after being
|
||||||
|
deprecated in `v0.18`. Use `concretizer:unify:[true|false]`.
|
||||||
|
* `config:module_roots` is no longer supported after being deprecated in `v0.18`. Use
|
||||||
|
configuration in module sets instead (#28659, [docs](
|
||||||
|
https://spack.readthedocs.io/en/latest/module_file_support.html)).
|
||||||
|
* `spack activate` and `spack deactivate` are no longer supported, having been
|
||||||
|
deprecated in `v0.18`. Use an environment with a view instead of
|
||||||
|
activating/deactivating ([docs](
|
||||||
|
https://spack.readthedocs.io/en/latest/environments.html#configuration-in-spack-yaml)).
|
||||||
|
* The old YAML format for buildcaches is now deprecated (#33707). If you are using an
|
||||||
|
old buildcache with YAML metadata you will need to regenerate it with JSON metadata.
|
||||||
|
* `spack bootstrap trust` and `spack bootstrap untrust` are deprecated in favor of
|
||||||
|
`spack bootstrap enable` and `spack bootstrap disable` and will be removed in `v0.20`.
|
||||||
|
(#33600)
|
||||||
|
* The `graviton2` architecture has been renamed to `neoverse_n1`, and `graviton3`
|
||||||
|
is now `neoverse_v1`. Buildcaches using the old architecture names will need to be rebuilt.
|
||||||
|
* The terms `blacklist` and `whitelist` have been replaced with `include` and `exclude`
|
||||||
|
in all configuration files (#31569). You can use `spack config update` to
|
||||||
|
automatically fix your configuration files.
|
||||||
|
|
||||||
|
## Notable Bugfixes
|
||||||
|
|
||||||
|
* Permission setting on installation now handles effective uid properly (#19980)
|
||||||
|
* `buildable:true` for an MPI implementation now overrides `buildable:false` for `mpi` (#18269)
|
||||||
|
* Improved error messages when attempting to use an unconfigured compiler (#32084)
|
||||||
|
* Do not punish explicitly requested compiler mismatches in the solver (#30074)
|
||||||
|
* `spack stage`: add missing --fresh and --reuse (#31626)
|
||||||
|
* Fixes for adding build system executables like `cmake` to package scope (#31739)
|
||||||
|
* Bugfix for binary relocation with aliased strings produced by newer `binutils` (#32253)
|
||||||
|
|
||||||
|
## Spack community stats
|
||||||
|
|
||||||
|
* 6,751 total packages, 335 new since `v0.18.0`
|
||||||
|
* 141 new Python packages
|
||||||
|
* 89 new R packages
|
||||||
|
* 303 people contributed to this release
|
||||||
|
* 287 committers to packages
|
||||||
|
* 57 committers to core
|
||||||
|
|
||||||
|
|
||||||
# v0.18.1 (2022-07-19)
|
# v0.18.1 (2022-07-19)
|
||||||
|
|
||||||
### Spack Bugfixes
|
### Spack Bugfixes
|
||||||
* Fix several bugs related to bootstrapping (#30834,#31042,#31180)
|
* Fix several bugs related to bootstrapping (#30834,#31042,#31180)
|
||||||
* Fix a regression that was causing spec hashes to differ between
|
* Fix a regression that was causing spec hashes to differ between
|
||||||
Python 2 and Python 3 (#31092)
|
Python 2 and Python 3 (#31092)
|
||||||
* Fixed compiler flags for oneAPI and DPC++ (#30856)
|
* Fixed compiler flags for oneAPI and DPC++ (#30856)
|
||||||
* Fixed several issues related to concretization (#31142,#31153,#31170,#31226)
|
* Fixed several issues related to concretization (#31142,#31153,#31170,#31226)
|
||||||
* Improved support for Cray manifest file and `spack external find` (#31144,#31201,#31173,#31186)
|
* Improved support for Cray manifest file and `spack external find` (#31144,#31201,#31173,#31186)
|
||||||
* Assign a version to openSUSE Tumbleweed according to the GLIBC version
|
* Assign a version to openSUSE Tumbleweed according to the GLIBC version
|
||||||
in the system (#19895)
|
in the system (#19895)
|
||||||
* Improved Dockerfile generation for `spack containerize` (#29741,#31321)
|
* Improved Dockerfile generation for `spack containerize` (#29741,#31321)
|
||||||
* Fixed a few bugs related to concurrent execution of commands (#31509,#31493,#31477)
|
* Fixed a few bugs related to concurrent execution of commands (#31509,#31493,#31477)
|
||||||
|
|
||||||
### Package updates
|
### Package updates
|
||||||
* WarpX: add v22.06, fixed libs property (#30866,#31102)
|
* WarpX: add v22.06, fixed libs property (#30866,#31102)
|
||||||
|
|||||||
@@ -10,8 +10,8 @@ For more on Spack's release structure, see
|
|||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| ------- | ------------------ |
|
| ------- | ------------------ |
|
||||||
| develop | :white_check_mark: |
|
| develop | :white_check_mark: |
|
||||||
| 0.17.x | :white_check_mark: |
|
| 0.19.x | :white_check_mark: |
|
||||||
| 0.16.x | :white_check_mark: |
|
| 0.18.x | :white_check_mark: |
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
|||||||
@@ -31,13 +31,11 @@ import os
|
|||||||
import os.path
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
min_python3 = (3, 5)
|
min_python3 = (3, 6)
|
||||||
|
|
||||||
if sys.version_info[:2] < (2, 7) or (
|
if sys.version_info[:2] < min_python3:
|
||||||
sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < min_python3
|
|
||||||
):
|
|
||||||
v_info = sys.version_info[:3]
|
v_info = sys.version_info[:3]
|
||||||
msg = "Spack requires Python 2.7 or %d.%d or higher " % min_python3
|
msg = "Spack requires Python %d.%d or higher " % min_python3
|
||||||
msg += "You are running spack with Python %d.%d.%d." % v_info
|
msg += "You are running spack with Python %d.%d.%d." % v_info
|
||||||
sys.exit(msg)
|
sys.exit(msg)
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ config:
|
|||||||
install_tree:
|
install_tree:
|
||||||
root: $spack/opt/spack
|
root: $spack/opt/spack
|
||||||
projections:
|
projections:
|
||||||
all: "${ARCHITECTURE}/${COMPILERNAME}-${COMPILERVER}/${PACKAGE}-${VERSION}-${HASH}"
|
all: "{architecture}/{compiler.name}-{compiler.version}/{name}-{version}-{hash}"
|
||||||
# install_tree can include an optional padded length (int or boolean)
|
# install_tree can include an optional padded length (int or boolean)
|
||||||
# default is False (do not pad)
|
# default is False (do not pad)
|
||||||
# if padded_length is True, Spack will pad as close to the system max path
|
# if padded_length is True, Spack will pad as close to the system max path
|
||||||
@@ -214,4 +214,8 @@ config:
|
|||||||
|
|
||||||
# Number of seconds a buildcache's index.json is cached locally before probing
|
# Number of seconds a buildcache's index.json is cached locally before probing
|
||||||
# for updates, within a single Spack invocation. Defaults to 10 minutes.
|
# for updates, within a single Spack invocation. Defaults to 10 minutes.
|
||||||
binary_index_ttl: 600
|
binary_index_ttl: 600
|
||||||
|
|
||||||
|
flags:
|
||||||
|
# Whether to keep -Werror flags active in package builds.
|
||||||
|
keep_werror: 'none'
|
||||||
|
|||||||
@@ -1244,8 +1244,8 @@ For example, for the ``stackstart`` variant:
|
|||||||
|
|
||||||
.. code-block:: sh
|
.. code-block:: sh
|
||||||
|
|
||||||
mpileaks stackstart=4 # variant will be propagated to dependencies
|
mpileaks stackstart==4 # variant will be propagated to dependencies
|
||||||
mpileaks stackstart==4 # only mpileaks will have this variant value
|
mpileaks stackstart=4 # only mpileaks will have this variant value
|
||||||
|
|
||||||
^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^
|
||||||
Compiler Flags
|
Compiler Flags
|
||||||
@@ -1672,9 +1672,13 @@ own install prefix. However, certain packages are typically installed
|
|||||||
`Python <https://www.python.org>`_ packages are typically installed in the
|
`Python <https://www.python.org>`_ packages are typically installed in the
|
||||||
``$prefix/lib/python-2.7/site-packages`` directory.
|
``$prefix/lib/python-2.7/site-packages`` directory.
|
||||||
|
|
||||||
Spack has support for this type of installation as well. In Spack,
|
In Spack, installation prefixes are immutable, so this type of installation
|
||||||
a package that can live inside the prefix of another package is called
|
is not directly supported. However, it is possible to create views that
|
||||||
an *extension*. Suppose you have Python installed like so:
|
allow you to merge install prefixes of multiple packages into a single new prefix.
|
||||||
|
Views are a convenient way to get a more traditional filesystem structure.
|
||||||
|
Using *extensions*, you can ensure that Python packages always share the
|
||||||
|
same prefix in the view as Python itself. Suppose you have
|
||||||
|
Python installed like so:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -1712,8 +1716,6 @@ You can find extensions for your Python installation like this:
|
|||||||
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
|
||||||
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
|
||||||
|
|
||||||
==> None activated.
|
|
||||||
|
|
||||||
The extensions are a subset of what's returned by ``spack list``, and
|
The extensions are a subset of what's returned by ``spack list``, and
|
||||||
they are packages like any other. They are installed into their own
|
they are packages like any other. They are installed into their own
|
||||||
prefixes, and you can see this with ``spack find --paths``:
|
prefixes, and you can see this with ``spack find --paths``:
|
||||||
@@ -1741,32 +1743,72 @@ directly when you run ``python``:
|
|||||||
ImportError: No module named numpy
|
ImportError: No module named numpy
|
||||||
>>>
|
>>>
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Using Extensions
|
Using Extensions in Environments
|
||||||
^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
There are multiple ways to get ``numpy`` working in Python. The first is
|
The recommended way of working with extensions such as ``py-numpy``
|
||||||
to use :ref:`shell-support`. You can simply ``load`` the extension,
|
above is through :ref:`Environments <environments>`. For example,
|
||||||
and it will be added to the ``PYTHONPATH`` in your current shell, and
|
the following creates an environment in the current working directory
|
||||||
Python itself will be available in the ``PATH``:
|
with a filesystem view in the ``./view`` directory:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env create --with-view view --dir .
|
||||||
|
$ spack -e . add py-numpy
|
||||||
|
$ spack -e . concretize
|
||||||
|
$ spack -e . install
|
||||||
|
|
||||||
|
We recommend environments for two reasons. Firstly, environments
|
||||||
|
can be activated (requires :ref:`shell-support`):
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ spack env activate .
|
||||||
|
|
||||||
|
which sets all the right environment variables such as ``PATH`` and
|
||||||
|
``PYTHONPATH``. This ensures that
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ python
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
works. Secondly, even without shell support, the view ensures
|
||||||
|
that Python can locate its extensions:
|
||||||
|
|
||||||
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ ./view/bin/python
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
|
See :ref:`environments` for a more in-depth description of Spack
|
||||||
|
environments and customizations to views.
|
||||||
|
|
||||||
|
^^^^^^^^^^^^^^^^^^^^
|
||||||
|
Using ``spack load``
|
||||||
|
^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
|
A more traditional way of using Spack and extensions is ``spack load``
|
||||||
|
(requires :ref:`shell-support`). This will add the extension to ``PYTHONPATH``
|
||||||
|
in your current shell, and Python itself will be available in the ``PATH``:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack load py-numpy
|
$ spack load py-numpy
|
||||||
|
$ python
|
||||||
|
>>> import numpy
|
||||||
|
|
||||||
Now ``import numpy`` will succeed for as long as you keep your current
|
|
||||||
session open.
|
|
||||||
The loaded packages can be checked using ``spack find --loaded``
|
The loaded packages can be checked using ``spack find --loaded``
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
Loading Extensions via Modules
|
Loading Extensions via Modules
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Instead of using Spack's environment modification capabilities through
|
Apart from ``spack env activate`` and ``spack load``, you can load numpy
|
||||||
the ``spack load`` command, you can load numpy through your
|
through your environment modules (using ``environment-modules`` or
|
||||||
environment modules (using ``environment-modules`` or ``lmod``). This
|
``lmod``). This will also add the extension to the ``PYTHONPATH`` in
|
||||||
will also add the extension to the ``PYTHONPATH`` in your current
|
your current shell.
|
||||||
shell.
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
@@ -1776,15 +1818,6 @@ If you do not know the name of the specific numpy module you wish to
|
|||||||
load, you can use the ``spack module tcl|lmod loads`` command to get
|
load, you can use the ``spack module tcl|lmod loads`` command to get
|
||||||
the name of the module from the Spack spec.
|
the name of the module from the Spack spec.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
Extensions in an Environment
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Another way to use extensions is to create a view, which merges the
|
|
||||||
python installation along with the extensions into a single prefix.
|
|
||||||
See :ref:`environments` for a more in-depth description
|
|
||||||
of environment views.
|
|
||||||
|
|
||||||
-----------------------
|
-----------------------
|
||||||
Filesystem requirements
|
Filesystem requirements
|
||||||
-----------------------
|
-----------------------
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _cachedcmakepackage:
|
.. _cachedcmakepackage:
|
||||||
|
|
||||||
------------------
|
-----------
|
||||||
CachedCMakePackage
|
CachedCMake
|
||||||
------------------
|
-----------
|
||||||
|
|
||||||
The CachedCMakePackage base class is used for CMake-based workflows
|
The CachedCMakePackage base class is used for CMake-based workflows
|
||||||
that create a CMake cache file prior to running ``cmake``. This is
|
that create a CMake cache file prior to running ``cmake``. This is
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _cudapackage:
|
.. _cudapackage:
|
||||||
|
|
||||||
-----------
|
----
|
||||||
CudaPackage
|
Cuda
|
||||||
-----------
|
----
|
||||||
|
|
||||||
Different from other packages, ``CudaPackage`` does not represent a build system.
|
Different from other packages, ``CudaPackage`` does not represent a build system.
|
||||||
Instead its goal is to simplify and unify usage of ``CUDA`` in other packages by providing a `mixin-class <https://en.wikipedia.org/wiki/Mixin>`_.
|
Instead its goal is to simplify and unify usage of ``CUDA`` in other packages by providing a `mixin-class <https://en.wikipedia.org/wiki/Mixin>`_.
|
||||||
@@ -80,7 +80,7 @@ standard CUDA compiler flags.
|
|||||||
|
|
||||||
**cuda_flags**
|
**cuda_flags**
|
||||||
|
|
||||||
This built-in static method returns a list of command line flags
|
This built-in static method returns a list of command line flags
|
||||||
for the chosen ``cuda_arch`` value(s). The flags are intended to
|
for the chosen ``cuda_arch`` value(s). The flags are intended to
|
||||||
be passed to the CUDA compiler driver (i.e., ``nvcc``).
|
be passed to the CUDA compiler driver (i.e., ``nvcc``).
|
||||||
|
|
||||||
|
|||||||
@@ -6,9 +6,9 @@
|
|||||||
.. _inteloneapipackage:
|
.. _inteloneapipackage:
|
||||||
|
|
||||||
|
|
||||||
====================
|
===========
|
||||||
IntelOneapiPackage
|
IntelOneapi
|
||||||
====================
|
===========
|
||||||
|
|
||||||
|
|
||||||
.. contents::
|
.. contents::
|
||||||
@@ -36,7 +36,7 @@ For more information on a specific package, do::
|
|||||||
|
|
||||||
Intel no longer releases new versions of Parallel Studio, which can be
|
Intel no longer releases new versions of Parallel Studio, which can be
|
||||||
used in Spack via the :ref:`intelpackage`. All of its components can
|
used in Spack via the :ref:`intelpackage`. All of its components can
|
||||||
now be found in oneAPI.
|
now be found in oneAPI.
|
||||||
|
|
||||||
Examples
|
Examples
|
||||||
========
|
========
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _intelpackage:
|
.. _intelpackage:
|
||||||
|
|
||||||
------------
|
-----
|
||||||
IntelPackage
|
Intel
|
||||||
------------
|
-----
|
||||||
|
|
||||||
.. contents::
|
.. contents::
|
||||||
|
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _pythonpackage:
|
.. _pythonpackage:
|
||||||
|
|
||||||
-------------
|
------
|
||||||
PythonPackage
|
Python
|
||||||
-------------
|
------
|
||||||
|
|
||||||
Python packages and modules have their own special build system. This
|
Python packages and modules have their own special build system. This
|
||||||
documentation covers everything you'll need to know in order to write
|
documentation covers everything you'll need to know in order to write
|
||||||
@@ -724,10 +724,9 @@ extends vs. depends_on
|
|||||||
|
|
||||||
This is very similar to the naming dilemma above, with a slight twist.
|
This is very similar to the naming dilemma above, with a slight twist.
|
||||||
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
As mentioned in the :ref:`Packaging Guide <packaging_extensions>`,
|
||||||
``extends`` and ``depends_on`` are very similar, but ``extends`` adds
|
``extends`` and ``depends_on`` are very similar, but ``extends`` ensures
|
||||||
the ability to *activate* the package. Activation involves symlinking
|
that the extension and extendee share the same prefix in views.
|
||||||
everything in the installation prefix of the package to the installation
|
This allows the user to import a Python module without
|
||||||
prefix of Python. This allows the user to import a Python module without
|
|
||||||
having to add that module to ``PYTHONPATH``.
|
having to add that module to ``PYTHONPATH``.
|
||||||
|
|
||||||
When deciding between ``extends`` and ``depends_on``, the best rule of
|
When deciding between ``extends`` and ``depends_on``, the best rule of
|
||||||
@@ -735,7 +734,7 @@ thumb is to check the installation prefix. If Python libraries are
|
|||||||
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
|
installed to ``<prefix>/lib/pythonX.Y/site-packages``, then you
|
||||||
should use ``extends``. If Python libraries are installed elsewhere
|
should use ``extends``. If Python libraries are installed elsewhere
|
||||||
or the only files that get installed reside in ``<prefix>/bin``, then
|
or the only files that get installed reside in ``<prefix>/bin``, then
|
||||||
don't use ``extends``, as symlinking the package wouldn't be useful.
|
don't use ``extends``.
|
||||||
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^
|
||||||
Alternatives to Spack
|
Alternatives to Spack
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _rocmpackage:
|
.. _rocmpackage:
|
||||||
|
|
||||||
-----------
|
----
|
||||||
ROCmPackage
|
ROCm
|
||||||
-----------
|
----
|
||||||
|
|
||||||
The ``ROCmPackage`` is not a build system but a helper package. Like ``CudaPackage``,
|
The ``ROCmPackage`` is not a build system but a helper package. Like ``CudaPackage``,
|
||||||
it provides standard variants, dependencies, and conflicts to facilitate building
|
it provides standard variants, dependencies, and conflicts to facilitate building
|
||||||
@@ -25,7 +25,7 @@ This package provides the following variants:
|
|||||||
|
|
||||||
* **rocm**
|
* **rocm**
|
||||||
|
|
||||||
This variant is used to enable/disable building with ``rocm``.
|
This variant is used to enable/disable building with ``rocm``.
|
||||||
The default is disabled (or ``False``).
|
The default is disabled (or ``False``).
|
||||||
|
|
||||||
* **amdgpu_target**
|
* **amdgpu_target**
|
||||||
|
|||||||
@@ -5,9 +5,9 @@
|
|||||||
|
|
||||||
.. _rpackage:
|
.. _rpackage:
|
||||||
|
|
||||||
--------
|
--
|
||||||
RPackage
|
R
|
||||||
--------
|
--
|
||||||
|
|
||||||
Like Python, R has its own built-in build system.
|
Like Python, R has its own built-in build system.
|
||||||
|
|
||||||
@@ -193,10 +193,10 @@ Build system dependencies
|
|||||||
|
|
||||||
As an extension of the R ecosystem, your package will obviously depend
|
As an extension of the R ecosystem, your package will obviously depend
|
||||||
on R to build and run. Normally, we would use ``depends_on`` to express
|
on R to build and run. Normally, we would use ``depends_on`` to express
|
||||||
this, but for R packages, we use ``extends``. ``extends`` is similar to
|
this, but for R packages, we use ``extends``. This implies a special
|
||||||
``depends_on``, but adds an additional feature: the ability to "activate"
|
dependency on R, which is used to set environment variables such as
|
||||||
the package by symlinking it to the R installation directory. Since
|
``R_LIBS`` uniformly. Since every R package needs this, the ``RPackage``
|
||||||
every R package needs this, the ``RPackage`` base class contains:
|
base class contains:
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
|
|||||||
@@ -5,15 +5,15 @@
|
|||||||
|
|
||||||
.. _sourceforgepackage:
|
.. _sourceforgepackage:
|
||||||
|
|
||||||
------------------
|
-----------
|
||||||
SourceforgePackage
|
Sourceforge
|
||||||
------------------
|
-----------
|
||||||
|
|
||||||
``SourceforgePackage`` is a
|
``SourceforgePackage`` is a
|
||||||
`mixin-class <https://en.wikipedia.org/wiki/Mixin>`_. It automatically
|
`mixin-class <https://en.wikipedia.org/wiki/Mixin>`_. It automatically
|
||||||
sets the URL based on a list of Sourceforge mirrors listed in
|
sets the URL based on a list of Sourceforge mirrors listed in
|
||||||
`sourceforge_mirror_path`, which defaults to a half dozen known mirrors.
|
`sourceforge_mirror_path`, which defaults to a half dozen known mirrors.
|
||||||
Refer to the package source
|
Refer to the package source
|
||||||
(`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/sourceforge.py>`__) for the current list of mirrors used by Spack.
|
(`<https://github.com/spack/spack/blob/develop/lib/spack/spack/build_systems/sourceforge.py>`__) for the current list of mirrors used by Spack.
|
||||||
|
|
||||||
|
|
||||||
@@ -29,7 +29,7 @@ This package provides a method for populating mirror URLs.
|
|||||||
It is decorated with `property` so its results are treated as
|
It is decorated with `property` so its results are treated as
|
||||||
a package attribute.
|
a package attribute.
|
||||||
|
|
||||||
Refer to
|
Refer to
|
||||||
`<https://spack.readthedocs.io/en/latest/packaging_guide.html#mirrors-of-the-main-url>`__
|
`<https://spack.readthedocs.io/en/latest/packaging_guide.html#mirrors-of-the-main-url>`__
|
||||||
for information on how Spack uses the `urls` attribute during
|
for information on how Spack uses the `urls` attribute during
|
||||||
fetching.
|
fetching.
|
||||||
|
|||||||
@@ -37,12 +37,6 @@
|
|||||||
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
|
os.symlink(os.path.abspath("../../.."), link_name, target_is_directory=True)
|
||||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
|
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
|
||||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback"))
|
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback"))
|
||||||
|
|
||||||
if sys.version_info[0] < 3:
|
|
||||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib"))
|
|
||||||
else:
|
|
||||||
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib3"))
|
|
||||||
|
|
||||||
sys.path.append(os.path.abspath("_spack_root/lib/spack/"))
|
sys.path.append(os.path.abspath("_spack_root/lib/spack/"))
|
||||||
|
|
||||||
# Add the Spack bin directory to the path so that we can use its output in docs.
|
# Add the Spack bin directory to the path so that we can use its output in docs.
|
||||||
@@ -160,8 +154,8 @@ def setup(sphinx):
|
|||||||
master_doc = "index"
|
master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = u"Spack"
|
project = "Spack"
|
||||||
copyright = u"2013-2021, Lawrence Livermore National Laboratory."
|
copyright = "2013-2021, Lawrence Livermore National Laboratory."
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
@@ -350,7 +344,7 @@ class SpackStyle(DefaultStyle):
|
|||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
("index", "Spack.tex", u"Spack Documentation", u"Todd Gamblin", "manual"),
|
("index", "Spack.tex", "Spack Documentation", "Todd Gamblin", "manual"),
|
||||||
]
|
]
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
@@ -378,7 +372,7 @@ class SpackStyle(DefaultStyle):
|
|||||||
|
|
||||||
# One entry per manual page. List of tuples
|
# One entry per manual page. List of tuples
|
||||||
# (source start file, name, description, authors, manual section).
|
# (source start file, name, description, authors, manual section).
|
||||||
man_pages = [("index", "spack", u"Spack Documentation", [u"Todd Gamblin"], 1)]
|
man_pages = [("index", "spack", "Spack Documentation", ["Todd Gamblin"], 1)]
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
# man_show_urls = False
|
# man_show_urls = False
|
||||||
@@ -393,8 +387,8 @@ class SpackStyle(DefaultStyle):
|
|||||||
(
|
(
|
||||||
"index",
|
"index",
|
||||||
"Spack",
|
"Spack",
|
||||||
u"Spack Documentation",
|
"Spack Documentation",
|
||||||
u"Todd Gamblin",
|
"Todd Gamblin",
|
||||||
"Spack",
|
"Spack",
|
||||||
"One line description of project.",
|
"One line description of project.",
|
||||||
"Miscellaneous",
|
"Miscellaneous",
|
||||||
|
|||||||
@@ -394,7 +394,7 @@ are indicated at the start of the path with ``~`` or ``~user``.
|
|||||||
Spack-specific variables
|
Spack-specific variables
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
Spack understands several special variables. These are:
|
Spack understands over a dozen special variables. These are:
|
||||||
|
|
||||||
* ``$env``: name of the currently active :ref:`environment <environments>`
|
* ``$env``: name of the currently active :ref:`environment <environments>`
|
||||||
* ``$spack``: path to the prefix of this Spack installation
|
* ``$spack``: path to the prefix of this Spack installation
|
||||||
@@ -416,6 +416,8 @@ Spack understands several special variables. These are:
|
|||||||
ArchSpec. E.g. ``skylake`` or ``neoverse-n1``.
|
ArchSpec. E.g. ``skylake`` or ``neoverse-n1``.
|
||||||
* ``$target_family``. The target family for the current host, as
|
* ``$target_family``. The target family for the current host, as
|
||||||
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
|
detected by ArchSpec. E.g. ``x86_64`` or ``aarch64``.
|
||||||
|
* ``$date``: the current date in the format YYYY-MM-DD
|
||||||
|
|
||||||
|
|
||||||
Note that, as with shell variables, you can write these as ``$varname``
|
Note that, as with shell variables, you can write these as ``$varname``
|
||||||
or with braces to distinguish the variable from surrounding characters:
|
or with braces to distinguish the variable from surrounding characters:
|
||||||
|
|||||||
@@ -175,14 +175,11 @@ Spec-related modules
|
|||||||
^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
:mod:`spack.spec`
|
:mod:`spack.spec`
|
||||||
Contains :class:`~spack.spec.Spec` and :class:`~spack.spec.SpecParser`.
|
Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization
|
||||||
Also implements most of the logic for normalization and concretization
|
|
||||||
of specs.
|
of specs.
|
||||||
|
|
||||||
:mod:`spack.parse`
|
:mod:`spack.parser`
|
||||||
Contains some base classes for implementing simple recursive descent
|
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
||||||
parsers: :class:`~spack.parse.Parser` and :class:`~spack.parse.Lexer`.
|
|
||||||
Used by :class:`~spack.spec.SpecParser`.
|
|
||||||
|
|
||||||
:mod:`spack.concretize`
|
:mod:`spack.concretize`
|
||||||
Contains :class:`~spack.concretize.Concretizer` implementation,
|
Contains :class:`~spack.concretize.Concretizer` implementation,
|
||||||
|
|||||||
@@ -1070,19 +1070,23 @@ the include is conditional.
|
|||||||
Building a subset of the environment
|
Building a subset of the environment
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
The generated ``Makefile``\s contain install targets for each spec. Given the hash
|
The generated ``Makefile``\s contain install targets for each spec, identified
|
||||||
of a particular spec, you can use the ``.install/<hash>`` target to install the
|
by ``<name>-<version>-<hash>``. This allows you to install only a subset of the
|
||||||
spec with its dependencies. There is also ``.install-deps/<hash>`` to *only* install
|
packages in the environment. When packages are unique in the environment, it's
|
||||||
|
enough to know the name and let tab-completion fill out the version and hash.
|
||||||
|
|
||||||
|
The following phony targets are available: ``install/<spec>`` to install the
|
||||||
|
spec with its dependencies, and ``install-deps/<spec>`` to *only* install
|
||||||
its dependencies. This can be useful when certain flags should only apply to
|
its dependencies. This can be useful when certain flags should only apply to
|
||||||
dependencies. Below we show a use case where a spec is installed with verbose
|
dependencies. Below we show a use case where a spec is installed with verbose
|
||||||
output (``spack install --verbose``) while its dependencies are installed silently:
|
output (``spack install --verbose``) while its dependencies are installed silently:
|
||||||
|
|
||||||
.. code:: console
|
.. code:: console
|
||||||
|
|
||||||
$ spack env depfile -o Makefile --make-target-prefix my_env
|
$ spack env depfile -o Makefile
|
||||||
|
|
||||||
# Install dependencies in parallel, only show a log on error.
|
# Install dependencies in parallel, only show a log on error.
|
||||||
$ make -j16 my_env/.install-deps/<hash> SPACK_INSTALL_FLAGS=--show-log-on-error
|
$ make -j16 install-deps/python-3.11.0-<hash> SPACK_INSTALL_FLAGS=--show-log-on-error
|
||||||
|
|
||||||
# Install the root spec with verbose output.
|
# Install the root spec with verbose output.
|
||||||
$ make -j16 my_env/.install/<hash> SPACK_INSTALL_FLAGS=--verbose
|
$ make -j16 install/python-3.11.0-<hash> SPACK_INSTALL_FLAGS=--verbose
|
||||||
@@ -21,8 +21,9 @@ be present on the machine where Spack is run:
|
|||||||
:header-rows: 1
|
:header-rows: 1
|
||||||
|
|
||||||
These requirements can be easily installed on most modern Linux systems;
|
These requirements can be easily installed on most modern Linux systems;
|
||||||
on macOS, XCode is required. Spack is designed to run on HPC
|
on macOS, the Command Line Tools package is required, and a full XCode suite
|
||||||
platforms like Cray. Not all packages should be expected
|
may be necessary for some packages such as Qt and apple-gl. Spack is designed
|
||||||
|
to run on HPC platforms like Cray. Not all packages should be expected
|
||||||
to work on all platforms.
|
to work on all platforms.
|
||||||
|
|
||||||
A build matrix showing which packages are working on which systems is shown below.
|
A build matrix showing which packages are working on which systems is shown below.
|
||||||
@@ -1704,9 +1705,11 @@ dependencies or incompatible build tools like autoconf. Here are several
|
|||||||
packages known to work on Windows:
|
packages known to work on Windows:
|
||||||
|
|
||||||
* abseil-cpp
|
* abseil-cpp
|
||||||
|
* bzip2
|
||||||
* clingo
|
* clingo
|
||||||
* cpuinfo
|
* cpuinfo
|
||||||
* cmake
|
* cmake
|
||||||
|
* hdf5
|
||||||
* glm
|
* glm
|
||||||
* nasm
|
* nasm
|
||||||
* netlib-lapack (requires Intel Fortran)
|
* netlib-lapack (requires Intel Fortran)
|
||||||
|
|||||||
@@ -2634,9 +2634,12 @@ extendable package:
|
|||||||
extends('python')
|
extends('python')
|
||||||
...
|
...
|
||||||
|
|
||||||
Now, the ``py-numpy`` package can be used as an argument to ``spack
|
This accomplishes a few things. Firstly, the Python package can set special
|
||||||
activate``. When it is activated, all the files in its prefix will be
|
variables such as ``PYTHONPATH`` for all extensions when the run or build
|
||||||
symbolically linked into the prefix of the python package.
|
environment is set up. Secondly, filesystem views can ensure that extensions
|
||||||
|
are put in the same prefix as their extendee. This ensures that Python in
|
||||||
|
a view can always locate its Python packages, even without environment
|
||||||
|
variables set.
|
||||||
|
|
||||||
A package can only extend one other package at a time. To support packages
|
A package can only extend one other package at a time. To support packages
|
||||||
that may extend one of a list of other packages, Spack supports multiple
|
that may extend one of a list of other packages, Spack supports multiple
|
||||||
@@ -2684,9 +2687,8 @@ variant(s) are selected. This may be accomplished with conditional
|
|||||||
...
|
...
|
||||||
|
|
||||||
Sometimes, certain files in one package will conflict with those in
|
Sometimes, certain files in one package will conflict with those in
|
||||||
another, which means they cannot both be activated (symlinked) at the
|
another, which means they cannot both be used in a view at the
|
||||||
same time. In this case, you can tell Spack to ignore those files
|
same time. In this case, you can tell Spack to ignore those files:
|
||||||
when it does the activation:
|
|
||||||
|
|
||||||
.. code-block:: python
|
.. code-block:: python
|
||||||
|
|
||||||
@@ -2698,7 +2700,7 @@ when it does the activation:
|
|||||||
...
|
...
|
||||||
|
|
||||||
The code above will prevent everything in the ``$prefix/bin/`` directory
|
The code above will prevent everything in the ``$prefix/bin/`` directory
|
||||||
from being linked in at activation time.
|
from being linked in a view.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
@@ -3523,7 +3525,7 @@ will likely contain some overriding of default builder methods:
|
|||||||
def cmake_args(self):
|
def cmake_args(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
class Autotoolsbuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
class AutotoolsBuilder(spack.build_systems.autotools.AutotoolsBuilder):
|
||||||
def configure_args(self):
|
def configure_args(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
Name, Supported Versions, Notes, Requirement Reason
|
Name, Supported Versions, Notes, Requirement Reason
|
||||||
Python, 2.7/3.6-3.11, , Interpreter for Spack
|
Python, 3.6--3.11, , Interpreter for Spack
|
||||||
C/C++ Compilers, , , Building software
|
C/C++ Compilers, , , Building software
|
||||||
make, , , Build software
|
make, , , Build software
|
||||||
patch, , , Build software
|
patch, , , Build software
|
||||||
|
|||||||
|
41
lib/spack/env/cc
vendored
41
lib/spack/env/cc
vendored
@@ -440,6 +440,47 @@ while [ $# -ne 0 ]; do
|
|||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [ -n "${SPACK_COMPILER_FLAGS_KEEP}" ] ; then
|
||||||
|
# NOTE: the eval is required to allow `|` alternatives inside the variable
|
||||||
|
eval "\
|
||||||
|
case \"\$1\" in
|
||||||
|
$SPACK_COMPILER_FLAGS_KEEP)
|
||||||
|
append other_args_list \"\$1\"
|
||||||
|
shift
|
||||||
|
continue
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
"
|
||||||
|
fi
|
||||||
|
# the replace list is a space-separated list of pipe-separated pairs,
|
||||||
|
# the first in each pair is the original prefix to be matched, the
|
||||||
|
# second is the replacement prefix
|
||||||
|
if [ -n "${SPACK_COMPILER_FLAGS_REPLACE}" ] ; then
|
||||||
|
for rep in ${SPACK_COMPILER_FLAGS_REPLACE} ; do
|
||||||
|
before=${rep%|*}
|
||||||
|
after=${rep#*|}
|
||||||
|
eval "\
|
||||||
|
stripped=\"\${1##$before}\"
|
||||||
|
"
|
||||||
|
if [ "$stripped" = "$1" ] ; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
replaced="$after$stripped"
|
||||||
|
|
||||||
|
# it matched, remove it
|
||||||
|
shift
|
||||||
|
|
||||||
|
if [ -z "$replaced" ] ; then
|
||||||
|
# completely removed, continue OUTER loop
|
||||||
|
continue 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# re-build argument list with replacement
|
||||||
|
set -- "$replaced" "$@"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
case "$1" in
|
case "$1" in
|
||||||
-isystem*)
|
-isystem*)
|
||||||
arg="${1#-isystem}"
|
arg="${1#-isystem}"
|
||||||
|
|||||||
7
lib/spack/external/ctest_log_parser.py
vendored
7
lib/spack/external/ctest_log_parser.py
vendored
@@ -71,13 +71,12 @@
|
|||||||
import re
|
import re
|
||||||
import math
|
import math
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
|
import io
|
||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
from six import StringIO
|
|
||||||
from six import string_types
|
|
||||||
|
|
||||||
_error_matches = [
|
_error_matches = [
|
||||||
"^FAIL: ",
|
"^FAIL: ",
|
||||||
@@ -246,7 +245,7 @@ def __getitem__(self, line_no):
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""Returns event lines and context."""
|
"""Returns event lines and context."""
|
||||||
out = StringIO()
|
out = io.StringIO()
|
||||||
for i in range(self.start, self.end):
|
for i in range(self.start, self.end):
|
||||||
if i == self.line_no:
|
if i == self.line_no:
|
||||||
out.write(' >> %-6d%s' % (i, self[i]))
|
out.write(' >> %-6d%s' % (i, self[i]))
|
||||||
@@ -386,7 +385,7 @@ def parse(self, stream, context=6, jobs=None):
|
|||||||
(tuple): two lists containing ``BuildError`` and
|
(tuple): two lists containing ``BuildError`` and
|
||||||
``BuildWarning`` objects.
|
``BuildWarning`` objects.
|
||||||
"""
|
"""
|
||||||
if isinstance(stream, string_types):
|
if isinstance(stream, str):
|
||||||
with open(stream) as f:
|
with open(stream) as f:
|
||||||
return self.parse(f, context, jobs)
|
return self.parse(f, context, jobs)
|
||||||
|
|
||||||
|
|||||||
2392
lib/spack/external/py2/argparse.py
vendored
2392
lib/spack/external/py2/argparse.py
vendored
File diff suppressed because it is too large
Load Diff
289
lib/spack/external/py2/functools32/LICENSE
vendored
289
lib/spack/external/py2/functools32/LICENSE
vendored
@@ -1,289 +0,0 @@
|
|||||||
A. HISTORY OF THE SOFTWARE
|
|
||||||
==========================
|
|
||||||
|
|
||||||
Python was created in the early 1990s by Guido van Rossum at Stichting
|
|
||||||
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
|
|
||||||
as a successor of a language called ABC. Guido remains Python's
|
|
||||||
principal author, although it includes many contributions from others.
|
|
||||||
|
|
||||||
In 1995, Guido continued his work on Python at the Corporation for
|
|
||||||
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
|
|
||||||
in Reston, Virginia where he released several versions of the
|
|
||||||
software.
|
|
||||||
|
|
||||||
In May 2000, Guido and the Python core development team moved to
|
|
||||||
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
|
||||||
year, the PythonLabs team moved to Digital Creations (now Zope
|
|
||||||
Corporation, see http://www.zope.com). In 2001, the Python Software
|
|
||||||
Foundation (PSF, see http://www.python.org/psf/) was formed, a
|
|
||||||
non-profit organization created specifically to own Python-related
|
|
||||||
Intellectual Property. Zope Corporation is a sponsoring member of
|
|
||||||
the PSF.
|
|
||||||
|
|
||||||
All Python releases are Open Source (see http://www.opensource.org for
|
|
||||||
the Open Source Definition). Historically, most, but not all, Python
|
|
||||||
releases have also been GPL-compatible; the table below summarizes
|
|
||||||
the various releases.
|
|
||||||
|
|
||||||
Release Derived Year Owner GPL-
|
|
||||||
from compatible? (1)
|
|
||||||
|
|
||||||
0.9.0 thru 1.2 1991-1995 CWI yes
|
|
||||||
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
|
||||||
1.6 1.5.2 2000 CNRI no
|
|
||||||
2.0 1.6 2000 BeOpen.com no
|
|
||||||
1.6.1 1.6 2001 CNRI yes (2)
|
|
||||||
2.1 2.0+1.6.1 2001 PSF no
|
|
||||||
2.0.1 2.0+1.6.1 2001 PSF yes
|
|
||||||
2.1.1 2.1+2.0.1 2001 PSF yes
|
|
||||||
2.2 2.1.1 2001 PSF yes
|
|
||||||
2.1.2 2.1.1 2002 PSF yes
|
|
||||||
2.1.3 2.1.2 2002 PSF yes
|
|
||||||
2.2.1 2.2 2002 PSF yes
|
|
||||||
2.2.2 2.2.1 2002 PSF yes
|
|
||||||
2.2.3 2.2.2 2003 PSF yes
|
|
||||||
2.3 2.2.2 2002-2003 PSF yes
|
|
||||||
2.3.1 2.3 2002-2003 PSF yes
|
|
||||||
2.3.2 2.3.1 2002-2003 PSF yes
|
|
||||||
2.3.3 2.3.2 2002-2003 PSF yes
|
|
||||||
2.3.4 2.3.3 2004 PSF yes
|
|
||||||
2.3.5 2.3.4 2005 PSF yes
|
|
||||||
2.4 2.3 2004 PSF yes
|
|
||||||
2.4.1 2.4 2005 PSF yes
|
|
||||||
2.4.2 2.4.1 2005 PSF yes
|
|
||||||
2.4.3 2.4.2 2006 PSF yes
|
|
||||||
2.4.4 2.4.3 2006 PSF yes
|
|
||||||
2.5 2.4 2006 PSF yes
|
|
||||||
2.5.1 2.5 2007 PSF yes
|
|
||||||
2.5.2 2.5.1 2008 PSF yes
|
|
||||||
2.5.3 2.5.2 2008 PSF yes
|
|
||||||
2.6 2.5 2008 PSF yes
|
|
||||||
2.6.1 2.6 2008 PSF yes
|
|
||||||
2.6.2 2.6.1 2009 PSF yes
|
|
||||||
2.6.3 2.6.2 2009 PSF yes
|
|
||||||
2.6.4 2.6.3 2009 PSF yes
|
|
||||||
2.6.5 2.6.4 2010 PSF yes
|
|
||||||
3.0 2.6 2008 PSF yes
|
|
||||||
3.0.1 3.0 2009 PSF yes
|
|
||||||
3.1 3.0.1 2009 PSF yes
|
|
||||||
3.1.1 3.1 2009 PSF yes
|
|
||||||
3.1.2 3.1.1 2010 PSF yes
|
|
||||||
3.1.3 3.1.2 2010 PSF yes
|
|
||||||
3.1.4 3.1.3 2011 PSF yes
|
|
||||||
3.2 3.1 2011 PSF yes
|
|
||||||
3.2.1 3.2 2011 PSF yes
|
|
||||||
3.2.2 3.2.1 2011 PSF yes
|
|
||||||
3.2.3 3.2.2 2012 PSF yes
|
|
||||||
|
|
||||||
Footnotes:
|
|
||||||
|
|
||||||
(1) GPL-compatible doesn't mean that we're distributing Python under
|
|
||||||
the GPL. All Python licenses, unlike the GPL, let you distribute
|
|
||||||
a modified version without making your changes open source. The
|
|
||||||
GPL-compatible licenses make it possible to combine Python with
|
|
||||||
other software that is released under the GPL; the others don't.
|
|
||||||
|
|
||||||
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
|
||||||
because its license has a choice of law clause. According to
|
|
||||||
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
|
||||||
is "not incompatible" with the GPL.
|
|
||||||
|
|
||||||
Thanks to the many outside volunteers who have worked under Guido's
|
|
||||||
direction to make these releases possible.
|
|
||||||
|
|
||||||
|
|
||||||
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
|
||||||
===============================================================
|
|
||||||
|
|
||||||
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
|
||||||
--------------------------------------------
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
|
||||||
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
|
||||||
otherwise using this software ("Python") in source or binary form and
|
|
||||||
its associated documentation.
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
|
||||||
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
|
||||||
analyze, test, perform and/or display publicly, prepare derivative works,
|
|
||||||
distribute, and otherwise use Python alone or in any derivative version,
|
|
||||||
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
|
||||||
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
|
||||||
2011, 2012 Python Software Foundation; All Rights Reserved" are retained in Python
|
|
||||||
alone or in any derivative version prepared by Licensee.
|
|
||||||
|
|
||||||
3. In the event Licensee prepares a derivative work that is based on
|
|
||||||
or incorporates Python or any part thereof, and wants to make
|
|
||||||
the derivative work available to others as provided herein, then
|
|
||||||
Licensee hereby agrees to include in any such work a brief summary of
|
|
||||||
the changes made to Python.
|
|
||||||
|
|
||||||
4. PSF is making Python available to Licensee on an "AS IS"
|
|
||||||
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
|
||||||
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
|
||||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
|
||||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
6. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
7. Nothing in this License Agreement shall be deemed to create any
|
|
||||||
relationship of agency, partnership, or joint venture between PSF and
|
|
||||||
Licensee. This License Agreement does not grant permission to use PSF
|
|
||||||
trademarks or trade name in a trademark sense to endorse or promote
|
|
||||||
products or services of Licensee, or any third party.
|
|
||||||
|
|
||||||
8. By copying, installing or otherwise using Python, Licensee
|
|
||||||
agrees to be bound by the terms and conditions of this License
|
|
||||||
Agreement.
|
|
||||||
|
|
||||||
|
|
||||||
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
|
||||||
-------------------------------------------
|
|
||||||
|
|
||||||
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
|
||||||
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
|
||||||
Individual or Organization ("Licensee") accessing and otherwise using
|
|
||||||
this software in source or binary form and its associated
|
|
||||||
documentation ("the Software").
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this BeOpen Python License
|
|
||||||
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
|
||||||
royalty-free, world-wide license to reproduce, analyze, test, perform
|
|
||||||
and/or display publicly, prepare derivative works, distribute, and
|
|
||||||
otherwise use the Software alone or in any derivative version,
|
|
||||||
provided, however, that the BeOpen Python License is retained in the
|
|
||||||
Software, alone or in any derivative version prepared by Licensee.
|
|
||||||
|
|
||||||
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
|
||||||
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
|
||||||
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
|
||||||
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
|
||||||
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
5. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
6. This License Agreement shall be governed by and interpreted in all
|
|
||||||
respects by the law of the State of California, excluding conflict of
|
|
||||||
law provisions. Nothing in this License Agreement shall be deemed to
|
|
||||||
create any relationship of agency, partnership, or joint venture
|
|
||||||
between BeOpen and Licensee. This License Agreement does not grant
|
|
||||||
permission to use BeOpen trademarks or trade names in a trademark
|
|
||||||
sense to endorse or promote products or services of Licensee, or any
|
|
||||||
third party. As an exception, the "BeOpen Python" logos available at
|
|
||||||
http://www.pythonlabs.com/logos.html may be used according to the
|
|
||||||
permissions granted on that web page.
|
|
||||||
|
|
||||||
7. By copying, installing or otherwise using the software, Licensee
|
|
||||||
agrees to be bound by the terms and conditions of this License
|
|
||||||
Agreement.
|
|
||||||
|
|
||||||
|
|
||||||
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
|
||||||
---------------------------------------
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between the Corporation for National
|
|
||||||
Research Initiatives, having an office at 1895 Preston White Drive,
|
|
||||||
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
|
||||||
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
|
||||||
source or binary form and its associated documentation.
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this License Agreement, CNRI
|
|
||||||
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
|
||||||
license to reproduce, analyze, test, perform and/or display publicly,
|
|
||||||
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
|
||||||
alone or in any derivative version, provided, however, that CNRI's
|
|
||||||
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
|
||||||
1995-2001 Corporation for National Research Initiatives; All Rights
|
|
||||||
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
|
||||||
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
|
||||||
Agreement, Licensee may substitute the following text (omitting the
|
|
||||||
quotes): "Python 1.6.1 is made available subject to the terms and
|
|
||||||
conditions in CNRI's License Agreement. This Agreement together with
|
|
||||||
Python 1.6.1 may be located on the Internet using the following
|
|
||||||
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
|
||||||
Agreement may also be obtained from a proxy server on the Internet
|
|
||||||
using the following URL: http://hdl.handle.net/1895.22/1013".
|
|
||||||
|
|
||||||
3. In the event Licensee prepares a derivative work that is based on
|
|
||||||
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
|
||||||
the derivative work available to others as provided herein, then
|
|
||||||
Licensee hereby agrees to include in any such work a brief summary of
|
|
||||||
the changes made to Python 1.6.1.
|
|
||||||
|
|
||||||
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
|
||||||
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
|
||||||
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
|
||||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
|
||||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
6. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
7. This License Agreement shall be governed by the federal
|
|
||||||
intellectual property law of the United States, including without
|
|
||||||
limitation the federal copyright law, and, to the extent such
|
|
||||||
U.S. federal law does not apply, by the law of the Commonwealth of
|
|
||||||
Virginia, excluding Virginia's conflict of law provisions.
|
|
||||||
Notwithstanding the foregoing, with regard to derivative works based
|
|
||||||
on Python 1.6.1 that incorporate non-separable material that was
|
|
||||||
previously distributed under the GNU General Public License (GPL), the
|
|
||||||
law of the Commonwealth of Virginia shall govern this License
|
|
||||||
Agreement only as to issues arising under or with respect to
|
|
||||||
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
|
||||||
License Agreement shall be deemed to create any relationship of
|
|
||||||
agency, partnership, or joint venture between CNRI and Licensee. This
|
|
||||||
License Agreement does not grant permission to use CNRI trademarks or
|
|
||||||
trade name in a trademark sense to endorse or promote products or
|
|
||||||
services of Licensee, or any third party.
|
|
||||||
|
|
||||||
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
|
||||||
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
|
||||||
bound by the terms and conditions of this License Agreement.
|
|
||||||
|
|
||||||
ACCEPT
|
|
||||||
|
|
||||||
|
|
||||||
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
|
||||||
The Netherlands. All rights reserved.
|
|
||||||
|
|
||||||
Permission to use, copy, modify, and distribute this software and its
|
|
||||||
documentation for any purpose and without fee is hereby granted,
|
|
||||||
provided that the above copyright notice appear in all copies and that
|
|
||||||
both that copyright notice and this permission notice appear in
|
|
||||||
supporting documentation, and that the name of Stichting Mathematisch
|
|
||||||
Centrum or CWI not be used in advertising or publicity pertaining to
|
|
||||||
distribution of the software without specific, written prior
|
|
||||||
permission.
|
|
||||||
|
|
||||||
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
|
||||||
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
|
||||||
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
|
||||||
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|
||||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
|
||||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
|
||||||
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
from .functools32 import *
|
|
||||||
@@ -1,158 +0,0 @@
|
|||||||
"""Drop-in replacement for the thread module.
|
|
||||||
|
|
||||||
Meant to be used as a brain-dead substitute so that threaded code does
|
|
||||||
not need to be rewritten for when the thread module is not present.
|
|
||||||
|
|
||||||
Suggested usage is::
|
|
||||||
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
import _thread # Python >= 3
|
|
||||||
except:
|
|
||||||
import thread as _thread # Python < 3
|
|
||||||
except ImportError:
|
|
||||||
import _dummy_thread as _thread
|
|
||||||
|
|
||||||
"""
|
|
||||||
# Exports only things specified by thread documentation;
|
|
||||||
# skipping obsolete synonyms allocate(), start_new(), exit_thread().
|
|
||||||
__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
|
|
||||||
'interrupt_main', 'LockType']
|
|
||||||
|
|
||||||
# A dummy value
|
|
||||||
TIMEOUT_MAX = 2**31
|
|
||||||
|
|
||||||
# NOTE: this module can be imported early in the extension building process,
|
|
||||||
# and so top level imports of other modules should be avoided. Instead, all
|
|
||||||
# imports are done when needed on a function-by-function basis. Since threads
|
|
||||||
# are disabled, the import lock should not be an issue anyway (??).
|
|
||||||
|
|
||||||
class error(Exception):
|
|
||||||
"""Dummy implementation of _thread.error."""
|
|
||||||
|
|
||||||
def __init__(self, *args):
|
|
||||||
self.args = args
|
|
||||||
|
|
||||||
def start_new_thread(function, args, kwargs={}):
|
|
||||||
"""Dummy implementation of _thread.start_new_thread().
|
|
||||||
|
|
||||||
Compatibility is maintained by making sure that ``args`` is a
|
|
||||||
tuple and ``kwargs`` is a dictionary. If an exception is raised
|
|
||||||
and it is SystemExit (which can be done by _thread.exit()) it is
|
|
||||||
caught and nothing is done; all other exceptions are printed out
|
|
||||||
by using traceback.print_exc().
|
|
||||||
|
|
||||||
If the executed function calls interrupt_main the KeyboardInterrupt will be
|
|
||||||
raised when the function returns.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if type(args) != type(tuple()):
|
|
||||||
raise TypeError("2nd arg must be a tuple")
|
|
||||||
if type(kwargs) != type(dict()):
|
|
||||||
raise TypeError("3rd arg must be a dict")
|
|
||||||
global _main
|
|
||||||
_main = False
|
|
||||||
try:
|
|
||||||
function(*args, **kwargs)
|
|
||||||
except SystemExit:
|
|
||||||
pass
|
|
||||||
except:
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
_main = True
|
|
||||||
global _interrupt
|
|
||||||
if _interrupt:
|
|
||||||
_interrupt = False
|
|
||||||
raise KeyboardInterrupt
|
|
||||||
|
|
||||||
def exit():
|
|
||||||
"""Dummy implementation of _thread.exit()."""
|
|
||||||
raise SystemExit
|
|
||||||
|
|
||||||
def get_ident():
|
|
||||||
"""Dummy implementation of _thread.get_ident().
|
|
||||||
|
|
||||||
Since this module should only be used when _threadmodule is not
|
|
||||||
available, it is safe to assume that the current process is the
|
|
||||||
only thread. Thus a constant can be safely returned.
|
|
||||||
"""
|
|
||||||
return -1
|
|
||||||
|
|
||||||
def allocate_lock():
|
|
||||||
"""Dummy implementation of _thread.allocate_lock()."""
|
|
||||||
return LockType()
|
|
||||||
|
|
||||||
def stack_size(size=None):
|
|
||||||
"""Dummy implementation of _thread.stack_size()."""
|
|
||||||
if size is not None:
|
|
||||||
raise error("setting thread stack size not supported")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
class LockType(object):
|
|
||||||
"""Class implementing dummy implementation of _thread.LockType.
|
|
||||||
|
|
||||||
Compatibility is maintained by maintaining self.locked_status
|
|
||||||
which is a boolean that stores the state of the lock. Pickling of
|
|
||||||
the lock, though, should not be done since if the _thread module is
|
|
||||||
then used with an unpickled ``lock()`` from here problems could
|
|
||||||
occur from this class not having atomic methods.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.locked_status = False
|
|
||||||
|
|
||||||
def acquire(self, waitflag=None, timeout=-1):
|
|
||||||
"""Dummy implementation of acquire().
|
|
||||||
|
|
||||||
For blocking calls, self.locked_status is automatically set to
|
|
||||||
True and returned appropriately based on value of
|
|
||||||
``waitflag``. If it is non-blocking, then the value is
|
|
||||||
actually checked and not set if it is already acquired. This
|
|
||||||
is all done so that threading.Condition's assert statements
|
|
||||||
aren't triggered and throw a little fit.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if waitflag is None or waitflag:
|
|
||||||
self.locked_status = True
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
if not self.locked_status:
|
|
||||||
self.locked_status = True
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
if timeout > 0:
|
|
||||||
import time
|
|
||||||
time.sleep(timeout)
|
|
||||||
return False
|
|
||||||
|
|
||||||
__enter__ = acquire
|
|
||||||
|
|
||||||
def __exit__(self, typ, val, tb):
|
|
||||||
self.release()
|
|
||||||
|
|
||||||
def release(self):
|
|
||||||
"""Release the dummy lock."""
|
|
||||||
# XXX Perhaps shouldn't actually bother to test? Could lead
|
|
||||||
# to problems for complex, threaded code.
|
|
||||||
if not self.locked_status:
|
|
||||||
raise error
|
|
||||||
self.locked_status = False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def locked(self):
|
|
||||||
return self.locked_status
|
|
||||||
|
|
||||||
# Used to signal that interrupt_main was called in a "thread"
|
|
||||||
_interrupt = False
|
|
||||||
# True when not executing in a "thread"
|
|
||||||
_main = True
|
|
||||||
|
|
||||||
def interrupt_main():
|
|
||||||
"""Set _interrupt flag to True to have start_new_thread raise
|
|
||||||
KeyboardInterrupt upon exiting."""
|
|
||||||
if _main:
|
|
||||||
raise KeyboardInterrupt
|
|
||||||
else:
|
|
||||||
global _interrupt
|
|
||||||
_interrupt = True
|
|
||||||
423
lib/spack/external/py2/functools32/functools32.py
vendored
423
lib/spack/external/py2/functools32/functools32.py
vendored
@@ -1,423 +0,0 @@
|
|||||||
"""functools.py - Tools for working with functions and callable objects
|
|
||||||
"""
|
|
||||||
# Python module wrapper for _functools C module
|
|
||||||
# to allow utilities written in Python to be added
|
|
||||||
# to the functools module.
|
|
||||||
# Written by Nick Coghlan <ncoghlan at gmail.com>
|
|
||||||
# and Raymond Hettinger <python at rcn.com>
|
|
||||||
# Copyright (C) 2006-2010 Python Software Foundation.
|
|
||||||
# See C source code for _functools credits/copyright
|
|
||||||
|
|
||||||
__all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
|
|
||||||
'total_ordering', 'cmp_to_key', 'lru_cache', 'reduce', 'partial']
|
|
||||||
|
|
||||||
from _functools import partial, reduce
|
|
||||||
from collections import MutableMapping, namedtuple
|
|
||||||
from .reprlib32 import recursive_repr as _recursive_repr
|
|
||||||
from weakref import proxy as _proxy
|
|
||||||
import sys as _sys
|
|
||||||
try:
|
|
||||||
from thread import allocate_lock as Lock
|
|
||||||
except ImportError:
|
|
||||||
from ._dummy_thread32 import allocate_lock as Lock
|
|
||||||
|
|
||||||
################################################################################
|
|
||||||
### OrderedDict
|
|
||||||
################################################################################
|
|
||||||
|
|
||||||
class _Link(object):
|
|
||||||
__slots__ = 'prev', 'next', 'key', '__weakref__'
|
|
||||||
|
|
||||||
class OrderedDict(dict):
|
|
||||||
'Dictionary that remembers insertion order'
|
|
||||||
# An inherited dict maps keys to values.
|
|
||||||
# The inherited dict provides __getitem__, __len__, __contains__, and get.
|
|
||||||
# The remaining methods are order-aware.
|
|
||||||
# Big-O running times for all methods are the same as regular dictionaries.
|
|
||||||
|
|
||||||
# The internal self.__map dict maps keys to links in a doubly linked list.
|
|
||||||
# The circular doubly linked list starts and ends with a sentinel element.
|
|
||||||
# The sentinel element never gets deleted (this simplifies the algorithm).
|
|
||||||
# The sentinel is in self.__hardroot with a weakref proxy in self.__root.
|
|
||||||
# The prev links are weakref proxies (to prevent circular references).
|
|
||||||
# Individual links are kept alive by the hard reference in self.__map.
|
|
||||||
# Those hard references disappear when a key is deleted from an OrderedDict.
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwds):
|
|
||||||
'''Initialize an ordered dictionary. The signature is the same as
|
|
||||||
regular dictionaries, but keyword arguments are not recommended because
|
|
||||||
their insertion order is arbitrary.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if len(args) > 1:
|
|
||||||
raise TypeError('expected at most 1 arguments, got %d' % len(args))
|
|
||||||
try:
|
|
||||||
self.__root
|
|
||||||
except AttributeError:
|
|
||||||
self.__hardroot = _Link()
|
|
||||||
self.__root = root = _proxy(self.__hardroot)
|
|
||||||
root.prev = root.next = root
|
|
||||||
self.__map = {}
|
|
||||||
self.__update(*args, **kwds)
|
|
||||||
|
|
||||||
def __setitem__(self, key, value,
|
|
||||||
dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link):
|
|
||||||
'od.__setitem__(i, y) <==> od[i]=y'
|
|
||||||
# Setting a new item creates a new link at the end of the linked list,
|
|
||||||
# and the inherited dictionary is updated with the new key/value pair.
|
|
||||||
if key not in self:
|
|
||||||
self.__map[key] = link = Link()
|
|
||||||
root = self.__root
|
|
||||||
last = root.prev
|
|
||||||
link.prev, link.next, link.key = last, root, key
|
|
||||||
last.next = link
|
|
||||||
root.prev = proxy(link)
|
|
||||||
dict_setitem(self, key, value)
|
|
||||||
|
|
||||||
def __delitem__(self, key, dict_delitem=dict.__delitem__):
|
|
||||||
'od.__delitem__(y) <==> del od[y]'
|
|
||||||
# Deleting an existing item uses self.__map to find the link which gets
|
|
||||||
# removed by updating the links in the predecessor and successor nodes.
|
|
||||||
dict_delitem(self, key)
|
|
||||||
link = self.__map.pop(key)
|
|
||||||
link_prev = link.prev
|
|
||||||
link_next = link.next
|
|
||||||
link_prev.next = link_next
|
|
||||||
link_next.prev = link_prev
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
'od.__iter__() <==> iter(od)'
|
|
||||||
# Traverse the linked list in order.
|
|
||||||
root = self.__root
|
|
||||||
curr = root.next
|
|
||||||
while curr is not root:
|
|
||||||
yield curr.key
|
|
||||||
curr = curr.next
|
|
||||||
|
|
||||||
def __reversed__(self):
|
|
||||||
'od.__reversed__() <==> reversed(od)'
|
|
||||||
# Traverse the linked list in reverse order.
|
|
||||||
root = self.__root
|
|
||||||
curr = root.prev
|
|
||||||
while curr is not root:
|
|
||||||
yield curr.key
|
|
||||||
curr = curr.prev
|
|
||||||
|
|
||||||
def clear(self):
|
|
||||||
'od.clear() -> None. Remove all items from od.'
|
|
||||||
root = self.__root
|
|
||||||
root.prev = root.next = root
|
|
||||||
self.__map.clear()
|
|
||||||
dict.clear(self)
|
|
||||||
|
|
||||||
def popitem(self, last=True):
|
|
||||||
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
|
|
||||||
Pairs are returned in LIFO order if last is true or FIFO order if false.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if not self:
|
|
||||||
raise KeyError('dictionary is empty')
|
|
||||||
root = self.__root
|
|
||||||
if last:
|
|
||||||
link = root.prev
|
|
||||||
link_prev = link.prev
|
|
||||||
link_prev.next = root
|
|
||||||
root.prev = link_prev
|
|
||||||
else:
|
|
||||||
link = root.next
|
|
||||||
link_next = link.next
|
|
||||||
root.next = link_next
|
|
||||||
link_next.prev = root
|
|
||||||
key = link.key
|
|
||||||
del self.__map[key]
|
|
||||||
value = dict.pop(self, key)
|
|
||||||
return key, value
|
|
||||||
|
|
||||||
def move_to_end(self, key, last=True):
|
|
||||||
'''Move an existing element to the end (or beginning if last==False).
|
|
||||||
|
|
||||||
Raises KeyError if the element does not exist.
|
|
||||||
When last=True, acts like a fast version of self[key]=self.pop(key).
|
|
||||||
|
|
||||||
'''
|
|
||||||
link = self.__map[key]
|
|
||||||
link_prev = link.prev
|
|
||||||
link_next = link.next
|
|
||||||
link_prev.next = link_next
|
|
||||||
link_next.prev = link_prev
|
|
||||||
root = self.__root
|
|
||||||
if last:
|
|
||||||
last = root.prev
|
|
||||||
link.prev = last
|
|
||||||
link.next = root
|
|
||||||
last.next = root.prev = link
|
|
||||||
else:
|
|
||||||
first = root.next
|
|
||||||
link.prev = root
|
|
||||||
link.next = first
|
|
||||||
root.next = first.prev = link
|
|
||||||
|
|
||||||
def __sizeof__(self):
|
|
||||||
sizeof = _sys.getsizeof
|
|
||||||
n = len(self) + 1 # number of links including root
|
|
||||||
size = sizeof(self.__dict__) # instance dictionary
|
|
||||||
size += sizeof(self.__map) * 2 # internal dict and inherited dict
|
|
||||||
size += sizeof(self.__hardroot) * n # link objects
|
|
||||||
size += sizeof(self.__root) * n # proxy objects
|
|
||||||
return size
|
|
||||||
|
|
||||||
update = __update = MutableMapping.update
|
|
||||||
keys = MutableMapping.keys
|
|
||||||
values = MutableMapping.values
|
|
||||||
items = MutableMapping.items
|
|
||||||
__ne__ = MutableMapping.__ne__
|
|
||||||
|
|
||||||
__marker = object()
|
|
||||||
|
|
||||||
def pop(self, key, default=__marker):
|
|
||||||
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding
|
|
||||||
value. If key is not found, d is returned if given, otherwise KeyError
|
|
||||||
is raised.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if key in self:
|
|
||||||
result = self[key]
|
|
||||||
del self[key]
|
|
||||||
return result
|
|
||||||
if default is self.__marker:
|
|
||||||
raise KeyError(key)
|
|
||||||
return default
|
|
||||||
|
|
||||||
def setdefault(self, key, default=None):
|
|
||||||
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
|
|
||||||
if key in self:
|
|
||||||
return self[key]
|
|
||||||
self[key] = default
|
|
||||||
return default
|
|
||||||
|
|
||||||
@_recursive_repr()
|
|
||||||
def __repr__(self):
|
|
||||||
'od.__repr__() <==> repr(od)'
|
|
||||||
if not self:
|
|
||||||
return '%s()' % (self.__class__.__name__,)
|
|
||||||
return '%s(%r)' % (self.__class__.__name__, list(self.items()))
|
|
||||||
|
|
||||||
def __reduce__(self):
|
|
||||||
'Return state information for pickling'
|
|
||||||
items = [[k, self[k]] for k in self]
|
|
||||||
inst_dict = vars(self).copy()
|
|
||||||
for k in vars(OrderedDict()):
|
|
||||||
inst_dict.pop(k, None)
|
|
||||||
if inst_dict:
|
|
||||||
return (self.__class__, (items,), inst_dict)
|
|
||||||
return self.__class__, (items,)
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
'od.copy() -> a shallow copy of od'
|
|
||||||
return self.__class__(self)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def fromkeys(cls, iterable, value=None):
|
|
||||||
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S.
|
|
||||||
If not specified, the value defaults to None.
|
|
||||||
|
|
||||||
'''
|
|
||||||
self = cls()
|
|
||||||
for key in iterable:
|
|
||||||
self[key] = value
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
|
|
||||||
while comparison to a regular mapping is order-insensitive.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if isinstance(other, OrderedDict):
|
|
||||||
return len(self)==len(other) and \
|
|
||||||
all(p==q for p, q in zip(self.items(), other.items()))
|
|
||||||
return dict.__eq__(self, other)
|
|
||||||
|
|
||||||
# update_wrapper() and wraps() are tools to help write
|
|
||||||
# wrapper functions that can handle naive introspection
|
|
||||||
|
|
||||||
WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__doc__')
|
|
||||||
WRAPPER_UPDATES = ('__dict__',)
|
|
||||||
def update_wrapper(wrapper,
|
|
||||||
wrapped,
|
|
||||||
assigned = WRAPPER_ASSIGNMENTS,
|
|
||||||
updated = WRAPPER_UPDATES):
|
|
||||||
"""Update a wrapper function to look like the wrapped function
|
|
||||||
|
|
||||||
wrapper is the function to be updated
|
|
||||||
wrapped is the original function
|
|
||||||
assigned is a tuple naming the attributes assigned directly
|
|
||||||
from the wrapped function to the wrapper function (defaults to
|
|
||||||
functools.WRAPPER_ASSIGNMENTS)
|
|
||||||
updated is a tuple naming the attributes of the wrapper that
|
|
||||||
are updated with the corresponding attribute from the wrapped
|
|
||||||
function (defaults to functools.WRAPPER_UPDATES)
|
|
||||||
"""
|
|
||||||
wrapper.__wrapped__ = wrapped
|
|
||||||
for attr in assigned:
|
|
||||||
try:
|
|
||||||
value = getattr(wrapped, attr)
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
setattr(wrapper, attr, value)
|
|
||||||
for attr in updated:
|
|
||||||
getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
|
|
||||||
# Return the wrapper so this can be used as a decorator via partial()
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
def wraps(wrapped,
|
|
||||||
assigned = WRAPPER_ASSIGNMENTS,
|
|
||||||
updated = WRAPPER_UPDATES):
|
|
||||||
"""Decorator factory to apply update_wrapper() to a wrapper function
|
|
||||||
|
|
||||||
Returns a decorator that invokes update_wrapper() with the decorated
|
|
||||||
function as the wrapper argument and the arguments to wraps() as the
|
|
||||||
remaining arguments. Default arguments are as for update_wrapper().
|
|
||||||
This is a convenience function to simplify applying partial() to
|
|
||||||
update_wrapper().
|
|
||||||
"""
|
|
||||||
return partial(update_wrapper, wrapped=wrapped,
|
|
||||||
assigned=assigned, updated=updated)
|
|
||||||
|
|
||||||
def total_ordering(cls):
|
|
||||||
"""Class decorator that fills in missing ordering methods"""
|
|
||||||
convert = {
|
|
||||||
'__lt__': [('__gt__', lambda self, other: not (self < other or self == other)),
|
|
||||||
('__le__', lambda self, other: self < other or self == other),
|
|
||||||
('__ge__', lambda self, other: not self < other)],
|
|
||||||
'__le__': [('__ge__', lambda self, other: not self <= other or self == other),
|
|
||||||
('__lt__', lambda self, other: self <= other and not self == other),
|
|
||||||
('__gt__', lambda self, other: not self <= other)],
|
|
||||||
'__gt__': [('__lt__', lambda self, other: not (self > other or self == other)),
|
|
||||||
('__ge__', lambda self, other: self > other or self == other),
|
|
||||||
('__le__', lambda self, other: not self > other)],
|
|
||||||
'__ge__': [('__le__', lambda self, other: (not self >= other) or self == other),
|
|
||||||
('__gt__', lambda self, other: self >= other and not self == other),
|
|
||||||
('__lt__', lambda self, other: not self >= other)]
|
|
||||||
}
|
|
||||||
roots = set(dir(cls)) & set(convert)
|
|
||||||
if not roots:
|
|
||||||
raise ValueError('must define at least one ordering operation: < > <= >=')
|
|
||||||
root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__
|
|
||||||
for opname, opfunc in convert[root]:
|
|
||||||
if opname not in roots:
|
|
||||||
opfunc.__name__ = opname
|
|
||||||
opfunc.__doc__ = getattr(int, opname).__doc__
|
|
||||||
setattr(cls, opname, opfunc)
|
|
||||||
return cls
|
|
||||||
|
|
||||||
def cmp_to_key(mycmp):
|
|
||||||
"""Convert a cmp= function into a key= function"""
|
|
||||||
class K(object):
|
|
||||||
__slots__ = ['obj']
|
|
||||||
def __init__(self, obj):
|
|
||||||
self.obj = obj
|
|
||||||
def __lt__(self, other):
|
|
||||||
return mycmp(self.obj, other.obj) < 0
|
|
||||||
def __gt__(self, other):
|
|
||||||
return mycmp(self.obj, other.obj) > 0
|
|
||||||
def __eq__(self, other):
|
|
||||||
return mycmp(self.obj, other.obj) == 0
|
|
||||||
def __le__(self, other):
|
|
||||||
return mycmp(self.obj, other.obj) <= 0
|
|
||||||
def __ge__(self, other):
|
|
||||||
return mycmp(self.obj, other.obj) >= 0
|
|
||||||
def __ne__(self, other):
|
|
||||||
return mycmp(self.obj, other.obj) != 0
|
|
||||||
__hash__ = None
|
|
||||||
return K
|
|
||||||
|
|
||||||
_CacheInfo = namedtuple("CacheInfo", "hits misses maxsize currsize")
|
|
||||||
|
|
||||||
def lru_cache(maxsize=100):
|
|
||||||
"""Least-recently-used cache decorator.
|
|
||||||
|
|
||||||
If *maxsize* is set to None, the LRU features are disabled and the cache
|
|
||||||
can grow without bound.
|
|
||||||
|
|
||||||
Arguments to the cached function must be hashable.
|
|
||||||
|
|
||||||
View the cache statistics named tuple (hits, misses, maxsize, currsize) with
|
|
||||||
f.cache_info(). Clear the cache and statistics with f.cache_clear().
|
|
||||||
Access the underlying function with f.__wrapped__.
|
|
||||||
|
|
||||||
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
|
|
||||||
|
|
||||||
"""
|
|
||||||
# Users should only access the lru_cache through its public API:
|
|
||||||
# cache_info, cache_clear, and f.__wrapped__
|
|
||||||
# The internals of the lru_cache are encapsulated for thread safety and
|
|
||||||
# to allow the implementation to change (including a possible C version).
|
|
||||||
|
|
||||||
def decorating_function(user_function,
|
|
||||||
tuple=tuple, sorted=sorted, len=len, KeyError=KeyError):
|
|
||||||
|
|
||||||
hits, misses = [0], [0]
|
|
||||||
kwd_mark = (object(),) # separates positional and keyword args
|
|
||||||
lock = Lock() # needed because OrderedDict isn't threadsafe
|
|
||||||
|
|
||||||
if maxsize is None:
|
|
||||||
cache = dict() # simple cache without ordering or size limit
|
|
||||||
|
|
||||||
@wraps(user_function)
|
|
||||||
def wrapper(*args, **kwds):
|
|
||||||
key = args
|
|
||||||
if kwds:
|
|
||||||
key += kwd_mark + tuple(sorted(kwds.items()))
|
|
||||||
try:
|
|
||||||
result = cache[key]
|
|
||||||
hits[0] += 1
|
|
||||||
return result
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
result = user_function(*args, **kwds)
|
|
||||||
cache[key] = result
|
|
||||||
misses[0] += 1
|
|
||||||
return result
|
|
||||||
else:
|
|
||||||
cache = OrderedDict() # ordered least recent to most recent
|
|
||||||
cache_popitem = cache.popitem
|
|
||||||
cache_renew = cache.move_to_end
|
|
||||||
|
|
||||||
@wraps(user_function)
|
|
||||||
def wrapper(*args, **kwds):
|
|
||||||
key = args
|
|
||||||
if kwds:
|
|
||||||
key += kwd_mark + tuple(sorted(kwds.items()))
|
|
||||||
with lock:
|
|
||||||
try:
|
|
||||||
result = cache[key]
|
|
||||||
cache_renew(key) # record recent use of this key
|
|
||||||
hits[0] += 1
|
|
||||||
return result
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
result = user_function(*args, **kwds)
|
|
||||||
with lock:
|
|
||||||
cache[key] = result # record recent use of this key
|
|
||||||
misses[0] += 1
|
|
||||||
if len(cache) > maxsize:
|
|
||||||
cache_popitem(0) # purge least recently used cache entry
|
|
||||||
return result
|
|
||||||
|
|
||||||
def cache_info():
|
|
||||||
"""Report cache statistics"""
|
|
||||||
with lock:
|
|
||||||
return _CacheInfo(hits[0], misses[0], maxsize, len(cache))
|
|
||||||
|
|
||||||
def cache_clear():
|
|
||||||
"""Clear the cache and cache statistics"""
|
|
||||||
with lock:
|
|
||||||
cache.clear()
|
|
||||||
hits[0] = misses[0] = 0
|
|
||||||
|
|
||||||
wrapper.cache_info = cache_info
|
|
||||||
wrapper.cache_clear = cache_clear
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
return decorating_function
|
|
||||||
157
lib/spack/external/py2/functools32/reprlib32.py
vendored
157
lib/spack/external/py2/functools32/reprlib32.py
vendored
@@ -1,157 +0,0 @@
|
|||||||
"""Redo the builtin repr() (representation) but with limits on most sizes."""
|
|
||||||
|
|
||||||
__all__ = ["Repr", "repr", "recursive_repr"]
|
|
||||||
|
|
||||||
import __builtin__ as builtins
|
|
||||||
from itertools import islice
|
|
||||||
try:
|
|
||||||
from thread import get_ident
|
|
||||||
except ImportError:
|
|
||||||
from _dummy_thread32 import get_ident
|
|
||||||
|
|
||||||
def recursive_repr(fillvalue='...'):
|
|
||||||
'Decorator to make a repr function return fillvalue for a recursive call'
|
|
||||||
|
|
||||||
def decorating_function(user_function):
|
|
||||||
repr_running = set()
|
|
||||||
|
|
||||||
def wrapper(self):
|
|
||||||
key = id(self), get_ident()
|
|
||||||
if key in repr_running:
|
|
||||||
return fillvalue
|
|
||||||
repr_running.add(key)
|
|
||||||
try:
|
|
||||||
result = user_function(self)
|
|
||||||
finally:
|
|
||||||
repr_running.discard(key)
|
|
||||||
return result
|
|
||||||
|
|
||||||
# Can't use functools.wraps() here because of bootstrap issues
|
|
||||||
wrapper.__module__ = getattr(user_function, '__module__')
|
|
||||||
wrapper.__doc__ = getattr(user_function, '__doc__')
|
|
||||||
wrapper.__name__ = getattr(user_function, '__name__')
|
|
||||||
wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
return decorating_function
|
|
||||||
|
|
||||||
class Repr:
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.maxlevel = 6
|
|
||||||
self.maxtuple = 6
|
|
||||||
self.maxlist = 6
|
|
||||||
self.maxarray = 5
|
|
||||||
self.maxdict = 4
|
|
||||||
self.maxset = 6
|
|
||||||
self.maxfrozenset = 6
|
|
||||||
self.maxdeque = 6
|
|
||||||
self.maxstring = 30
|
|
||||||
self.maxlong = 40
|
|
||||||
self.maxother = 30
|
|
||||||
|
|
||||||
def repr(self, x):
|
|
||||||
return self.repr1(x, self.maxlevel)
|
|
||||||
|
|
||||||
def repr1(self, x, level):
|
|
||||||
typename = type(x).__name__
|
|
||||||
if ' ' in typename:
|
|
||||||
parts = typename.split()
|
|
||||||
typename = '_'.join(parts)
|
|
||||||
if hasattr(self, 'repr_' + typename):
|
|
||||||
return getattr(self, 'repr_' + typename)(x, level)
|
|
||||||
else:
|
|
||||||
return self.repr_instance(x, level)
|
|
||||||
|
|
||||||
def _repr_iterable(self, x, level, left, right, maxiter, trail=''):
|
|
||||||
n = len(x)
|
|
||||||
if level <= 0 and n:
|
|
||||||
s = '...'
|
|
||||||
else:
|
|
||||||
newlevel = level - 1
|
|
||||||
repr1 = self.repr1
|
|
||||||
pieces = [repr1(elem, newlevel) for elem in islice(x, maxiter)]
|
|
||||||
if n > maxiter: pieces.append('...')
|
|
||||||
s = ', '.join(pieces)
|
|
||||||
if n == 1 and trail: right = trail + right
|
|
||||||
return '%s%s%s' % (left, s, right)
|
|
||||||
|
|
||||||
def repr_tuple(self, x, level):
|
|
||||||
return self._repr_iterable(x, level, '(', ')', self.maxtuple, ',')
|
|
||||||
|
|
||||||
def repr_list(self, x, level):
|
|
||||||
return self._repr_iterable(x, level, '[', ']', self.maxlist)
|
|
||||||
|
|
||||||
def repr_array(self, x, level):
|
|
||||||
header = "array('%s', [" % x.typecode
|
|
||||||
return self._repr_iterable(x, level, header, '])', self.maxarray)
|
|
||||||
|
|
||||||
def repr_set(self, x, level):
|
|
||||||
x = _possibly_sorted(x)
|
|
||||||
return self._repr_iterable(x, level, 'set([', '])', self.maxset)
|
|
||||||
|
|
||||||
def repr_frozenset(self, x, level):
|
|
||||||
x = _possibly_sorted(x)
|
|
||||||
return self._repr_iterable(x, level, 'frozenset([', '])',
|
|
||||||
self.maxfrozenset)
|
|
||||||
|
|
||||||
def repr_deque(self, x, level):
|
|
||||||
return self._repr_iterable(x, level, 'deque([', '])', self.maxdeque)
|
|
||||||
|
|
||||||
def repr_dict(self, x, level):
|
|
||||||
n = len(x)
|
|
||||||
if n == 0: return '{}'
|
|
||||||
if level <= 0: return '{...}'
|
|
||||||
newlevel = level - 1
|
|
||||||
repr1 = self.repr1
|
|
||||||
pieces = []
|
|
||||||
for key in islice(_possibly_sorted(x), self.maxdict):
|
|
||||||
keyrepr = repr1(key, newlevel)
|
|
||||||
valrepr = repr1(x[key], newlevel)
|
|
||||||
pieces.append('%s: %s' % (keyrepr, valrepr))
|
|
||||||
if n > self.maxdict: pieces.append('...')
|
|
||||||
s = ', '.join(pieces)
|
|
||||||
return '{%s}' % (s,)
|
|
||||||
|
|
||||||
def repr_str(self, x, level):
|
|
||||||
s = builtins.repr(x[:self.maxstring])
|
|
||||||
if len(s) > self.maxstring:
|
|
||||||
i = max(0, (self.maxstring-3)//2)
|
|
||||||
j = max(0, self.maxstring-3-i)
|
|
||||||
s = builtins.repr(x[:i] + x[len(x)-j:])
|
|
||||||
s = s[:i] + '...' + s[len(s)-j:]
|
|
||||||
return s
|
|
||||||
|
|
||||||
def repr_int(self, x, level):
|
|
||||||
s = builtins.repr(x) # XXX Hope this isn't too slow...
|
|
||||||
if len(s) > self.maxlong:
|
|
||||||
i = max(0, (self.maxlong-3)//2)
|
|
||||||
j = max(0, self.maxlong-3-i)
|
|
||||||
s = s[:i] + '...' + s[len(s)-j:]
|
|
||||||
return s
|
|
||||||
|
|
||||||
def repr_instance(self, x, level):
|
|
||||||
try:
|
|
||||||
s = builtins.repr(x)
|
|
||||||
# Bugs in x.__repr__() can cause arbitrary
|
|
||||||
# exceptions -- then make up something
|
|
||||||
except Exception:
|
|
||||||
return '<%s instance at %x>' % (x.__class__.__name__, id(x))
|
|
||||||
if len(s) > self.maxother:
|
|
||||||
i = max(0, (self.maxother-3)//2)
|
|
||||||
j = max(0, self.maxother-3-i)
|
|
||||||
s = s[:i] + '...' + s[len(s)-j:]
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
def _possibly_sorted(x):
|
|
||||||
# Since not all sequences of items can be sorted and comparison
|
|
||||||
# functions may raise arbitrary exceptions, return an unsorted
|
|
||||||
# sequence in that case.
|
|
||||||
try:
|
|
||||||
return sorted(x)
|
|
||||||
except Exception:
|
|
||||||
return list(x)
|
|
||||||
|
|
||||||
aRepr = Repr()
|
|
||||||
repr = aRepr.repr
|
|
||||||
103
lib/spack/external/py2/typing.py
vendored
103
lib/spack/external/py2/typing.py
vendored
@@ -1,103 +0,0 @@
|
|||||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
"""
|
|
||||||
This is a fake set of symbols to allow spack to import typing in python
|
|
||||||
versions where we do not support type checking (<3)
|
|
||||||
"""
|
|
||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
# (1) Unparameterized types.
|
|
||||||
Annotated = object
|
|
||||||
Any = object
|
|
||||||
AnyStr = object
|
|
||||||
ByteString = object
|
|
||||||
Counter = object
|
|
||||||
Final = object
|
|
||||||
Hashable = object
|
|
||||||
NoReturn = object
|
|
||||||
Sized = object
|
|
||||||
SupportsAbs = object
|
|
||||||
SupportsBytes = object
|
|
||||||
SupportsComplex = object
|
|
||||||
SupportsFloat = object
|
|
||||||
SupportsIndex = object
|
|
||||||
SupportsInt = object
|
|
||||||
SupportsRound = object
|
|
||||||
|
|
||||||
# (2) Parameterized types.
|
|
||||||
AbstractSet = defaultdict(lambda: object)
|
|
||||||
AsyncContextManager = defaultdict(lambda: object)
|
|
||||||
AsyncGenerator = defaultdict(lambda: object)
|
|
||||||
AsyncIterable = defaultdict(lambda: object)
|
|
||||||
AsyncIterator = defaultdict(lambda: object)
|
|
||||||
Awaitable = defaultdict(lambda: object)
|
|
||||||
Callable = defaultdict(lambda: object)
|
|
||||||
ChainMap = defaultdict(lambda: object)
|
|
||||||
ClassVar = defaultdict(lambda: object)
|
|
||||||
Collection = defaultdict(lambda: object)
|
|
||||||
Container = defaultdict(lambda: object)
|
|
||||||
ContextManager = defaultdict(lambda: object)
|
|
||||||
Coroutine = defaultdict(lambda: object)
|
|
||||||
DefaultDict = defaultdict(lambda: object)
|
|
||||||
Deque = defaultdict(lambda: object)
|
|
||||||
Dict = defaultdict(lambda: object)
|
|
||||||
ForwardRef = defaultdict(lambda: object)
|
|
||||||
FrozenSet = defaultdict(lambda: object)
|
|
||||||
Generator = defaultdict(lambda: object)
|
|
||||||
Generic = defaultdict(lambda: object)
|
|
||||||
ItemsView = defaultdict(lambda: object)
|
|
||||||
Iterable = defaultdict(lambda: object)
|
|
||||||
Iterator = defaultdict(lambda: object)
|
|
||||||
KeysView = defaultdict(lambda: object)
|
|
||||||
List = defaultdict(lambda: object)
|
|
||||||
Literal = defaultdict(lambda: object)
|
|
||||||
Mapping = defaultdict(lambda: object)
|
|
||||||
MappingView = defaultdict(lambda: object)
|
|
||||||
MutableMapping = defaultdict(lambda: object)
|
|
||||||
MutableSequence = defaultdict(lambda: object)
|
|
||||||
MutableSet = defaultdict(lambda: object)
|
|
||||||
NamedTuple = defaultdict(lambda: object)
|
|
||||||
Optional = defaultdict(lambda: object)
|
|
||||||
OrderedDict = defaultdict(lambda: object)
|
|
||||||
Reversible = defaultdict(lambda: object)
|
|
||||||
Sequence = defaultdict(lambda: object)
|
|
||||||
Set = defaultdict(lambda: object)
|
|
||||||
Tuple = defaultdict(lambda: object)
|
|
||||||
Type = defaultdict(lambda: object)
|
|
||||||
TypedDict = defaultdict(lambda: object)
|
|
||||||
Union = defaultdict(lambda: object)
|
|
||||||
ValuesView = defaultdict(lambda: object)
|
|
||||||
|
|
||||||
# (3) Type variable declarations.
|
|
||||||
TypeVar = lambda *args, **kwargs: None
|
|
||||||
|
|
||||||
# (4) Functions.
|
|
||||||
cast = lambda _type, x: x
|
|
||||||
get_args = None
|
|
||||||
get_origin = None
|
|
||||||
get_type_hints = None
|
|
||||||
no_type_check = None
|
|
||||||
no_type_check_decorator = None
|
|
||||||
|
|
||||||
## typing_extensions
|
|
||||||
# We get a ModuleNotFoundError when attempting to import anything from typing_extensions
|
|
||||||
# if we separate this into a separate typing_extensions.py file for some reason.
|
|
||||||
|
|
||||||
# (1) Unparameterized types.
|
|
||||||
IntVar = object
|
|
||||||
Literal = object
|
|
||||||
NewType = object
|
|
||||||
Text = object
|
|
||||||
|
|
||||||
# (2) Parameterized types.
|
|
||||||
Protocol = defaultdict(lambda: object)
|
|
||||||
|
|
||||||
# (3) Macro for avoiding evaluation except during type checking.
|
|
||||||
TYPE_CHECKING = False
|
|
||||||
|
|
||||||
# (4) Decorators.
|
|
||||||
final = lambda x: x
|
|
||||||
overload = lambda x: x
|
|
||||||
runtime_checkable = lambda x: x
|
|
||||||
@@ -7,11 +7,10 @@
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import errno
|
import errno
|
||||||
|
import io
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from six import StringIO
|
|
||||||
|
|
||||||
|
|
||||||
class Command(object):
|
class Command(object):
|
||||||
"""Parsed representation of a command from argparse.
|
"""Parsed representation of a command from argparse.
|
||||||
@@ -181,7 +180,7 @@ def __init__(self, prog, out=None, aliases=False, rst_levels=_rst_levels):
|
|||||||
self.rst_levels = rst_levels
|
self.rst_levels = rst_levels
|
||||||
|
|
||||||
def format(self, cmd):
|
def format(self, cmd):
|
||||||
string = StringIO()
|
string = io.StringIO()
|
||||||
string.write(self.begin_command(cmd.prog))
|
string.write(self.begin_command(cmd.prog))
|
||||||
|
|
||||||
if cmd.description:
|
if cmd.description:
|
||||||
|
|||||||
@@ -1,39 +0,0 @@
|
|||||||
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
# isort: off
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
if sys.version_info < (3,):
|
|
||||||
from itertools import ifilter as filter
|
|
||||||
from itertools import imap as map
|
|
||||||
from itertools import izip as zip
|
|
||||||
from itertools import izip_longest as zip_longest # novm
|
|
||||||
from urllib import urlencode as urlencode
|
|
||||||
from urllib import urlopen as urlopen
|
|
||||||
else:
|
|
||||||
filter = filter
|
|
||||||
map = map
|
|
||||||
zip = zip
|
|
||||||
from itertools import zip_longest as zip_longest # novm # noqa: F401
|
|
||||||
from urllib.parse import urlencode as urlencode # novm # noqa: F401
|
|
||||||
from urllib.request import urlopen as urlopen # novm # noqa: F401
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 3):
|
|
||||||
from collections.abc import Hashable as Hashable # novm
|
|
||||||
from collections.abc import Iterable as Iterable # novm
|
|
||||||
from collections.abc import Mapping as Mapping # novm
|
|
||||||
from collections.abc import MutableMapping as MutableMapping # novm
|
|
||||||
from collections.abc import MutableSequence as MutableSequence # novm
|
|
||||||
from collections.abc import MutableSet as MutableSet # novm
|
|
||||||
from collections.abc import Sequence as Sequence # novm
|
|
||||||
else:
|
|
||||||
from collections import Hashable as Hashable # noqa: F401
|
|
||||||
from collections import Iterable as Iterable # noqa: F401
|
|
||||||
from collections import Mapping as Mapping # noqa: F401
|
|
||||||
from collections import MutableMapping as MutableMapping # noqa: F401
|
|
||||||
from collections import MutableSequence as MutableSequence # noqa: F401
|
|
||||||
from collections import MutableSet as MutableSet # noqa: F401
|
|
||||||
from collections import Sequence as Sequence # noqa: F401
|
|
||||||
@@ -3,6 +3,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import collections
|
import collections
|
||||||
|
import collections.abc
|
||||||
import errno
|
import errno
|
||||||
import glob
|
import glob
|
||||||
import hashlib
|
import hashlib
|
||||||
@@ -17,10 +18,7 @@
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from sys import platform as _platform
|
from sys import platform as _platform
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
from llnl.util.compat import Sequence
|
|
||||||
from llnl.util.lang import dedupe, memoized
|
from llnl.util.lang import dedupe, memoized
|
||||||
from llnl.util.symlink import islink, symlink
|
from llnl.util.symlink import islink, symlink
|
||||||
|
|
||||||
@@ -290,9 +288,7 @@ def groupid_to_group(x):
|
|||||||
shutil.copy(filename, tmp_filename)
|
shutil.copy(filename, tmp_filename)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
extra_kwargs = {}
|
extra_kwargs = {"errors": "surrogateescape"}
|
||||||
if sys.version_info > (3, 0):
|
|
||||||
extra_kwargs = {"errors": "surrogateescape"}
|
|
||||||
|
|
||||||
# Open as a text file and filter until the end of the file is
|
# Open as a text file and filter until the end of the file is
|
||||||
# reached or we found a marker in the line if it was specified
|
# reached or we found a marker in the line if it was specified
|
||||||
@@ -522,7 +518,7 @@ def chgrp(path, group, follow_symlinks=True):
|
|||||||
if is_windows:
|
if is_windows:
|
||||||
raise OSError("Function 'chgrp' is not supported on Windows")
|
raise OSError("Function 'chgrp' is not supported on Windows")
|
||||||
|
|
||||||
if isinstance(group, six.string_types):
|
if isinstance(group, str):
|
||||||
gid = grp.getgrnam(group).gr_gid
|
gid = grp.getgrnam(group).gr_gid
|
||||||
else:
|
else:
|
||||||
gid = group
|
gid = group
|
||||||
@@ -1019,7 +1015,7 @@ def open_if_filename(str_or_file, mode="r"):
|
|||||||
|
|
||||||
If it's a file object, just yields the file object.
|
If it's a file object, just yields the file object.
|
||||||
"""
|
"""
|
||||||
if isinstance(str_or_file, six.string_types):
|
if isinstance(str_or_file, str):
|
||||||
with open(str_or_file, mode) as f:
|
with open(str_or_file, mode) as f:
|
||||||
yield f
|
yield f
|
||||||
else:
|
else:
|
||||||
@@ -1309,46 +1305,34 @@ def visit_directory_tree(root, visitor, rel_path="", depth=0):
|
|||||||
depth (str): current depth from the root
|
depth (str): current depth from the root
|
||||||
"""
|
"""
|
||||||
dir = os.path.join(root, rel_path)
|
dir = os.path.join(root, rel_path)
|
||||||
|
dir_entries = sorted(os.scandir(dir), key=lambda d: d.name)
|
||||||
if sys.version_info >= (3, 5, 0):
|
|
||||||
dir_entries = sorted(os.scandir(dir), key=lambda d: d.name) # novermin
|
|
||||||
else:
|
|
||||||
dir_entries = os.listdir(dir)
|
|
||||||
dir_entries.sort()
|
|
||||||
|
|
||||||
for f in dir_entries:
|
for f in dir_entries:
|
||||||
if sys.version_info >= (3, 5, 0):
|
rel_child = os.path.join(rel_path, f.name)
|
||||||
rel_child = os.path.join(rel_path, f.name)
|
islink = f.is_symlink()
|
||||||
islink = f.is_symlink()
|
# On Windows, symlinks to directories are distinct from
|
||||||
# On Windows, symlinks to directories are distinct from
|
# symlinks to files, and it is possible to create a
|
||||||
# symlinks to files, and it is possible to create a
|
# broken symlink to a directory (e.g. using os.symlink
|
||||||
# broken symlink to a directory (e.g. using os.symlink
|
# without `target_is_directory=True`), invoking `isdir`
|
||||||
# without `target_is_directory=True`), invoking `isdir`
|
# on a symlink on Windows that is broken in this manner
|
||||||
# on a symlink on Windows that is broken in this manner
|
# will result in an error. In this case we can work around
|
||||||
# will result in an error. In this case we can work around
|
# the issue by reading the target and resolving the
|
||||||
# the issue by reading the target and resolving the
|
# directory ourselves
|
||||||
# directory ourselves
|
try:
|
||||||
try:
|
isdir = f.is_dir()
|
||||||
isdir = f.is_dir()
|
except OSError as e:
|
||||||
except OSError as e:
|
if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink:
|
||||||
if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink:
|
# if path is a symlink, determine destination and
|
||||||
# if path is a symlink, determine destination and
|
# evaluate file vs directory
|
||||||
# evaluate file vs directory
|
link_target = resolve_link_target_relative_to_the_link(f)
|
||||||
link_target = resolve_link_target_relative_to_the_link(f)
|
# link_target might be relative but
|
||||||
# link_target might be relative but
|
# resolve_link_target_relative_to_the_link
|
||||||
# resolve_link_target_relative_to_the_link
|
# will ensure that if so, that it is relative
|
||||||
# will ensure that if so, that it is relative
|
# to the CWD and therefore
|
||||||
# to the CWD and therefore
|
# makes sense
|
||||||
# makes sense
|
isdir = os.path.isdir(link_target)
|
||||||
isdir = os.path.isdir(link_target)
|
else:
|
||||||
else:
|
raise e
|
||||||
raise e
|
|
||||||
|
|
||||||
else:
|
|
||||||
rel_child = os.path.join(rel_path, f)
|
|
||||||
lexists, islink, isdir = lexists_islink_isdir(os.path.join(dir, f))
|
|
||||||
if not lexists:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not isdir and not islink:
|
if not isdir and not islink:
|
||||||
# handle non-symlink files
|
# handle non-symlink files
|
||||||
@@ -1609,14 +1593,14 @@ def find(root, files, recursive=True):
|
|||||||
|
|
||||||
Parameters:
|
Parameters:
|
||||||
root (str): The root directory to start searching from
|
root (str): The root directory to start searching from
|
||||||
files (str or Sequence): Library name(s) to search for
|
files (str or collections.abc.Sequence): Library name(s) to search for
|
||||||
recursive (bool): if False search only root folder,
|
recursive (bool): if False search only root folder,
|
||||||
if True descends top-down from the root. Defaults to True.
|
if True descends top-down from the root. Defaults to True.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list: The files that have been found
|
list: The files that have been found
|
||||||
"""
|
"""
|
||||||
if isinstance(files, six.string_types):
|
if isinstance(files, str):
|
||||||
files = [files]
|
files = [files]
|
||||||
|
|
||||||
if recursive:
|
if recursive:
|
||||||
@@ -1673,14 +1657,14 @@ def _find_non_recursive(root, search_files):
|
|||||||
# Utilities for libraries and headers
|
# Utilities for libraries and headers
|
||||||
|
|
||||||
|
|
||||||
class FileList(Sequence):
|
class FileList(collections.abc.Sequence):
|
||||||
"""Sequence of absolute paths to files.
|
"""Sequence of absolute paths to files.
|
||||||
|
|
||||||
Provides a few convenience methods to manipulate file paths.
|
Provides a few convenience methods to manipulate file paths.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, files):
|
def __init__(self, files):
|
||||||
if isinstance(files, six.string_types):
|
if isinstance(files, str):
|
||||||
files = [files]
|
files = [files]
|
||||||
|
|
||||||
self.files = list(dedupe(files))
|
self.files = list(dedupe(files))
|
||||||
@@ -1776,7 +1760,7 @@ def directories(self):
|
|||||||
def directories(self, value):
|
def directories(self, value):
|
||||||
value = value or []
|
value = value or []
|
||||||
# Accept a single directory as input
|
# Accept a single directory as input
|
||||||
if isinstance(value, six.string_types):
|
if isinstance(value, str):
|
||||||
value = [value]
|
value = [value]
|
||||||
|
|
||||||
self._directories = [path_to_os_path(os.path.normpath(x))[0] for x in value]
|
self._directories = [path_to_os_path(os.path.normpath(x))[0] for x in value]
|
||||||
@@ -1912,9 +1896,9 @@ def find_headers(headers, root, recursive=False):
|
|||||||
Returns:
|
Returns:
|
||||||
HeaderList: The headers that have been found
|
HeaderList: The headers that have been found
|
||||||
"""
|
"""
|
||||||
if isinstance(headers, six.string_types):
|
if isinstance(headers, str):
|
||||||
headers = [headers]
|
headers = [headers]
|
||||||
elif not isinstance(headers, Sequence):
|
elif not isinstance(headers, collections.abc.Sequence):
|
||||||
message = "{0} expects a string or sequence of strings as the "
|
message = "{0} expects a string or sequence of strings as the "
|
||||||
message += "first argument [got {1} instead]"
|
message += "first argument [got {1} instead]"
|
||||||
message = message.format(find_headers.__name__, type(headers))
|
message = message.format(find_headers.__name__, type(headers))
|
||||||
@@ -2078,9 +2062,9 @@ def find_system_libraries(libraries, shared=True):
|
|||||||
Returns:
|
Returns:
|
||||||
LibraryList: The libraries that have been found
|
LibraryList: The libraries that have been found
|
||||||
"""
|
"""
|
||||||
if isinstance(libraries, six.string_types):
|
if isinstance(libraries, str):
|
||||||
libraries = [libraries]
|
libraries = [libraries]
|
||||||
elif not isinstance(libraries, Sequence):
|
elif not isinstance(libraries, collections.abc.Sequence):
|
||||||
message = "{0} expects a string or sequence of strings as the "
|
message = "{0} expects a string or sequence of strings as the "
|
||||||
message += "first argument [got {1} instead]"
|
message += "first argument [got {1} instead]"
|
||||||
message = message.format(find_system_libraries.__name__, type(libraries))
|
message = message.format(find_system_libraries.__name__, type(libraries))
|
||||||
@@ -2135,9 +2119,9 @@ def find_libraries(libraries, root, shared=True, recursive=False, runtime=True):
|
|||||||
Returns:
|
Returns:
|
||||||
LibraryList: The libraries that have been found
|
LibraryList: The libraries that have been found
|
||||||
"""
|
"""
|
||||||
if isinstance(libraries, six.string_types):
|
if isinstance(libraries, str):
|
||||||
libraries = [libraries]
|
libraries = [libraries]
|
||||||
elif not isinstance(libraries, Sequence):
|
elif not isinstance(libraries, collections.abc.Sequence):
|
||||||
message = "{0} expects a string or sequence of strings as the "
|
message = "{0} expects a string or sequence of strings as the "
|
||||||
message += "first argument [got {1} instead]"
|
message += "first argument [got {1} instead]"
|
||||||
message = message.format(find_libraries.__name__, type(libraries))
|
message = message.format(find_libraries.__name__, type(libraries))
|
||||||
|
|||||||
@@ -5,9 +5,11 @@
|
|||||||
|
|
||||||
from __future__ import division
|
from __future__ import division
|
||||||
|
|
||||||
|
import collections.abc
|
||||||
import contextlib
|
import contextlib
|
||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
|
import itertools
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
@@ -15,11 +17,6 @@
|
|||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from typing import Any, Callable, Iterable, List, Tuple
|
from typing import Any, Callable, Iterable, List, Tuple
|
||||||
|
|
||||||
import six
|
|
||||||
from six import string_types
|
|
||||||
|
|
||||||
from llnl.util.compat import MutableMapping, MutableSequence, zip_longest
|
|
||||||
|
|
||||||
# Ignore emacs backups when listing modules
|
# Ignore emacs backups when listing modules
|
||||||
ignore_modules = [r"^\.#", "~$"]
|
ignore_modules = [r"^\.#", "~$"]
|
||||||
|
|
||||||
@@ -200,14 +197,9 @@ def _memoized_function(*args, **kwargs):
|
|||||||
return ret
|
return ret
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
# TypeError is raised when indexing into a dict if the key is unhashable.
|
# TypeError is raised when indexing into a dict if the key is unhashable.
|
||||||
raise six.raise_from(
|
raise UnhashableArguments(
|
||||||
UnhashableArguments(
|
"args + kwargs '{}' was not hashable for function '{}'".format(key, func.__name__),
|
||||||
"args + kwargs '{}' was not hashable for function '{}'".format(
|
) from e
|
||||||
key, func.__name__
|
|
||||||
),
|
|
||||||
),
|
|
||||||
e,
|
|
||||||
)
|
|
||||||
|
|
||||||
return _memoized_function
|
return _memoized_function
|
||||||
|
|
||||||
@@ -312,7 +304,7 @@ def lazy_eq(lseq, rseq):
|
|||||||
# zip_longest is implemented in native code, so use it for speed.
|
# zip_longest is implemented in native code, so use it for speed.
|
||||||
# use zip_longest instead of zip because it allows us to tell
|
# use zip_longest instead of zip because it allows us to tell
|
||||||
# which iterator was longer.
|
# which iterator was longer.
|
||||||
for left, right in zip_longest(liter, riter, fillvalue=done):
|
for left, right in itertools.zip_longest(liter, riter, fillvalue=done):
|
||||||
if (left is done) or (right is done):
|
if (left is done) or (right is done):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -332,7 +324,7 @@ def lazy_lt(lseq, rseq):
|
|||||||
liter = lseq()
|
liter = lseq()
|
||||||
riter = rseq()
|
riter = rseq()
|
||||||
|
|
||||||
for left, right in zip_longest(liter, riter, fillvalue=done):
|
for left, right in itertools.zip_longest(liter, riter, fillvalue=done):
|
||||||
if (left is done) or (right is done):
|
if (left is done) or (right is done):
|
||||||
return left is done # left was shorter than right
|
return left is done # left was shorter than right
|
||||||
|
|
||||||
@@ -482,7 +474,7 @@ def add_func_to_class(name, func):
|
|||||||
|
|
||||||
|
|
||||||
@lazy_lexicographic_ordering
|
@lazy_lexicographic_ordering
|
||||||
class HashableMap(MutableMapping):
|
class HashableMap(collections.abc.MutableMapping):
|
||||||
"""This is a hashable, comparable dictionary. Hash is performed on
|
"""This is a hashable, comparable dictionary. Hash is performed on
|
||||||
a tuple of the values in the dictionary."""
|
a tuple of the values in the dictionary."""
|
||||||
|
|
||||||
@@ -574,7 +566,7 @@ def match_predicate(*args):
|
|||||||
|
|
||||||
def match(string):
|
def match(string):
|
||||||
for arg in args:
|
for arg in args:
|
||||||
if isinstance(arg, string_types):
|
if isinstance(arg, str):
|
||||||
if re.search(arg, string):
|
if re.search(arg, string):
|
||||||
return True
|
return True
|
||||||
elif isinstance(arg, list) or isinstance(arg, tuple):
|
elif isinstance(arg, list) or isinstance(arg, tuple):
|
||||||
@@ -887,32 +879,28 @@ def load_module_from_file(module_name, module_path):
|
|||||||
ImportError: when the module can't be loaded
|
ImportError: when the module can't be loaded
|
||||||
FileNotFoundError: when module_path doesn't exist
|
FileNotFoundError: when module_path doesn't exist
|
||||||
"""
|
"""
|
||||||
|
import importlib.util
|
||||||
|
|
||||||
if module_name in sys.modules:
|
if module_name in sys.modules:
|
||||||
return sys.modules[module_name]
|
return sys.modules[module_name]
|
||||||
|
|
||||||
# This recipe is adapted from https://stackoverflow.com/a/67692/771663
|
# This recipe is adapted from https://stackoverflow.com/a/67692/771663
|
||||||
if sys.version_info[0] == 3 and sys.version_info[1] >= 5:
|
|
||||||
import importlib.util
|
|
||||||
|
|
||||||
spec = importlib.util.spec_from_file_location(module_name, module_path) # novm
|
spec = importlib.util.spec_from_file_location(module_name, module_path) # novm
|
||||||
module = importlib.util.module_from_spec(spec) # novm
|
module = importlib.util.module_from_spec(spec) # novm
|
||||||
# The module object needs to exist in sys.modules before the
|
# The module object needs to exist in sys.modules before the
|
||||||
# loader executes the module code.
|
# loader executes the module code.
|
||||||
#
|
#
|
||||||
# See https://docs.python.org/3/reference/import.html#loading
|
# See https://docs.python.org/3/reference/import.html#loading
|
||||||
sys.modules[spec.name] = module
|
sys.modules[spec.name] = module
|
||||||
|
try:
|
||||||
|
spec.loader.exec_module(module)
|
||||||
|
except BaseException:
|
||||||
try:
|
try:
|
||||||
spec.loader.exec_module(module)
|
del sys.modules[spec.name]
|
||||||
except BaseException:
|
except KeyError:
|
||||||
try:
|
pass
|
||||||
del sys.modules[spec.name]
|
raise
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
raise
|
|
||||||
elif sys.version_info[0] == 2:
|
|
||||||
import imp
|
|
||||||
|
|
||||||
module = imp.load_source(module_name, module_path)
|
|
||||||
return module
|
return module
|
||||||
|
|
||||||
|
|
||||||
@@ -1030,7 +1018,7 @@ def ensure_last(lst, *elements):
|
|||||||
lst.append(lst.pop(lst.index(elt)))
|
lst.append(lst.pop(lst.index(elt)))
|
||||||
|
|
||||||
|
|
||||||
class TypedMutableSequence(MutableSequence):
|
class TypedMutableSequence(collections.abc.MutableSequence):
|
||||||
"""Base class that behaves like a list, just with a different type.
|
"""Base class that behaves like a list, just with a different type.
|
||||||
|
|
||||||
Client code can inherit from this base class:
|
Client code can inherit from this base class:
|
||||||
|
|||||||
@@ -9,7 +9,6 @@
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Dict, Tuple # novm
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import pretty_seconds
|
from llnl.util.lang import pretty_seconds
|
||||||
@@ -81,7 +80,7 @@ class OpenFileTracker(object):
|
|||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
"""Create a new ``OpenFileTracker``."""
|
"""Create a new ``OpenFileTracker``."""
|
||||||
self._descriptors = {} # type: Dict[Tuple[int, int], OpenFile]
|
self._descriptors = {}
|
||||||
|
|
||||||
def get_fh(self, path):
|
def get_fh(self, path):
|
||||||
"""Get a filehandle for a lockfile.
|
"""Get a filehandle for a lockfile.
|
||||||
@@ -103,7 +102,7 @@ def get_fh(self, path):
|
|||||||
try:
|
try:
|
||||||
# see whether we've seen this inode/pid before
|
# see whether we've seen this inode/pid before
|
||||||
stat = os.stat(path)
|
stat = os.stat(path)
|
||||||
key = (stat.st_ino, pid)
|
key = (stat.st_dev, stat.st_ino, pid)
|
||||||
open_file = self._descriptors.get(key)
|
open_file = self._descriptors.get(key)
|
||||||
|
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
@@ -129,32 +128,32 @@ def get_fh(self, path):
|
|||||||
|
|
||||||
# if we just created the file, we'll need to get its inode here
|
# if we just created the file, we'll need to get its inode here
|
||||||
if not stat:
|
if not stat:
|
||||||
inode = os.fstat(fd).st_ino
|
stat = os.fstat(fd)
|
||||||
key = (inode, pid)
|
key = (stat.st_dev, stat.st_ino, pid)
|
||||||
|
|
||||||
self._descriptors[key] = open_file
|
self._descriptors[key] = open_file
|
||||||
|
|
||||||
open_file.refs += 1
|
open_file.refs += 1
|
||||||
return open_file.fh
|
return open_file.fh
|
||||||
|
|
||||||
def release_fh(self, path):
|
def release_by_stat(self, stat):
|
||||||
"""Release a filehandle, only closing it if there are no more references."""
|
key = (stat.st_dev, stat.st_ino, os.getpid())
|
||||||
try:
|
|
||||||
inode = os.stat(path).st_ino
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno != errno.ENOENT: # only handle file not found
|
|
||||||
raise
|
|
||||||
inode = None # this will not be in self._descriptors
|
|
||||||
|
|
||||||
key = (inode, os.getpid())
|
|
||||||
open_file = self._descriptors.get(key)
|
open_file = self._descriptors.get(key)
|
||||||
assert open_file, "Attempted to close non-existing lock path: %s" % path
|
assert open_file, "Attempted to close non-existing inode: %s" % stat.st_inode
|
||||||
|
|
||||||
open_file.refs -= 1
|
open_file.refs -= 1
|
||||||
if not open_file.refs:
|
if not open_file.refs:
|
||||||
del self._descriptors[key]
|
del self._descriptors[key]
|
||||||
open_file.fh.close()
|
open_file.fh.close()
|
||||||
|
|
||||||
|
def release_by_fh(self, fh):
|
||||||
|
self.release_by_stat(os.fstat(fh.fileno()))
|
||||||
|
|
||||||
|
def purge(self):
|
||||||
|
for key in list(self._descriptors.keys()):
|
||||||
|
self._descriptors[key].fh.close()
|
||||||
|
del self._descriptors[key]
|
||||||
|
|
||||||
|
|
||||||
#: Open file descriptors for locks in this process. Used to prevent one process
|
#: Open file descriptors for locks in this process. Used to prevent one process
|
||||||
#: from opening the sam file many times for different byte range locks
|
#: from opening the sam file many times for different byte range locks
|
||||||
@@ -432,8 +431,7 @@ def _unlock(self):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
fcntl.lockf(self._file, fcntl.LOCK_UN, self._length, self._start, os.SEEK_SET)
|
fcntl.lockf(self._file, fcntl.LOCK_UN, self._length, self._start, os.SEEK_SET)
|
||||||
|
file_tracker.release_by_fh(self._file)
|
||||||
file_tracker.release_fh(self.path)
|
|
||||||
self._file = None
|
self._file = None
|
||||||
self._reads = 0
|
self._reads = 0
|
||||||
self._writes = 0
|
self._writes = 0
|
||||||
|
|||||||
@@ -23,8 +23,11 @@ def symlink(real_path, link_path):
|
|||||||
|
|
||||||
On Windows, use junctions if os.symlink fails.
|
On Windows, use junctions if os.symlink fails.
|
||||||
"""
|
"""
|
||||||
if not is_windows or _win32_can_symlink():
|
if not is_windows:
|
||||||
os.symlink(real_path, link_path)
|
os.symlink(real_path, link_path)
|
||||||
|
elif _win32_can_symlink():
|
||||||
|
# Windows requires target_is_directory=True when the target is a dir.
|
||||||
|
os.symlink(real_path, link_path, target_is_directory=os.path.isdir(real_path))
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
# Try to use junctions
|
# Try to use junctions
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
|
import io
|
||||||
import os
|
import os
|
||||||
import struct
|
import struct
|
||||||
import sys
|
import sys
|
||||||
@@ -14,10 +15,6 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from sys import platform as _platform
|
from sys import platform as _platform
|
||||||
|
|
||||||
import six
|
|
||||||
from six import StringIO
|
|
||||||
from six.moves import input
|
|
||||||
|
|
||||||
if _platform != "win32":
|
if _platform != "win32":
|
||||||
import fcntl
|
import fcntl
|
||||||
import termios
|
import termios
|
||||||
@@ -183,7 +180,7 @@ def msg(message, *args, **kwargs):
|
|||||||
else:
|
else:
|
||||||
cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
|
cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
|
||||||
for arg in args:
|
for arg in args:
|
||||||
print(indent + _output_filter(six.text_type(arg)))
|
print(indent + _output_filter(str(arg)))
|
||||||
|
|
||||||
|
|
||||||
def info(message, *args, **kwargs):
|
def info(message, *args, **kwargs):
|
||||||
@@ -201,13 +198,13 @@ def info(message, *args, **kwargs):
|
|||||||
st_text = process_stacktrace(st_countback)
|
st_text = process_stacktrace(st_countback)
|
||||||
cprint(
|
cprint(
|
||||||
"@%s{%s==>} %s%s"
|
"@%s{%s==>} %s%s"
|
||||||
% (format, st_text, get_timestamp(), cescape(_output_filter(six.text_type(message)))),
|
% (format, st_text, get_timestamp(), cescape(_output_filter(str(message)))),
|
||||||
stream=stream,
|
stream=stream,
|
||||||
)
|
)
|
||||||
for arg in args:
|
for arg in args:
|
||||||
if wrap:
|
if wrap:
|
||||||
lines = textwrap.wrap(
|
lines = textwrap.wrap(
|
||||||
_output_filter(six.text_type(arg)),
|
_output_filter(str(arg)),
|
||||||
initial_indent=indent,
|
initial_indent=indent,
|
||||||
subsequent_indent=indent,
|
subsequent_indent=indent,
|
||||||
break_long_words=break_long_words,
|
break_long_words=break_long_words,
|
||||||
@@ -215,7 +212,7 @@ def info(message, *args, **kwargs):
|
|||||||
for line in lines:
|
for line in lines:
|
||||||
stream.write(line + "\n")
|
stream.write(line + "\n")
|
||||||
else:
|
else:
|
||||||
stream.write(indent + _output_filter(six.text_type(arg)) + "\n")
|
stream.write(indent + _output_filter(str(arg)) + "\n")
|
||||||
|
|
||||||
|
|
||||||
def verbose(message, *args, **kwargs):
|
def verbose(message, *args, **kwargs):
|
||||||
@@ -238,7 +235,7 @@ def error(message, *args, **kwargs):
|
|||||||
|
|
||||||
kwargs.setdefault("format", "*r")
|
kwargs.setdefault("format", "*r")
|
||||||
kwargs.setdefault("stream", sys.stderr)
|
kwargs.setdefault("stream", sys.stderr)
|
||||||
info("Error: " + six.text_type(message), *args, **kwargs)
|
info("Error: " + str(message), *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def warn(message, *args, **kwargs):
|
def warn(message, *args, **kwargs):
|
||||||
@@ -247,7 +244,7 @@ def warn(message, *args, **kwargs):
|
|||||||
|
|
||||||
kwargs.setdefault("format", "*Y")
|
kwargs.setdefault("format", "*Y")
|
||||||
kwargs.setdefault("stream", sys.stderr)
|
kwargs.setdefault("stream", sys.stderr)
|
||||||
info("Warning: " + six.text_type(message), *args, **kwargs)
|
info("Warning: " + str(message), *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def die(message, *args, **kwargs):
|
def die(message, *args, **kwargs):
|
||||||
@@ -271,7 +268,7 @@ def get_number(prompt, **kwargs):
|
|||||||
while number is None:
|
while number is None:
|
||||||
msg(prompt, newline=False)
|
msg(prompt, newline=False)
|
||||||
ans = input()
|
ans = input()
|
||||||
if ans == six.text_type(abort):
|
if ans == str(abort):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if ans:
|
if ans:
|
||||||
@@ -336,11 +333,11 @@ def hline(label=None, **kwargs):
|
|||||||
cols -= 2
|
cols -= 2
|
||||||
cols = min(max_width, cols)
|
cols = min(max_width, cols)
|
||||||
|
|
||||||
label = six.text_type(label)
|
label = str(label)
|
||||||
prefix = char * 2 + " "
|
prefix = char * 2 + " "
|
||||||
suffix = " " + (cols - len(prefix) - clen(label)) * char
|
suffix = " " + (cols - len(prefix) - clen(label)) * char
|
||||||
|
|
||||||
out = StringIO()
|
out = io.StringIO()
|
||||||
out.write(prefix)
|
out.write(prefix)
|
||||||
out.write(label)
|
out.write(label)
|
||||||
out.write(suffix)
|
out.write(suffix)
|
||||||
@@ -372,10 +369,5 @@ def ioctl_gwinsz(fd):
|
|||||||
|
|
||||||
return int(rc[0]), int(rc[1])
|
return int(rc[0]), int(rc[1])
|
||||||
else:
|
else:
|
||||||
if sys.version_info[0] < 3:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Terminal size not obtainable on Windows with a\
|
|
||||||
Python version older than 3"
|
|
||||||
)
|
|
||||||
rc = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", 80))
|
rc = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", 80))
|
||||||
return int(rc[0]), int(rc[1])
|
return int(rc[0]), int(rc[1])
|
||||||
|
|||||||
@@ -8,11 +8,10 @@
|
|||||||
"""
|
"""
|
||||||
from __future__ import division, unicode_literals
|
from __future__ import division, unicode_literals
|
||||||
|
|
||||||
|
import io
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from six import StringIO, text_type
|
|
||||||
|
|
||||||
from llnl.util.tty import terminal_size
|
from llnl.util.tty import terminal_size
|
||||||
from llnl.util.tty.color import cextra, clen
|
from llnl.util.tty.color import cextra, clen
|
||||||
|
|
||||||
@@ -134,7 +133,7 @@ def colify(elts, **options):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# elts needs to be an array of strings so we can count the elements
|
# elts needs to be an array of strings so we can count the elements
|
||||||
elts = [text_type(elt) for elt in elts]
|
elts = [str(elt) for elt in elts]
|
||||||
if not elts:
|
if not elts:
|
||||||
return (0, ())
|
return (0, ())
|
||||||
|
|
||||||
@@ -232,7 +231,7 @@ def transpose():
|
|||||||
def colified(elts, **options):
|
def colified(elts, **options):
|
||||||
"""Invokes the ``colify()`` function but returns the result as a string
|
"""Invokes the ``colify()`` function but returns the result as a string
|
||||||
instead of writing it to an output string."""
|
instead of writing it to an output string."""
|
||||||
sio = StringIO()
|
sio = io.StringIO()
|
||||||
options["output"] = sio
|
options["output"] = sio
|
||||||
colify(elts, **options)
|
colify(elts, **options)
|
||||||
return sio.getvalue()
|
return sio.getvalue()
|
||||||
|
|||||||
@@ -65,8 +65,6 @@
|
|||||||
import sys
|
import sys
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
class ColorParseError(Exception):
|
class ColorParseError(Exception):
|
||||||
"""Raised when a color format fails to parse."""
|
"""Raised when a color format fails to parse."""
|
||||||
@@ -259,7 +257,7 @@ def cescape(string):
|
|||||||
Returns:
|
Returns:
|
||||||
(str): the string with color codes escaped
|
(str): the string with color codes escaped
|
||||||
"""
|
"""
|
||||||
string = six.text_type(string)
|
string = str(string)
|
||||||
string = string.replace("@", "@@")
|
string = string.replace("@", "@@")
|
||||||
string = string.replace("}", "}}")
|
string = string.replace("}", "}}")
|
||||||
return string
|
return string
|
||||||
|
|||||||
@@ -24,8 +24,6 @@
|
|||||||
from types import ModuleType # novm
|
from types import ModuleType # novm
|
||||||
from typing import Optional # novm
|
from typing import Optional # novm
|
||||||
|
|
||||||
from six import StringIO, string_types
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
termios = None # type: Optional[ModuleType]
|
termios = None # type: Optional[ModuleType]
|
||||||
@@ -241,8 +239,7 @@ def __exit__(self, exc_type, exception, traceback):
|
|||||||
"""If termios was available, restore old settings."""
|
"""If termios was available, restore old settings."""
|
||||||
if self.old_cfg:
|
if self.old_cfg:
|
||||||
self._restore_default_terminal_settings()
|
self._restore_default_terminal_settings()
|
||||||
if sys.version_info >= (3,):
|
atexit.unregister(self._restore_default_terminal_settings)
|
||||||
atexit.unregister(self._restore_default_terminal_settings)
|
|
||||||
|
|
||||||
# restore SIGSTP and SIGCONT handlers
|
# restore SIGSTP and SIGCONT handlers
|
||||||
if self.old_handlers:
|
if self.old_handlers:
|
||||||
@@ -309,7 +306,7 @@ def __init__(self, file_like):
|
|||||||
|
|
||||||
self.file_like = file_like
|
self.file_like = file_like
|
||||||
|
|
||||||
if isinstance(file_like, string_types):
|
if isinstance(file_like, str):
|
||||||
self.open = True
|
self.open = True
|
||||||
elif _file_descriptors_work(file_like):
|
elif _file_descriptors_work(file_like):
|
||||||
self.open = False
|
self.open = False
|
||||||
@@ -323,12 +320,9 @@ def __init__(self, file_like):
|
|||||||
def unwrap(self):
|
def unwrap(self):
|
||||||
if self.open:
|
if self.open:
|
||||||
if self.file_like:
|
if self.file_like:
|
||||||
if sys.version_info < (3,):
|
self.file = open(self.file_like, "w", encoding="utf-8")
|
||||||
self.file = open(self.file_like, "w")
|
|
||||||
else:
|
|
||||||
self.file = open(self.file_like, "w", encoding="utf-8") # novm
|
|
||||||
else:
|
else:
|
||||||
self.file = StringIO()
|
self.file = io.StringIO()
|
||||||
return self.file
|
return self.file
|
||||||
else:
|
else:
|
||||||
# We were handed an already-open file object. In this case we also
|
# We were handed an already-open file object. In this case we also
|
||||||
@@ -699,13 +693,10 @@ def __init__(self, sys_attr):
|
|||||||
self.sys_attr = sys_attr
|
self.sys_attr = sys_attr
|
||||||
self.saved_stream = None
|
self.saved_stream = None
|
||||||
if sys.platform.startswith("win32"):
|
if sys.platform.startswith("win32"):
|
||||||
if sys.version_info < (3, 5):
|
if hasattr(sys, "gettotalrefcount"): # debug build
|
||||||
libc = ctypes.CDLL(ctypes.util.find_library("c"))
|
libc = ctypes.CDLL("ucrtbased")
|
||||||
else:
|
else:
|
||||||
if hasattr(sys, "gettotalrefcount"): # debug build
|
libc = ctypes.CDLL("api-ms-win-crt-stdio-l1-1-0")
|
||||||
libc = ctypes.CDLL("ucrtbased")
|
|
||||||
else:
|
|
||||||
libc = ctypes.CDLL("api-ms-win-crt-stdio-l1-1-0")
|
|
||||||
|
|
||||||
kernel32 = ctypes.WinDLL("kernel32")
|
kernel32 = ctypes.WinDLL("kernel32")
|
||||||
|
|
||||||
@@ -794,7 +785,7 @@ def __enter__(self):
|
|||||||
raise RuntimeError("file argument must be set by __init__ ")
|
raise RuntimeError("file argument must be set by __init__ ")
|
||||||
|
|
||||||
# Open both write and reading on logfile
|
# Open both write and reading on logfile
|
||||||
if type(self.logfile) == StringIO:
|
if type(self.logfile) == io.StringIO:
|
||||||
self._ioflag = True
|
self._ioflag = True
|
||||||
# cannot have two streams on tempfile, so we must make our own
|
# cannot have two streams on tempfile, so we must make our own
|
||||||
sys.stdout = self.logfile
|
sys.stdout = self.logfile
|
||||||
@@ -927,13 +918,10 @@ def _writer_daemon(
|
|||||||
if sys.version_info < (3, 8) or sys.platform != "darwin":
|
if sys.version_info < (3, 8) or sys.platform != "darwin":
|
||||||
os.close(write_fd)
|
os.close(write_fd)
|
||||||
|
|
||||||
# Use line buffering (3rd param = 1) since Python 3 has a bug
|
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||||
# that prevents unbuffered text I/O.
|
# that prevents unbuffered text I/O.
|
||||||
if sys.version_info < (3,):
|
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1)
|
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
|
||||||
else:
|
|
||||||
# Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
|
||||||
in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
|
|
||||||
|
|
||||||
if stdin_multiprocess_fd:
|
if stdin_multiprocess_fd:
|
||||||
stdin = os.fdopen(stdin_multiprocess_fd.fd)
|
stdin = os.fdopen(stdin_multiprocess_fd.fd)
|
||||||
@@ -1023,7 +1011,7 @@ def _writer_daemon(
|
|||||||
|
|
||||||
finally:
|
finally:
|
||||||
# send written data back to parent if we used a StringIO
|
# send written data back to parent if we used a StringIO
|
||||||
if isinstance(log_file, StringIO):
|
if isinstance(log_file, io.StringIO):
|
||||||
control_pipe.send(log_file.getvalue())
|
control_pipe.send(log_file.getvalue())
|
||||||
log_file_wrapper.close()
|
log_file_wrapper.close()
|
||||||
close_connection_and_file(read_multiprocess_fd, in_pipe)
|
close_connection_and_file(read_multiprocess_fd, in_pipe)
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
|
||||||
__version__ = "0.19.0.dev0"
|
__version__ = "0.20.0.dev0"
|
||||||
spack_version = __version__
|
spack_version = __version__
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -37,15 +37,14 @@ def _search_duplicate_compilers(error_cls):
|
|||||||
"""
|
"""
|
||||||
import ast
|
import ast
|
||||||
import collections
|
import collections
|
||||||
|
import collections.abc
|
||||||
import inspect
|
import inspect
|
||||||
import itertools
|
import itertools
|
||||||
import pickle
|
import pickle
|
||||||
import re
|
import re
|
||||||
|
from urllib.request import urlopen
|
||||||
from six.moves.urllib.request import urlopen
|
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
from llnl.util.compat import Sequence
|
|
||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.patch
|
import spack.patch
|
||||||
@@ -81,7 +80,7 @@ def __hash__(self):
|
|||||||
return hash(value)
|
return hash(value)
|
||||||
|
|
||||||
|
|
||||||
class AuditClass(Sequence):
|
class AuditClass(collections.abc.Sequence):
|
||||||
def __init__(self, group, tag, description, kwargs):
|
def __init__(self, group, tag, description, kwargs):
|
||||||
"""Return an object that acts as a decorator to register functions
|
"""Return an object that acts as a decorator to register functions
|
||||||
associated with a specific class of sanity checks.
|
associated with a specific class of sanity checks.
|
||||||
@@ -288,7 +287,7 @@ def _check_build_test_callbacks(pkgs, error_cls):
|
|||||||
errors = []
|
errors = []
|
||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
|
||||||
test_callbacks = pkg_cls.build_time_test_callbacks
|
test_callbacks = getattr(pkg_cls, "build_time_test_callbacks", None)
|
||||||
|
|
||||||
if test_callbacks and "test" in test_callbacks:
|
if test_callbacks and "test" in test_callbacks:
|
||||||
msg = '{0} package contains "test" method in ' "build_time_test_callbacks"
|
msg = '{0} package contains "test" method in ' "build_time_test_callbacks"
|
||||||
|
|||||||
@@ -17,9 +17,9 @@
|
|||||||
import traceback
|
import traceback
|
||||||
import warnings
|
import warnings
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
|
from urllib.error import HTTPError, URLError
|
||||||
|
|
||||||
import ruamel.yaml as yaml
|
import ruamel.yaml as yaml
|
||||||
from six.moves.urllib.error import HTTPError, URLError
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fsys
|
import llnl.util.filesystem as fsys
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
@@ -914,8 +914,6 @@ def _fetch_spec_from_mirror(spec_url):
|
|||||||
return Spec.from_dict(specfile_json)
|
return Spec.from_dict(specfile_json)
|
||||||
if spec_url.endswith(".json"):
|
if spec_url.endswith(".json"):
|
||||||
return Spec.from_json(spec_file_contents)
|
return Spec.from_json(spec_file_contents)
|
||||||
if spec_url.endswith(".yaml"):
|
|
||||||
return Spec.from_yaml(spec_file_contents)
|
|
||||||
|
|
||||||
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
|
||||||
try:
|
try:
|
||||||
@@ -990,8 +988,6 @@ def file_read_method(file_path):
|
|||||||
"*.spec.json.sig",
|
"*.spec.json.sig",
|
||||||
"--include",
|
"--include",
|
||||||
"*.spec.json",
|
"*.spec.json",
|
||||||
"--include",
|
|
||||||
"*.spec.yaml",
|
|
||||||
cache_prefix,
|
cache_prefix,
|
||||||
tmpspecsdir,
|
tmpspecsdir,
|
||||||
]
|
]
|
||||||
@@ -1001,7 +997,7 @@ def file_read_method(file_path):
|
|||||||
"Using aws s3 sync to download specs from {0} to {1}".format(cache_prefix, tmpspecsdir)
|
"Using aws s3 sync to download specs from {0} to {1}".format(cache_prefix, tmpspecsdir)
|
||||||
)
|
)
|
||||||
aws(*sync_command_args, output=os.devnull, error=os.devnull)
|
aws(*sync_command_args, output=os.devnull, error=os.devnull)
|
||||||
file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json", "*.spec.yaml"])
|
file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json"])
|
||||||
read_fn = file_read_method
|
read_fn = file_read_method
|
||||||
except Exception:
|
except Exception:
|
||||||
tty.warn("Failed to use aws s3 sync to retrieve specs, falling back to parallel fetch")
|
tty.warn("Failed to use aws s3 sync to retrieve specs, falling back to parallel fetch")
|
||||||
@@ -1037,9 +1033,7 @@ def url_read_method(url):
|
|||||||
file_list = [
|
file_list = [
|
||||||
url_util.join(cache_prefix, entry)
|
url_util.join(cache_prefix, entry)
|
||||||
for entry in web_util.list_url(cache_prefix)
|
for entry in web_util.list_url(cache_prefix)
|
||||||
if entry.endswith(".yaml")
|
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
|
||||||
or entry.endswith("spec.json")
|
|
||||||
or entry.endswith("spec.json.sig")
|
|
||||||
]
|
]
|
||||||
read_fn = url_read_method
|
read_fn = url_read_method
|
||||||
except KeyError as inst:
|
except KeyError as inst:
|
||||||
@@ -1101,14 +1095,6 @@ def generate_package_index(cache_prefix, concurrency=32):
|
|||||||
tty.error("Unabled to generate package index, {0}".format(err))
|
tty.error("Unabled to generate package index, {0}".format(err))
|
||||||
return
|
return
|
||||||
|
|
||||||
if any(x.endswith(".yaml") for x in file_list):
|
|
||||||
msg = (
|
|
||||||
"The mirror in '{}' contains specs in the deprecated YAML format.\n\n\tSupport for "
|
|
||||||
"this format will be removed in v0.20, please regenerate the build cache with a "
|
|
||||||
"recent Spack\n"
|
|
||||||
).format(cache_prefix)
|
|
||||||
warnings.warn(msg)
|
|
||||||
|
|
||||||
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
|
||||||
|
|
||||||
tmpdir = tempfile.mkdtemp()
|
tmpdir = tempfile.mkdtemp()
|
||||||
@@ -1236,15 +1222,11 @@ def _build_tarball(
|
|||||||
specfile_name = tarball_name(spec, ".spec.json")
|
specfile_name = tarball_name(spec, ".spec.json")
|
||||||
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
|
||||||
signed_specfile_path = "{0}.sig".format(specfile_path)
|
signed_specfile_path = "{0}.sig".format(specfile_path)
|
||||||
deprecated_specfile_path = specfile_path.replace(".spec.json", ".spec.yaml")
|
|
||||||
|
|
||||||
remote_specfile_path = url_util.join(
|
remote_specfile_path = url_util.join(
|
||||||
outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir))
|
outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir))
|
||||||
)
|
)
|
||||||
remote_signed_specfile_path = "{0}.sig".format(remote_specfile_path)
|
remote_signed_specfile_path = "{0}.sig".format(remote_specfile_path)
|
||||||
remote_specfile_path_deprecated = url_util.join(
|
|
||||||
outdir, os.path.relpath(deprecated_specfile_path, os.path.realpath(tmpdir))
|
|
||||||
)
|
|
||||||
|
|
||||||
# If force and exists, overwrite. Otherwise raise exception on collision.
|
# If force and exists, overwrite. Otherwise raise exception on collision.
|
||||||
if force:
|
if force:
|
||||||
@@ -1252,12 +1234,8 @@ def _build_tarball(
|
|||||||
web_util.remove_url(remote_specfile_path)
|
web_util.remove_url(remote_specfile_path)
|
||||||
if web_util.url_exists(remote_signed_specfile_path):
|
if web_util.url_exists(remote_signed_specfile_path):
|
||||||
web_util.remove_url(remote_signed_specfile_path)
|
web_util.remove_url(remote_signed_specfile_path)
|
||||||
if web_util.url_exists(remote_specfile_path_deprecated):
|
elif web_util.url_exists(remote_specfile_path) or web_util.url_exists(
|
||||||
web_util.remove_url(remote_specfile_path_deprecated)
|
remote_signed_specfile_path
|
||||||
elif (
|
|
||||||
web_util.url_exists(remote_specfile_path)
|
|
||||||
or web_util.url_exists(remote_signed_specfile_path)
|
|
||||||
or web_util.url_exists(remote_specfile_path_deprecated)
|
|
||||||
):
|
):
|
||||||
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
raise NoOverwriteException(url_util.format(remote_specfile_path))
|
||||||
|
|
||||||
@@ -1313,12 +1291,10 @@ def _build_tarball(
|
|||||||
|
|
||||||
with open(spec_file, "r") as inputfile:
|
with open(spec_file, "r") as inputfile:
|
||||||
content = inputfile.read()
|
content = inputfile.read()
|
||||||
if spec_file.endswith(".yaml"):
|
if spec_file.endswith(".json"):
|
||||||
spec_dict = yaml.load(content)
|
|
||||||
elif spec_file.endswith(".json"):
|
|
||||||
spec_dict = sjson.load(content)
|
spec_dict = sjson.load(content)
|
||||||
else:
|
else:
|
||||||
raise ValueError("{0} not a valid spec file type (json or yaml)".format(spec_file))
|
raise ValueError("{0} not a valid spec file type".format(spec_file))
|
||||||
spec_dict["buildcache_layout_version"] = 1
|
spec_dict["buildcache_layout_version"] = 1
|
||||||
bchecksum = {}
|
bchecksum = {}
|
||||||
bchecksum["hash_algorithm"] = "sha256"
|
bchecksum["hash_algorithm"] = "sha256"
|
||||||
@@ -1539,7 +1515,7 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
|||||||
# Assumes we care more about finding a spec file by preferred ext
|
# Assumes we care more about finding a spec file by preferred ext
|
||||||
# than by mirrory priority. This can be made less complicated as
|
# than by mirrory priority. This can be made less complicated as
|
||||||
# we remove support for deprecated spec formats and buildcache layouts.
|
# we remove support for deprecated spec formats and buildcache layouts.
|
||||||
for ext in ["json.sig", "json", "yaml"]:
|
for ext in ["json.sig", "json"]:
|
||||||
for mirror_to_try in mirrors_to_try:
|
for mirror_to_try in mirrors_to_try:
|
||||||
specfile_url = "{0}.{1}".format(mirror_to_try["specfile"], ext)
|
specfile_url = "{0}.{1}".format(mirror_to_try["specfile"], ext)
|
||||||
spackfile_url = mirror_to_try["spackfile"]
|
spackfile_url = mirror_to_try["spackfile"]
|
||||||
@@ -1576,13 +1552,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
|
|||||||
# the remaining mirrors, looking for one we can use.
|
# the remaining mirrors, looking for one we can use.
|
||||||
tarball_stage = try_fetch(spackfile_url)
|
tarball_stage = try_fetch(spackfile_url)
|
||||||
if tarball_stage:
|
if tarball_stage:
|
||||||
if ext == "yaml":
|
|
||||||
msg = (
|
|
||||||
"Reading {} from mirror.\n\n\tThe YAML format for buildcaches is "
|
|
||||||
"deprecated and will be removed in v0.20\n"
|
|
||||||
).format(spackfile_url)
|
|
||||||
warnings.warn(msg)
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"tarball_stage": tarball_stage,
|
"tarball_stage": tarball_stage,
|
||||||
"specfile_stage": local_specfile_stage,
|
"specfile_stage": local_specfile_stage,
|
||||||
@@ -1634,7 +1603,7 @@ def make_package_relative(workdir, spec, allow_root):
|
|||||||
if "elf" in platform.binary_formats:
|
if "elf" in platform.binary_formats:
|
||||||
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
|
||||||
|
|
||||||
relocate.raise_if_not_relocatable(cur_path_names, allow_root)
|
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
|
||||||
orig_path_names = list()
|
orig_path_names = list()
|
||||||
cur_path_names = list()
|
cur_path_names = list()
|
||||||
for linkname in buildinfo.get("relocate_links", []):
|
for linkname in buildinfo.get("relocate_links", []):
|
||||||
@@ -1652,7 +1621,7 @@ def check_package_relocatable(workdir, spec, allow_root):
|
|||||||
cur_path_names = list()
|
cur_path_names = list()
|
||||||
for filename in buildinfo["relocate_binaries"]:
|
for filename in buildinfo["relocate_binaries"]:
|
||||||
cur_path_names.append(os.path.join(workdir, filename))
|
cur_path_names.append(os.path.join(workdir, filename))
|
||||||
relocate.raise_if_not_relocatable(cur_path_names, allow_root)
|
allow_root or relocate.ensure_binaries_are_relocatable(cur_path_names)
|
||||||
|
|
||||||
|
|
||||||
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
def dedupe_hardlinks_if_necessary(root, buildinfo):
|
||||||
@@ -1826,8 +1795,6 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
|||||||
spackfile_path = os.path.join(stagepath, spackfile_name)
|
spackfile_path = os.path.join(stagepath, spackfile_name)
|
||||||
tarfile_name = tarball_name(spec, ".tar.gz")
|
tarfile_name = tarball_name(spec, ".tar.gz")
|
||||||
tarfile_path = os.path.join(extract_to, tarfile_name)
|
tarfile_path = os.path.join(extract_to, tarfile_name)
|
||||||
deprecated_yaml_name = tarball_name(spec, ".spec.yaml")
|
|
||||||
deprecated_yaml_path = os.path.join(extract_to, deprecated_yaml_name)
|
|
||||||
json_name = tarball_name(spec, ".spec.json")
|
json_name = tarball_name(spec, ".spec.json")
|
||||||
json_path = os.path.join(extract_to, json_name)
|
json_path = os.path.join(extract_to, json_name)
|
||||||
with closing(tarfile.open(spackfile_path, "r")) as tar:
|
with closing(tarfile.open(spackfile_path, "r")) as tar:
|
||||||
@@ -1839,8 +1806,6 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
|
|||||||
|
|
||||||
if os.path.exists(json_path):
|
if os.path.exists(json_path):
|
||||||
specfile_path = json_path
|
specfile_path = json_path
|
||||||
elif os.path.exists(deprecated_yaml_path):
|
|
||||||
specfile_path = deprecated_yaml_path
|
|
||||||
else:
|
else:
|
||||||
raise ValueError("Cannot find spec file for {0}.".format(extract_to))
|
raise ValueError("Cannot find spec file for {0}.".format(extract_to))
|
||||||
|
|
||||||
@@ -1887,10 +1852,8 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for
|
|||||||
content = inputfile.read()
|
content = inputfile.read()
|
||||||
if specfile_path.endswith(".json.sig"):
|
if specfile_path.endswith(".json.sig"):
|
||||||
spec_dict = Spec.extract_json_from_clearsig(content)
|
spec_dict = Spec.extract_json_from_clearsig(content)
|
||||||
elif specfile_path.endswith(".json"):
|
|
||||||
spec_dict = sjson.load(content)
|
|
||||||
else:
|
else:
|
||||||
spec_dict = syaml.load(content)
|
spec_dict = sjson.load(content)
|
||||||
|
|
||||||
bchecksum = spec_dict["binary_cache_checksum"]
|
bchecksum = spec_dict["binary_cache_checksum"]
|
||||||
filename = download_result["tarball_stage"].save_filename
|
filename = download_result["tarball_stage"].save_filename
|
||||||
@@ -1902,7 +1865,7 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for
|
|||||||
or int(spec_dict["buildcache_layout_version"]) < 1
|
or int(spec_dict["buildcache_layout_version"]) < 1
|
||||||
):
|
):
|
||||||
# Handle the older buildcache layout where the .spack file
|
# Handle the older buildcache layout where the .spack file
|
||||||
# contains a spec json/yaml, maybe an .asc file (signature),
|
# contains a spec json, maybe an .asc file (signature),
|
||||||
# and another tarball containing the actual install tree.
|
# and another tarball containing the actual install tree.
|
||||||
tmpdir = tempfile.mkdtemp()
|
tmpdir = tempfile.mkdtemp()
|
||||||
try:
|
try:
|
||||||
@@ -2053,17 +2016,12 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
"""
|
"""
|
||||||
Try to find the spec directly on the configured mirrors
|
Try to find the spec directly on the configured mirrors
|
||||||
"""
|
"""
|
||||||
deprecated_specfile_name = tarball_name(spec, ".spec.yaml")
|
|
||||||
specfile_name = tarball_name(spec, ".spec.json")
|
specfile_name = tarball_name(spec, ".spec.json")
|
||||||
signed_specfile_name = tarball_name(spec, ".spec.json.sig")
|
signed_specfile_name = tarball_name(spec, ".spec.json.sig")
|
||||||
specfile_is_signed = False
|
specfile_is_signed = False
|
||||||
specfile_is_json = True
|
|
||||||
found_specs = []
|
found_specs = []
|
||||||
|
|
||||||
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
|
||||||
buildcache_fetch_url_yaml = url_util.join(
|
|
||||||
mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name
|
|
||||||
)
|
|
||||||
buildcache_fetch_url_json = url_util.join(
|
buildcache_fetch_url_json = url_util.join(
|
||||||
mirror.fetch_url, _build_cache_relative_path, specfile_name
|
mirror.fetch_url, _build_cache_relative_path, specfile_name
|
||||||
)
|
)
|
||||||
@@ -2077,28 +2035,19 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
try:
|
try:
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
|
||||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
|
except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
|
||||||
try:
|
tty.debug(
|
||||||
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
|
"Did not find {0} on {1}".format(
|
||||||
specfile_is_json = False
|
specfile_name, buildcache_fetch_url_signed_json
|
||||||
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
|
),
|
||||||
tty.debug(
|
url_err,
|
||||||
"Did not find {0} on {1}".format(
|
level=2,
|
||||||
specfile_name, buildcache_fetch_url_signed_json
|
)
|
||||||
),
|
tty.debug(
|
||||||
url_err,
|
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
|
||||||
level=2,
|
url_err_x,
|
||||||
)
|
level=2,
|
||||||
tty.debug(
|
)
|
||||||
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
|
continue
|
||||||
url_err_x,
|
|
||||||
level=2,
|
|
||||||
)
|
|
||||||
tty.debug(
|
|
||||||
"Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_yaml),
|
|
||||||
url_err_y,
|
|
||||||
level=2,
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
specfile_contents = codecs.getreader("utf-8")(fs).read()
|
||||||
|
|
||||||
# read the spec from the build cache file. All specs in build caches
|
# read the spec from the build cache file. All specs in build caches
|
||||||
@@ -2107,10 +2056,8 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
if specfile_is_signed:
|
if specfile_is_signed:
|
||||||
specfile_json = Spec.extract_json_from_clearsig(specfile_contents)
|
specfile_json = Spec.extract_json_from_clearsig(specfile_contents)
|
||||||
fetched_spec = Spec.from_dict(specfile_json)
|
fetched_spec = Spec.from_dict(specfile_json)
|
||||||
elif specfile_is_json:
|
|
||||||
fetched_spec = Spec.from_json(specfile_contents)
|
|
||||||
else:
|
else:
|
||||||
fetched_spec = Spec.from_yaml(specfile_contents)
|
fetched_spec = Spec.from_json(specfile_contents)
|
||||||
fetched_spec._mark_concrete()
|
fetched_spec._mark_concrete()
|
||||||
|
|
||||||
found_specs.append(
|
found_specs.append(
|
||||||
@@ -2321,7 +2268,7 @@ def needs_rebuild(spec, mirror_url):
|
|||||||
specfile_path = os.path.join(cache_prefix, specfile_name)
|
specfile_path = os.path.join(cache_prefix, specfile_name)
|
||||||
|
|
||||||
# Only check for the presence of the json version of the spec. If the
|
# Only check for the presence of the json version of the spec. If the
|
||||||
# mirror only has the yaml version, or doesn't have the spec at all, we
|
# mirror only has the json version, or doesn't have the spec at all, we
|
||||||
# need to rebuild.
|
# need to rebuild.
|
||||||
return not web_util.url_exists(specfile_path)
|
return not web_util.url_exists(specfile_path)
|
||||||
|
|
||||||
@@ -2429,7 +2376,6 @@ def download_single_spec(concrete_spec, destination, mirror_url=None):
|
|||||||
"url": [
|
"url": [
|
||||||
tarball_name(concrete_spec, ".spec.json.sig"),
|
tarball_name(concrete_spec, ".spec.json.sig"),
|
||||||
tarball_name(concrete_spec, ".spec.json"),
|
tarball_name(concrete_spec, ".spec.json"),
|
||||||
tarball_name(concrete_spec, ".spec.yaml"),
|
|
||||||
],
|
],
|
||||||
"path": destination,
|
"path": destination,
|
||||||
"required": True,
|
"required": True,
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
25
lib/spack/spack/bootstrap/__init__.py
Normal file
25
lib/spack/spack/bootstrap/__init__.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
"""Function and classes needed to bootstrap Spack itself."""
|
||||||
|
|
||||||
|
from .config import ensure_bootstrap_configuration, is_bootstrapping
|
||||||
|
from .core import (
|
||||||
|
all_core_root_specs,
|
||||||
|
ensure_core_dependencies,
|
||||||
|
ensure_patchelf_in_path_or_raise,
|
||||||
|
)
|
||||||
|
from .environment import BootstrapEnvironment, ensure_environment_dependencies
|
||||||
|
from .status import status_message
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"is_bootstrapping",
|
||||||
|
"ensure_bootstrap_configuration",
|
||||||
|
"ensure_core_dependencies",
|
||||||
|
"ensure_patchelf_in_path_or_raise",
|
||||||
|
"all_core_root_specs",
|
||||||
|
"ensure_environment_dependencies",
|
||||||
|
"BootstrapEnvironment",
|
||||||
|
"status_message",
|
||||||
|
]
|
||||||
218
lib/spack/spack/bootstrap/_common.py
Normal file
218
lib/spack/spack/bootstrap/_common.py
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
"""Common basic functions used through the spack.bootstrap package"""
|
||||||
|
import fnmatch
|
||||||
|
import os.path
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import sysconfig
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
import archspec.cpu
|
||||||
|
|
||||||
|
import llnl.util.filesystem as fs
|
||||||
|
from llnl.util import tty
|
||||||
|
|
||||||
|
import spack.store
|
||||||
|
import spack.util.environment
|
||||||
|
import spack.util.executable
|
||||||
|
|
||||||
|
from .config import spec_for_current_python
|
||||||
|
|
||||||
|
|
||||||
|
def _python_import(module):
|
||||||
|
try:
|
||||||
|
__import__(module)
|
||||||
|
except ImportError:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def _try_import_from_store(module, query_spec, query_info=None):
|
||||||
|
"""Return True if the module can be imported from an already
|
||||||
|
installed spec, False otherwise.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
module: Python module to be imported
|
||||||
|
query_spec: spec that may provide the module
|
||||||
|
query_info (dict or None): if a dict is passed it is populated with the
|
||||||
|
command found and the concrete spec providing it
|
||||||
|
"""
|
||||||
|
# If it is a string assume it's one of the root specs by this module
|
||||||
|
if isinstance(query_spec, str):
|
||||||
|
# We have to run as part of this python interpreter
|
||||||
|
query_spec += " ^" + spec_for_current_python()
|
||||||
|
|
||||||
|
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||||
|
|
||||||
|
for candidate_spec in installed_specs:
|
||||||
|
pkg = candidate_spec["python"].package
|
||||||
|
module_paths = [
|
||||||
|
os.path.join(candidate_spec.prefix, pkg.purelib),
|
||||||
|
os.path.join(candidate_spec.prefix, pkg.platlib),
|
||||||
|
] # type: list[str]
|
||||||
|
path_before = list(sys.path)
|
||||||
|
|
||||||
|
# NOTE: try module_paths first and last, last allows an existing version in path
|
||||||
|
# to be picked up and used, possibly depending on something in the store, first
|
||||||
|
# allows the bootstrap version to work when an incompatible version is in
|
||||||
|
# sys.path
|
||||||
|
orders = [
|
||||||
|
module_paths + sys.path,
|
||||||
|
sys.path + module_paths,
|
||||||
|
]
|
||||||
|
for path in orders:
|
||||||
|
sys.path = path
|
||||||
|
try:
|
||||||
|
_fix_ext_suffix(candidate_spec)
|
||||||
|
if _python_import(module):
|
||||||
|
msg = (
|
||||||
|
f"[BOOTSTRAP MODULE {module}] The installed spec "
|
||||||
|
f'"{query_spec}/{candidate_spec.dag_hash()}" '
|
||||||
|
f'provides the "{module}" Python module'
|
||||||
|
)
|
||||||
|
tty.debug(msg)
|
||||||
|
if query_info is not None:
|
||||||
|
query_info["spec"] = candidate_spec
|
||||||
|
return True
|
||||||
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
|
msg = (
|
||||||
|
"unexpected error while trying to import module "
|
||||||
|
f'"{module}" from spec "{candidate_spec}" [error="{str(exc)}"]'
|
||||||
|
)
|
||||||
|
warnings.warn(msg)
|
||||||
|
else:
|
||||||
|
msg = "Spec {0} did not provide module {1}"
|
||||||
|
warnings.warn(msg.format(candidate_spec, module))
|
||||||
|
|
||||||
|
sys.path = path_before
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _fix_ext_suffix(candidate_spec):
|
||||||
|
"""Fix the external suffixes of Python extensions on the fly for
|
||||||
|
platforms that may need it
|
||||||
|
|
||||||
|
Args:
|
||||||
|
candidate_spec (Spec): installed spec with a Python module
|
||||||
|
to be checked.
|
||||||
|
"""
|
||||||
|
# Here we map target families to the patterns expected
|
||||||
|
# by pristine CPython. Only architectures with known issues
|
||||||
|
# are included. Known issues:
|
||||||
|
#
|
||||||
|
# [RHEL + ppc64le]: https://github.com/spack/spack/issues/25734
|
||||||
|
#
|
||||||
|
_suffix_to_be_checked = {
|
||||||
|
"ppc64le": {
|
||||||
|
"glob": "*.cpython-*-powerpc64le-linux-gnu.so",
|
||||||
|
"re": r".cpython-[\w]*-powerpc64le-linux-gnu.so",
|
||||||
|
"fmt": r"{module}.cpython-{major}{minor}m-powerpc64le-linux-gnu.so",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# If the current architecture is not problematic return
|
||||||
|
generic_target = archspec.cpu.host().family
|
||||||
|
if str(generic_target) not in _suffix_to_be_checked:
|
||||||
|
return
|
||||||
|
|
||||||
|
# If there's no EXT_SUFFIX (Python < 3.5) or the suffix matches
|
||||||
|
# the expectations, return since the package is surely good
|
||||||
|
ext_suffix = sysconfig.get_config_var("EXT_SUFFIX")
|
||||||
|
if ext_suffix is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
expected = _suffix_to_be_checked[str(generic_target)]
|
||||||
|
if fnmatch.fnmatch(ext_suffix, expected["glob"]):
|
||||||
|
return
|
||||||
|
|
||||||
|
# If we are here it means the current interpreter expects different names
|
||||||
|
# than pristine CPython. So:
|
||||||
|
# 1. Find what we have installed
|
||||||
|
# 2. Create symbolic links for the other names, it they're not there already
|
||||||
|
|
||||||
|
# Check if standard names are installed and if we have to create
|
||||||
|
# link for this interpreter
|
||||||
|
standard_extensions = fs.find(candidate_spec.prefix, expected["glob"])
|
||||||
|
link_names = [re.sub(expected["re"], ext_suffix, s) for s in standard_extensions]
|
||||||
|
for file_name, link_name in zip(standard_extensions, link_names):
|
||||||
|
if os.path.exists(link_name):
|
||||||
|
continue
|
||||||
|
os.symlink(file_name, link_name)
|
||||||
|
|
||||||
|
# Check if this interpreter installed something and we have to create
|
||||||
|
# links for a standard CPython interpreter
|
||||||
|
non_standard_extensions = fs.find(candidate_spec.prefix, "*" + ext_suffix)
|
||||||
|
for abs_path in non_standard_extensions:
|
||||||
|
directory, filename = os.path.split(abs_path)
|
||||||
|
module = filename.split(".")[0]
|
||||||
|
link_name = os.path.join(
|
||||||
|
directory,
|
||||||
|
expected["fmt"].format(
|
||||||
|
module=module, major=sys.version_info[0], minor=sys.version_info[1]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
if os.path.exists(link_name):
|
||||||
|
continue
|
||||||
|
os.symlink(abs_path, link_name)
|
||||||
|
|
||||||
|
|
||||||
|
def _executables_in_store(executables, query_spec, query_info=None):
|
||||||
|
"""Return True if at least one of the executables can be retrieved from
|
||||||
|
a spec in store, False otherwise.
|
||||||
|
|
||||||
|
The different executables must provide the same functionality and are
|
||||||
|
"alternate" to each other, i.e. the function will exit True on the first
|
||||||
|
executable found.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
executables: list of executables to be searched
|
||||||
|
query_spec: spec that may provide the executable
|
||||||
|
query_info (dict or None): if a dict is passed it is populated with the
|
||||||
|
command found and the concrete spec providing it
|
||||||
|
"""
|
||||||
|
executables_str = ", ".join(executables)
|
||||||
|
msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'"
|
||||||
|
tty.debug(msg.format(executables_str, query_spec))
|
||||||
|
installed_specs = spack.store.db.query(query_spec, installed=True)
|
||||||
|
if installed_specs:
|
||||||
|
for concrete_spec in installed_specs:
|
||||||
|
bin_dir = concrete_spec.prefix.bin
|
||||||
|
# IF we have a "bin" directory and it contains
|
||||||
|
# the executables we are looking for
|
||||||
|
if (
|
||||||
|
os.path.exists(bin_dir)
|
||||||
|
and os.path.isdir(bin_dir)
|
||||||
|
and spack.util.executable.which_string(*executables, path=bin_dir)
|
||||||
|
):
|
||||||
|
spack.util.environment.path_put_first("PATH", [bin_dir])
|
||||||
|
if query_info is not None:
|
||||||
|
query_info["command"] = spack.util.executable.which(*executables, path=bin_dir)
|
||||||
|
query_info["spec"] = concrete_spec
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _root_spec(spec_str):
|
||||||
|
"""Add a proper compiler and target to a spec used during bootstrapping.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
spec_str (str): spec to be bootstrapped. Must be without compiler and target.
|
||||||
|
"""
|
||||||
|
# Add a proper compiler hint to the root spec. We use GCC for
|
||||||
|
# everything but MacOS and Windows.
|
||||||
|
if str(spack.platforms.host()) == "darwin":
|
||||||
|
spec_str += " %apple-clang"
|
||||||
|
elif str(spack.platforms.host()) == "windows":
|
||||||
|
spec_str += " %msvc"
|
||||||
|
else:
|
||||||
|
spec_str += " %gcc"
|
||||||
|
|
||||||
|
target = archspec.cpu.host().family
|
||||||
|
spec_str += f" target={target}"
|
||||||
|
|
||||||
|
tty.debug(f"[BOOTSTRAP ROOT SPEC] {spec_str}")
|
||||||
|
return spec_str
|
||||||
169
lib/spack/spack/bootstrap/config.py
Normal file
169
lib/spack/spack/bootstrap/config.py
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
"""Manage configuration swapping for bootstrapping purposes"""
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from llnl.util import tty
|
||||||
|
|
||||||
|
import spack.compilers
|
||||||
|
import spack.config
|
||||||
|
import spack.environment
|
||||||
|
import spack.paths
|
||||||
|
import spack.platforms
|
||||||
|
import spack.repo
|
||||||
|
import spack.spec
|
||||||
|
import spack.store
|
||||||
|
import spack.util.path
|
||||||
|
|
||||||
|
#: Reference counter for the bootstrapping configuration context manager
|
||||||
|
_REF_COUNT = 0
|
||||||
|
|
||||||
|
|
||||||
|
def is_bootstrapping():
|
||||||
|
"""Return True if we are in a bootstrapping context, False otherwise."""
|
||||||
|
return _REF_COUNT > 0
|
||||||
|
|
||||||
|
|
||||||
|
def spec_for_current_python():
|
||||||
|
"""For bootstrapping purposes we are just interested in the Python
|
||||||
|
minor version (all patches are ABI compatible with the same minor).
|
||||||
|
|
||||||
|
See:
|
||||||
|
https://www.python.org/dev/peps/pep-0513/
|
||||||
|
https://stackoverflow.com/a/35801395/771663
|
||||||
|
"""
|
||||||
|
version_str = ".".join(str(x) for x in sys.version_info[:2])
|
||||||
|
return f"python@{version_str}"
|
||||||
|
|
||||||
|
|
||||||
|
def root_path():
|
||||||
|
"""Root of all the bootstrap related folders"""
|
||||||
|
return spack.util.path.canonicalize_path(
|
||||||
|
spack.config.get("bootstrap:root", spack.paths.default_user_bootstrap_path)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def store_path():
|
||||||
|
"""Path to the store used for bootstrapped software"""
|
||||||
|
enabled = spack.config.get("bootstrap:enable", True)
|
||||||
|
if not enabled:
|
||||||
|
msg = 'bootstrapping is currently disabled. Use "spack bootstrap enable" to enable it'
|
||||||
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
|
return _store_path()
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def spack_python_interpreter():
|
||||||
|
"""Override the current configuration to set the interpreter under
|
||||||
|
which Spack is currently running as the only Python external spec
|
||||||
|
available.
|
||||||
|
"""
|
||||||
|
python_prefix = sys.exec_prefix
|
||||||
|
external_python = spec_for_current_python()
|
||||||
|
|
||||||
|
entry = {
|
||||||
|
"buildable": False,
|
||||||
|
"externals": [{"prefix": python_prefix, "spec": str(external_python)}],
|
||||||
|
}
|
||||||
|
|
||||||
|
with spack.config.override("packages:python::", entry):
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
def _store_path():
|
||||||
|
bootstrap_root_path = root_path()
|
||||||
|
return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "store"))
|
||||||
|
|
||||||
|
|
||||||
|
def _config_path():
|
||||||
|
bootstrap_root_path = root_path()
|
||||||
|
return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "config"))
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def ensure_bootstrap_configuration():
|
||||||
|
"""Swap the current configuration for the one used to bootstrap Spack.
|
||||||
|
|
||||||
|
The context manager is reference counted to ensure we don't swap multiple
|
||||||
|
times if there's nested use of it in the stack. One compelling use case
|
||||||
|
is bootstrapping patchelf during the bootstrap of clingo.
|
||||||
|
"""
|
||||||
|
global _REF_COUNT # pylint: disable=global-statement
|
||||||
|
already_swapped = bool(_REF_COUNT)
|
||||||
|
_REF_COUNT += 1
|
||||||
|
try:
|
||||||
|
if already_swapped:
|
||||||
|
yield
|
||||||
|
else:
|
||||||
|
with _ensure_bootstrap_configuration():
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
_REF_COUNT -= 1
|
||||||
|
|
||||||
|
|
||||||
|
def _read_and_sanitize_configuration():
|
||||||
|
"""Read the user configuration that needs to be reused for bootstrapping
|
||||||
|
and remove the entries that should not be copied over.
|
||||||
|
"""
|
||||||
|
# Read the "config" section but pop the install tree (the entry will not be
|
||||||
|
# considered due to the use_store context manager, so it will be confusing
|
||||||
|
# to have it in the configuration).
|
||||||
|
config_yaml = spack.config.get("config")
|
||||||
|
config_yaml.pop("install_tree", None)
|
||||||
|
user_configuration = {"bootstrap": spack.config.get("bootstrap"), "config": config_yaml}
|
||||||
|
return user_configuration
|
||||||
|
|
||||||
|
|
||||||
|
def _bootstrap_config_scopes():
|
||||||
|
tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
|
||||||
|
config_scopes = [spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)]
|
||||||
|
configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
|
||||||
|
for name, path in configuration_paths:
|
||||||
|
platform = spack.platforms.host().name
|
||||||
|
platform_scope = spack.config.ConfigScope(
|
||||||
|
"/".join([name, platform]), os.path.join(path, platform)
|
||||||
|
)
|
||||||
|
generic_scope = spack.config.ConfigScope(name, path)
|
||||||
|
config_scopes.extend([generic_scope, platform_scope])
|
||||||
|
msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
|
||||||
|
tty.debug(msg.format(generic_scope.name, generic_scope.path))
|
||||||
|
tty.debug(msg.format(platform_scope.name, platform_scope.path))
|
||||||
|
return config_scopes
|
||||||
|
|
||||||
|
|
||||||
|
def _add_compilers_if_missing():
|
||||||
|
arch = spack.spec.ArchSpec.frontend_arch()
|
||||||
|
if not spack.compilers.compilers_for_arch(arch):
|
||||||
|
new_compilers = spack.compilers.find_new_compilers()
|
||||||
|
if new_compilers:
|
||||||
|
spack.compilers.add_compilers_to_config(new_compilers, init_config=False)
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def _ensure_bootstrap_configuration():
|
||||||
|
bootstrap_store_path = store_path()
|
||||||
|
user_configuration = _read_and_sanitize_configuration()
|
||||||
|
with spack.environment.no_active_environment():
|
||||||
|
with spack.platforms.prevent_cray_detection(), spack.platforms.use_platform(
|
||||||
|
spack.platforms.real_host()
|
||||||
|
), spack.repo.use_repositories(spack.paths.packages_path), spack.store.use_store(
|
||||||
|
bootstrap_store_path
|
||||||
|
):
|
||||||
|
# Default configuration scopes excluding command line
|
||||||
|
# and builtin but accounting for platform specific scopes
|
||||||
|
config_scopes = _bootstrap_config_scopes()
|
||||||
|
with spack.config.use_configuration(*config_scopes):
|
||||||
|
# We may need to compile code from sources, so ensure we
|
||||||
|
# have compilers for the current platform
|
||||||
|
_add_compilers_if_missing()
|
||||||
|
spack.config.set("bootstrap", user_configuration["bootstrap"])
|
||||||
|
spack.config.set("config", user_configuration["config"])
|
||||||
|
with spack.modules.disable_modules():
|
||||||
|
with spack_python_interpreter():
|
||||||
|
yield
|
||||||
574
lib/spack/spack/bootstrap/core.py
Normal file
574
lib/spack/spack/bootstrap/core.py
Normal file
@@ -0,0 +1,574 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
"""Bootstrap Spack core dependencies from binaries.
|
||||||
|
|
||||||
|
This module contains logic to bootstrap software required by Spack from binaries served in the
|
||||||
|
bootstrapping mirrors. The logic is quite different from an installation done from a Spack user,
|
||||||
|
because of the following reasons:
|
||||||
|
|
||||||
|
1. The binaries are all compiled on the same OS for a given platform (e.g. they are compiled on
|
||||||
|
``centos7`` on ``linux``), but they will be installed and used on the host OS. They are also
|
||||||
|
targeted at the most generic architecture possible. That makes the binaries difficult to reuse
|
||||||
|
with other specs in an environment without ad-hoc logic.
|
||||||
|
2. Bootstrapping has a fallback procedure where we try to install software by default from the
|
||||||
|
most recent binaries, and proceed to older versions of the mirror, until we try building from
|
||||||
|
sources as a last resort. This allows us not to be blocked on architectures where we don't
|
||||||
|
have binaries readily available, but is also not compatible with the working of environments
|
||||||
|
(they don't have fallback procedures).
|
||||||
|
3. Among the binaries we have clingo, so we can't concretize that with clingo :-)
|
||||||
|
4. clingo, GnuPG and patchelf binaries need to be verified by sha256 sum (all the other binaries
|
||||||
|
we might add on top of that in principle can be verified with GPG signatures).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import copy
|
||||||
|
import functools
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from llnl.util import tty
|
||||||
|
from llnl.util.lang import GroupedExceptionHandler
|
||||||
|
|
||||||
|
import spack.binary_distribution
|
||||||
|
import spack.config
|
||||||
|
import spack.detection
|
||||||
|
import spack.environment
|
||||||
|
import spack.modules
|
||||||
|
import spack.paths
|
||||||
|
import spack.platforms
|
||||||
|
import spack.platforms.linux
|
||||||
|
import spack.repo
|
||||||
|
import spack.spec
|
||||||
|
import spack.store
|
||||||
|
import spack.user_environment
|
||||||
|
import spack.util.environment
|
||||||
|
import spack.util.executable
|
||||||
|
import spack.util.path
|
||||||
|
import spack.util.spack_yaml
|
||||||
|
import spack.util.url
|
||||||
|
import spack.version
|
||||||
|
|
||||||
|
from ._common import (
|
||||||
|
_executables_in_store,
|
||||||
|
_python_import,
|
||||||
|
_root_spec,
|
||||||
|
_try_import_from_store,
|
||||||
|
)
|
||||||
|
from .config import spack_python_interpreter, spec_for_current_python
|
||||||
|
|
||||||
|
#: Name of the file containing metadata about the bootstrapping source
|
||||||
|
METADATA_YAML_FILENAME = "metadata.yaml"
|
||||||
|
|
||||||
|
#: Whether the current platform is Windows
|
||||||
|
IS_WINDOWS = sys.platform == "win32"
|
||||||
|
|
||||||
|
#: Map a bootstrapper type to the corresponding class
|
||||||
|
_bootstrap_methods = {}
|
||||||
|
|
||||||
|
|
||||||
|
def bootstrapper(bootstrapper_type):
|
||||||
|
"""Decorator to register classes implementing bootstrapping
|
||||||
|
methods.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
bootstrapper_type (str): string identifying the class
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _register(cls):
|
||||||
|
_bootstrap_methods[bootstrapper_type] = cls
|
||||||
|
return cls
|
||||||
|
|
||||||
|
return _register
|
||||||
|
|
||||||
|
|
||||||
|
class Bootstrapper:
|
||||||
|
"""Interface for "core" software bootstrappers"""
|
||||||
|
|
||||||
|
config_scope_name = ""
|
||||||
|
|
||||||
|
def __init__(self, conf):
|
||||||
|
self.conf = conf
|
||||||
|
self.name = conf["name"]
|
||||||
|
self.url = conf["info"]["url"]
|
||||||
|
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mirror_url(self):
|
||||||
|
"""Mirror url associated with this bootstrapper"""
|
||||||
|
# Absolute paths
|
||||||
|
if os.path.isabs(self.url):
|
||||||
|
return spack.util.url.format(self.url)
|
||||||
|
|
||||||
|
# Check for :// and assume it's an url if we find it
|
||||||
|
if "://" in self.url:
|
||||||
|
return self.url
|
||||||
|
|
||||||
|
# Otherwise, it's a relative path
|
||||||
|
return spack.util.url.format(os.path.join(self.metadata_dir, self.url))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mirror_scope(self):
|
||||||
|
"""Mirror scope to be pushed onto the bootstrapping configuration when using
|
||||||
|
this bootstrapper.
|
||||||
|
"""
|
||||||
|
return spack.config.InternalConfigScope(
|
||||||
|
self.config_scope_name, {"mirrors:": {self.name: self.mirror_url}}
|
||||||
|
)
|
||||||
|
|
||||||
|
def try_import(self, module: str, abstract_spec_str: str): # pylint: disable=unused-argument
|
||||||
|
"""Try to import a Python module from a spec satisfying the abstract spec
|
||||||
|
passed as argument.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
module (str): Python module name to try importing
|
||||||
|
abstract_spec_str (str): abstract spec that can provide the Python module
|
||||||
|
|
||||||
|
Return:
|
||||||
|
True if the Python module could be imported, False otherwise
|
||||||
|
"""
|
||||||
|
return False
|
||||||
|
|
||||||
|
def try_search_path(self, executables, abstract_spec_str): # pylint: disable=unused-argument
|
||||||
|
"""Try to search some executables in the prefix of specs satisfying the abstract
|
||||||
|
spec passed as argument.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
executables (list of str): executables to be found
|
||||||
|
abstract_spec_str (str): abstract spec that can provide the Python module
|
||||||
|
|
||||||
|
Return:
|
||||||
|
True if the executables are found, False otherwise
|
||||||
|
"""
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@bootstrapper(bootstrapper_type="buildcache")
|
||||||
|
class BuildcacheBootstrapper(Bootstrapper):
|
||||||
|
"""Install the software needed during bootstrapping from a buildcache."""
|
||||||
|
|
||||||
|
def __init__(self, conf):
|
||||||
|
super().__init__(conf)
|
||||||
|
self.last_search = None
|
||||||
|
self.config_scope_name = f"bootstrap_buildcache-{uuid.uuid4()}"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _spec_and_platform(abstract_spec_str):
|
||||||
|
"""Return the spec object and platform we need to use when
|
||||||
|
querying the buildcache.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
abstract_spec_str: abstract spec string we are looking for
|
||||||
|
"""
|
||||||
|
# Try to install from an unsigned binary cache
|
||||||
|
abstract_spec = spack.spec.Spec(abstract_spec_str)
|
||||||
|
# On Cray we want to use Linux binaries if available from mirrors
|
||||||
|
bincache_platform = spack.platforms.real_host()
|
||||||
|
return abstract_spec, bincache_platform
|
||||||
|
|
||||||
|
def _read_metadata(self, package_name):
|
||||||
|
"""Return metadata about the given package."""
|
||||||
|
json_filename = f"{package_name}.json"
|
||||||
|
json_dir = self.metadata_dir
|
||||||
|
json_path = os.path.join(json_dir, json_filename)
|
||||||
|
with open(json_path, encoding="utf-8") as stream:
|
||||||
|
data = json.load(stream)
|
||||||
|
return data
|
||||||
|
|
||||||
|
def _install_by_hash(self, pkg_hash, pkg_sha256, index, bincache_platform):
|
||||||
|
index_spec = next(x for x in index if x.dag_hash() == pkg_hash)
|
||||||
|
# Reconstruct the compiler that we need to use for bootstrapping
|
||||||
|
compiler_entry = {
|
||||||
|
"modules": [],
|
||||||
|
"operating_system": str(index_spec.os),
|
||||||
|
"paths": {
|
||||||
|
"cc": "/dev/null",
|
||||||
|
"cxx": "/dev/null",
|
||||||
|
"f77": "/dev/null",
|
||||||
|
"fc": "/dev/null",
|
||||||
|
},
|
||||||
|
"spec": str(index_spec.compiler),
|
||||||
|
"target": str(index_spec.target.family),
|
||||||
|
}
|
||||||
|
with spack.platforms.use_platform(bincache_platform):
|
||||||
|
with spack.config.override("compilers", [{"compiler": compiler_entry}]):
|
||||||
|
spec_str = "/" + pkg_hash
|
||||||
|
query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
|
||||||
|
matches = spack.store.find([spec_str], multiple=False, query_fn=query)
|
||||||
|
for match in matches:
|
||||||
|
spack.binary_distribution.install_root_node(
|
||||||
|
match, allow_root=True, unsigned=True, force=True, sha256=pkg_sha256
|
||||||
|
)
|
||||||
|
|
||||||
|
def _install_and_test(self, abstract_spec, bincache_platform, bincache_data, test_fn):
|
||||||
|
# Ensure we see only the buildcache being used to bootstrap
|
||||||
|
with spack.config.override(self.mirror_scope):
|
||||||
|
# This index is currently needed to get the compiler used to build some
|
||||||
|
# specs that we know by dag hash.
|
||||||
|
spack.binary_distribution.binary_index.regenerate_spec_cache()
|
||||||
|
index = spack.binary_distribution.update_cache_and_get_specs()
|
||||||
|
|
||||||
|
if not index:
|
||||||
|
raise RuntimeError("The binary index is empty")
|
||||||
|
|
||||||
|
for item in bincache_data["verified"]:
|
||||||
|
candidate_spec = item["spec"]
|
||||||
|
# This will be None for things that don't depend on python
|
||||||
|
python_spec = item.get("python", None)
|
||||||
|
# Skip specs which are not compatible
|
||||||
|
if not abstract_spec.satisfies(candidate_spec):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if python_spec is not None and python_spec not in abstract_spec:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for _, pkg_hash, pkg_sha256 in item["binaries"]:
|
||||||
|
self._install_by_hash(pkg_hash, pkg_sha256, index, bincache_platform)
|
||||||
|
|
||||||
|
info = {}
|
||||||
|
if test_fn(query_spec=abstract_spec, query_info=info):
|
||||||
|
self.last_search = info
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def try_import(self, module, abstract_spec_str):
|
||||||
|
test_fn, info = functools.partial(_try_import_from_store, module), {}
|
||||||
|
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||||
|
return True
|
||||||
|
|
||||||
|
tty.debug(f"Bootstrapping {module} from pre-built binaries")
|
||||||
|
abstract_spec, bincache_platform = self._spec_and_platform(
|
||||||
|
abstract_spec_str + " ^" + spec_for_current_python()
|
||||||
|
)
|
||||||
|
data = self._read_metadata(module)
|
||||||
|
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||||
|
|
||||||
|
def try_search_path(self, executables, abstract_spec_str):
|
||||||
|
test_fn, info = functools.partial(_executables_in_store, executables), {}
|
||||||
|
if test_fn(query_spec=abstract_spec_str, query_info=info):
|
||||||
|
self.last_search = info
|
||||||
|
return True
|
||||||
|
|
||||||
|
abstract_spec, bincache_platform = self._spec_and_platform(abstract_spec_str)
|
||||||
|
tty.debug(f"Bootstrapping {abstract_spec.name} from pre-built binaries")
|
||||||
|
data = self._read_metadata(abstract_spec.name)
|
||||||
|
return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
|
||||||
|
|
||||||
|
|
||||||
|
@bootstrapper(bootstrapper_type="install")
|
||||||
|
class SourceBootstrapper(Bootstrapper):
|
||||||
|
"""Install the software needed during bootstrapping from sources."""
|
||||||
|
|
||||||
|
def __init__(self, conf):
|
||||||
|
super().__init__(conf)
|
||||||
|
self.last_search = None
|
||||||
|
self.config_scope_name = f"bootstrap_source-{uuid.uuid4()}"
|
||||||
|
|
||||||
|
def try_import(self, module, abstract_spec_str):
|
||||||
|
info = {}
|
||||||
|
if _try_import_from_store(module, abstract_spec_str, query_info=info):
|
||||||
|
self.last_search = info
|
||||||
|
return True
|
||||||
|
|
||||||
|
tty.debug(f"Bootstrapping {module} from sources")
|
||||||
|
|
||||||
|
# If we compile code from sources detecting a few build tools
|
||||||
|
# might reduce compilation time by a fair amount
|
||||||
|
_add_externals_if_missing()
|
||||||
|
|
||||||
|
# Try to build and install from sources
|
||||||
|
with spack_python_interpreter():
|
||||||
|
# Add hint to use frontend operating system on Cray
|
||||||
|
concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
|
||||||
|
|
||||||
|
if module == "clingo":
|
||||||
|
# TODO: remove when the old concretizer is deprecated # pylint: disable=fixme
|
||||||
|
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||||
|
deprecation_warning=False
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
concrete_spec.concretize()
|
||||||
|
|
||||||
|
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||||
|
tty.debug(msg.format(module, abstract_spec_str))
|
||||||
|
|
||||||
|
# Install the spec that should make the module importable
|
||||||
|
with spack.config.override(self.mirror_scope):
|
||||||
|
concrete_spec.package.do_install(fail_fast=True)
|
||||||
|
|
||||||
|
if _try_import_from_store(module, query_spec=concrete_spec, query_info=info):
|
||||||
|
self.last_search = info
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def try_search_path(self, executables, abstract_spec_str):
|
||||||
|
info = {}
|
||||||
|
if _executables_in_store(executables, abstract_spec_str, query_info=info):
|
||||||
|
self.last_search = info
|
||||||
|
return True
|
||||||
|
|
||||||
|
tty.debug(f"Bootstrapping {abstract_spec_str} from sources")
|
||||||
|
|
||||||
|
# If we compile code from sources detecting a few build tools
|
||||||
|
# might reduce compilation time by a fair amount
|
||||||
|
_add_externals_if_missing()
|
||||||
|
|
||||||
|
concrete_spec = spack.spec.Spec(abstract_spec_str)
|
||||||
|
if concrete_spec.name == "patchelf":
|
||||||
|
concrete_spec._old_concretize( # pylint: disable=protected-access
|
||||||
|
deprecation_warning=False
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
concrete_spec.concretize()
|
||||||
|
|
||||||
|
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||||
|
tty.debug(msg.format(abstract_spec_str))
|
||||||
|
with spack.config.override(self.mirror_scope):
|
||||||
|
concrete_spec.package.do_install()
|
||||||
|
if _executables_in_store(executables, concrete_spec, query_info=info):
|
||||||
|
self.last_search = info
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def create_bootstrapper(conf):
|
||||||
|
"""Return a bootstrap object built according to the configuration argument"""
|
||||||
|
btype = conf["type"]
|
||||||
|
return _bootstrap_methods[btype](conf)
|
||||||
|
|
||||||
|
|
||||||
|
def source_is_enabled_or_raise(conf):
|
||||||
|
"""Raise ValueError if the source is not enabled for bootstrapping"""
|
||||||
|
trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
|
||||||
|
if not trusted.get(name, False):
|
||||||
|
raise ValueError("source is not trusted")
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_module_importable_or_raise(module, abstract_spec=None):
|
||||||
|
"""Make the requested module available for import, or raise.
|
||||||
|
|
||||||
|
This function tries to import a Python module in the current interpreter
|
||||||
|
using, in order, the methods configured in bootstrap.yaml.
|
||||||
|
|
||||||
|
If none of the methods succeed, an exception is raised. The function exits
|
||||||
|
on first success.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
module (str): module to be imported in the current interpreter
|
||||||
|
abstract_spec (str): abstract spec that might provide the module. If not
|
||||||
|
given it defaults to "module"
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ImportError: if the module couldn't be imported
|
||||||
|
"""
|
||||||
|
# If we can import it already, that's great
|
||||||
|
tty.debug(f"[BOOTSTRAP MODULE {module}] Try importing from Python")
|
||||||
|
if _python_import(module):
|
||||||
|
return
|
||||||
|
|
||||||
|
abstract_spec = abstract_spec or module
|
||||||
|
|
||||||
|
exception_handler = GroupedExceptionHandler()
|
||||||
|
|
||||||
|
for current_config in bootstrapping_sources():
|
||||||
|
with exception_handler.forward(current_config["name"]):
|
||||||
|
source_is_enabled_or_raise(current_config)
|
||||||
|
current_bootstrapper = create_bootstrapper(current_config)
|
||||||
|
if current_bootstrapper.try_import(module, abstract_spec):
|
||||||
|
return
|
||||||
|
|
||||||
|
assert exception_handler, (
|
||||||
|
f"expected at least one exception to have been raised at this point: "
|
||||||
|
f"while bootstrapping {module}"
|
||||||
|
)
|
||||||
|
msg = f'cannot bootstrap the "{module}" Python module '
|
||||||
|
if abstract_spec:
|
||||||
|
msg += f'from spec "{abstract_spec}" '
|
||||||
|
if tty.is_debug():
|
||||||
|
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||||
|
else:
|
||||||
|
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||||
|
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||||
|
raise ImportError(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_executables_in_path_or_raise(executables, abstract_spec, cmd_check=None):
|
||||||
|
"""Ensure that some executables are in path or raise.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
executables (list): list of executables to be searched in the PATH,
|
||||||
|
in order. The function exits on the first one found.
|
||||||
|
abstract_spec (str): abstract spec that provides the executables
|
||||||
|
cmd_check (object): callable predicate that takes a
|
||||||
|
``spack.util.executable.Executable`` command and validate it. Should return
|
||||||
|
``True`` if the executable is acceptable, ``False`` otherwise.
|
||||||
|
Can be used to, e.g., ensure a suitable version of the command before
|
||||||
|
accepting for bootstrapping.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: if the executables cannot be ensured to be in PATH
|
||||||
|
|
||||||
|
Return:
|
||||||
|
Executable object
|
||||||
|
|
||||||
|
"""
|
||||||
|
cmd = spack.util.executable.which(*executables)
|
||||||
|
if cmd:
|
||||||
|
if not cmd_check or cmd_check(cmd):
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
executables_str = ", ".join(executables)
|
||||||
|
|
||||||
|
exception_handler = GroupedExceptionHandler()
|
||||||
|
|
||||||
|
for current_config in bootstrapping_sources():
|
||||||
|
with exception_handler.forward(current_config["name"]):
|
||||||
|
source_is_enabled_or_raise(current_config)
|
||||||
|
current_bootstrapper = create_bootstrapper(current_config)
|
||||||
|
if current_bootstrapper.try_search_path(executables, abstract_spec):
|
||||||
|
# Additional environment variables needed
|
||||||
|
concrete_spec, cmd = (
|
||||||
|
current_bootstrapper.last_search["spec"],
|
||||||
|
current_bootstrapper.last_search["command"],
|
||||||
|
)
|
||||||
|
env_mods = spack.util.environment.EnvironmentModifications()
|
||||||
|
for dep in concrete_spec.traverse(
|
||||||
|
root=True, order="post", deptype=("link", "run")
|
||||||
|
):
|
||||||
|
env_mods.extend(
|
||||||
|
spack.user_environment.environment_modifications_for_spec(
|
||||||
|
dep, set_package_py_globals=False
|
||||||
|
)
|
||||||
|
)
|
||||||
|
cmd.add_default_envmod(env_mods)
|
||||||
|
return cmd
|
||||||
|
|
||||||
|
assert exception_handler, (
|
||||||
|
f"expected at least one exception to have been raised at this point: "
|
||||||
|
f"while bootstrapping {executables_str}"
|
||||||
|
)
|
||||||
|
msg = f"cannot bootstrap any of the {executables_str} executables "
|
||||||
|
if abstract_spec:
|
||||||
|
msg += f'from spec "{abstract_spec}" '
|
||||||
|
if tty.is_debug():
|
||||||
|
msg += exception_handler.grouped_message(with_tracebacks=True)
|
||||||
|
else:
|
||||||
|
msg += exception_handler.grouped_message(with_tracebacks=False)
|
||||||
|
msg += "\nRun `spack --debug ...` for more detailed errors"
|
||||||
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def _add_externals_if_missing():
|
||||||
|
search_list = [
|
||||||
|
# clingo
|
||||||
|
spack.repo.path.get_pkg_class("cmake"),
|
||||||
|
spack.repo.path.get_pkg_class("bison"),
|
||||||
|
# GnuPG
|
||||||
|
spack.repo.path.get_pkg_class("gawk"),
|
||||||
|
]
|
||||||
|
if IS_WINDOWS:
|
||||||
|
search_list.append(spack.repo.path.get_pkg_class("winbison"))
|
||||||
|
detected_packages = spack.detection.by_executable(search_list)
|
||||||
|
spack.detection.update_configuration(detected_packages, scope="bootstrap")
|
||||||
|
|
||||||
|
|
||||||
|
def clingo_root_spec():
|
||||||
|
"""Return the root spec used to bootstrap clingo"""
|
||||||
|
return _root_spec("clingo-bootstrap@spack+python")
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_clingo_importable_or_raise():
|
||||||
|
"""Ensure that the clingo module is available for import."""
|
||||||
|
ensure_module_importable_or_raise(module="clingo", abstract_spec=clingo_root_spec())
|
||||||
|
|
||||||
|
|
||||||
|
def gnupg_root_spec():
|
||||||
|
"""Return the root spec used to bootstrap GnuPG"""
|
||||||
|
return _root_spec("gnupg@2.3:")
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_gpg_in_path_or_raise():
|
||||||
|
"""Ensure gpg or gpg2 are in the PATH or raise."""
|
||||||
|
return ensure_executables_in_path_or_raise(
|
||||||
|
executables=["gpg2", "gpg"], abstract_spec=gnupg_root_spec()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def patchelf_root_spec():
|
||||||
|
"""Return the root spec used to bootstrap patchelf"""
|
||||||
|
# 0.13.1 is the last version not to require C++17.
|
||||||
|
return _root_spec("patchelf@0.13.1:")
|
||||||
|
|
||||||
|
|
||||||
|
def verify_patchelf(patchelf):
|
||||||
|
"""Older patchelf versions can produce broken binaries, so we
|
||||||
|
verify the version here.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
patchelf (spack.util.executable.Executable): patchelf executable
|
||||||
|
"""
|
||||||
|
out = patchelf("--version", output=str, error=os.devnull, fail_on_error=False).strip()
|
||||||
|
if patchelf.returncode != 0:
|
||||||
|
return False
|
||||||
|
parts = out.split(" ")
|
||||||
|
if len(parts) < 2:
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
version = spack.version.Version(parts[1])
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
return version >= spack.version.Version("0.13.1")
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_patchelf_in_path_or_raise():
|
||||||
|
"""Ensure patchelf is in the PATH or raise."""
|
||||||
|
# The old concretizer is not smart and we're doing its job: if the latest patchelf
|
||||||
|
# does not concretize because the compiler doesn't support C++17, we try to
|
||||||
|
# concretize again with an upperbound @:13.
|
||||||
|
try:
|
||||||
|
return ensure_executables_in_path_or_raise(
|
||||||
|
executables=["patchelf"], abstract_spec=patchelf_root_spec(), cmd_check=verify_patchelf
|
||||||
|
)
|
||||||
|
except RuntimeError:
|
||||||
|
return ensure_executables_in_path_or_raise(
|
||||||
|
executables=["patchelf"],
|
||||||
|
abstract_spec=_root_spec("patchelf@0.13.1:0.13"),
|
||||||
|
cmd_check=verify_patchelf,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_core_dependencies():
|
||||||
|
"""Ensure the presence of all the core dependencies."""
|
||||||
|
if sys.platform.lower() == "linux":
|
||||||
|
ensure_patchelf_in_path_or_raise()
|
||||||
|
ensure_clingo_importable_or_raise()
|
||||||
|
ensure_gpg_in_path_or_raise()
|
||||||
|
|
||||||
|
|
||||||
|
def all_core_root_specs():
|
||||||
|
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
|
||||||
|
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
||||||
|
|
||||||
|
|
||||||
|
def bootstrapping_sources(scope=None):
|
||||||
|
"""Return the list of configured sources of software for bootstrapping Spack
|
||||||
|
|
||||||
|
Args:
|
||||||
|
scope (str or None): if a valid configuration scope is given, return the
|
||||||
|
list only from that scope
|
||||||
|
"""
|
||||||
|
source_configs = spack.config.get("bootstrap:sources", default=None, scope=scope)
|
||||||
|
source_configs = source_configs or []
|
||||||
|
list_of_sources = []
|
||||||
|
for entry in source_configs:
|
||||||
|
current = copy.copy(entry)
|
||||||
|
metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
|
||||||
|
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
|
||||||
|
with open(metadata_yaml, encoding="utf-8") as stream:
|
||||||
|
current.update(spack.util.spack_yaml.load(stream))
|
||||||
|
list_of_sources.append(current)
|
||||||
|
return list_of_sources
|
||||||
191
lib/spack/spack/bootstrap/environment.py
Normal file
191
lib/spack/spack/bootstrap/environment.py
Normal file
@@ -0,0 +1,191 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
"""Bootstrap non-core Spack dependencies from an environment."""
|
||||||
|
import glob
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import sys
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
import archspec.cpu
|
||||||
|
|
||||||
|
from llnl.util import tty
|
||||||
|
|
||||||
|
import spack.build_environment
|
||||||
|
import spack.environment
|
||||||
|
import spack.tengine
|
||||||
|
import spack.util.executable
|
||||||
|
|
||||||
|
from ._common import _root_spec
|
||||||
|
from .config import root_path, spec_for_current_python, store_path
|
||||||
|
|
||||||
|
|
||||||
|
class BootstrapEnvironment(spack.environment.Environment):
|
||||||
|
"""Environment to install dependencies of Spack for a given interpreter and architecture"""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def spack_dev_requirements(cls):
|
||||||
|
"""Spack development requirements"""
|
||||||
|
return [
|
||||||
|
isort_root_spec(),
|
||||||
|
mypy_root_spec(),
|
||||||
|
black_root_spec(),
|
||||||
|
flake8_root_spec(),
|
||||||
|
pytest_root_spec(),
|
||||||
|
]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def environment_root(cls):
|
||||||
|
"""Environment root directory"""
|
||||||
|
bootstrap_root_path = root_path()
|
||||||
|
python_part = spec_for_current_python().replace("@", "")
|
||||||
|
arch_part = archspec.cpu.host().family
|
||||||
|
interpreter_part = hashlib.md5(sys.exec_prefix.encode()).hexdigest()[:5]
|
||||||
|
environment_dir = f"{python_part}-{arch_part}-{interpreter_part}"
|
||||||
|
return pathlib.Path(
|
||||||
|
spack.util.path.canonicalize_path(
|
||||||
|
os.path.join(bootstrap_root_path, "environments", environment_dir)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def view_root(cls):
|
||||||
|
"""Location of the view"""
|
||||||
|
return cls.environment_root().joinpath("view")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def pythonpaths(cls):
|
||||||
|
"""Paths to be added to sys.path or PYTHONPATH"""
|
||||||
|
python_dir_part = f"python{'.'.join(str(x) for x in sys.version_info[:2])}"
|
||||||
|
glob_expr = str(cls.view_root().joinpath("**", python_dir_part, "**"))
|
||||||
|
result = glob.glob(glob_expr)
|
||||||
|
if not result:
|
||||||
|
msg = f"Cannot find any Python path in {cls.view_root()}"
|
||||||
|
warnings.warn(msg)
|
||||||
|
return result
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def bin_dirs(cls):
|
||||||
|
"""Paths to be added to PATH"""
|
||||||
|
return [cls.view_root().joinpath("bin")]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def spack_yaml(cls):
|
||||||
|
"""Environment spack.yaml file"""
|
||||||
|
return cls.environment_root().joinpath("spack.yaml")
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
if not self.spack_yaml().exists():
|
||||||
|
self._write_spack_yaml_file()
|
||||||
|
super().__init__(self.environment_root())
|
||||||
|
|
||||||
|
def update_installations(self):
|
||||||
|
"""Update the installations of this environment.
|
||||||
|
|
||||||
|
The update is done using a depfile on Linux and macOS, and using the ``install_all``
|
||||||
|
method of environments on Windows.
|
||||||
|
"""
|
||||||
|
with tty.SuppressOutput(msg_enabled=False, warn_enabled=False):
|
||||||
|
specs = self.concretize()
|
||||||
|
if specs:
|
||||||
|
colorized_specs = [
|
||||||
|
spack.spec.Spec(x).cformat("{name}{@version}")
|
||||||
|
for x in self.spack_dev_requirements()
|
||||||
|
]
|
||||||
|
tty.msg(f"[BOOTSTRAPPING] Installing dependencies ({', '.join(colorized_specs)})")
|
||||||
|
self.write(regenerate=False)
|
||||||
|
if sys.platform == "win32":
|
||||||
|
self.install_all()
|
||||||
|
else:
|
||||||
|
self._install_with_depfile()
|
||||||
|
self.write(regenerate=True)
|
||||||
|
|
||||||
|
def update_syspath_and_environ(self):
|
||||||
|
"""Update ``sys.path`` and the PATH, PYTHONPATH environment variables to point to
|
||||||
|
the environment view.
|
||||||
|
"""
|
||||||
|
# Do minimal modifications to sys.path and environment variables. In particular, pay
|
||||||
|
# attention to have the smallest PYTHONPATH / sys.path possible, since that may impact
|
||||||
|
# the performance of the current interpreter
|
||||||
|
sys.path.extend(self.pythonpaths())
|
||||||
|
os.environ["PATH"] = os.pathsep.join(
|
||||||
|
[str(x) for x in self.bin_dirs()] + os.environ.get("PATH", "").split(os.pathsep)
|
||||||
|
)
|
||||||
|
os.environ["PYTHONPATH"] = os.pathsep.join(
|
||||||
|
os.environ.get("PYTHONPATH", "").split(os.pathsep)
|
||||||
|
+ [str(x) for x in self.pythonpaths()]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _install_with_depfile(self):
|
||||||
|
spackcmd = spack.util.executable.which("spack")
|
||||||
|
spackcmd(
|
||||||
|
"-e",
|
||||||
|
str(self.environment_root()),
|
||||||
|
"env",
|
||||||
|
"depfile",
|
||||||
|
"-o",
|
||||||
|
str(self.environment_root().joinpath("Makefile")),
|
||||||
|
)
|
||||||
|
make = spack.util.executable.which("make")
|
||||||
|
kwargs = {}
|
||||||
|
if not tty.is_debug():
|
||||||
|
kwargs = {"output": os.devnull, "error": os.devnull}
|
||||||
|
make(
|
||||||
|
"-C",
|
||||||
|
str(self.environment_root()),
|
||||||
|
"-j",
|
||||||
|
str(spack.build_environment.determine_number_of_jobs(parallel=True)),
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _write_spack_yaml_file(self):
|
||||||
|
tty.msg(
|
||||||
|
"[BOOTSTRAPPING] Spack has missing dependencies, creating a bootstrapping environment"
|
||||||
|
)
|
||||||
|
env = spack.tengine.make_environment()
|
||||||
|
template = env.get_template("bootstrap/spack.yaml")
|
||||||
|
context = {
|
||||||
|
"python_spec": spec_for_current_python(),
|
||||||
|
"python_prefix": sys.exec_prefix,
|
||||||
|
"architecture": archspec.cpu.host().family,
|
||||||
|
"environment_path": self.environment_root(),
|
||||||
|
"environment_specs": self.spack_dev_requirements(),
|
||||||
|
"store_path": store_path(),
|
||||||
|
}
|
||||||
|
self.environment_root().mkdir(parents=True, exist_ok=True)
|
||||||
|
self.spack_yaml().write_text(template.render(context), encoding="utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def isort_root_spec():
|
||||||
|
"""Return the root spec used to bootstrap isort"""
|
||||||
|
return _root_spec("py-isort@4.3.5:")
|
||||||
|
|
||||||
|
|
||||||
|
def mypy_root_spec():
|
||||||
|
"""Return the root spec used to bootstrap mypy"""
|
||||||
|
return _root_spec("py-mypy@0.900:")
|
||||||
|
|
||||||
|
|
||||||
|
def black_root_spec():
|
||||||
|
"""Return the root spec used to bootstrap black"""
|
||||||
|
return _root_spec("py-black")
|
||||||
|
|
||||||
|
|
||||||
|
def flake8_root_spec():
|
||||||
|
"""Return the root spec used to bootstrap flake8"""
|
||||||
|
return _root_spec("py-flake8")
|
||||||
|
|
||||||
|
|
||||||
|
def pytest_root_spec():
|
||||||
|
"""Return the root spec used to bootstrap flake8"""
|
||||||
|
return _root_spec("py-pytest")
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_environment_dependencies():
|
||||||
|
"""Ensure Spack dependencies from the bootstrap environment are installed and ready to use"""
|
||||||
|
with BootstrapEnvironment() as env:
|
||||||
|
env.update_installations()
|
||||||
|
env.update_syspath_and_environ()
|
||||||
169
lib/spack/spack/bootstrap/status.py
Normal file
169
lib/spack/spack/bootstrap/status.py
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
"""Query the status of bootstrapping on this machine"""
|
||||||
|
import platform
|
||||||
|
|
||||||
|
import spack.util.executable
|
||||||
|
|
||||||
|
from ._common import _executables_in_store, _python_import, _try_import_from_store
|
||||||
|
from .config import ensure_bootstrap_configuration
|
||||||
|
from .core import clingo_root_spec, patchelf_root_spec
|
||||||
|
from .environment import (
|
||||||
|
BootstrapEnvironment,
|
||||||
|
black_root_spec,
|
||||||
|
flake8_root_spec,
|
||||||
|
isort_root_spec,
|
||||||
|
mypy_root_spec,
|
||||||
|
pytest_root_spec,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _required_system_executable(exes, msg):
|
||||||
|
"""Search for an executable is the system path only."""
|
||||||
|
if isinstance(exes, str):
|
||||||
|
exes = (exes,)
|
||||||
|
if spack.util.executable.which_string(*exes):
|
||||||
|
return True, None
|
||||||
|
return False, msg
|
||||||
|
|
||||||
|
|
||||||
|
def _required_executable(exes, query_spec, msg):
|
||||||
|
"""Search for an executable in the system path or in the bootstrap store."""
|
||||||
|
if isinstance(exes, str):
|
||||||
|
exes = (exes,)
|
||||||
|
if spack.util.executable.which_string(*exes) or _executables_in_store(exes, query_spec):
|
||||||
|
return True, None
|
||||||
|
return False, msg
|
||||||
|
|
||||||
|
|
||||||
|
def _required_python_module(module, query_spec, msg):
|
||||||
|
"""Check if a Python module is available in the current interpreter or
|
||||||
|
if it can be loaded from the bootstrap store
|
||||||
|
"""
|
||||||
|
if _python_import(module) or _try_import_from_store(module, query_spec):
|
||||||
|
return True, None
|
||||||
|
return False, msg
|
||||||
|
|
||||||
|
|
||||||
|
def _missing(name, purpose, system_only=True):
|
||||||
|
"""Message to be printed if an executable is not found"""
|
||||||
|
msg = '[{2}] MISSING "{0}": {1}'
|
||||||
|
if not system_only:
|
||||||
|
return msg.format(name, purpose, "@*y{{B}}")
|
||||||
|
return msg.format(name, purpose, "@*y{{-}}")
|
||||||
|
|
||||||
|
|
||||||
|
def _core_requirements():
|
||||||
|
_core_system_exes = {
|
||||||
|
"make": _missing("make", "required to build software from sources"),
|
||||||
|
"patch": _missing("patch", "required to patch source code before building"),
|
||||||
|
"bash": _missing("bash", "required for Spack compiler wrapper"),
|
||||||
|
"tar": _missing("tar", "required to manage code archives"),
|
||||||
|
"gzip": _missing("gzip", "required to compress/decompress code archives"),
|
||||||
|
"unzip": _missing("unzip", "required to compress/decompress code archives"),
|
||||||
|
"bzip2": _missing("bzip2", "required to compress/decompress code archives"),
|
||||||
|
"git": _missing("git", "required to fetch/manage git repositories"),
|
||||||
|
}
|
||||||
|
if platform.system().lower() == "linux":
|
||||||
|
_core_system_exes["xz"] = _missing("xz", "required to compress/decompress code archives")
|
||||||
|
|
||||||
|
# Executables that are not bootstrapped yet
|
||||||
|
result = [_required_system_executable(exe, msg) for exe, msg in _core_system_exes.items()]
|
||||||
|
# Python modules
|
||||||
|
result.append(
|
||||||
|
_required_python_module(
|
||||||
|
"clingo", clingo_root_spec(), _missing("clingo", "required to concretize specs", False)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _buildcache_requirements():
|
||||||
|
_buildcache_exes = {
|
||||||
|
"file": _missing("file", "required to analyze files for buildcaches"),
|
||||||
|
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
|
||||||
|
}
|
||||||
|
if platform.system().lower() == "darwin":
|
||||||
|
_buildcache_exes["otool"] = _missing("otool", "required to relocate binaries")
|
||||||
|
|
||||||
|
# Executables that are not bootstrapped yet
|
||||||
|
result = [_required_system_executable(exe, msg) for exe, msg in _buildcache_exes.items()]
|
||||||
|
|
||||||
|
if platform.system().lower() == "linux":
|
||||||
|
result.append(
|
||||||
|
_required_executable(
|
||||||
|
"patchelf",
|
||||||
|
patchelf_root_spec(),
|
||||||
|
_missing("patchelf", "required to relocate binaries", False),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _optional_requirements():
|
||||||
|
_optional_exes = {
|
||||||
|
"zstd": _missing("zstd", "required to compress/decompress code archives"),
|
||||||
|
"svn": _missing("svn", "required to manage subversion repositories"),
|
||||||
|
"hg": _missing("hg", "required to manage mercurial repositories"),
|
||||||
|
}
|
||||||
|
# Executables that are not bootstrapped yet
|
||||||
|
result = [_required_system_executable(exe, msg) for exe, msg in _optional_exes.items()]
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def _development_requirements():
|
||||||
|
# Ensure we trigger environment modifications if we have an environment
|
||||||
|
if BootstrapEnvironment.spack_yaml().exists():
|
||||||
|
with BootstrapEnvironment() as env:
|
||||||
|
env.update_syspath_and_environ()
|
||||||
|
|
||||||
|
return [
|
||||||
|
_required_executable(
|
||||||
|
"isort", isort_root_spec(), _missing("isort", "required for style checks", False)
|
||||||
|
),
|
||||||
|
_required_executable(
|
||||||
|
"mypy", mypy_root_spec(), _missing("mypy", "required for style checks", False)
|
||||||
|
),
|
||||||
|
_required_executable(
|
||||||
|
"flake8", flake8_root_spec(), _missing("flake8", "required for style checks", False)
|
||||||
|
),
|
||||||
|
_required_executable(
|
||||||
|
"black", black_root_spec(), _missing("black", "required for code formatting", False)
|
||||||
|
),
|
||||||
|
_required_python_module(
|
||||||
|
"pytest", pytest_root_spec(), _missing("pytest", "required to run unit-test", False)
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def status_message(section):
|
||||||
|
"""Return a status message to be printed to screen that refers to the
|
||||||
|
section passed as argument and a bool which is True if there are missing
|
||||||
|
dependencies.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
section (str): either 'core' or 'buildcache' or 'optional' or 'develop'
|
||||||
|
"""
|
||||||
|
pass_token, fail_token = "@*g{[PASS]}", "@*r{[FAIL]}"
|
||||||
|
|
||||||
|
# Contain the header of the section and a list of requirements
|
||||||
|
spack_sections = {
|
||||||
|
"core": ("{0} @*{{Core Functionalities}}", _core_requirements),
|
||||||
|
"buildcache": ("{0} @*{{Binary packages}}", _buildcache_requirements),
|
||||||
|
"optional": ("{0} @*{{Optional Features}}", _optional_requirements),
|
||||||
|
"develop": ("{0} @*{{Development Dependencies}}", _development_requirements),
|
||||||
|
}
|
||||||
|
msg, required_software = spack_sections[section]
|
||||||
|
|
||||||
|
with ensure_bootstrap_configuration():
|
||||||
|
missing_software = False
|
||||||
|
for found, err_msg in required_software():
|
||||||
|
if not found:
|
||||||
|
missing_software = True
|
||||||
|
msg += "\n " + err_msg
|
||||||
|
msg += "\n"
|
||||||
|
msg = msg.format(pass_token if not missing_software else fail_token)
|
||||||
|
return msg, missing_software
|
||||||
@@ -33,6 +33,7 @@
|
|||||||
calls you can make from within the install() function.
|
calls you can make from within the install() function.
|
||||||
"""
|
"""
|
||||||
import inspect
|
import inspect
|
||||||
|
import io
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -40,8 +41,7 @@
|
|||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import types
|
import types
|
||||||
|
from typing import List, Tuple
|
||||||
from six import StringIO
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import install, install_tree, mkdirp
|
from llnl.util.filesystem import install, install_tree, mkdirp
|
||||||
@@ -285,6 +285,23 @@ def clean_environment():
|
|||||||
return env
|
return env
|
||||||
|
|
||||||
|
|
||||||
|
def _add_werror_handling(keep_werror, env):
|
||||||
|
keep_flags = set()
|
||||||
|
# set of pairs
|
||||||
|
replace_flags: List[Tuple[str, str]] = []
|
||||||
|
if keep_werror == "all":
|
||||||
|
keep_flags.add("-Werror*")
|
||||||
|
else:
|
||||||
|
if keep_werror == "specific":
|
||||||
|
keep_flags.add("-Werror-*")
|
||||||
|
keep_flags.add("-Werror=*")
|
||||||
|
# This extra case is to handle -Werror-implicit-function-declaration
|
||||||
|
replace_flags.append(("-Werror-", "-Wno-error="))
|
||||||
|
replace_flags.append(("-Werror", "-Wno-error"))
|
||||||
|
env.set("SPACK_COMPILER_FLAGS_KEEP", "|".join(keep_flags))
|
||||||
|
env.set("SPACK_COMPILER_FLAGS_REPLACE", " ".join(["|".join(item) for item in replace_flags]))
|
||||||
|
|
||||||
|
|
||||||
def set_compiler_environment_variables(pkg, env):
|
def set_compiler_environment_variables(pkg, env):
|
||||||
assert pkg.spec.concrete
|
assert pkg.spec.concrete
|
||||||
compiler = pkg.compiler
|
compiler = pkg.compiler
|
||||||
@@ -331,6 +348,13 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
env.set("SPACK_DTAGS_TO_STRIP", compiler.disable_new_dtags)
|
env.set("SPACK_DTAGS_TO_STRIP", compiler.disable_new_dtags)
|
||||||
env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
|
env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
|
||||||
|
|
||||||
|
if pkg.keep_werror is not None:
|
||||||
|
keep_werror = pkg.keep_werror
|
||||||
|
else:
|
||||||
|
keep_werror = spack.config.get("config:flags:keep_werror")
|
||||||
|
|
||||||
|
_add_werror_handling(keep_werror, env)
|
||||||
|
|
||||||
# Set the target parameters that the compiler will add
|
# Set the target parameters that the compiler will add
|
||||||
# Don't set on cray platform because the targeting module handles this
|
# Don't set on cray platform because the targeting module handles this
|
||||||
if spec.satisfies("platform=cray"):
|
if spec.satisfies("platform=cray"):
|
||||||
@@ -353,10 +377,8 @@ def set_compiler_environment_variables(pkg, env):
|
|||||||
if isinstance(pkg.flag_handler, types.FunctionType):
|
if isinstance(pkg.flag_handler, types.FunctionType):
|
||||||
handler = pkg.flag_handler
|
handler = pkg.flag_handler
|
||||||
else:
|
else:
|
||||||
if sys.version_info >= (3, 0):
|
handler = pkg.flag_handler.__func__
|
||||||
handler = pkg.flag_handler.__func__
|
|
||||||
else:
|
|
||||||
handler = pkg.flag_handler.im_func
|
|
||||||
injf, envf, bsf = handler(pkg, flag, spec.compiler_flags[flag][:])
|
injf, envf, bsf = handler(pkg, flag, spec.compiler_flags[flag][:])
|
||||||
inject_flags[flag] = injf or []
|
inject_flags[flag] = injf or []
|
||||||
env_flags[flag] = envf or []
|
env_flags[flag] = envf or []
|
||||||
@@ -542,14 +564,18 @@ def determine_number_of_jobs(
|
|||||||
return min(max_cpus, config_default)
|
return min(max_cpus, config_default)
|
||||||
|
|
||||||
|
|
||||||
def _set_variables_for_single_module(pkg, module):
|
def set_module_variables_for_package(pkg):
|
||||||
"""Helper function to set module variables for single module."""
|
"""Populate the Python module of a package with some useful global names.
|
||||||
|
This makes things easier for package writers.
|
||||||
|
"""
|
||||||
# Put a marker on this module so that it won't execute the body of this
|
# Put a marker on this module so that it won't execute the body of this
|
||||||
# function again, since it is not needed
|
# function again, since it is not needed
|
||||||
marker = "_set_run_already_called"
|
marker = "_set_run_already_called"
|
||||||
if getattr(module, marker, False):
|
if getattr(pkg.module, marker, False):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
module = ModuleChangePropagator(pkg)
|
||||||
|
|
||||||
jobs = determine_number_of_jobs(parallel=pkg.parallel)
|
jobs = determine_number_of_jobs(parallel=pkg.parallel)
|
||||||
|
|
||||||
m = module
|
m = module
|
||||||
@@ -569,6 +595,7 @@ def _set_variables_for_single_module(pkg, module):
|
|||||||
|
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
m.nmake = Executable("nmake")
|
m.nmake = Executable("nmake")
|
||||||
|
m.msbuild = Executable("msbuild")
|
||||||
# Standard CMake arguments
|
# Standard CMake arguments
|
||||||
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
m.std_cmake_args = spack.build_systems.cmake.CMakeBuilder.std_args(pkg)
|
||||||
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
m.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
|
||||||
@@ -616,20 +643,7 @@ def static_to_shared_library(static_lib, shared_lib=None, **kwargs):
|
|||||||
# Put a marker on this module so that it won't execute the body of this
|
# Put a marker on this module so that it won't execute the body of this
|
||||||
# function again, since it is not needed
|
# function again, since it is not needed
|
||||||
setattr(m, marker, True)
|
setattr(m, marker, True)
|
||||||
|
module.propagate_changes_to_mro()
|
||||||
|
|
||||||
def set_module_variables_for_package(pkg):
|
|
||||||
"""Populate the module scope of install() with some useful functions.
|
|
||||||
This makes things easier for package writers.
|
|
||||||
"""
|
|
||||||
# If a user makes their own package repo, e.g.
|
|
||||||
# spack.pkg.mystuff.libelf.Libelf, and they inherit from an existing class
|
|
||||||
# like spack.pkg.original.libelf.Libelf, then set the module variables
|
|
||||||
# for both classes so the parent class can still use them if it gets
|
|
||||||
# called. parent_class_modules includes pkg.module.
|
|
||||||
modules = parent_class_modules(pkg.__class__)
|
|
||||||
for mod in modules:
|
|
||||||
_set_variables_for_single_module(pkg, mod)
|
|
||||||
|
|
||||||
|
|
||||||
def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwargs):
|
def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwargs):
|
||||||
@@ -739,25 +753,6 @@ def get_rpaths(pkg):
|
|||||||
return list(dedupe(filter_system_paths(rpaths)))
|
return list(dedupe(filter_system_paths(rpaths)))
|
||||||
|
|
||||||
|
|
||||||
def parent_class_modules(cls):
|
|
||||||
"""
|
|
||||||
Get list of superclass modules that descend from spack.package_base.PackageBase
|
|
||||||
|
|
||||||
Includes cls.__module__
|
|
||||||
"""
|
|
||||||
if not issubclass(cls, spack.package_base.PackageBase) or issubclass(
|
|
||||||
spack.package_base.PackageBase, cls
|
|
||||||
):
|
|
||||||
return []
|
|
||||||
result = []
|
|
||||||
module = sys.modules.get(cls.__module__)
|
|
||||||
if module:
|
|
||||||
result = [module]
|
|
||||||
for c in cls.__bases__:
|
|
||||||
result.extend(parent_class_modules(c))
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def load_external_modules(pkg):
|
def load_external_modules(pkg):
|
||||||
"""Traverse a package's spec DAG and load any external modules.
|
"""Traverse a package's spec DAG and load any external modules.
|
||||||
|
|
||||||
@@ -978,22 +973,9 @@ def add_modifications_for_dep(dep):
|
|||||||
if set_package_py_globals:
|
if set_package_py_globals:
|
||||||
set_module_variables_for_package(dpkg)
|
set_module_variables_for_package(dpkg)
|
||||||
|
|
||||||
# Allow dependencies to modify the module
|
current_module = ModuleChangePropagator(spec.package)
|
||||||
# Get list of modules that may need updating
|
dpkg.setup_dependent_package(current_module, spec)
|
||||||
modules = []
|
current_module.propagate_changes_to_mro()
|
||||||
for cls in inspect.getmro(type(spec.package)):
|
|
||||||
module = cls.module
|
|
||||||
if module == spack.package_base:
|
|
||||||
break
|
|
||||||
modules.append(module)
|
|
||||||
|
|
||||||
# Execute changes as if on a single module
|
|
||||||
# copy dict to ensure prior changes are available
|
|
||||||
changes = spack.util.pattern.Bunch()
|
|
||||||
dpkg.setup_dependent_package(changes, spec)
|
|
||||||
|
|
||||||
for module in modules:
|
|
||||||
module.__dict__.update(changes.__dict__)
|
|
||||||
|
|
||||||
if context == "build":
|
if context == "build":
|
||||||
builder = spack.builder.create(dpkg)
|
builder = spack.builder.create(dpkg)
|
||||||
@@ -1271,6 +1253,8 @@ def make_stack(tb, stack=None):
|
|||||||
obj = frame.f_locals["self"]
|
obj = frame.f_locals["self"]
|
||||||
if isinstance(obj, spack.package_base.PackageBase):
|
if isinstance(obj, spack.package_base.PackageBase):
|
||||||
break
|
break
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
# We found obj, the Package implementation we care about.
|
# We found obj, the Package implementation we care about.
|
||||||
# Point out the location in the install method where we failed.
|
# Point out the location in the install method where we failed.
|
||||||
@@ -1352,7 +1336,7 @@ def __init__(self, msg, module, classname, traceback_string, log_name, log_type,
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def long_message(self):
|
def long_message(self):
|
||||||
out = StringIO()
|
out = io.StringIO()
|
||||||
out.write(self._long_message if self._long_message else "")
|
out.write(self._long_message if self._long_message else "")
|
||||||
|
|
||||||
have_log = self.log_name and os.path.exists(self.log_name)
|
have_log = self.log_name and os.path.exists(self.log_name)
|
||||||
@@ -1437,3 +1421,51 @@ def write_log_summary(out, log_type, log, last=None):
|
|||||||
# If no errors are found but warnings are, display warnings
|
# If no errors are found but warnings are, display warnings
|
||||||
out.write("\n%s found in %s log:\n" % (plural(nwar, "warning"), log_type))
|
out.write("\n%s found in %s log:\n" % (plural(nwar, "warning"), log_type))
|
||||||
out.write(make_log_context(warnings))
|
out.write(make_log_context(warnings))
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleChangePropagator:
|
||||||
|
"""Wrapper class to accept changes to a package.py Python module, and propagate them in the
|
||||||
|
MRO of the package.
|
||||||
|
|
||||||
|
It is mainly used as a substitute of the ``package.py`` module, when calling the
|
||||||
|
"setup_dependent_package" function during build environment setup.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_PROTECTED_NAMES = ("package", "current_module", "modules_in_mro", "_set_attributes")
|
||||||
|
|
||||||
|
def __init__(self, package):
|
||||||
|
self._set_self_attributes("package", package)
|
||||||
|
self._set_self_attributes("current_module", package.module)
|
||||||
|
|
||||||
|
#: Modules for the classes in the MRO up to PackageBase
|
||||||
|
modules_in_mro = []
|
||||||
|
for cls in inspect.getmro(type(package)):
|
||||||
|
module = cls.module
|
||||||
|
|
||||||
|
if module == self.current_module:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if module == spack.package_base:
|
||||||
|
break
|
||||||
|
|
||||||
|
modules_in_mro.append(module)
|
||||||
|
self._set_self_attributes("modules_in_mro", modules_in_mro)
|
||||||
|
self._set_self_attributes("_set_attributes", {})
|
||||||
|
|
||||||
|
def _set_self_attributes(self, key, value):
|
||||||
|
super().__setattr__(key, value)
|
||||||
|
|
||||||
|
def __getattr__(self, item):
|
||||||
|
return getattr(self.current_module, item)
|
||||||
|
|
||||||
|
def __setattr__(self, key, value):
|
||||||
|
if key in ModuleChangePropagator._PROTECTED_NAMES:
|
||||||
|
msg = f'Cannot set attribute "{key}" in ModuleMonkeyPatcher'
|
||||||
|
return AttributeError(msg)
|
||||||
|
|
||||||
|
setattr(self.current_module, key, value)
|
||||||
|
self._set_attributes[key] = value
|
||||||
|
|
||||||
|
def propagate_changes_to_mro(self):
|
||||||
|
for module_in_mro in self.modules_in_mro:
|
||||||
|
module_in_mro.__dict__.update(self._set_attributes)
|
||||||
|
|||||||
@@ -4,8 +4,6 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
|
|
||||||
import spack.builder
|
import spack.builder
|
||||||
@@ -26,7 +24,7 @@ def sanity_check_prefix(builder):
|
|||||||
pkg = builder.pkg
|
pkg = builder.pkg
|
||||||
|
|
||||||
def check_paths(path_list, filetype, predicate):
|
def check_paths(path_list, filetype, predicate):
|
||||||
if isinstance(path_list, six.string_types):
|
if isinstance(path_list, str):
|
||||||
path_list = [path_list]
|
path_list = [path_list]
|
||||||
|
|
||||||
for path in path_list:
|
for path in path_list:
|
||||||
@@ -89,11 +87,11 @@ def ensure_build_dependencies_or_raise(spec, dependencies, error_msg):
|
|||||||
)
|
)
|
||||||
|
|
||||||
for dep in missing_deps:
|
for dep in missing_deps:
|
||||||
msg += " depends_on('{0}', type='build', when='@{1} {2}')\n".format(
|
msg += ' depends_on("{0}", type="build", when="@{1} {2}")\n'.format(
|
||||||
dep, spec.version, "build_system=autotools"
|
dep, spec.version, "build_system=autotools"
|
||||||
)
|
)
|
||||||
|
|
||||||
msg += "\nUpdate the version (when='@{0}') as needed.".format(spec.version)
|
msg += '\nUpdate the version (when="@{0}") as needed.'.format(spec.version)
|
||||||
raise RuntimeError(msg)
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
import os.path
|
import os.path
|
||||||
import stat
|
import stat
|
||||||
import subprocess
|
import subprocess
|
||||||
from typing import List # novm
|
from typing import List
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -138,7 +138,7 @@ class AutotoolsBuilder(BaseBuilder):
|
|||||||
patch_libtool = True
|
patch_libtool = True
|
||||||
|
|
||||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.build` phase
|
#: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.build` phase
|
||||||
build_targets = [] # type: List[str]
|
build_targets: List[str] = []
|
||||||
#: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.install` phase
|
#: Targets for ``make`` during the :py:meth:`~.AutotoolsBuilder.install` phase
|
||||||
install_targets = ["install"]
|
install_targets = ["install"]
|
||||||
|
|
||||||
@@ -152,7 +152,7 @@ class AutotoolsBuilder(BaseBuilder):
|
|||||||
force_autoreconf = False
|
force_autoreconf = False
|
||||||
|
|
||||||
#: Options to be passed to autoreconf when using the default implementation
|
#: Options to be passed to autoreconf when using the default implementation
|
||||||
autoreconf_extra_args = [] # type: List[str]
|
autoreconf_extra_args: List[str] = []
|
||||||
|
|
||||||
#: If False deletes all the .la files in the prefix folder after the installation.
|
#: If False deletes all the .la files in the prefix folder after the installation.
|
||||||
#: If True instead it installs them.
|
#: If True instead it installs them.
|
||||||
|
|||||||
@@ -34,22 +34,22 @@ class CachedCMakeBuilder(CMakeBuilder):
|
|||||||
|
|
||||||
#: Phases of a Cached CMake package
|
#: Phases of a Cached CMake package
|
||||||
#: Note: the initconfig phase is used for developer builds as a final phase to stop on
|
#: Note: the initconfig phase is used for developer builds as a final phase to stop on
|
||||||
phases = ("initconfig", "cmake", "build", "install") # type: Tuple[str, ...]
|
phases: Tuple[str, ...] = ("initconfig", "cmake", "build", "install")
|
||||||
|
|
||||||
#: Names associated with package methods in the old build-system format
|
#: Names associated with package methods in the old build-system format
|
||||||
legacy_methods = CMakeBuilder.legacy_methods + (
|
legacy_methods: Tuple[str, ...] = CMakeBuilder.legacy_methods + (
|
||||||
"initconfig_compiler_entries",
|
"initconfig_compiler_entries",
|
||||||
"initconfig_mpi_entries",
|
"initconfig_mpi_entries",
|
||||||
"initconfig_hardware_entries",
|
"initconfig_hardware_entries",
|
||||||
"std_initconfig_entries",
|
"std_initconfig_entries",
|
||||||
"initconfig_package_entries",
|
"initconfig_package_entries",
|
||||||
) # type: Tuple[str, ...]
|
)
|
||||||
|
|
||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = CMakeBuilder.legacy_attributes + (
|
legacy_attributes: Tuple[str, ...] = CMakeBuilder.legacy_attributes + (
|
||||||
"cache_name",
|
"cache_name",
|
||||||
"cache_path",
|
"cache_path",
|
||||||
) # type: Tuple[str, ...]
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def cache_name(self):
|
def cache_name(self):
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import collections.abc
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
@@ -9,10 +10,7 @@
|
|||||||
import sys
|
import sys
|
||||||
from typing import List, Tuple
|
from typing import List, Tuple
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
from llnl.util.compat import Sequence
|
|
||||||
|
|
||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
import spack.builder
|
import spack.builder
|
||||||
@@ -155,13 +153,13 @@ class CMakeBuilder(BaseBuilder):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
#: Phases of a CMake package
|
#: Phases of a CMake package
|
||||||
phases = ("cmake", "build", "install") # type: Tuple[str, ...]
|
phases: Tuple[str, ...] = ("cmake", "build", "install")
|
||||||
|
|
||||||
#: Names associated with package methods in the old build-system format
|
#: Names associated with package methods in the old build-system format
|
||||||
legacy_methods = ("cmake_args", "check") # type: Tuple[str, ...]
|
legacy_methods: Tuple[str, ...] = ("cmake_args", "check")
|
||||||
|
|
||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = (
|
legacy_attributes: Tuple[str, ...] = (
|
||||||
"generator",
|
"generator",
|
||||||
"build_targets",
|
"build_targets",
|
||||||
"install_targets",
|
"install_targets",
|
||||||
@@ -171,7 +169,7 @@ class CMakeBuilder(BaseBuilder):
|
|||||||
"std_cmake_args",
|
"std_cmake_args",
|
||||||
"build_dirname",
|
"build_dirname",
|
||||||
"build_directory",
|
"build_directory",
|
||||||
) # type: Tuple[str, ...]
|
)
|
||||||
|
|
||||||
#: The build system generator to use.
|
#: The build system generator to use.
|
||||||
#:
|
#:
|
||||||
@@ -184,7 +182,7 @@ class CMakeBuilder(BaseBuilder):
|
|||||||
generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
generator = "Ninja" if sys.platform == "win32" else "Unix Makefiles"
|
||||||
|
|
||||||
#: Targets to be used during the build phase
|
#: Targets to be used during the build phase
|
||||||
build_targets = [] # type: List[str]
|
build_targets: List[str] = []
|
||||||
#: Targets to be used during the install phase
|
#: Targets to be used during the install phase
|
||||||
install_targets = ["install"]
|
install_targets = ["install"]
|
||||||
#: Callback names for build-time test
|
#: Callback names for build-time test
|
||||||
@@ -302,7 +300,7 @@ def define(cmake_var, value):
|
|||||||
value = "ON" if value else "OFF"
|
value = "ON" if value else "OFF"
|
||||||
else:
|
else:
|
||||||
kind = "STRING"
|
kind = "STRING"
|
||||||
if isinstance(value, Sequence) and not isinstance(value, six.string_types):
|
if isinstance(value, collections.abc.Sequence) and not isinstance(value, str):
|
||||||
value = ";".join(str(v) for v in value)
|
value = ";".join(str(v) for v in value)
|
||||||
else:
|
else:
|
||||||
value = str(value)
|
value = str(value)
|
||||||
|
|||||||
@@ -35,10 +35,10 @@ class GenericBuilder(BaseBuilder):
|
|||||||
phases = ("install",)
|
phases = ("install",)
|
||||||
|
|
||||||
#: Names associated with package methods in the old build-system format
|
#: Names associated with package methods in the old build-system format
|
||||||
legacy_methods = () # type: Tuple[str, ...]
|
legacy_methods: Tuple[str, ...] = ()
|
||||||
|
|
||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = ("archive_files",) # type: Tuple[str, ...]
|
legacy_attributes: Tuple[str, ...] = ("archive_files",)
|
||||||
|
|
||||||
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
|
||||||
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
spack.builder.run_after("install", when="platform=darwin")(apply_macos_rpath_fixups)
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class GNUMirrorPackage(spack.package_base.PackageBase):
|
|||||||
"""Mixin that takes care of setting url and mirrors for GNU packages."""
|
"""Mixin that takes care of setting url and mirrors for GNU packages."""
|
||||||
|
|
||||||
#: Path of the package in a GNU mirror
|
#: Path of the package in a GNU mirror
|
||||||
gnu_mirror_path = None # type: Optional[str]
|
gnu_mirror_path: Optional[str] = None
|
||||||
|
|
||||||
#: List of GNU mirrors used by Spack
|
#: List of GNU mirrors used by Spack
|
||||||
base_mirrors = [
|
base_mirrors = [
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
import inspect
|
||||||
from typing import List # novm
|
from typing import List
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
@@ -77,7 +77,7 @@ class MakefileBuilder(BaseBuilder):
|
|||||||
)
|
)
|
||||||
|
|
||||||
#: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.build` phase
|
#: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.build` phase
|
||||||
build_targets = [] # type: List[str]
|
build_targets: List[str] = []
|
||||||
#: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.install` phase
|
#: Targets for ``make`` during the :py:meth:`~.MakefileBuilder.install` phase
|
||||||
install_targets = ["install"]
|
install_targets = ["install"]
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
from typing import List # novm
|
from typing import List
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
@@ -95,7 +95,7 @@ class MesonBuilder(BaseBuilder):
|
|||||||
"build_directory",
|
"build_directory",
|
||||||
)
|
)
|
||||||
|
|
||||||
build_targets = [] # type: List[str]
|
build_targets: List[str] = []
|
||||||
install_targets = ["install"]
|
install_targets = ["install"]
|
||||||
|
|
||||||
build_time_test_callbacks = ["check"]
|
build_time_test_callbacks = ["check"]
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import inspect
|
import inspect
|
||||||
from typing import List # novm
|
from typing import List
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
|
|
||||||
@@ -72,7 +72,7 @@ class NMakeBuilder(BaseBuilder):
|
|||||||
)
|
)
|
||||||
|
|
||||||
#: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.build` phase
|
#: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.build` phase
|
||||||
build_targets = [] # type: List[str]
|
build_targets: List[str] = []
|
||||||
#: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.install` phase
|
#: Targets for ``make`` during the :py:meth:`~.NMakeBuilder.install` phase
|
||||||
install_targets = ["install"]
|
install_targets = ["install"]
|
||||||
|
|
||||||
|
|||||||
@@ -177,7 +177,7 @@ class PythonPackage(PythonExtension):
|
|||||||
"""Specialized class for packages that are built using pip."""
|
"""Specialized class for packages that are built using pip."""
|
||||||
|
|
||||||
#: Package name, version, and extension on PyPI
|
#: Package name, version, and extension on PyPI
|
||||||
pypi = None # type: Optional[str]
|
pypi: Optional[str] = None
|
||||||
|
|
||||||
maintainers = ["adamjstewart", "pradyunsg"]
|
maintainers = ["adamjstewart", "pradyunsg"]
|
||||||
|
|
||||||
@@ -200,7 +200,7 @@ class PythonPackage(PythonExtension):
|
|||||||
# package manually
|
# package manually
|
||||||
depends_on("py-wheel", type="build")
|
depends_on("py-wheel", type="build")
|
||||||
|
|
||||||
py_namespace = None # type: Optional[str]
|
py_namespace: Optional[str] = None
|
||||||
|
|
||||||
@lang.classproperty
|
@lang.classproperty
|
||||||
def homepage(cls):
|
def homepage(cls):
|
||||||
|
|||||||
@@ -22,10 +22,10 @@ class RBuilder(GenericBuilder):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
#: Names associated with package methods in the old build-system format
|
#: Names associated with package methods in the old build-system format
|
||||||
legacy_methods = (
|
legacy_methods: Tuple[str, ...] = (
|
||||||
"configure_args",
|
"configure_args",
|
||||||
"configure_vars",
|
"configure_vars",
|
||||||
) + GenericBuilder.legacy_methods # type: Tuple[str, ...]
|
) + GenericBuilder.legacy_methods
|
||||||
|
|
||||||
def configure_args(self):
|
def configure_args(self):
|
||||||
"""Arguments to pass to install via ``--configure-args``."""
|
"""Arguments to pass to install via ``--configure-args``."""
|
||||||
@@ -64,10 +64,10 @@ class RPackage(Package):
|
|||||||
# package attributes that can be expanded to set the homepage, url,
|
# package attributes that can be expanded to set the homepage, url,
|
||||||
# list_url, and git values
|
# list_url, and git values
|
||||||
# For CRAN packages
|
# For CRAN packages
|
||||||
cran = None # type: Optional[str]
|
cran: Optional[str] = None
|
||||||
|
|
||||||
# For Bioconductor packages
|
# For Bioconductor packages
|
||||||
bioc = None # type: Optional[str]
|
bioc: Optional[str] = None
|
||||||
|
|
||||||
GenericBuilder = RBuilder
|
GenericBuilder = RBuilder
|
||||||
|
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ class RacketPackage(PackageBase):
|
|||||||
|
|
||||||
extends("racket", when="build_system=racket")
|
extends("racket", when="build_system=racket")
|
||||||
|
|
||||||
racket_name = None # type: Optional[str]
|
racket_name: Optional[str] = None
|
||||||
parallel = True
|
parallel = True
|
||||||
|
|
||||||
@lang.classproperty
|
@lang.classproperty
|
||||||
@@ -51,7 +51,7 @@ class RacketBuilder(spack.builder.Builder):
|
|||||||
phases = ("install",)
|
phases = ("install",)
|
||||||
|
|
||||||
#: Names associated with package methods in the old build-system format
|
#: Names associated with package methods in the old build-system format
|
||||||
legacy_methods = tuple() # type: Tuple[str, ...]
|
legacy_methods: Tuple[str, ...] = tuple()
|
||||||
|
|
||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = ("build_directory", "build_time_test_callbacks", "subdirectory")
|
legacy_attributes = ("build_directory", "build_time_test_callbacks", "subdirectory")
|
||||||
@@ -59,7 +59,7 @@ class RacketBuilder(spack.builder.Builder):
|
|||||||
#: Callback names for build-time test
|
#: Callback names for build-time test
|
||||||
build_time_test_callbacks = ["check"]
|
build_time_test_callbacks = ["check"]
|
||||||
|
|
||||||
racket_name = None # type: Optional[str]
|
racket_name: Optional[str] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def subdirectory(self):
|
def subdirectory(self):
|
||||||
|
|||||||
@@ -96,18 +96,33 @@ class ROCmPackage(PackageBase):
|
|||||||
"gfx803",
|
"gfx803",
|
||||||
"gfx900",
|
"gfx900",
|
||||||
"gfx900:xnack-",
|
"gfx900:xnack-",
|
||||||
|
"gfx902",
|
||||||
|
"gfx904",
|
||||||
"gfx906",
|
"gfx906",
|
||||||
"gfx908",
|
|
||||||
"gfx90a",
|
|
||||||
"gfx906:xnack-",
|
"gfx906:xnack-",
|
||||||
|
"gfx908",
|
||||||
"gfx908:xnack-",
|
"gfx908:xnack-",
|
||||||
|
"gfx909",
|
||||||
|
"gfx90a",
|
||||||
"gfx90a:xnack-",
|
"gfx90a:xnack-",
|
||||||
"gfx90a:xnack+",
|
"gfx90a:xnack+",
|
||||||
|
"gfx90c",
|
||||||
|
"gfx940",
|
||||||
"gfx1010",
|
"gfx1010",
|
||||||
"gfx1011",
|
"gfx1011",
|
||||||
"gfx1012",
|
"gfx1012",
|
||||||
|
"gfx1013",
|
||||||
"gfx1030",
|
"gfx1030",
|
||||||
"gfx1031",
|
"gfx1031",
|
||||||
|
"gfx1032",
|
||||||
|
"gfx1033",
|
||||||
|
"gfx1034",
|
||||||
|
"gfx1035",
|
||||||
|
"gfx1036",
|
||||||
|
"gfx1100",
|
||||||
|
"gfx1101",
|
||||||
|
"gfx1102",
|
||||||
|
"gfx1103",
|
||||||
)
|
)
|
||||||
|
|
||||||
variant("rocm", default=False, description="Enable ROCm support")
|
variant("rocm", default=False, description="Enable ROCm support")
|
||||||
@@ -144,6 +159,29 @@ def hip_flags(amdgpu_target):
|
|||||||
# depends_on('hip@:6.0', when='amdgpu_target=gfx701')
|
# depends_on('hip@:6.0', when='amdgpu_target=gfx701')
|
||||||
# to indicate minimum version for each architecture.
|
# to indicate minimum version for each architecture.
|
||||||
|
|
||||||
|
# Add compiler minimum versions based on the first release where the
|
||||||
|
# processor is included in llvm/lib/Support/TargetParser.cpp
|
||||||
|
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx900:xnack-")
|
||||||
|
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx906:xnack-")
|
||||||
|
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx908:xnack-")
|
||||||
|
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx90c")
|
||||||
|
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a")
|
||||||
|
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack-")
|
||||||
|
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx90a:xnack+")
|
||||||
|
depends_on("llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx940")
|
||||||
|
depends_on("llvm-amdgpu@4.5.0:", when="amdgpu_target=gfx1013")
|
||||||
|
depends_on("llvm-amdgpu@3.8.0:", when="amdgpu_target=gfx1030")
|
||||||
|
depends_on("llvm-amdgpu@3.9.0:", when="amdgpu_target=gfx1031")
|
||||||
|
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx1032")
|
||||||
|
depends_on("llvm-amdgpu@4.1.0:", when="amdgpu_target=gfx1033")
|
||||||
|
depends_on("llvm-amdgpu@4.3.0:", when="amdgpu_target=gfx1034")
|
||||||
|
depends_on("llvm-amdgpu@4.5.0:", when="amdgpu_target=gfx1035")
|
||||||
|
depends_on("llvm-amdgpu@5.2.0:", when="amdgpu_target=gfx1036")
|
||||||
|
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1100")
|
||||||
|
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1101")
|
||||||
|
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1102")
|
||||||
|
depends_on("llvm-amdgpu@5.3.0:", when="amdgpu_target=gfx1103")
|
||||||
|
|
||||||
# Compiler conflicts
|
# Compiler conflicts
|
||||||
|
|
||||||
# TODO: add conflicts statements along the lines of
|
# TODO: add conflicts statements along the lines of
|
||||||
|
|||||||
@@ -157,7 +157,7 @@ def configure(self, pkg, spec, prefix):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
self.python(configure, *args)
|
self.pkg.python(configure, *args)
|
||||||
|
|
||||||
def configure_args(self):
|
def configure_args(self):
|
||||||
"""Arguments to pass to configure."""
|
"""Arguments to pass to configure."""
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ class SourceforgePackage(spack.package_base.PackageBase):
|
|||||||
packages."""
|
packages."""
|
||||||
|
|
||||||
#: Path of the package in a Sourceforge mirror
|
#: Path of the package in a Sourceforge mirror
|
||||||
sourceforge_mirror_path = None # type: Optional[str]
|
sourceforge_mirror_path: Optional[str] = None
|
||||||
|
|
||||||
#: List of Sourceforge mirrors used by Spack
|
#: List of Sourceforge mirrors used by Spack
|
||||||
base_mirrors = [
|
base_mirrors = [
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class SourcewarePackage(spack.package_base.PackageBase):
|
|||||||
packages."""
|
packages."""
|
||||||
|
|
||||||
#: Path of the package in a Sourceware mirror
|
#: Path of the package in a Sourceware mirror
|
||||||
sourceware_mirror_path = None # type: Optional[str]
|
sourceware_mirror_path: Optional[str] = None
|
||||||
|
|
||||||
#: List of Sourceware mirrors used by Spack
|
#: List of Sourceware mirrors used by Spack
|
||||||
base_mirrors = [
|
base_mirrors = [
|
||||||
|
|||||||
@@ -72,9 +72,9 @@ class WafBuilder(BaseBuilder):
|
|||||||
|
|
||||||
#: Names associated with package attributes in the old build-system format
|
#: Names associated with package attributes in the old build-system format
|
||||||
legacy_attributes = (
|
legacy_attributes = (
|
||||||
"build_time_test_callbacks",
|
|
||||||
"build_time_test_callbacks",
|
"build_time_test_callbacks",
|
||||||
"build_directory",
|
"build_directory",
|
||||||
|
"install_time_test_callbacks",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Callback names for build-time test
|
# Callback names for build-time test
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ class XorgPackage(spack.package_base.PackageBase):
|
|||||||
packages."""
|
packages."""
|
||||||
|
|
||||||
#: Path of the package in a x.org mirror
|
#: Path of the package in a x.org mirror
|
||||||
xorg_mirror_path = None # type: Optional[str]
|
xorg_mirror_path: Optional[str] = None
|
||||||
|
|
||||||
#: List of x.org mirrors used by Spack
|
#: List of x.org mirrors used by Spack
|
||||||
# Note: x.org mirrors are a bit tricky, since many are out-of-sync or off.
|
# Note: x.org mirrors are a bit tricky, since many are out-of-sync or off.
|
||||||
|
|||||||
@@ -3,15 +3,12 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import collections
|
import collections
|
||||||
|
import collections.abc
|
||||||
import copy
|
import copy
|
||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
from typing import List, Optional, Tuple
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
import llnl.util.compat
|
|
||||||
|
|
||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
|
|
||||||
#: Builder classes, as registered by the "builder" decorator
|
#: Builder classes, as registered by the "builder" decorator
|
||||||
@@ -168,7 +165,7 @@ def __forward(self):
|
|||||||
property(forward_property_to_getattr(attribute_name)),
|
property(forward_property_to_getattr(attribute_name)),
|
||||||
)
|
)
|
||||||
|
|
||||||
class Adapter(six.with_metaclass(_PackageAdapterMeta, base_cls)):
|
class Adapter(base_cls, metaclass=_PackageAdapterMeta):
|
||||||
def __init__(self, pkg):
|
def __init__(self, pkg):
|
||||||
# Deal with custom phases in packages here
|
# Deal with custom phases in packages here
|
||||||
if hasattr(pkg, "phases"):
|
if hasattr(pkg, "phases"):
|
||||||
@@ -280,7 +277,7 @@ def _decorator(fn):
|
|||||||
return _decorator
|
return _decorator
|
||||||
|
|
||||||
|
|
||||||
class BuilderMeta(PhaseCallbacksMeta, type(llnl.util.compat.Sequence)): # type: ignore
|
class BuilderMeta(PhaseCallbacksMeta, type(collections.abc.Sequence)): # type: ignore
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@@ -457,7 +454,7 @@ def copy(self):
|
|||||||
return copy.deepcopy(self)
|
return copy.deepcopy(self)
|
||||||
|
|
||||||
|
|
||||||
class Builder(six.with_metaclass(BuilderMeta, llnl.util.compat.Sequence)):
|
class Builder(collections.abc.Sequence, metaclass=BuilderMeta):
|
||||||
"""A builder is a class that, given a package object (i.e. associated with
|
"""A builder is a class that, given a package object (i.e. associated with
|
||||||
concrete spec), knows how to install it.
|
concrete spec), knows how to install it.
|
||||||
|
|
||||||
@@ -469,19 +466,19 @@ class Builder(six.with_metaclass(BuilderMeta, llnl.util.compat.Sequence)):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
#: Sequence of phases. Must be defined in derived classes
|
#: Sequence of phases. Must be defined in derived classes
|
||||||
phases = () # type: Tuple[str, ...]
|
phases: Tuple[str, ...] = ()
|
||||||
#: Build system name. Must also be defined in derived classes.
|
#: Build system name. Must also be defined in derived classes.
|
||||||
build_system = None # type: Optional[str]
|
build_system: Optional[str] = None
|
||||||
|
|
||||||
legacy_methods = () # type: Tuple[str, ...]
|
legacy_methods: Tuple[str, ...] = ()
|
||||||
legacy_attributes = () # type: Tuple[str, ...]
|
legacy_attributes: Tuple[str, ...] = ()
|
||||||
|
|
||||||
#: List of glob expressions. Each expression must either be
|
#: List of glob expressions. Each expression must either be
|
||||||
#: absolute or relative to the package source path.
|
#: absolute or relative to the package source path.
|
||||||
#: Matching artifacts found at the end of the build process will be
|
#: Matching artifacts found at the end of the build process will be
|
||||||
#: copied in the same directory tree as _spack_build_logfile and
|
#: copied in the same directory tree as _spack_build_logfile and
|
||||||
#: _spack_build_envfile.
|
#: _spack_build_envfile.
|
||||||
archive_files = [] # type: List[str]
|
archive_files: List[str] = []
|
||||||
|
|
||||||
def __init__(self, pkg):
|
def __init__(self, pkg):
|
||||||
self.pkg = pkg
|
self.pkg = pkg
|
||||||
|
|||||||
@@ -16,11 +16,9 @@
|
|||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
import zipfile
|
import zipfile
|
||||||
|
from urllib.error import HTTPError, URLError
|
||||||
from six import iteritems, string_types
|
from urllib.parse import urlencode
|
||||||
from six.moves.urllib.error import HTTPError, URLError
|
from urllib.request import HTTPHandler, Request, build_opener
|
||||||
from six.moves.urllib.parse import urlencode
|
|
||||||
from six.moves.urllib.request import HTTPHandler, Request, build_opener
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -51,6 +49,7 @@
|
|||||||
|
|
||||||
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
|
||||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||||
|
SHARED_PR_MIRROR_URL = "s3://spack-binaries-prs/shared_pr_mirror"
|
||||||
|
|
||||||
spack_gpg = spack.main.SpackCommand("gpg")
|
spack_gpg = spack.main.SpackCommand("gpg")
|
||||||
spack_compiler = spack.main.SpackCommand("compiler")
|
spack_compiler = spack.main.SpackCommand("compiler")
|
||||||
@@ -215,7 +214,7 @@ def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
|
|||||||
def _remove_satisfied_deps(deps, satisfied_list):
|
def _remove_satisfied_deps(deps, satisfied_list):
|
||||||
new_deps = {}
|
new_deps = {}
|
||||||
|
|
||||||
for key, value in iteritems(deps):
|
for key, value in deps.items():
|
||||||
new_value = set([v for v in value if v not in satisfied_list])
|
new_value = set([v for v in value if v not in satisfied_list])
|
||||||
if new_value:
|
if new_value:
|
||||||
new_deps[key] = new_value
|
new_deps[key] = new_value
|
||||||
@@ -731,6 +730,12 @@ def generate_gitlab_ci_yaml(
|
|||||||
# won't fetch its index and include in our local cache.
|
# won't fetch its index and include in our local cache.
|
||||||
spack.mirror.add("ci_pr_mirror", remote_mirror_override, cfg.default_modify_scope())
|
spack.mirror.add("ci_pr_mirror", remote_mirror_override, cfg.default_modify_scope())
|
||||||
|
|
||||||
|
shared_pr_mirror = None
|
||||||
|
if spack_pipeline_type == "spack_pull_request":
|
||||||
|
stack_name = os.environ.get("SPACK_CI_STACK_NAME", "")
|
||||||
|
shared_pr_mirror = url_util.join(SHARED_PR_MIRROR_URL, stack_name)
|
||||||
|
spack.mirror.add("ci_shared_pr_mirror", shared_pr_mirror, cfg.default_modify_scope())
|
||||||
|
|
||||||
pipeline_artifacts_dir = artifacts_root
|
pipeline_artifacts_dir = artifacts_root
|
||||||
if not pipeline_artifacts_dir:
|
if not pipeline_artifacts_dir:
|
||||||
proj_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd())
|
proj_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd())
|
||||||
@@ -805,6 +810,8 @@ def generate_gitlab_ci_yaml(
|
|||||||
# Clean up remote mirror override if enabled
|
# Clean up remote mirror override if enabled
|
||||||
if remote_mirror_override:
|
if remote_mirror_override:
|
||||||
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
|
||||||
|
if spack_pipeline_type == "spack_pull_request":
|
||||||
|
spack.mirror.remove("ci_shared_pr_mirror", cfg.default_modify_scope())
|
||||||
|
|
||||||
all_job_names = []
|
all_job_names = []
|
||||||
output_object = {}
|
output_object = {}
|
||||||
@@ -1294,6 +1301,7 @@ def generate_gitlab_ci_yaml(
|
|||||||
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
|
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
|
||||||
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
|
||||||
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
||||||
|
"SPACK_CI_SHARED_PR_MIRROR_URL": shared_pr_mirror or "None",
|
||||||
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
|
||||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||||
}
|
}
|
||||||
@@ -1769,9 +1777,9 @@ def reproduce_ci_job(url, work_dir):
|
|||||||
download_and_extract_artifacts(url, work_dir)
|
download_and_extract_artifacts(url, work_dir)
|
||||||
|
|
||||||
lock_file = fs.find(work_dir, "spack.lock")[0]
|
lock_file = fs.find(work_dir, "spack.lock")[0]
|
||||||
concrete_env_dir = os.path.dirname(lock_file)
|
repro_lock_dir = os.path.dirname(lock_file)
|
||||||
|
|
||||||
tty.debug("Concrete environment directory: {0}".format(concrete_env_dir))
|
tty.debug("Found lock file in: {0}".format(repro_lock_dir))
|
||||||
|
|
||||||
yaml_files = fs.find(work_dir, ["*.yaml", "*.yml"])
|
yaml_files = fs.find(work_dir, ["*.yaml", "*.yml"])
|
||||||
|
|
||||||
@@ -1794,6 +1802,20 @@ def reproduce_ci_job(url, work_dir):
|
|||||||
if pipeline_yaml:
|
if pipeline_yaml:
|
||||||
tty.debug("\n{0} is likely your pipeline file".format(yf))
|
tty.debug("\n{0} is likely your pipeline file".format(yf))
|
||||||
|
|
||||||
|
relative_concrete_env_dir = pipeline_yaml["variables"]["SPACK_CONCRETE_ENV_DIR"]
|
||||||
|
tty.debug("Relative environment path used by cloud job: {0}".format(relative_concrete_env_dir))
|
||||||
|
|
||||||
|
# Using the relative concrete environment path found in the generated
|
||||||
|
# pipeline variable above, copy the spack environment files so they'll
|
||||||
|
# be found in the same location as when the job ran in the cloud.
|
||||||
|
concrete_env_dir = os.path.join(work_dir, relative_concrete_env_dir)
|
||||||
|
os.makedirs(concrete_env_dir, exist_ok=True)
|
||||||
|
copy_lock_path = os.path.join(concrete_env_dir, "spack.lock")
|
||||||
|
orig_yaml_path = os.path.join(repro_lock_dir, "spack.yaml")
|
||||||
|
copy_yaml_path = os.path.join(concrete_env_dir, "spack.yaml")
|
||||||
|
shutil.copyfile(lock_file, copy_lock_path)
|
||||||
|
shutil.copyfile(orig_yaml_path, copy_yaml_path)
|
||||||
|
|
||||||
# Find the install script in the unzipped artifacts and make it executable
|
# Find the install script in the unzipped artifacts and make it executable
|
||||||
install_script = fs.find(work_dir, "install.sh")[0]
|
install_script = fs.find(work_dir, "install.sh")[0]
|
||||||
st = os.stat(install_script)
|
st = os.stat(install_script)
|
||||||
@@ -1849,6 +1871,7 @@ def reproduce_ci_job(url, work_dir):
|
|||||||
if repro_details:
|
if repro_details:
|
||||||
mount_as_dir = repro_details["ci_project_dir"]
|
mount_as_dir = repro_details["ci_project_dir"]
|
||||||
mounted_repro_dir = os.path.join(mount_as_dir, rel_repro_dir)
|
mounted_repro_dir = os.path.join(mount_as_dir, rel_repro_dir)
|
||||||
|
mounted_env_dir = os.path.join(mount_as_dir, relative_concrete_env_dir)
|
||||||
|
|
||||||
# We will also try to clone spack from your local checkout and
|
# We will also try to clone spack from your local checkout and
|
||||||
# reproduce the state present during the CI build, and put that into
|
# reproduce the state present during the CI build, and put that into
|
||||||
@@ -1932,7 +1955,7 @@ def reproduce_ci_job(url, work_dir):
|
|||||||
inst_list.append(" $ source {0}/share/spack/setup-env.sh\n".format(spack_root))
|
inst_list.append(" $ source {0}/share/spack/setup-env.sh\n".format(spack_root))
|
||||||
inst_list.append(
|
inst_list.append(
|
||||||
" $ spack env activate --without-view {0}\n\n".format(
|
" $ spack env activate --without-view {0}\n\n".format(
|
||||||
mounted_repro_dir if job_image else repro_dir
|
mounted_env_dir if job_image else repro_dir
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
inst_list.append(" - Run the install script\n\n")
|
inst_list.append(" - Run the install script\n\n")
|
||||||
@@ -1960,7 +1983,7 @@ def process_command(name, commands, repro_dir):
|
|||||||
"""
|
"""
|
||||||
tty.debug("spack {0} arguments: {1}".format(name, commands))
|
tty.debug("spack {0} arguments: {1}".format(name, commands))
|
||||||
|
|
||||||
if len(commands) == 0 or isinstance(commands[0], string_types):
|
if len(commands) == 0 or isinstance(commands[0], str):
|
||||||
commands = [commands]
|
commands = [commands]
|
||||||
|
|
||||||
# Create a string [command 1] && [command 2] && ... && [command n] with commands
|
# Create a string [command 1] && [command 2] && ... && [command n] with commands
|
||||||
|
|||||||
@@ -2,12 +2,11 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import collections.abc
|
||||||
from llnl.util.compat import Mapping
|
|
||||||
|
|
||||||
get_job_name = lambda needs_entry: (
|
get_job_name = lambda needs_entry: (
|
||||||
needs_entry.get("job")
|
needs_entry.get("job")
|
||||||
if (isinstance(needs_entry, Mapping) and needs_entry.get("artifacts", True))
|
if (isinstance(needs_entry, collections.abc.Mapping) and needs_entry.get("artifacts", True))
|
||||||
else needs_entry
|
else needs_entry
|
||||||
if isinstance(needs_entry, str)
|
if isinstance(needs_entry, str)
|
||||||
else None
|
else None
|
||||||
@@ -15,7 +14,7 @@
|
|||||||
|
|
||||||
|
|
||||||
def convert_job(job_entry):
|
def convert_job(job_entry):
|
||||||
if not isinstance(job_entry, Mapping):
|
if not isinstance(job_entry, collections.abc.Mapping):
|
||||||
return job_entry
|
return job_entry
|
||||||
|
|
||||||
needs = job_entry.get("needs")
|
needs = job_entry.get("needs")
|
||||||
|
|||||||
@@ -2,23 +2,21 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import collections
|
||||||
|
import collections.abc
|
||||||
import copy
|
import copy
|
||||||
import hashlib
|
import hashlib
|
||||||
from collections import defaultdict
|
|
||||||
|
|
||||||
from llnl.util.compat import Mapping, Sequence
|
|
||||||
|
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
|
|
||||||
def sort_yaml_obj(obj):
|
def sort_yaml_obj(obj):
|
||||||
if isinstance(obj, Mapping):
|
if isinstance(obj, collections.abc.Mapping):
|
||||||
return syaml.syaml_dict(
|
return syaml.syaml_dict(
|
||||||
(k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0])))
|
(k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0])))
|
||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(obj, Sequence) and not isinstance(obj, str):
|
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||||
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
|
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
|
||||||
|
|
||||||
return obj
|
return obj
|
||||||
@@ -38,15 +36,15 @@ def matches(obj, proto):
|
|||||||
|
|
||||||
Precondition: proto must not have any reference cycles
|
Precondition: proto must not have any reference cycles
|
||||||
"""
|
"""
|
||||||
if isinstance(obj, Mapping):
|
if isinstance(obj, collections.abc.Mapping):
|
||||||
if not isinstance(proto, Mapping):
|
if not isinstance(proto, collections.abc.Mapping):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
|
return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
|
||||||
|
|
||||||
if isinstance(obj, Sequence) and not isinstance(obj, str):
|
if isinstance(obj, collections.abc.Sequence) and not isinstance(obj, str):
|
||||||
|
|
||||||
if not (isinstance(proto, Sequence) and not isinstance(proto, str)):
|
if not (isinstance(proto, collections.abc.Sequence) and not isinstance(proto, str)):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if len(obj) != len(proto):
|
if len(obj) != len(proto):
|
||||||
@@ -76,7 +74,9 @@ def subkeys(obj, proto):
|
|||||||
|
|
||||||
Otherwise, obj is returned.
|
Otherwise, obj is returned.
|
||||||
"""
|
"""
|
||||||
if not (isinstance(obj, Mapping) and isinstance(proto, Mapping)):
|
if not (
|
||||||
|
isinstance(obj, collections.abc.Mapping) and isinstance(proto, collections.abc.Mapping)
|
||||||
|
):
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
new_obj = {}
|
new_obj = {}
|
||||||
@@ -88,7 +88,7 @@ def subkeys(obj, proto):
|
|||||||
if matches(value, proto[key]) and matches(proto[key], value):
|
if matches(value, proto[key]) and matches(proto[key], value):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if isinstance(value, Mapping):
|
if isinstance(value, collections.abc.Mapping):
|
||||||
new_obj[key] = subkeys(value, proto[key])
|
new_obj[key] = subkeys(value, proto[key])
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -116,7 +116,7 @@ def add_extends(yaml, key):
|
|||||||
has_key = "extends" in yaml
|
has_key = "extends" in yaml
|
||||||
extends = yaml.get("extends")
|
extends = yaml.get("extends")
|
||||||
|
|
||||||
if has_key and not isinstance(extends, (str, Sequence)):
|
if has_key and not isinstance(extends, (str, collections.abc.Sequence)):
|
||||||
return
|
return
|
||||||
|
|
||||||
if extends is None:
|
if extends is None:
|
||||||
@@ -261,7 +261,7 @@ def build_histogram(iterator, key):
|
|||||||
The list is sorted in descending order by count, yielding the most
|
The list is sorted in descending order by count, yielding the most
|
||||||
frequently occuring hashes first.
|
frequently occuring hashes first.
|
||||||
"""
|
"""
|
||||||
buckets = defaultdict(int)
|
buckets = collections.defaultdict(int)
|
||||||
values = {}
|
values = {}
|
||||||
|
|
||||||
num_objects = 0
|
num_objects = 0
|
||||||
|
|||||||
@@ -11,10 +11,9 @@
|
|||||||
import shlex
|
import shlex
|
||||||
import sys
|
import sys
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
from typing import List, Tuple
|
from typing import List, Match, Tuple
|
||||||
|
|
||||||
import ruamel.yaml as yaml
|
import ruamel.yaml as yaml
|
||||||
import six
|
|
||||||
from ruamel.yaml.error import MarkedYAMLError
|
from ruamel.yaml.error import MarkedYAMLError
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -27,6 +26,7 @@
|
|||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.extensions
|
import spack.extensions
|
||||||
|
import spack.parser
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -166,18 +166,15 @@ class _UnquotedFlags(object):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(self, all_unquoted_flag_pairs):
|
def __init__(self, all_unquoted_flag_pairs: List[Tuple[Match[str], str]]):
|
||||||
# type: (List[Tuple[re.Match, str]]) -> None
|
|
||||||
self._flag_pairs = all_unquoted_flag_pairs
|
self._flag_pairs = all_unquoted_flag_pairs
|
||||||
|
|
||||||
def __bool__(self):
|
def __bool__(self) -> bool:
|
||||||
# type: () -> bool
|
|
||||||
return bool(self._flag_pairs)
|
return bool(self._flag_pairs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def extract(cls, sargs):
|
def extract(cls, sargs: str) -> "_UnquotedFlags":
|
||||||
# type: (str) -> _UnquotedFlags
|
all_unquoted_flag_pairs: List[Tuple[Match[str], str]] = []
|
||||||
all_unquoted_flag_pairs = [] # type: List[Tuple[re.Match, str]]
|
|
||||||
prev_flags_arg = None
|
prev_flags_arg = None
|
||||||
for arg in shlex.split(sargs):
|
for arg in shlex.split(sargs):
|
||||||
if prev_flags_arg is not None:
|
if prev_flags_arg is not None:
|
||||||
@@ -185,8 +182,7 @@ def extract(cls, sargs):
|
|||||||
prev_flags_arg = cls.flags_arg_pattern.match(arg)
|
prev_flags_arg = cls.flags_arg_pattern.match(arg)
|
||||||
return cls(all_unquoted_flag_pairs)
|
return cls(all_unquoted_flag_pairs)
|
||||||
|
|
||||||
def report(self):
|
def report(self) -> str:
|
||||||
# type: () -> str
|
|
||||||
single_errors = [
|
single_errors = [
|
||||||
"({0}) {1} {2} => {3}".format(
|
"({0}) {1} {2} => {3}".format(
|
||||||
i + 1,
|
i + 1,
|
||||||
@@ -217,12 +213,12 @@ def parse_specs(args, **kwargs):
|
|||||||
tests = kwargs.get("tests", False)
|
tests = kwargs.get("tests", False)
|
||||||
|
|
||||||
sargs = args
|
sargs = args
|
||||||
if not isinstance(args, six.string_types):
|
if not isinstance(args, str):
|
||||||
sargs = " ".join(args)
|
sargs = " ".join(args)
|
||||||
unquoted_flags = _UnquotedFlags.extract(sargs)
|
unquoted_flags = _UnquotedFlags.extract(sargs)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
specs = spack.spec.parse(sargs)
|
specs = spack.parser.parse(sargs)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
if concretize:
|
if concretize:
|
||||||
spec.concretize(tests=tests) # implies normalize
|
spec.concretize(tests=tests) # implies normalize
|
||||||
|
|||||||
@@ -5,10 +5,8 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import os.path
|
import os.path
|
||||||
import platform
|
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import warnings
|
|
||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
import llnl.util.tty
|
import llnl.util.tty
|
||||||
@@ -16,6 +14,8 @@
|
|||||||
|
|
||||||
import spack
|
import spack
|
||||||
import spack.bootstrap
|
import spack.bootstrap
|
||||||
|
import spack.bootstrap.config
|
||||||
|
import spack.bootstrap.core
|
||||||
import spack.cmd.common.arguments
|
import spack.cmd.common.arguments
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.main
|
import spack.main
|
||||||
@@ -76,7 +76,8 @@ def _add_scope_option(parser):
|
|||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
sp = subparser.add_subparsers(dest="subcommand")
|
sp = subparser.add_subparsers(dest="subcommand")
|
||||||
|
|
||||||
sp.add_parser("now", help="Spack ready, right now!")
|
now = sp.add_parser("now", help="Spack ready, right now!")
|
||||||
|
now.add_argument("--dev", action="store_true", help="bootstrap dev dependencies too")
|
||||||
|
|
||||||
status = sp.add_parser("status", help="get the status of Spack")
|
status = sp.add_parser("status", help="get the status of Spack")
|
||||||
status.add_argument(
|
status.add_argument(
|
||||||
@@ -112,18 +113,10 @@ def setup_parser(subparser):
|
|||||||
list = sp.add_parser("list", help="list all the sources of software to bootstrap Spack")
|
list = sp.add_parser("list", help="list all the sources of software to bootstrap Spack")
|
||||||
_add_scope_option(list)
|
_add_scope_option(list)
|
||||||
|
|
||||||
trust = sp.add_parser("trust", help="(DEPRECATED) trust a bootstrapping source")
|
|
||||||
_add_scope_option(trust)
|
|
||||||
trust.add_argument("name", help="name of the source to be trusted")
|
|
||||||
|
|
||||||
untrust = sp.add_parser("untrust", help="(DEPRECATED) untrust a bootstrapping source")
|
|
||||||
_add_scope_option(untrust)
|
|
||||||
untrust.add_argument("name", help="name of the source to be untrusted")
|
|
||||||
|
|
||||||
add = sp.add_parser("add", help="add a new source for bootstrapping")
|
add = sp.add_parser("add", help="add a new source for bootstrapping")
|
||||||
_add_scope_option(add)
|
_add_scope_option(add)
|
||||||
add.add_argument(
|
add.add_argument(
|
||||||
"--trust", action="store_true", help="trust the source immediately upon addition"
|
"--trust", action="store_true", help="enable the source immediately upon addition"
|
||||||
)
|
)
|
||||||
add.add_argument("name", help="name of the new source of software")
|
add.add_argument("name", help="name of the new source of software")
|
||||||
add.add_argument("metadata_dir", help="directory where to find metadata files")
|
add.add_argument("metadata_dir", help="directory where to find metadata files")
|
||||||
@@ -156,9 +149,9 @@ def _enable_or_disable(args):
|
|||||||
return
|
return
|
||||||
|
|
||||||
if value is True:
|
if value is True:
|
||||||
_trust(args)
|
_enable_source(args)
|
||||||
else:
|
else:
|
||||||
_untrust(args)
|
_disable_source(args)
|
||||||
|
|
||||||
|
|
||||||
def _reset(args):
|
def _reset(args):
|
||||||
@@ -203,7 +196,7 @@ def _root(args):
|
|||||||
|
|
||||||
|
|
||||||
def _list(args):
|
def _list(args):
|
||||||
sources = spack.bootstrap.bootstrapping_sources(scope=args.scope)
|
sources = spack.bootstrap.core.bootstrapping_sources(scope=args.scope)
|
||||||
if not sources:
|
if not sources:
|
||||||
llnl.util.tty.msg("No method available for bootstrapping Spack's dependencies")
|
llnl.util.tty.msg("No method available for bootstrapping Spack's dependencies")
|
||||||
return
|
return
|
||||||
@@ -254,8 +247,14 @@ def sort_fn(x):
|
|||||||
_print_method(s, trusted.get(s["name"], None))
|
_print_method(s, trusted.get(s["name"], None))
|
||||||
|
|
||||||
|
|
||||||
def _write_trust_state(args, value):
|
def _write_bootstrapping_source_status(name, enabled, scope=None):
|
||||||
name = args.name
|
"""Write if a bootstrapping source is enable or disabled to config file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name (str): name of the bootstrapping source.
|
||||||
|
enabled (bool): True if the source is enabled, False if it is disabled.
|
||||||
|
scope (None or str): configuration scope to modify. If none use the default scope.
|
||||||
|
"""
|
||||||
sources = spack.config.get("bootstrap:sources")
|
sources = spack.config.get("bootstrap:sources")
|
||||||
|
|
||||||
matches = [s for s in sources if s["name"] == name]
|
matches = [s for s in sources if s["name"] == name]
|
||||||
@@ -277,30 +276,18 @@ def _write_trust_state(args, value):
|
|||||||
|
|
||||||
# Setting the scope explicitly is needed to not copy over to a new scope
|
# Setting the scope explicitly is needed to not copy over to a new scope
|
||||||
# the entire default configuration for bootstrap.yaml
|
# the entire default configuration for bootstrap.yaml
|
||||||
scope = args.scope or spack.config.default_modify_scope("bootstrap")
|
scope = scope or spack.config.default_modify_scope("bootstrap")
|
||||||
spack.config.add("bootstrap:trusted:{0}:{1}".format(name, str(value)), scope=scope)
|
spack.config.add("bootstrap:trusted:{0}:{1}".format(name, str(enabled)), scope=scope)
|
||||||
|
|
||||||
|
|
||||||
def _deprecate_command(deprecated_cmd, suggested_cmd):
|
def _enable_source(args):
|
||||||
msg = (
|
_write_bootstrapping_source_status(args.name, enabled=True, scope=args.scope)
|
||||||
"the 'spack bootstrap {} ...' command is deprecated and will be "
|
|
||||||
"removed in v0.20, use 'spack bootstrap {} ...' instead"
|
|
||||||
)
|
|
||||||
warnings.warn(msg.format(deprecated_cmd, suggested_cmd))
|
|
||||||
|
|
||||||
|
|
||||||
def _trust(args):
|
|
||||||
if args.subcommand == "trust":
|
|
||||||
_deprecate_command("trust", "enable")
|
|
||||||
_write_trust_state(args, value=True)
|
|
||||||
msg = '"{0}" is now enabled for bootstrapping'
|
msg = '"{0}" is now enabled for bootstrapping'
|
||||||
llnl.util.tty.msg(msg.format(args.name))
|
llnl.util.tty.msg(msg.format(args.name))
|
||||||
|
|
||||||
|
|
||||||
def _untrust(args):
|
def _disable_source(args):
|
||||||
if args.subcommand == "untrust":
|
_write_bootstrapping_source_status(args.name, enabled=False, scope=args.scope)
|
||||||
_deprecate_command("untrust", "disable")
|
|
||||||
_write_trust_state(args, value=False)
|
|
||||||
msg = '"{0}" is now disabled and will not be used for bootstrapping'
|
msg = '"{0}" is now disabled and will not be used for bootstrapping'
|
||||||
llnl.util.tty.msg(msg.format(args.name))
|
llnl.util.tty.msg(msg.format(args.name))
|
||||||
|
|
||||||
@@ -313,7 +300,7 @@ def _status(args):
|
|||||||
sections.append("develop")
|
sections.append("develop")
|
||||||
|
|
||||||
header = "@*b{{Spack v{0} - {1}}}".format(
|
header = "@*b{{Spack v{0} - {1}}}".format(
|
||||||
spack.spack_version, spack.bootstrap.spec_for_current_python()
|
spack.spack_version, spack.bootstrap.config.spec_for_current_python()
|
||||||
)
|
)
|
||||||
print(llnl.util.tty.color.colorize(header))
|
print(llnl.util.tty.color.colorize(header))
|
||||||
print()
|
print()
|
||||||
@@ -338,7 +325,7 @@ def _status(args):
|
|||||||
|
|
||||||
|
|
||||||
def _add(args):
|
def _add(args):
|
||||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
initial_sources = spack.bootstrap.core.bootstrapping_sources()
|
||||||
names = [s["name"] for s in initial_sources]
|
names = [s["name"] for s in initial_sources]
|
||||||
|
|
||||||
# If the name is already used error out
|
# If the name is already used error out
|
||||||
@@ -364,11 +351,11 @@ def _add(args):
|
|||||||
msg = 'New bootstrapping source "{0}" added in the "{1}" configuration scope'
|
msg = 'New bootstrapping source "{0}" added in the "{1}" configuration scope'
|
||||||
llnl.util.tty.msg(msg.format(args.name, write_scope))
|
llnl.util.tty.msg(msg.format(args.name, write_scope))
|
||||||
if args.trust:
|
if args.trust:
|
||||||
_trust(args)
|
_enable_source(args)
|
||||||
|
|
||||||
|
|
||||||
def _remove(args):
|
def _remove(args):
|
||||||
initial_sources = spack.bootstrap.bootstrapping_sources()
|
initial_sources = spack.bootstrap.core.bootstrapping_sources()
|
||||||
names = [s["name"] for s in initial_sources]
|
names = [s["name"] for s in initial_sources]
|
||||||
if args.name not in names:
|
if args.name not in names:
|
||||||
msg = (
|
msg = (
|
||||||
@@ -401,7 +388,10 @@ def _mirror(args):
|
|||||||
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
# TODO: Here we are adding gnuconfig manually, but this can be fixed
|
||||||
# TODO: as soon as we have an option to add to a mirror all the possible
|
# TODO: as soon as we have an option to add to a mirror all the possible
|
||||||
# TODO: dependencies of a spec
|
# TODO: dependencies of a spec
|
||||||
root_specs = spack.bootstrap.all_root_specs(development=args.dev) + ["gnuconfig"]
|
root_specs = spack.bootstrap.all_core_root_specs() + ["gnuconfig"]
|
||||||
|
if args.dev:
|
||||||
|
root_specs += spack.bootstrap.BootstrapEnvironment.spack_dev_requirements()
|
||||||
|
|
||||||
for spec_str in root_specs:
|
for spec_str in root_specs:
|
||||||
msg = 'Adding "{0}" and dependencies to the mirror at {1}'
|
msg = 'Adding "{0}" and dependencies to the mirror at {1}'
|
||||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||||
@@ -451,10 +441,9 @@ def write_metadata(subdir, metadata):
|
|||||||
|
|
||||||
def _now(args):
|
def _now(args):
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
if platform.system().lower() == "linux":
|
spack.bootstrap.ensure_core_dependencies()
|
||||||
spack.bootstrap.ensure_patchelf_in_path_or_raise()
|
if args.dev:
|
||||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
spack.bootstrap.ensure_environment_dependencies()
|
||||||
spack.bootstrap.ensure_gpg_in_path_or_raise()
|
|
||||||
|
|
||||||
|
|
||||||
def bootstrap(parser, args):
|
def bootstrap(parser, args):
|
||||||
@@ -465,8 +454,6 @@ def bootstrap(parser, args):
|
|||||||
"reset": _reset,
|
"reset": _reset,
|
||||||
"root": _root,
|
"root": _root,
|
||||||
"list": _list,
|
"list": _list,
|
||||||
"trust": _trust,
|
|
||||||
"untrust": _untrust,
|
|
||||||
"add": _add,
|
"add": _add,
|
||||||
"remove": _remove,
|
"remove": _remove,
|
||||||
"mirror": _mirror,
|
"mirror": _mirror,
|
||||||
|
|||||||
@@ -454,7 +454,7 @@ def check_fn(args):
|
|||||||
|
|
||||||
if not specs:
|
if not specs:
|
||||||
tty.msg("No specs provided, exiting.")
|
tty.msg("No specs provided, exiting.")
|
||||||
sys.exit(0)
|
return
|
||||||
|
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
spec.concretize()
|
spec.concretize()
|
||||||
@@ -467,9 +467,10 @@ def check_fn(args):
|
|||||||
|
|
||||||
if not configured_mirrors:
|
if not configured_mirrors:
|
||||||
tty.msg("No mirrors provided, exiting.")
|
tty.msg("No mirrors provided, exiting.")
|
||||||
sys.exit(0)
|
return
|
||||||
|
|
||||||
sys.exit(bindist.check_specs_against_mirrors(configured_mirrors, specs, args.output_file))
|
if bindist.check_specs_against_mirrors(configured_mirrors, specs, args.output_file) == 1:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
def download_fn(args):
|
def download_fn(args):
|
||||||
@@ -479,11 +480,11 @@ def download_fn(args):
|
|||||||
least one of the required buildcache components."""
|
least one of the required buildcache components."""
|
||||||
if not args.spec and not args.spec_file:
|
if not args.spec and not args.spec_file:
|
||||||
tty.msg("No specs provided, exiting.")
|
tty.msg("No specs provided, exiting.")
|
||||||
sys.exit(0)
|
return
|
||||||
|
|
||||||
if not args.path:
|
if not args.path:
|
||||||
tty.msg("No download path provided, exiting")
|
tty.msg("No download path provided, exiting")
|
||||||
sys.exit(0)
|
return
|
||||||
|
|
||||||
spec = _concrete_spec_from_args(args)
|
spec = _concrete_spec_from_args(args)
|
||||||
result = bindist.download_single_spec(spec, args.path)
|
result = bindist.download_single_spec(spec, args.path)
|
||||||
@@ -532,8 +533,6 @@ def save_specfile_fn(args):
|
|||||||
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format
|
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format
|
||||||
)
|
)
|
||||||
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_buildcache_file(src_url, dest_url, local_path=None):
|
def copy_buildcache_file(src_url, dest_url, local_path=None):
|
||||||
"""Copy from source url to destination url"""
|
"""Copy from source url to destination url"""
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import sys
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
@@ -16,6 +17,7 @@
|
|||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
from spack.package_base import deprecated_version, preferred_version
|
from spack.package_base import deprecated_version, preferred_version
|
||||||
|
from spack.util.editor import editor
|
||||||
from spack.util.naming import valid_fully_qualified_module_name
|
from spack.util.naming import valid_fully_qualified_module_name
|
||||||
from spack.version import VersionBase, ver
|
from spack.version import VersionBase, ver
|
||||||
|
|
||||||
@@ -53,6 +55,13 @@ def setup_parser(subparser):
|
|||||||
default=False,
|
default=False,
|
||||||
help="checksum the preferred version only",
|
help="checksum the preferred version only",
|
||||||
)
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"-a",
|
||||||
|
"--add-to-package",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="add new versions to package",
|
||||||
|
)
|
||||||
arguments.add_common_arguments(subparser, ["package"])
|
arguments.add_common_arguments(subparser, ["package"])
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
|
"versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
|
||||||
@@ -118,3 +127,46 @@ def checksum(parser, args):
|
|||||||
print()
|
print()
|
||||||
print(version_lines)
|
print(version_lines)
|
||||||
print()
|
print()
|
||||||
|
|
||||||
|
if args.add_to_package:
|
||||||
|
filename = spack.repo.path.filename_for_package_name(pkg.name)
|
||||||
|
# Make sure we also have a newline after the last version
|
||||||
|
versions = [v + "\n" for v in version_lines.splitlines()]
|
||||||
|
versions.append("\n")
|
||||||
|
# We need to insert the versions in reversed order
|
||||||
|
versions.reverse()
|
||||||
|
versions.append(" # FIXME: Added by `spack checksum`\n")
|
||||||
|
version_line = None
|
||||||
|
|
||||||
|
with open(filename, "r") as f:
|
||||||
|
lines = f.readlines()
|
||||||
|
for i in range(len(lines)):
|
||||||
|
# Black is drunk, so this is what it looks like for now
|
||||||
|
# See https://github.com/psf/black/issues/2156 for more information
|
||||||
|
if lines[i].startswith(" # FIXME: Added by `spack checksum`") or lines[
|
||||||
|
i
|
||||||
|
].startswith(" version("):
|
||||||
|
version_line = i
|
||||||
|
break
|
||||||
|
|
||||||
|
if version_line is not None:
|
||||||
|
for v in versions:
|
||||||
|
lines.insert(version_line, v)
|
||||||
|
|
||||||
|
with open(filename, "w") as f:
|
||||||
|
f.writelines(lines)
|
||||||
|
|
||||||
|
msg = "opening editor to verify"
|
||||||
|
|
||||||
|
if not sys.stdout.isatty():
|
||||||
|
msg = "please verify"
|
||||||
|
|
||||||
|
tty.info(
|
||||||
|
"Added {0} new versions to {1}, "
|
||||||
|
"{2}.".format(len(versions) - 2, args.package, msg)
|
||||||
|
)
|
||||||
|
|
||||||
|
if sys.stdout.isatty():
|
||||||
|
editor(filename)
|
||||||
|
else:
|
||||||
|
tty.warn("Could not add new versions to {0}.".format(args.package))
|
||||||
|
|||||||
@@ -6,7 +6,6 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -285,6 +284,7 @@ def ci_rebuild(args):
|
|||||||
remote_mirror_override = get_env_var("SPACK_REMOTE_MIRROR_OVERRIDE")
|
remote_mirror_override = get_env_var("SPACK_REMOTE_MIRROR_OVERRIDE")
|
||||||
remote_mirror_url = get_env_var("SPACK_REMOTE_MIRROR_URL")
|
remote_mirror_url = get_env_var("SPACK_REMOTE_MIRROR_URL")
|
||||||
spack_ci_stack_name = get_env_var("SPACK_CI_STACK_NAME")
|
spack_ci_stack_name = get_env_var("SPACK_CI_STACK_NAME")
|
||||||
|
shared_pr_mirror_url = get_env_var("SPACK_CI_SHARED_PR_MIRROR_URL")
|
||||||
rebuild_everything = get_env_var("SPACK_REBUILD_EVERYTHING")
|
rebuild_everything = get_env_var("SPACK_REBUILD_EVERYTHING")
|
||||||
|
|
||||||
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
# Construct absolute paths relative to current $CI_PROJECT_DIR
|
||||||
@@ -472,6 +472,10 @@ def ci_rebuild(args):
|
|||||||
spack.mirror.add("mirror_override", remote_mirror_override, cfg.default_modify_scope())
|
spack.mirror.add("mirror_override", remote_mirror_override, cfg.default_modify_scope())
|
||||||
pipeline_mirrors.append(remote_mirror_override)
|
pipeline_mirrors.append(remote_mirror_override)
|
||||||
|
|
||||||
|
if spack_pipeline_type == "spack_pull_request":
|
||||||
|
if shared_pr_mirror_url != "None":
|
||||||
|
pipeline_mirrors.append(shared_pr_mirror_url)
|
||||||
|
|
||||||
matches = (
|
matches = (
|
||||||
None
|
None
|
||||||
if full_rebuild
|
if full_rebuild
|
||||||
@@ -498,7 +502,7 @@ def ci_rebuild(args):
|
|||||||
bindist.download_single_spec(job_spec, build_cache_dir, mirror_url=matching_mirror)
|
bindist.download_single_spec(job_spec, build_cache_dir, mirror_url=matching_mirror)
|
||||||
|
|
||||||
# Now we are done and successful
|
# Now we are done and successful
|
||||||
sys.exit(0)
|
return 0
|
||||||
|
|
||||||
# Before beginning the install, if this is a "rebuild everything" pipeline, we
|
# Before beginning the install, if this is a "rebuild everything" pipeline, we
|
||||||
# only want to keep the mirror being used by the current pipeline as it's binary
|
# only want to keep the mirror being used by the current pipeline as it's binary
|
||||||
@@ -566,8 +570,6 @@ def ci_rebuild(args):
|
|||||||
"-o",
|
"-o",
|
||||||
"Makefile",
|
"Makefile",
|
||||||
"--use-buildcache=package:never,dependencies:only",
|
"--use-buildcache=package:never,dependencies:only",
|
||||||
"--make-target-prefix",
|
|
||||||
"ci",
|
|
||||||
slash_hash, # limit to spec we're building
|
slash_hash, # limit to spec we're building
|
||||||
],
|
],
|
||||||
[
|
[
|
||||||
@@ -584,7 +586,7 @@ def ci_rebuild(args):
|
|||||||
"SPACK_COLOR=always",
|
"SPACK_COLOR=always",
|
||||||
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
"SPACK_INSTALL_FLAGS={}".format(args_to_string(deps_install_args)),
|
||||||
"-j$(nproc)",
|
"-j$(nproc)",
|
||||||
"ci/.install-deps/{}".format(job_spec.dag_hash()),
|
"install-deps/{}".format(job_spec.format("{name}-{version}-{hash}")),
|
||||||
],
|
],
|
||||||
spack_cmd + ["install"] + root_install_args,
|
spack_cmd + ["install"] + root_install_args,
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -8,8 +8,6 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from six import iteritems
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import index_by
|
from llnl.util.lang import index_by
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
@@ -138,13 +136,13 @@ def compiler_info(args):
|
|||||||
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
|
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
|
||||||
if c.flags:
|
if c.flags:
|
||||||
print("\tflags:")
|
print("\tflags:")
|
||||||
for flag, flag_value in iteritems(c.flags):
|
for flag, flag_value in c.flags.items():
|
||||||
print("\t\t%s = %s" % (flag, flag_value))
|
print("\t\t%s = %s" % (flag, flag_value))
|
||||||
if len(c.environment) != 0:
|
if len(c.environment) != 0:
|
||||||
if len(c.environment.get("set", {})) != 0:
|
if len(c.environment.get("set", {})) != 0:
|
||||||
print("\tenvironment:")
|
print("\tenvironment:")
|
||||||
print("\t set:")
|
print("\t set:")
|
||||||
for key, value in iteritems(c.environment["set"]):
|
for key, value in c.environment["set"].items():
|
||||||
print("\t %s = %s" % (key, value))
|
print("\t %s = %s" % (key, value))
|
||||||
if c.extra_rpaths:
|
if c.extra_rpaths:
|
||||||
print("\tExtra rpaths:")
|
print("\tExtra rpaths:")
|
||||||
|
|||||||
@@ -46,6 +46,14 @@ def setup_parser(subparser):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def shift(asp_function):
|
||||||
|
"""Transforms ``attr("foo", "bar")`` into ``foo("bar")``."""
|
||||||
|
if not asp_function.args:
|
||||||
|
raise ValueError(f"Can't shift ASP function with no arguments: {str(asp_function)}")
|
||||||
|
first, *rest = asp_function.args
|
||||||
|
return asp.AspFunction(first, rest)
|
||||||
|
|
||||||
|
|
||||||
def compare_specs(a, b, to_string=False, color=None):
|
def compare_specs(a, b, to_string=False, color=None):
|
||||||
"""
|
"""
|
||||||
Generate a comparison, including diffs (for each side) and an intersection.
|
Generate a comparison, including diffs (for each side) and an intersection.
|
||||||
@@ -71,22 +79,24 @@ def compare_specs(a, b, to_string=False, color=None):
|
|||||||
# get facts for specs, making sure to include build dependencies of concrete
|
# get facts for specs, making sure to include build dependencies of concrete
|
||||||
# specs and to descend into dependency hashes so we include all facts.
|
# specs and to descend into dependency hashes so we include all facts.
|
||||||
a_facts = set(
|
a_facts = set(
|
||||||
t
|
shift(func)
|
||||||
for t in setup.spec_clauses(
|
for func in setup.spec_clauses(
|
||||||
a,
|
a,
|
||||||
body=True,
|
body=True,
|
||||||
expand_hashes=True,
|
expand_hashes=True,
|
||||||
concrete_build_deps=True,
|
concrete_build_deps=True,
|
||||||
)
|
)
|
||||||
|
if func.name == "attr"
|
||||||
)
|
)
|
||||||
b_facts = set(
|
b_facts = set(
|
||||||
t
|
shift(func)
|
||||||
for t in setup.spec_clauses(
|
for func in setup.spec_clauses(
|
||||||
b,
|
b,
|
||||||
body=True,
|
body=True,
|
||||||
expand_hashes=True,
|
expand_hashes=True,
|
||||||
concrete_build_deps=True,
|
concrete_build_deps=True,
|
||||||
)
|
)
|
||||||
|
if func.name == "attr"
|
||||||
)
|
)
|
||||||
|
|
||||||
# We want to present them to the user as simple key: values
|
# We want to present them to the user as simple key: values
|
||||||
|
|||||||
@@ -4,13 +4,12 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import io
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
@@ -674,13 +673,16 @@ def build_cache_flag(self, depth):
|
|||||||
return ""
|
return ""
|
||||||
|
|
||||||
def accept(self, node):
|
def accept(self, node):
|
||||||
dag_hash = node.edge.spec.dag_hash()
|
fmt = "{name}-{version}-{hash}"
|
||||||
|
tgt = node.edge.spec.format(fmt)
|
||||||
spec_str = node.edge.spec.format(
|
spec_str = node.edge.spec.format(
|
||||||
"{name}{@version}{%compiler}{variants}{arch=architecture}"
|
"{name}{@version}{%compiler}{variants}{arch=architecture}"
|
||||||
)
|
)
|
||||||
buildcache_flag = self.build_cache_flag(node.depth)
|
buildcache_flag = self.build_cache_flag(node.depth)
|
||||||
prereqs = " ".join([self.target(dep.spec.dag_hash()) for dep in self.neighbors(node)])
|
prereqs = " ".join([self.target(dep.spec.format(fmt)) for dep in self.neighbors(node)])
|
||||||
self.adjacency_list.append((dag_hash, spec_str, buildcache_flag, prereqs))
|
self.adjacency_list.append(
|
||||||
|
(tgt, prereqs, node.edge.spec.dag_hash(), spec_str, buildcache_flag)
|
||||||
|
)
|
||||||
|
|
||||||
# We already accepted this
|
# We already accepted this
|
||||||
return True
|
return True
|
||||||
@@ -691,6 +693,8 @@ def env_depfile(args):
|
|||||||
spack.cmd.require_active_env(cmd_name="env depfile")
|
spack.cmd.require_active_env(cmd_name="env depfile")
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
|
|
||||||
|
# Special make targets are useful when including a makefile in another, and you
|
||||||
|
# need to "namespace" the targets to avoid conflicts.
|
||||||
if args.make_target_prefix is None:
|
if args.make_target_prefix is None:
|
||||||
target_prefix = os.path.join(env.env_subdir_path, "makedeps")
|
target_prefix = os.path.join(env.env_subdir_path, "makedeps")
|
||||||
else:
|
else:
|
||||||
@@ -707,10 +711,10 @@ def get_target(name):
|
|||||||
return os.path.join(target_prefix, name)
|
return os.path.join(target_prefix, name)
|
||||||
|
|
||||||
def get_install_target(name):
|
def get_install_target(name):
|
||||||
return os.path.join(target_prefix, ".install", name)
|
return os.path.join(target_prefix, "install", name)
|
||||||
|
|
||||||
def get_install_deps_target(name):
|
def get_install_deps_target(name):
|
||||||
return os.path.join(target_prefix, ".install-deps", name)
|
return os.path.join(target_prefix, "install-deps", name)
|
||||||
|
|
||||||
# What things do we build when running make? By default, we build the
|
# What things do we build when running make? By default, we build the
|
||||||
# root specs. If specific specs are provided as input, we build those.
|
# root specs. If specific specs are provided as input, we build those.
|
||||||
@@ -729,15 +733,24 @@ def get_install_deps_target(name):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Root specs without deps are the prereqs for the environment target
|
# Root specs without deps are the prereqs for the environment target
|
||||||
root_install_targets = [get_install_target(h.dag_hash()) for h in roots]
|
root_install_targets = [get_install_target(h.format("{name}-{version}-{hash}")) for h in roots]
|
||||||
|
|
||||||
# Cleanable targets...
|
# All install and install-deps targets
|
||||||
cleanable_targets = [get_install_target(h) for h, _, _, _ in make_targets.adjacency_list]
|
all_install_related_targets = []
|
||||||
cleanable_targets.extend(
|
|
||||||
[get_install_deps_target(h) for h, _, _, _ in make_targets.adjacency_list]
|
|
||||||
)
|
|
||||||
|
|
||||||
buf = six.StringIO()
|
# Convenience shortcuts: ensure that `make install/pkg-version-hash` triggers
|
||||||
|
# <absolute path to env>/.spack-env/makedeps/install/pkg-version-hash in case
|
||||||
|
# we don't have a custom make target prefix.
|
||||||
|
phony_convenience_targets = []
|
||||||
|
|
||||||
|
for tgt, _, _, _, _ in make_targets.adjacency_list:
|
||||||
|
all_install_related_targets.append(get_install_target(tgt))
|
||||||
|
all_install_related_targets.append(get_install_deps_target(tgt))
|
||||||
|
if args.make_target_prefix is None:
|
||||||
|
phony_convenience_targets.append(os.path.join("install", tgt))
|
||||||
|
phony_convenience_targets.append(os.path.join("install-deps", tgt))
|
||||||
|
|
||||||
|
buf = io.StringIO()
|
||||||
|
|
||||||
template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile"))
|
template = spack.tengine.make_environment().get_template(os.path.join("depfile", "Makefile"))
|
||||||
|
|
||||||
@@ -746,15 +759,17 @@ def get_install_deps_target(name):
|
|||||||
"all_target": get_target("all"),
|
"all_target": get_target("all"),
|
||||||
"env_target": get_target("env"),
|
"env_target": get_target("env"),
|
||||||
"clean_target": get_target("clean"),
|
"clean_target": get_target("clean"),
|
||||||
"cleanable_targets": " ".join(cleanable_targets),
|
"all_install_related_targets": " ".join(all_install_related_targets),
|
||||||
"root_install_targets": " ".join(root_install_targets),
|
"root_install_targets": " ".join(root_install_targets),
|
||||||
"dirs_target": get_target("dirs"),
|
"dirs_target": get_target("dirs"),
|
||||||
"environment": env.path,
|
"environment": env.path,
|
||||||
"install_target": get_target(".install"),
|
"install_target": get_target("install"),
|
||||||
"install_deps_target": get_target(".install-deps"),
|
"install_deps_target": get_target("install-deps"),
|
||||||
"any_hash_target": get_target("%"),
|
"any_hash_target": get_target("%"),
|
||||||
"jobserver_support": "+" if args.jobserver else "",
|
"jobserver_support": "+" if args.jobserver else "",
|
||||||
"adjacency_list": make_targets.adjacency_list,
|
"adjacency_list": make_targets.adjacency_list,
|
||||||
|
"phony_convenience_targets": " ".join(phony_convenience_targets),
|
||||||
|
"target_prefix": target_prefix,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -140,13 +140,6 @@ def setup_parser(subparser):
|
|||||||
|
|
||||||
subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
|
subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
|
||||||
subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
|
subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
|
||||||
subparser.add_argument(
|
|
||||||
"-b",
|
|
||||||
"--bootstrap",
|
|
||||||
action="store_true",
|
|
||||||
help="show software in the internal bootstrap store",
|
|
||||||
)
|
|
||||||
|
|
||||||
arguments.add_common_arguments(subparser, ["constraint"])
|
arguments.add_common_arguments(subparser, ["constraint"])
|
||||||
|
|
||||||
|
|
||||||
@@ -251,23 +244,6 @@ def display_env(env, args, decorator, results):
|
|||||||
|
|
||||||
|
|
||||||
def find(parser, args):
|
def find(parser, args):
|
||||||
if args.bootstrap:
|
|
||||||
tty.warn(
|
|
||||||
"`spack find --bootstrap` is deprecated and will be removed in v0.19.",
|
|
||||||
"Use `spack --bootstrap find` instead.",
|
|
||||||
)
|
|
||||||
|
|
||||||
if args.bootstrap:
|
|
||||||
bootstrap_store_path = spack.bootstrap.store_path()
|
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
|
||||||
msg = 'Showing internal bootstrap store at "{0}"'
|
|
||||||
tty.msg(msg.format(bootstrap_store_path))
|
|
||||||
_find(parser, args)
|
|
||||||
return
|
|
||||||
_find(parser, args)
|
|
||||||
|
|
||||||
|
|
||||||
def _find(parser, args):
|
|
||||||
q_args = query_arguments(args)
|
q_args = query_arguments(args)
|
||||||
results = args.specs(**q_args)
|
results = args.specs(**q_args)
|
||||||
|
|
||||||
|
|||||||
@@ -43,4 +43,4 @@ def gc(parser, args):
|
|||||||
if not args.yes_to_all:
|
if not args.yes_to_all:
|
||||||
spack.cmd.uninstall.confirm_removal(specs)
|
spack.cmd.uninstall.confirm_removal(specs)
|
||||||
|
|
||||||
spack.cmd.uninstall.do_uninstall(None, specs, force=False)
|
spack.cmd.uninstall.do_uninstall(specs, force=False)
|
||||||
|
|||||||
@@ -7,8 +7,7 @@
|
|||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
import textwrap
|
import textwrap
|
||||||
|
from itertools import zip_longest
|
||||||
from six.moves import zip_longest
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.color as color
|
import llnl.util.tty.color as color
|
||||||
@@ -242,8 +241,8 @@ def print_tests(pkg):
|
|||||||
# So the presence of a callback in Spack does not necessarily correspond
|
# So the presence of a callback in Spack does not necessarily correspond
|
||||||
# to the actual presence of built-time tests for a package.
|
# to the actual presence of built-time tests for a package.
|
||||||
for callbacks, phase in [
|
for callbacks, phase in [
|
||||||
(pkg.build_time_test_callbacks, "Build"),
|
(getattr(pkg, "build_time_test_callbacks", None), "Build"),
|
||||||
(pkg.install_time_test_callbacks, "Install"),
|
(getattr(pkg, "install_time_test_callbacks", None), "Install"),
|
||||||
]:
|
]:
|
||||||
color.cprint("")
|
color.cprint("")
|
||||||
color.cprint(section_title("Available {0} Phase Test Methods:".format(phase)))
|
color.cprint(section_title("Available {0} Phase Test Methods:".format(phase)))
|
||||||
|
|||||||
@@ -12,6 +12,7 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
from html import escape
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
@@ -21,11 +22,6 @@
|
|||||||
import spack.repo
|
import spack.repo
|
||||||
from spack.version import VersionList
|
from spack.version import VersionList
|
||||||
|
|
||||||
if sys.version_info > (3, 1):
|
|
||||||
from html import escape # novm
|
|
||||||
else:
|
|
||||||
from cgi import escape
|
|
||||||
|
|
||||||
description = "list and search available packages"
|
description = "list and search available packages"
|
||||||
section = "basic"
|
section = "basic"
|
||||||
level = "short"
|
level = "short"
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from typing import Callable, Dict # novm
|
from typing import Callable, Dict
|
||||||
|
|
||||||
import spack.cmd.modules.lmod
|
import spack.cmd.modules.lmod
|
||||||
import spack.cmd.modules.tcl
|
import spack.cmd.modules.tcl
|
||||||
@@ -13,7 +13,7 @@
|
|||||||
level = "short"
|
level = "short"
|
||||||
|
|
||||||
|
|
||||||
_subcommands = {} # type: Dict[str, Callable]
|
_subcommands: Dict[str, Callable] = {}
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
|
|||||||
@@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated", "specs"])
|
arguments.add_common_arguments(subparser, ["no_checksum", "deprecated", "specs"])
|
||||||
|
arguments.add_concretizer_args(subparser)
|
||||||
|
|
||||||
|
|
||||||
def patch(parser, args):
|
def patch(parser, args):
|
||||||
|
|||||||
@@ -3,10 +3,9 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import io
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
import llnl.util.tty.colify as colify
|
import llnl.util.tty.colify as colify
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
@@ -29,7 +28,7 @@ def setup_parser(subparser):
|
|||||||
def providers(parser, args):
|
def providers(parser, args):
|
||||||
valid_virtuals = sorted(spack.repo.path.provider_index.providers.keys())
|
valid_virtuals = sorted(spack.repo.path.provider_index.providers.keys())
|
||||||
|
|
||||||
buffer = six.StringIO()
|
buffer = io.StringIO()
|
||||||
isatty = sys.stdout.isatty()
|
isatty = sys.stdout.isatty()
|
||||||
if isatty:
|
if isatty:
|
||||||
buffer.write("Virtual packages:\n")
|
buffer.write("Virtual packages:\n")
|
||||||
|
|||||||
@@ -2,37 +2,26 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
from itertools import zip_longest
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.color as color
|
import llnl.util.tty.color as color
|
||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
import spack.bootstrap
|
|
||||||
import spack.paths
|
import spack.paths
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
|
|
||||||
if sys.version_info < (3, 0):
|
|
||||||
from itertools import izip_longest # novm
|
|
||||||
|
|
||||||
zip_longest = izip_longest
|
|
||||||
else:
|
|
||||||
from itertools import zip_longest # novm
|
|
||||||
|
|
||||||
|
|
||||||
description = "runs source code style checks on spack"
|
description = "runs source code style checks on spack"
|
||||||
section = "developer"
|
section = "developer"
|
||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
|
|
||||||
def grouper(iterable, n, fillvalue=None):
|
def grouper(iterable, n, fillvalue=None):
|
||||||
"Collect data into fixed-length chunks or blocks"
|
"""Collect data into fixed-length chunks or blocks"""
|
||||||
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx"
|
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx"
|
||||||
args = [iter(iterable)] * n
|
args = [iter(iterable)] * n
|
||||||
for group in zip_longest(*args, fillvalue=fillvalue):
|
for group in zip_longest(*args, fillvalue=fillvalue):
|
||||||
@@ -48,16 +37,13 @@ def grouper(iterable, n, fillvalue=None):
|
|||||||
#: double-check the results of other tools (if, e.g., --fix was provided)
|
#: double-check the results of other tools (if, e.g., --fix was provided)
|
||||||
#: The list maps an executable name to a method to ensure the tool is
|
#: The list maps an executable name to a method to ensure the tool is
|
||||||
#: bootstrapped or present in the environment.
|
#: bootstrapped or present in the environment.
|
||||||
tool_order = [
|
tool_names = [
|
||||||
("isort", spack.bootstrap.ensure_isort_in_path_or_raise),
|
"isort",
|
||||||
("mypy", spack.bootstrap.ensure_mypy_in_path_or_raise),
|
"mypy",
|
||||||
("black", spack.bootstrap.ensure_black_in_path_or_raise),
|
"black",
|
||||||
("flake8", spack.bootstrap.ensure_flake8_in_path_or_raise),
|
"flake8",
|
||||||
]
|
]
|
||||||
|
|
||||||
#: list of just the tool names -- for argparse
|
|
||||||
tool_names = [k for k, _ in tool_order]
|
|
||||||
|
|
||||||
#: tools we run in spack style
|
#: tools we run in spack style
|
||||||
tools = {}
|
tools = {}
|
||||||
|
|
||||||
@@ -229,10 +215,8 @@ def translate(match):
|
|||||||
print(line)
|
print(line)
|
||||||
|
|
||||||
|
|
||||||
def print_style_header(file_list, args, selected):
|
def print_style_header(file_list, args, tools_to_run):
|
||||||
tools = [tool for tool in tool_names if tool in selected]
|
tty.msg("Running style checks on spack", "selected: " + ", ".join(tools_to_run))
|
||||||
tty.msg("Running style checks on spack", "selected: " + ", ".join(tools))
|
|
||||||
|
|
||||||
# translate modified paths to cwd_relative if needed
|
# translate modified paths to cwd_relative if needed
|
||||||
paths = [filename.strip() for filename in file_list]
|
paths = [filename.strip() for filename in file_list]
|
||||||
if not args.root_relative:
|
if not args.root_relative:
|
||||||
@@ -267,7 +251,7 @@ def run_flake8(flake8_cmd, file_list, args):
|
|||||||
"--config=%s" % os.path.join(spack.paths.prefix, ".flake8"),
|
"--config=%s" % os.path.join(spack.paths.prefix, ".flake8"),
|
||||||
*chunk,
|
*chunk,
|
||||||
fail_on_error=False,
|
fail_on_error=False,
|
||||||
output=str
|
output=str,
|
||||||
)
|
)
|
||||||
returncode |= flake8_cmd.returncode
|
returncode |= flake8_cmd.returncode
|
||||||
|
|
||||||
@@ -375,14 +359,6 @@ def run_black(black_cmd, file_list, args):
|
|||||||
packed_args = black_args + tuple(chunk)
|
packed_args = black_args + tuple(chunk)
|
||||||
output = black_cmd(*packed_args, fail_on_error=False, output=str, error=str)
|
output = black_cmd(*packed_args, fail_on_error=False, output=str, error=str)
|
||||||
returncode |= black_cmd.returncode
|
returncode |= black_cmd.returncode
|
||||||
|
|
||||||
# ignore Python 2.7 deprecation error because we already know it's deprecated.
|
|
||||||
output = "\n".join(
|
|
||||||
line
|
|
||||||
for line in output.split("\n")
|
|
||||||
if "DEPRECATION: Python 2 support will be removed" not in line
|
|
||||||
)
|
|
||||||
|
|
||||||
rewrite_and_print_output(output, args, pat, replacement)
|
rewrite_and_print_output(output, args, pat, replacement)
|
||||||
|
|
||||||
print_tool_result("black", returncode)
|
print_tool_result("black", returncode)
|
||||||
@@ -399,11 +375,18 @@ def validate_toolset(arg_value):
|
|||||||
return tools
|
return tools
|
||||||
|
|
||||||
|
|
||||||
def style(parser, args):
|
def missing_tools(tools_to_run):
|
||||||
# ensure python version is new enough
|
return [t for t in tools_to_run if which(t) is None]
|
||||||
if sys.version_info < (3, 6):
|
|
||||||
tty.die("spack style requires Python 3.6 or later.")
|
|
||||||
|
|
||||||
|
|
||||||
|
def _bootstrap_dev_dependencies():
|
||||||
|
import spack.bootstrap
|
||||||
|
|
||||||
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
|
spack.bootstrap.ensure_environment_dependencies()
|
||||||
|
|
||||||
|
|
||||||
|
def style(parser, args):
|
||||||
# save initial working directory for relativizing paths later
|
# save initial working directory for relativizing paths later
|
||||||
args.initial_working_dir = os.getcwd()
|
args.initial_working_dir = os.getcwd()
|
||||||
|
|
||||||
@@ -437,25 +420,20 @@ def prefix_relative(path):
|
|||||||
tty.msg("Nothing to run.")
|
tty.msg("Nothing to run.")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
tools_to_run = [t for t in tool_names if t in selected]
|
||||||
|
if missing_tools(tools_to_run):
|
||||||
|
_bootstrap_dev_dependencies()
|
||||||
|
|
||||||
return_code = 0
|
return_code = 0
|
||||||
with working_dir(args.root):
|
with working_dir(args.root):
|
||||||
if not file_list:
|
if not file_list:
|
||||||
file_list = changed_files(args.base, args.untracked, args.all)
|
file_list = changed_files(args.base, args.untracked, args.all)
|
||||||
|
|
||||||
print_style_header(file_list, args, selected)
|
print_style_header(file_list, args, tools_to_run)
|
||||||
|
for tool_name in tools_to_run:
|
||||||
tools_to_run = [(tool, fn) for tool, fn in tool_order if tool in selected]
|
run_function, required = tools[tool_name]
|
||||||
commands = {}
|
print_tool_header(tool_name)
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
return_code |= run_function(which(tool_name), file_list, args)
|
||||||
# bootstrap everything first to get commands
|
|
||||||
for tool_name, bootstrap_fn in tools_to_run:
|
|
||||||
commands[tool_name] = bootstrap_fn()
|
|
||||||
|
|
||||||
# run tools once bootstrapping is done
|
|
||||||
for tool_name, bootstrap_fn in tools_to_run:
|
|
||||||
run_function, required = tools[tool_name]
|
|
||||||
print_tool_header(tool_name)
|
|
||||||
return_code |= run_function(commands[tool_name], file_list, args)
|
|
||||||
|
|
||||||
if return_code == 0:
|
if return_code == 0:
|
||||||
tty.msg(color.colorize("@*{spack style checks were clean}"))
|
tty.msg(color.colorize("@*{spack style checks were clean}"))
|
||||||
|
|||||||
@@ -2,11 +2,9 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import io
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
import llnl.util.tty.colify as colify
|
import llnl.util.tty.colify as colify
|
||||||
|
|
||||||
@@ -20,7 +18,7 @@
|
|||||||
|
|
||||||
|
|
||||||
def report_tags(category, tags):
|
def report_tags(category, tags):
|
||||||
buffer = six.StringIO()
|
buffer = io.StringIO()
|
||||||
isatty = sys.stdout.isatty()
|
isatty = sys.stdout.isatty()
|
||||||
|
|
||||||
if isatty:
|
if isatty:
|
||||||
@@ -88,7 +86,7 @@ def tags(parser, args):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Report packages associated with tags
|
# Report packages associated with tags
|
||||||
buffer = six.StringIO()
|
buffer = io.StringIO()
|
||||||
isatty = sys.stdout.isatty()
|
isatty = sys.stdout.isatty()
|
||||||
|
|
||||||
tags = args.tag if args.tag else available_tags
|
tags = args.tag if args.tag else available_tags
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import working_dir
|
from llnl.util.filesystem import working_dir
|
||||||
|
|
||||||
|
import spack
|
||||||
import spack.cmd.common.arguments as arguments
|
import spack.cmd.common.arguments as arguments
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.paths
|
import spack.paths
|
||||||
@@ -24,7 +25,7 @@
|
|||||||
|
|
||||||
|
|
||||||
# tutorial configuration parameters
|
# tutorial configuration parameters
|
||||||
tutorial_branch = "releases/v0.18"
|
tutorial_branch = "releases/v0.19"
|
||||||
tutorial_mirror = "file:///mirror"
|
tutorial_mirror = "file:///mirror"
|
||||||
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
|
||||||
|
|
||||||
|
|||||||
@@ -17,6 +17,7 @@
|
|||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.store
|
import spack.store
|
||||||
|
import spack.traverse as traverse
|
||||||
from spack.database import InstallStatuses
|
from spack.database import InstallStatuses
|
||||||
|
|
||||||
description = "remove installed packages"
|
description = "remove installed packages"
|
||||||
@@ -144,11 +145,7 @@ def installed_dependents(specs, env):
|
|||||||
active environment, and one from specs to dependent installs outside of
|
active environment, and one from specs to dependent installs outside of
|
||||||
the active environment.
|
the active environment.
|
||||||
|
|
||||||
Any of the input specs may appear in both mappings (if there are
|
Every installed dependent spec is listed once.
|
||||||
dependents both inside and outside the current environment).
|
|
||||||
|
|
||||||
If a dependent spec is used both by the active environment and by
|
|
||||||
an inactive environment, it will only appear in the first mapping.
|
|
||||||
|
|
||||||
If there is not current active environment, the first mapping will be
|
If there is not current active environment, the first mapping will be
|
||||||
empty.
|
empty.
|
||||||
@@ -158,19 +155,27 @@ def installed_dependents(specs, env):
|
|||||||
|
|
||||||
env_hashes = set(env.all_hashes()) if env else set()
|
env_hashes = set(env.all_hashes()) if env else set()
|
||||||
|
|
||||||
all_specs_in_db = spack.store.db.query()
|
# Ensure we stop traversal at input specs.
|
||||||
|
visited = set(s.dag_hash() for s in specs)
|
||||||
|
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
installed = [x for x in all_specs_in_db if spec in x]
|
for dpt in traverse.traverse_nodes(
|
||||||
|
spec.dependents(deptype="all"),
|
||||||
# separate installed dependents into dpts in this environment and
|
direction="parents",
|
||||||
# dpts that are outside this environment
|
visited=visited,
|
||||||
for dpt in installed:
|
deptype="all",
|
||||||
if dpt not in specs:
|
root=True,
|
||||||
if dpt.dag_hash() in env_hashes:
|
key=lambda s: s.dag_hash(),
|
||||||
active_dpts.setdefault(spec, set()).add(dpt)
|
):
|
||||||
else:
|
hash = dpt.dag_hash()
|
||||||
outside_dpts.setdefault(spec, set()).add(dpt)
|
# Ensure that all the specs we get are installed
|
||||||
|
record = spack.store.db.query_local_by_spec_hash(hash)
|
||||||
|
if record is None or not record.installed:
|
||||||
|
continue
|
||||||
|
if hash in env_hashes:
|
||||||
|
active_dpts.setdefault(spec, set()).add(dpt)
|
||||||
|
else:
|
||||||
|
outside_dpts.setdefault(spec, set()).add(dpt)
|
||||||
|
|
||||||
return active_dpts, outside_dpts
|
return active_dpts, outside_dpts
|
||||||
|
|
||||||
@@ -225,54 +230,21 @@ def _remove_from_env(spec, env):
|
|||||||
pass # ignore non-root specs
|
pass # ignore non-root specs
|
||||||
|
|
||||||
|
|
||||||
def do_uninstall(env, specs, force):
|
def do_uninstall(specs, force=False):
|
||||||
"""Uninstalls all the specs in a list.
|
# TODO: get rid of the call-sites that use this function,
|
||||||
|
# so that we don't have to do a dance of list -> set -> list -> set
|
||||||
|
hashes_to_remove = set(s.dag_hash() for s in specs)
|
||||||
|
|
||||||
Args:
|
for s in traverse.traverse_nodes(
|
||||||
env (spack.environment.Environment or None): active environment, or ``None``
|
specs,
|
||||||
if there is not one
|
order="topo",
|
||||||
specs (list): list of specs to be uninstalled
|
direction="children",
|
||||||
force (bool): force uninstallation (boolean)
|
root=True,
|
||||||
"""
|
cover="nodes",
|
||||||
packages = []
|
deptype="all",
|
||||||
for item in specs:
|
):
|
||||||
try:
|
if s.dag_hash() in hashes_to_remove:
|
||||||
# should work if package is known to spack
|
spack.package_base.PackageBase.uninstall_by_spec(s, force=force)
|
||||||
packages.append(item.package)
|
|
||||||
except spack.repo.UnknownEntityError:
|
|
||||||
# The package.py file has gone away -- but still
|
|
||||||
# want to uninstall.
|
|
||||||
spack.package_base.PackageBase.uninstall_by_spec(item, force=True)
|
|
||||||
|
|
||||||
# A package is ready to be uninstalled when nothing else references it,
|
|
||||||
# unless we are requested to force uninstall it.
|
|
||||||
def is_ready(dag_hash):
|
|
||||||
if force:
|
|
||||||
return True
|
|
||||||
|
|
||||||
_, record = spack.store.db.query_by_spec_hash(dag_hash)
|
|
||||||
if not record.ref_count:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# If this spec is only used as a build dependency, we can uninstall
|
|
||||||
return all(
|
|
||||||
dspec.deptypes == ("build",) or not dspec.parent.installed
|
|
||||||
for dspec in record.spec.edges_from_dependents()
|
|
||||||
)
|
|
||||||
|
|
||||||
while packages:
|
|
||||||
ready = [x for x in packages if is_ready(x.spec.dag_hash())]
|
|
||||||
if not ready:
|
|
||||||
msg = (
|
|
||||||
"unexpected error [cannot proceed uninstalling specs with"
|
|
||||||
" remaining link or run dependents {0}]"
|
|
||||||
)
|
|
||||||
msg = msg.format(", ".join(x.name for x in packages))
|
|
||||||
raise spack.error.SpackError(msg)
|
|
||||||
|
|
||||||
packages = [x for x in packages if x not in ready]
|
|
||||||
for item in ready:
|
|
||||||
item.do_uninstall(force=force)
|
|
||||||
|
|
||||||
|
|
||||||
def get_uninstall_list(args, specs, env):
|
def get_uninstall_list(args, specs, env):
|
||||||
@@ -414,7 +386,7 @@ def uninstall_specs(args, specs):
|
|||||||
confirm_removal(uninstall_list)
|
confirm_removal(uninstall_list)
|
||||||
|
|
||||||
# Uninstall everything on the list
|
# Uninstall everything on the list
|
||||||
do_uninstall(env, uninstall_list, args.force)
|
do_uninstall(uninstall_list, args.force)
|
||||||
|
|
||||||
if env:
|
if env:
|
||||||
with env.write_transaction():
|
with env.write_transaction():
|
||||||
|
|||||||
@@ -7,6 +7,7 @@
|
|||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import collections
|
import collections
|
||||||
|
import io
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
@@ -16,13 +17,10 @@
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pytest = None # type: ignore
|
pytest = None # type: ignore
|
||||||
|
|
||||||
from six import StringIO
|
|
||||||
|
|
||||||
import llnl.util.filesystem
|
import llnl.util.filesystem
|
||||||
import llnl.util.tty.color as color
|
import llnl.util.tty.color as color
|
||||||
from llnl.util.tty.colify import colify
|
from llnl.util.tty.colify import colify
|
||||||
|
|
||||||
import spack.bootstrap
|
|
||||||
import spack.paths
|
import spack.paths
|
||||||
|
|
||||||
description = "run spack's unit tests (wrapper around pytest)"
|
description = "run spack's unit tests (wrapper around pytest)"
|
||||||
@@ -126,7 +124,7 @@ def colorize(c, prefix):
|
|||||||
|
|
||||||
old_output = sys.stdout
|
old_output = sys.stdout
|
||||||
try:
|
try:
|
||||||
sys.stdout = output = StringIO()
|
sys.stdout = output = io.StringIO()
|
||||||
pytest.main(["--collect-only"] + extra_args)
|
pytest.main(["--collect-only"] + extra_args)
|
||||||
finally:
|
finally:
|
||||||
sys.stdout = old_output
|
sys.stdout = old_output
|
||||||
@@ -208,6 +206,7 @@ def add_back_pytest_args(args, unknown_args):
|
|||||||
|
|
||||||
def unit_test(parser, args, unknown_args):
|
def unit_test(parser, args, unknown_args):
|
||||||
global pytest
|
global pytest
|
||||||
|
import spack.bootstrap
|
||||||
|
|
||||||
# Ensure clingo is available before switching to the
|
# Ensure clingo is available before switching to the
|
||||||
# mock configuration used by unit tests
|
# mock configuration used by unit tests
|
||||||
@@ -215,12 +214,10 @@ def unit_test(parser, args, unknown_args):
|
|||||||
# clingo is wholly unsupported from bootstrap
|
# clingo is wholly unsupported from bootstrap
|
||||||
if not is_windows:
|
if not is_windows:
|
||||||
with spack.bootstrap.ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
spack.bootstrap.ensure_clingo_importable_or_raise()
|
spack.bootstrap.ensure_core_dependencies()
|
||||||
|
if pytest is None:
|
||||||
if pytest is None:
|
spack.bootstrap.ensure_environment_dependencies()
|
||||||
vendored_pytest_dir = os.path.join(spack.paths.external_path, "pytest-fallback")
|
import pytest
|
||||||
sys.path.append(vendored_pytest_dir)
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
if args.pytest_help:
|
if args.pytest_help:
|
||||||
# make the pytest.main help output more accurate
|
# make the pytest.main help output more accurate
|
||||||
|
|||||||
@@ -5,10 +5,9 @@
|
|||||||
|
|
||||||
from __future__ import division, print_function
|
from __future__ import division, print_function
|
||||||
|
|
||||||
|
import urllib.parse
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
import six.moves.urllib.parse as urllib_parse
|
|
||||||
|
|
||||||
import llnl.util.tty.color as color
|
import llnl.util.tty.color as color
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
|
|
||||||
@@ -323,7 +322,7 @@ def add(self, pkg_name, fetcher):
|
|||||||
md5_hashes[pkg_name].append(fetcher.url)
|
md5_hashes[pkg_name].append(fetcher.url)
|
||||||
|
|
||||||
# parse out the URL scheme (https/http/ftp/etc.)
|
# parse out the URL scheme (https/http/ftp/etc.)
|
||||||
urlinfo = urllib_parse.urlparse(fetcher.url)
|
urlinfo = urllib.parse.urlparse(fetcher.url)
|
||||||
self.schemes[urlinfo.scheme] += 1
|
self.schemes[urlinfo.scheme] += 1
|
||||||
|
|
||||||
if urlinfo.scheme == "http":
|
if urlinfo.scheme == "http":
|
||||||
|
|||||||
@@ -10,7 +10,7 @@
|
|||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
from typing import List, Sequence # novm
|
from typing import List, Optional, Sequence
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
@@ -195,20 +195,20 @@ class Compiler(object):
|
|||||||
and how to identify the particular type of compiler."""
|
and how to identify the particular type of compiler."""
|
||||||
|
|
||||||
# Subclasses use possible names of C compiler
|
# Subclasses use possible names of C compiler
|
||||||
cc_names = [] # type: List[str]
|
cc_names: List[str] = []
|
||||||
|
|
||||||
# Subclasses use possible names of C++ compiler
|
# Subclasses use possible names of C++ compiler
|
||||||
cxx_names = [] # type: List[str]
|
cxx_names: List[str] = []
|
||||||
|
|
||||||
# Subclasses use possible names of Fortran 77 compiler
|
# Subclasses use possible names of Fortran 77 compiler
|
||||||
f77_names = [] # type: List[str]
|
f77_names: List[str] = []
|
||||||
|
|
||||||
# Subclasses use possible names of Fortran 90 compiler
|
# Subclasses use possible names of Fortran 90 compiler
|
||||||
fc_names = [] # type: List[str]
|
fc_names: List[str] = []
|
||||||
|
|
||||||
# Optional prefix regexes for searching for this type of compiler.
|
# Optional prefix regexes for searching for this type of compiler.
|
||||||
# Prefixes are sometimes used for toolchains
|
# Prefixes are sometimes used for toolchains
|
||||||
prefixes = [] # type: List[str]
|
prefixes: List[str] = []
|
||||||
|
|
||||||
# Optional suffix regexes for searching for this type of compiler.
|
# Optional suffix regexes for searching for this type of compiler.
|
||||||
# Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
|
# Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
|
||||||
@@ -219,7 +219,7 @@ class Compiler(object):
|
|||||||
version_argument = "-dumpversion"
|
version_argument = "-dumpversion"
|
||||||
|
|
||||||
#: Return values to ignore when invoking the compiler to get its version
|
#: Return values to ignore when invoking the compiler to get its version
|
||||||
ignore_version_errors = () # type: Sequence[int]
|
ignore_version_errors: Sequence[int] = ()
|
||||||
|
|
||||||
#: Regex used to extract version from compiler's output
|
#: Regex used to extract version from compiler's output
|
||||||
version_regex = "(.*)"
|
version_regex = "(.*)"
|
||||||
@@ -271,9 +271,9 @@ def opt_flags(self):
|
|||||||
return ["-O", "-O0", "-O1", "-O2", "-O3"]
|
return ["-O", "-O0", "-O1", "-O2", "-O3"]
|
||||||
|
|
||||||
# Cray PrgEnv name that can be used to load this compiler
|
# Cray PrgEnv name that can be used to load this compiler
|
||||||
PrgEnv = None # type: str
|
PrgEnv: Optional[str] = None
|
||||||
# Name of module used to switch versions of this compiler
|
# Name of module used to switch versions of this compiler
|
||||||
PrgEnv_compiler = None # type: str
|
PrgEnv_compiler: Optional[str] = None
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@@ -286,7 +286,7 @@ def __init__(
|
|||||||
environment=None,
|
environment=None,
|
||||||
extra_rpaths=None,
|
extra_rpaths=None,
|
||||||
enable_implicit_rpaths=None,
|
enable_implicit_rpaths=None,
|
||||||
**kwargs
|
**kwargs,
|
||||||
):
|
):
|
||||||
self.spec = cspec
|
self.spec = cspec
|
||||||
self.operating_system = str(operating_system)
|
self.operating_system = str(operating_system)
|
||||||
|
|||||||
@@ -10,9 +10,7 @@
|
|||||||
import itertools
|
import itertools
|
||||||
import multiprocessing.pool
|
import multiprocessing.pool
|
||||||
import os
|
import os
|
||||||
from typing import Dict # novm
|
from typing import Dict
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
import archspec.cpu
|
import archspec.cpu
|
||||||
|
|
||||||
@@ -43,7 +41,7 @@
|
|||||||
# TODO: Caches at module level make it difficult to mock configurations in
|
# TODO: Caches at module level make it difficult to mock configurations in
|
||||||
# TODO: unit tests. It might be worth reworking their implementation.
|
# TODO: unit tests. It might be worth reworking their implementation.
|
||||||
#: cache of compilers constructed from config data, keyed by config entry id.
|
#: cache of compilers constructed from config data, keyed by config entry id.
|
||||||
_compiler_cache = {} # type: Dict[str, spack.compiler.Compiler]
|
_compiler_cache: Dict[str, "spack.compiler.Compiler"] = {}
|
||||||
|
|
||||||
_compiler_to_pkg = {
|
_compiler_to_pkg = {
|
||||||
"clang": "llvm+clang",
|
"clang": "llvm+clang",
|
||||||
@@ -427,7 +425,7 @@ def compiler_from_dict(items):
|
|||||||
environment,
|
environment,
|
||||||
extra_rpaths,
|
extra_rpaths,
|
||||||
enable_implicit_rpaths=implicit_rpaths,
|
enable_implicit_rpaths=implicit_rpaths,
|
||||||
**compiler_flags
|
**compiler_flags,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -677,18 +675,18 @@ def _default(fn_args):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
version = callback(path)
|
version = callback(path)
|
||||||
if version and six.text_type(version).strip() and version != "unknown":
|
if version and str(version).strip() and version != "unknown":
|
||||||
value = fn_args._replace(id=compiler_id._replace(version=version))
|
value = fn_args._replace(id=compiler_id._replace(version=version))
|
||||||
return value, None
|
return value, None
|
||||||
|
|
||||||
error = "Couldn't get version for compiler {0}".format(path)
|
error = "Couldn't get version for compiler {0}".format(path)
|
||||||
except spack.util.executable.ProcessError as e:
|
except spack.util.executable.ProcessError as e:
|
||||||
error = "Couldn't get version for compiler {0}\n".format(path) + six.text_type(e)
|
error = "Couldn't get version for compiler {0}\n".format(path) + str(e)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Catching "Exception" here is fine because it just
|
# Catching "Exception" here is fine because it just
|
||||||
# means something went wrong running a candidate executable.
|
# means something went wrong running a candidate executable.
|
||||||
error = "Error while executing candidate compiler {0}" "\n{1}: {2}".format(
|
error = "Error while executing candidate compiler {0}" "\n{1}: {2}".format(
|
||||||
path, e.__class__.__name__, six.text_type(e)
|
path, e.__class__.__name__, str(e)
|
||||||
)
|
)
|
||||||
return None, error
|
return None, error
|
||||||
|
|
||||||
|
|||||||
@@ -8,15 +8,18 @@
|
|||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
from distutils.version import StrictVersion
|
from distutils.version import StrictVersion
|
||||||
from typing import Dict, List, Set # novm
|
from typing import Dict, List, Set
|
||||||
|
|
||||||
|
import spack.compiler
|
||||||
import spack.operating_systems.windows_os
|
import spack.operating_systems.windows_os
|
||||||
|
import spack.platforms
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
from spack.compiler import Compiler
|
from spack.compiler import Compiler
|
||||||
from spack.error import SpackError
|
from spack.error import SpackError
|
||||||
|
from spack.version import Version
|
||||||
|
|
||||||
avail_fc_version = set() # type: Set[str]
|
avail_fc_version: Set[str] = set()
|
||||||
fc_path = dict() # type: Dict[str, str]
|
fc_path: Dict[str, str] = dict()
|
||||||
|
|
||||||
fortran_mapping = {
|
fortran_mapping = {
|
||||||
"2021.3.0": "19.29.30133",
|
"2021.3.0": "19.29.30133",
|
||||||
@@ -39,16 +42,16 @@ def get_valid_fortran_pth(comp_ver):
|
|||||||
|
|
||||||
class Msvc(Compiler):
|
class Msvc(Compiler):
|
||||||
# Subclasses use possible names of C compiler
|
# Subclasses use possible names of C compiler
|
||||||
cc_names = ["cl.exe"]
|
cc_names: List[str] = ["cl.exe"]
|
||||||
|
|
||||||
# Subclasses use possible names of C++ compiler
|
# Subclasses use possible names of C++ compiler
|
||||||
cxx_names = ["cl.exe"]
|
cxx_names: List[str] = ["cl.exe"]
|
||||||
|
|
||||||
# Subclasses use possible names of Fortran 77 compiler
|
# Subclasses use possible names of Fortran 77 compiler
|
||||||
f77_names = ["ifx.exe"] # type: List[str]
|
f77_names: List[str] = ["ifx.exe"]
|
||||||
|
|
||||||
# Subclasses use possible names of Fortran 90 compiler
|
# Subclasses use possible names of Fortran 90 compiler
|
||||||
fc_names = ["ifx.exe"] # type: List[str]
|
fc_names: List[str] = ["ifx.exe"]
|
||||||
|
|
||||||
# Named wrapper links within build_env_path
|
# Named wrapper links within build_env_path
|
||||||
# Due to the challenges of supporting compiler wrappers
|
# Due to the challenges of supporting compiler wrappers
|
||||||
@@ -91,45 +94,67 @@ def __init__(self, *args, **kwargs):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def msvc_version(self):
|
def msvc_version(self):
|
||||||
ver = re.search(Msvc.version_regex, self.cc).group(1)
|
"""This is the VCToolset version *NOT* the actual version of the cl compiler
|
||||||
ver = "".join(ver.split(".")[:2])[:-1]
|
For CL version, query `Msvc.cl_version`"""
|
||||||
|
return Version(re.search(Msvc.version_regex, self.cc).group(1))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def short_msvc_version(self):
|
||||||
|
"""
|
||||||
|
This is the shorthand VCToolset version of form
|
||||||
|
MSVC<short-ver> *NOT* the full version, for that see
|
||||||
|
Msvc.msvc_version
|
||||||
|
"""
|
||||||
|
ver = self.msvc_version[:2].joined.string[:3]
|
||||||
return "MSVC" + ver
|
return "MSVC" + ver
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cl_version(self):
|
||||||
|
"""Cl toolset version"""
|
||||||
|
return spack.compiler.get_compiler_version_output(self.cc)
|
||||||
|
|
||||||
def setup_custom_environment(self, pkg, env):
|
def setup_custom_environment(self, pkg, env):
|
||||||
"""Set environment variables for MSVC using the
|
"""Set environment variables for MSVC using the
|
||||||
Microsoft-provided script."""
|
Microsoft-provided script."""
|
||||||
if sys.version_info[:2] > (2, 6):
|
# Set the build environment variables for spack. Just using
|
||||||
# Set the build environment variables for spack. Just using
|
# subprocess.call() doesn't work since that operates in its own
|
||||||
# subprocess.call() doesn't work since that operates in its own
|
# environment which is destroyed (along with the adjusted variables)
|
||||||
# environment which is destroyed (along with the adjusted variables)
|
# once the process terminates. So go the long way around: examine
|
||||||
# once the process terminates. So go the long way around: examine
|
# output, sort into dictionary, use that to make the build
|
||||||
# output, sort into dictionary, use that to make the build
|
# environment.
|
||||||
# environment.
|
|
||||||
out = subprocess.check_output( # novermin
|
|
||||||
'cmd /u /c "{}" {} && set'.format(self.setvarsfile, "amd64"),
|
|
||||||
stderr=subprocess.STDOUT,
|
|
||||||
)
|
|
||||||
if sys.version_info[0] >= 3:
|
|
||||||
out = out.decode("utf-16le", errors="replace") # novermin
|
|
||||||
|
|
||||||
int_env = dict(
|
# get current platform architecture and format for vcvars argument
|
||||||
(key.lower(), value)
|
arch = spack.platforms.real_host().default.lower()
|
||||||
for key, _, value in (line.partition("=") for line in out.splitlines())
|
arch = arch.replace("-", "_")
|
||||||
if key and value
|
# vcvars can target specific sdk versions, force it to pick up concretized sdk
|
||||||
)
|
# version, if needed by spec
|
||||||
|
sdk_ver = "" if "win-sdk" not in pkg.spec else pkg.spec["win-sdk"].version.string + ".0"
|
||||||
|
# provide vcvars with msvc version selected by concretization,
|
||||||
|
# not whatever it happens to pick up on the system (highest available version)
|
||||||
|
out = subprocess.check_output( # novermin
|
||||||
|
'cmd /u /c "{}" {} {} {} && set'.format(
|
||||||
|
self.setvarsfile, arch, sdk_ver, "-vcvars_ver=%s" % self.msvc_version
|
||||||
|
),
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
)
|
||||||
|
if sys.version_info[0] >= 3:
|
||||||
|
out = out.decode("utf-16le", errors="replace") # novermin
|
||||||
|
|
||||||
if "path" in int_env:
|
int_env = dict(
|
||||||
env.set_path("PATH", int_env["path"].split(";"))
|
(key.lower(), value)
|
||||||
env.set_path("INCLUDE", int_env.get("include", "").split(";"))
|
for key, _, value in (line.partition("=") for line in out.splitlines())
|
||||||
env.set_path("LIB", int_env.get("lib", "").split(";"))
|
if key and value
|
||||||
|
)
|
||||||
|
|
||||||
env.set("CC", self.cc)
|
if "path" in int_env:
|
||||||
env.set("CXX", self.cxx)
|
env.set_path("PATH", int_env["path"].split(";"))
|
||||||
env.set("FC", self.fc)
|
env.set_path("INCLUDE", int_env.get("include", "").split(";"))
|
||||||
env.set("F77", self.f77)
|
env.set_path("LIB", int_env.get("lib", "").split(";"))
|
||||||
else:
|
|
||||||
# Should not this be an exception?
|
env.set("CC", self.cc)
|
||||||
print("Cannot pull msvc compiler information in Python 2.6 or below")
|
env.set("CXX", self.cxx)
|
||||||
|
env.set("FC", self.fc)
|
||||||
|
env.set("F77", self.f77)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def fc_version(cls, fc):
|
def fc_version(cls, fc):
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user