Compare commits
188 Commits
deprecatio
...
packages/p
Author | SHA1 | Date | |
---|---|---|---|
![]() |
7fbb2923cd | ||
![]() |
2edbed3a9d | ||
![]() |
e0035bd658 | ||
![]() |
64207e8fe8 | ||
![]() |
fdc85572f3 | ||
![]() |
75162be4b6 | ||
![]() |
adbbb91b41 | ||
![]() |
55eeff7eb0 | ||
![]() |
6de1ebd71a | ||
![]() |
fd865efe87 | ||
![]() |
93c09ed3b4 | ||
![]() |
9db8f8ea88 | ||
![]() |
eb178e6840 | ||
![]() |
8487842e11 | ||
![]() |
2286b2ad5a | ||
![]() |
ea0d99baf8 | ||
![]() |
60be9ea068 | ||
![]() |
5640861aeb | ||
![]() |
d8fa6eb559 | ||
![]() |
ec7436be6b | ||
![]() |
48f6a4ffb4 | ||
![]() |
96a0b0eb08 | ||
![]() |
8d8e36d7e2 | ||
![]() |
1c843b99ae | ||
![]() |
93a0c0eafd | ||
![]() |
0850e0bf08 | ||
![]() |
6263f75303 | ||
![]() |
c184a68512 | ||
![]() |
69b17ea602 | ||
![]() |
5547b7b552 | ||
![]() |
ae6d1538d5 | ||
![]() |
cdb0e80598 | ||
![]() |
233e57c4bc | ||
![]() |
918afd6385 | ||
![]() |
83af81a14a | ||
![]() |
2b2538e82c | ||
![]() |
b6715bde32 | ||
![]() |
0db3b36874 | ||
![]() |
0bc54a4640 | ||
![]() |
7057ca3c0c | ||
![]() |
40ac1613e3 | ||
![]() |
d3ab84e5d8 | ||
![]() |
15197b1868 | ||
![]() |
de45c90056 | ||
![]() |
82fc0c702d | ||
![]() |
51e889ea3f | ||
![]() |
ad8d1eddde | ||
![]() |
ebb3736de7 | ||
![]() |
4d7a637788 | ||
![]() |
8e163c3565 | ||
![]() |
f1fbf11b33 | ||
![]() |
be3a33ecf7 | ||
![]() |
4be528448c | ||
![]() |
8b11918c1e | ||
![]() |
5add010c71 | ||
![]() |
e77e1d6528 | ||
![]() |
6ede4e9f13 | ||
![]() |
c50ac5ac25 | ||
![]() |
e7e5352e93 | ||
![]() |
36e74f360b | ||
![]() |
f362d45802 | ||
![]() |
9719220e8a | ||
![]() |
30e2b15eea | ||
![]() |
7ee1e518b0 | ||
![]() |
4af8fbeddf | ||
![]() |
2b85b706f1 | ||
![]() |
eadf8727e7 | ||
![]() |
de739db153 | ||
![]() |
a3bed44bf5 | ||
![]() |
3da04ccb19 | ||
![]() |
f921b28032 | ||
![]() |
3d50d7173d | ||
![]() |
5a5f555fe2 | ||
![]() |
bb30c726a4 | ||
![]() |
0894180cc1 | ||
![]() |
f211e2f9c4 | ||
![]() |
f04ea573fa | ||
![]() |
364f70c16d | ||
![]() |
5da1adad3a | ||
![]() |
dfb529fc6e | ||
![]() |
6e2625ae65 | ||
![]() |
7f24b11675 | ||
![]() |
bb9bb905a0 | ||
![]() |
60b4882d4e | ||
![]() |
19734832eb | ||
![]() |
51fb1ed05b | ||
![]() |
69faa41c3f | ||
![]() |
72ef5b9010 | ||
![]() |
795809f31b | ||
![]() |
5db597ff87 | ||
![]() |
b54227d5e9 | ||
![]() |
94cf51875f | ||
![]() |
2f6e30fd24 | ||
![]() |
06eae96ef9 | ||
![]() |
557083c33b | ||
![]() |
f6ab2f5b99 | ||
![]() |
6005813518 | ||
![]() |
1df506959e | ||
![]() |
0d0ff44e3e | ||
![]() |
f4bfeb7ed8 | ||
![]() |
a16350df69 | ||
![]() |
a2981cff1f | ||
![]() |
d2372f8eee | ||
![]() |
c310c2911a | ||
![]() |
d68747912d | ||
![]() |
107e4515bd | ||
![]() |
af6526bb82 | ||
![]() |
dd8dff7872 | ||
![]() |
82d4b391bf | ||
![]() |
a07e372770 | ||
![]() |
d35202d83e | ||
![]() |
1c1d439a01 | ||
![]() |
d52be82c06 | ||
![]() |
2a0fc464c9 | ||
![]() |
cd26331b19 | ||
![]() |
f5934db96b | ||
![]() |
11b86ca75c | ||
![]() |
0c2b546825 | ||
![]() |
ef615bcc7e | ||
![]() |
0f21f24356 | ||
![]() |
e59ee0768f | ||
![]() |
6cbd9dcf13 | ||
![]() |
92dbb55703 | ||
![]() |
e84631473c | ||
![]() |
c213a8c2a7 | ||
![]() |
526af1cbe7 | ||
![]() |
334a8b0991 | ||
![]() |
1581922c9e | ||
![]() |
9cd2f0a536 | ||
![]() |
687766b8ab | ||
![]() |
396a701860 | ||
![]() |
7105cc8c01 | ||
![]() |
0ce38ed109 | ||
![]() |
c548bcc9ef | ||
![]() |
f018e0fe42 | ||
![]() |
9aefbb0e96 | ||
![]() |
9265991767 | ||
![]() |
25cfea48f3 | ||
![]() |
fc4316cafa | ||
![]() |
de1416b3de | ||
![]() |
ba52c4f05d | ||
![]() |
501ee68606 | ||
![]() |
283eaaf323 | ||
![]() |
a3543008d9 | ||
![]() |
f760e16688 | ||
![]() |
e9d2732e00 | ||
![]() |
03525528d6 | ||
![]() |
a3985e7538 | ||
![]() |
ae28528ec7 | ||
![]() |
cb8880b388 | ||
![]() |
316dcc1609 | ||
![]() |
84ea7dbddf | ||
![]() |
b6e4ff0242 | ||
![]() |
c23ffbbd7a | ||
![]() |
accd3ca860 | ||
![]() |
d47629a521 | ||
![]() |
7bb6c9b828 | ||
![]() |
7e5b5f8c57 | ||
![]() |
3a1c0f5c5f | ||
![]() |
b50dbb8604 | ||
![]() |
30c00353d4 | ||
![]() |
466c3abaeb | ||
![]() |
478647f873 | ||
![]() |
15f3851a92 | ||
![]() |
5232ee1ed1 | ||
![]() |
855943ff29 | ||
![]() |
449a462cde | ||
![]() |
f3c6f00cc1 | ||
![]() |
42333ad66e | ||
![]() |
36f3566257 | ||
![]() |
24fc720c0b | ||
![]() |
fe0f4c1815 | ||
![]() |
d68462ae8e | ||
![]() |
0189e92329 | ||
![]() |
8d83baa35e | ||
![]() |
12dd1208f3 | ||
![]() |
728c5e0e9d | ||
![]() |
c3e92a3d01 | ||
![]() |
49efa711d0 | ||
![]() |
ab4a645cbe | ||
![]() |
7c74247f23 | ||
![]() |
728f13d4b2 | ||
![]() |
4d6347c99c | ||
![]() |
b2a86fcaba | ||
![]() |
da83ab35e8 | ||
![]() |
9cb2070eeb | ||
![]() |
a72490fc91 | ||
![]() |
f15e5f7163 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -66,7 +66,7 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
spack -d audit externals
|
spack -d audit externals
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||||
with:
|
with:
|
||||||
name: coverage-audits-${{ matrix.system.os }}
|
name: coverage-audits-${{ matrix.system.os }}
|
||||||
|
6
.github/workflows/build-containers.yml
vendored
6
.github/workflows/build-containers.yml
vendored
@@ -94,7 +94,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload Dockerfile
|
- name: Upload Dockerfile
|
||||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||||
with:
|
with:
|
||||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||||
path: dockerfiles
|
path: dockerfiles
|
||||||
@@ -103,7 +103,7 @@ jobs:
|
|||||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349
|
uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5
|
||||||
|
|
||||||
- name: Log in to GitHub Container Registry
|
- name: Log in to GitHub Container Registry
|
||||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||||
@@ -133,7 +133,7 @@ jobs:
|
|||||||
needs: deploy-images
|
needs: deploy-images
|
||||||
steps:
|
steps:
|
||||||
- name: Merge Artifacts
|
- name: Merge Artifacts
|
||||||
uses: actions/upload-artifact/merge@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
uses: actions/upload-artifact/merge@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||||
with:
|
with:
|
||||||
name: dockerfiles
|
name: dockerfiles
|
||||||
pattern: dockerfiles_*
|
pattern: dockerfiles_*
|
||||||
|
2
.github/workflows/coverage.yml
vendored
2
.github/workflows/coverage.yml
vendored
@@ -32,4 +32,4 @@ jobs:
|
|||||||
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
|
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
|
||||||
with:
|
with:
|
||||||
verbose: true
|
verbose: true
|
||||||
fail_ci_if_error: true
|
fail_ci_if_error: false
|
||||||
|
36
.github/workflows/unit_tests.yaml
vendored
36
.github/workflows/unit_tests.yaml
vendored
@@ -15,17 +15,17 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
|
||||||
on_develop:
|
on_develop:
|
||||||
- ${{ github.ref == 'refs/heads/develop' }}
|
- ${{ github.ref == 'refs/heads/develop' }}
|
||||||
include:
|
include:
|
||||||
- python-version: '3.6'
|
- python-version: '3.6'
|
||||||
os: ubuntu-20.04
|
os: ubuntu-20.04
|
||||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||||
exclude:
|
|
||||||
- python-version: '3.7'
|
- python-version: '3.7'
|
||||||
os: ubuntu-latest
|
os: ubuntu-22.04
|
||||||
on_develop: false
|
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||||
|
exclude:
|
||||||
- python-version: '3.8'
|
- python-version: '3.8'
|
||||||
os: ubuntu-latest
|
os: ubuntu-latest
|
||||||
on_develop: false
|
on_develop: false
|
||||||
@@ -52,7 +52,13 @@ jobs:
|
|||||||
# Needed for unit tests
|
# Needed for unit tests
|
||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||||
cmake bison libbison-dev kcov
|
cmake bison libbison-dev subversion
|
||||||
|
# On ubuntu 24.04, kcov was removed. It may come back in some future Ubuntu
|
||||||
|
- name: Set up Homebrew
|
||||||
|
id: set-up-homebrew
|
||||||
|
uses: Homebrew/actions/setup-homebrew@40e9946c182a64b3db1bf51be0dcb915f7802aa9
|
||||||
|
- name: Install kcov with brew
|
||||||
|
run: "brew install kcov"
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
|
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
|
||||||
@@ -80,7 +86,7 @@ jobs:
|
|||||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||||
path: coverage
|
path: coverage
|
||||||
@@ -99,7 +105,13 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
sudo apt-get -y update
|
sudo apt-get -y update
|
||||||
# Needed for shell tests
|
# Needed for shell tests
|
||||||
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
sudo apt-get install -y coreutils csh zsh tcsh fish dash bash subversion
|
||||||
|
# On ubuntu 24.04, kcov was removed. It may come back in some future Ubuntu
|
||||||
|
- name: Set up Homebrew
|
||||||
|
id: set-up-homebrew
|
||||||
|
uses: Homebrew/actions/setup-homebrew@40e9946c182a64b3db1bf51be0dcb915f7802aa9
|
||||||
|
- name: Install kcov with brew
|
||||||
|
run: "brew install kcov"
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-xdist
|
pip install --upgrade pip setuptools pytest coverage[toml] pytest-xdist
|
||||||
@@ -113,7 +125,7 @@ jobs:
|
|||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-shell-tests
|
share/spack/qa/run-shell-tests
|
||||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||||
with:
|
with:
|
||||||
name: coverage-shell
|
name: coverage-shell
|
||||||
path: coverage
|
path: coverage
|
||||||
@@ -134,7 +146,7 @@ jobs:
|
|||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
git config --global --add safe.directory '*'
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/bin/setup_git.sh
|
||||||
useradd spack-test
|
useradd spack-test
|
||||||
@@ -175,7 +187,7 @@ jobs:
|
|||||||
spack bootstrap status
|
spack bootstrap status
|
||||||
spack solve zlib
|
spack solve zlib
|
||||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
|
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
|
||||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||||
with:
|
with:
|
||||||
name: coverage-clingo-cffi
|
name: coverage-clingo-cffi
|
||||||
path: coverage
|
path: coverage
|
||||||
@@ -213,7 +225,7 @@ jobs:
|
|||||||
$(which spack) solve zlib
|
$(which spack) solve zlib
|
||||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||||
path: coverage
|
path: coverage
|
||||||
@@ -244,7 +256,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
||||||
with:
|
with:
|
||||||
name: coverage-windows
|
name: coverage-windows
|
||||||
path: coverage
|
path: coverage
|
||||||
|
42
.github/workflows/valid-style.yml
vendored
42
.github/workflows/valid-style.yml
vendored
@@ -13,8 +13,7 @@ concurrency:
|
|||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Validate that the code can be run on all the Python versions
|
# Validate that the code can be run on all the Python versions supported by Spack
|
||||||
# supported by Spack
|
|
||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@@ -74,7 +73,7 @@ jobs:
|
|||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
git config --global --add safe.directory /__w/spack/spack
|
git config --global --add safe.directory '*'
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/bin/setup_git.sh
|
||||||
useradd spack-test
|
useradd spack-test
|
||||||
@@ -87,6 +86,7 @@ jobs:
|
|||||||
spack -d bootstrap now --dev
|
spack -d bootstrap now --dev
|
||||||
spack -d style -t black
|
spack -d style -t black
|
||||||
spack unit-test -V
|
spack unit-test -V
|
||||||
|
# Check we don't make the situation with circular imports worse
|
||||||
import-check:
|
import-check:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@@ -121,28 +121,46 @@ jobs:
|
|||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
with:
|
with:
|
||||||
repository: haampie/circular-import-fighter
|
repository: haampie/circular-import-fighter
|
||||||
ref: 9f60f51bc7134e0be73f27623f1b0357d1718427
|
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
|
||||||
path: circular-import-fighter
|
path: circular-import-fighter
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
working-directory: circular-import-fighter
|
working-directory: circular-import-fighter
|
||||||
run: make -j dependencies
|
run: make -j dependencies
|
||||||
- name: Import cycles before
|
- name: Problematic imports before
|
||||||
working-directory: circular-import-fighter
|
working-directory: circular-import-fighter
|
||||||
run: make SPACK_ROOT=../old && cp solution solution.old
|
run: make SPACK_ROOT=../old SUFFIX=.old
|
||||||
- name: Import cycles after
|
- name: Problematic imports after
|
||||||
working-directory: circular-import-fighter
|
working-directory: circular-import-fighter
|
||||||
run: make clean-graph && make SPACK_ROOT=../new && cp solution solution.new
|
run: make SPACK_ROOT=../new SUFFIX=.new
|
||||||
- name: Compare import cycles
|
- name: Compare import cycles
|
||||||
working-directory: circular-import-fighter
|
working-directory: circular-import-fighter
|
||||||
run: |
|
run: |
|
||||||
edges_before="$(grep -oP 'edges to delete: \K\d+' solution.old)"
|
edges_before="$(head -n1 solution.old)"
|
||||||
edges_after="$(grep -oP 'edges to delete: \K\d+' solution.new)"
|
edges_after="$(head -n1 solution.new)"
|
||||||
if [ "$edges_after" -gt "$edges_before" ]; then
|
if [ "$edges_after" -gt "$edges_before" ]; then
|
||||||
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
||||||
printf 'previously this was %s\033[0m\n' "$edges_before"
|
printf 'previously this was %s\033[0m\n' "$edges_before"
|
||||||
printf 'Compare \033[1;97m"Import cycles before"\033[0m and '
|
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
|
||||||
printf '\033[1;97m"Import cycles after"\033[0m to see problematic imports.\n'
|
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Further style checks from pylint
|
||||||
|
pylint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
||||||
|
with:
|
||||||
|
python-version: '3.13'
|
||||||
|
cache: 'pip'
|
||||||
|
- name: Install Python packages
|
||||||
|
run: |
|
||||||
|
pip install --upgrade pip setuptools pylint
|
||||||
|
- name: Pylint (Spack Core)
|
||||||
|
run: |
|
||||||
|
pylint -j 4 --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
||||||
|
@@ -102,6 +102,6 @@ PackageName: sbang
|
|||||||
PackageHomePage: https://github.com/spack/sbang
|
PackageHomePage: https://github.com/spack/sbang
|
||||||
PackageLicenseDeclared: Apache-2.0 OR MIT
|
PackageLicenseDeclared: Apache-2.0 OR MIT
|
||||||
|
|
||||||
PackageName: six
|
PackageName: typing_extensions
|
||||||
PackageHomePage: https://pypi.python.org/pypi/six
|
PackageHomePage: https://pypi.org/project/typing-extensions/
|
||||||
PackageLicenseDeclared: MIT
|
PackageLicenseDeclared: Python-2.0
|
||||||
|
@@ -194,6 +194,12 @@ config:
|
|||||||
# executables with many dependencies, in particular on slow filesystems.
|
# executables with many dependencies, in particular on slow filesystems.
|
||||||
bind: false
|
bind: false
|
||||||
|
|
||||||
|
# Controls the handling of missing dynamic libraries after installation.
|
||||||
|
# Options are ignore (default), warn, or error. If set to error, the
|
||||||
|
# installation fails if installed binaries reference dynamic libraries that
|
||||||
|
# are not found in their specified rpaths.
|
||||||
|
missing_library_policy: ignore
|
||||||
|
|
||||||
|
|
||||||
# Set to 'false' to allow installation on filesystems that doesn't allow setgid bit
|
# Set to 'false' to allow installation on filesystems that doesn't allow setgid bit
|
||||||
# manipulation by unprivileged user (e.g. AFS)
|
# manipulation by unprivileged user (e.g. AFS)
|
||||||
|
@@ -265,25 +265,30 @@ infrastructure, or to cache Spack built binaries in Github Actions and
|
|||||||
GitLab CI.
|
GitLab CI.
|
||||||
|
|
||||||
To get started, configure an OCI mirror using ``oci://`` as the scheme,
|
To get started, configure an OCI mirror using ``oci://`` as the scheme,
|
||||||
and optionally specify a username and password (or personal access token):
|
and optionally specify variables that hold the username and password (or
|
||||||
|
personal access token) for the registry:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack mirror add --oci-username username --oci-password password my_registry oci://example.com/my_image
|
$ spack mirror add --oci-username-variable REGISTRY_USER \
|
||||||
|
--oci-password-variable REGISTRY_TOKEN \
|
||||||
|
my_registry oci://example.com/my_image
|
||||||
|
|
||||||
Spack follows the naming conventions of Docker, with Dockerhub as the default
|
Spack follows the naming conventions of Docker, with Dockerhub as the default
|
||||||
registry. To use Dockerhub, you can omit the registry domain:
|
registry. To use Dockerhub, you can omit the registry domain:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack mirror add --oci-username username --oci-password password my_registry oci://username/my_image
|
$ spack mirror add ... my_registry oci://username/my_image
|
||||||
|
|
||||||
From here, you can use the mirror as any other build cache:
|
From here, you can use the mirror as any other build cache:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
|
$ export REGISTRY_USER=...
|
||||||
|
$ export REGISTRY_TOKEN=...
|
||||||
$ spack buildcache push my_registry <specs...> # push to the registry
|
$ spack buildcache push my_registry <specs...> # push to the registry
|
||||||
$ spack install <specs...> # install from the registry
|
$ spack install <specs...> # or install from the registry
|
||||||
|
|
||||||
A unique feature of buildcaches on top of OCI registries is that it's incredibly
|
A unique feature of buildcaches on top of OCI registries is that it's incredibly
|
||||||
easy to generate get a runnable container image with the binaries installed. This
|
easy to generate get a runnable container image with the binaries installed. This
|
||||||
|
@@ -25,6 +25,14 @@ QMake does not appear to have a standardized way of specifying
|
|||||||
the installation directory, so you may have to set environment
|
the installation directory, so you may have to set environment
|
||||||
variables or edit ``*.pro`` files to get things working properly.
|
variables or edit ``*.pro`` files to get things working properly.
|
||||||
|
|
||||||
|
QMake packages will depend on the virtual ``qmake`` package which
|
||||||
|
is provided by multiple versions of Qt: ``qt`` provides Qt up to
|
||||||
|
Qt5, and ``qt-base`` provides Qt from version Qt6 onwards. This
|
||||||
|
split was motivated by the desire to split the single Qt package
|
||||||
|
into its components to allow for more fine-grained installation.
|
||||||
|
To depend on a specific version, refer to the documentation on
|
||||||
|
:ref:`virtual-dependencies`.
|
||||||
|
|
||||||
^^^^^^
|
^^^^^^
|
||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
@@ -38,9 +38,11 @@ just have to configure and OCI registry and run ``spack buildcache push``.
|
|||||||
spack -e . install
|
spack -e . install
|
||||||
|
|
||||||
# Configure the registry
|
# Configure the registry
|
||||||
spack -e . mirror add --oci-username ... --oci-password ... container-registry oci://example.com/name/image
|
spack -e . mirror add --oci-username-variable REGISTRY_USER \
|
||||||
|
--oci-password-variable REGISTRY_TOKEN \
|
||||||
|
container-registry oci://example.com/name/image
|
||||||
|
|
||||||
# Push the image
|
# Push the image (do set REGISTRY_USER and REGISTRY_TOKEN)
|
||||||
spack -e . buildcache push --update-index --base-image ubuntu:22.04 --tag my_env container-registry
|
spack -e . buildcache push --update-index --base-image ubuntu:22.04 --tag my_env container-registry
|
||||||
|
|
||||||
The resulting container image can then be run as follows:
|
The resulting container image can then be run as follows:
|
||||||
|
@@ -178,8 +178,8 @@ Spec-related modules
|
|||||||
Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization
|
Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization
|
||||||
of specs.
|
of specs.
|
||||||
|
|
||||||
:mod:`spack.parser`
|
:mod:`spack.spec_parser`
|
||||||
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
Contains :class:`~spack.spec_parser.SpecParser` and functions related to parsing specs.
|
||||||
|
|
||||||
:mod:`spack.version`
|
:mod:`spack.version`
|
||||||
Implements a simple :class:`~spack.version.Version` class with simple
|
Implements a simple :class:`~spack.version.Version` class with simple
|
||||||
|
@@ -5137,7 +5137,7 @@ other checks.
|
|||||||
- Not applicable
|
- Not applicable
|
||||||
* - :ref:`PythonPackage <pythonpackage>`
|
* - :ref:`PythonPackage <pythonpackage>`
|
||||||
- Not applicable
|
- Not applicable
|
||||||
- ``test`` (module imports)
|
- ``test_imports`` (module imports)
|
||||||
* - :ref:`QMakePackage <qmakepackage>`
|
* - :ref:`QMakePackage <qmakepackage>`
|
||||||
- ``check`` (``make check``)
|
- ``check`` (``make check``)
|
||||||
- Not applicable
|
- Not applicable
|
||||||
@@ -5146,7 +5146,7 @@ other checks.
|
|||||||
- Not applicable
|
- Not applicable
|
||||||
* - :ref:`SIPPackage <sippackage>`
|
* - :ref:`SIPPackage <sippackage>`
|
||||||
- Not applicable
|
- Not applicable
|
||||||
- ``test`` (module imports)
|
- ``test_imports`` (module imports)
|
||||||
* - :ref:`WafPackage <wafpackage>`
|
* - :ref:`WafPackage <wafpackage>`
|
||||||
- ``build_test`` (must be overridden)
|
- ``build_test`` (must be overridden)
|
||||||
- ``install_test`` (must be overridden)
|
- ``install_test`` (must be overridden)
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
sphinx==8.1.3
|
sphinx==8.1.3
|
||||||
sphinxcontrib-programoutput==0.17
|
sphinxcontrib-programoutput==0.18
|
||||||
sphinx_design==0.6.1
|
sphinx_design==0.6.1
|
||||||
sphinx-rtd-theme==3.0.2
|
sphinx-rtd-theme==3.0.2
|
||||||
python-levenshtein==0.26.1
|
python-levenshtein==0.26.1
|
||||||
|
254
lib/spack/external/_vendoring/typing_extensions.LICENSE
vendored
Normal file
254
lib/spack/external/_vendoring/typing_extensions.LICENSE
vendored
Normal file
@@ -0,0 +1,254 @@
|
|||||||
|
A. HISTORY OF THE SOFTWARE
|
||||||
|
==========================
|
||||||
|
|
||||||
|
Python was created in the early 1990s by Guido van Rossum at Stichting
|
||||||
|
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
|
||||||
|
as a successor of a language called ABC. Guido remains Python's
|
||||||
|
principal author, although it includes many contributions from others.
|
||||||
|
|
||||||
|
In 1995, Guido continued his work on Python at the Corporation for
|
||||||
|
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
|
||||||
|
in Reston, Virginia where he released several versions of the
|
||||||
|
software.
|
||||||
|
|
||||||
|
In May 2000, Guido and the Python core development team moved to
|
||||||
|
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
||||||
|
year, the PythonLabs team moved to Digital Creations (now Zope
|
||||||
|
Corporation, see http://www.zope.com). In 2001, the Python Software
|
||||||
|
Foundation (PSF, see http://www.python.org/psf/) was formed, a
|
||||||
|
non-profit organization created specifically to own Python-related
|
||||||
|
Intellectual Property. Zope Corporation is a sponsoring member of
|
||||||
|
the PSF.
|
||||||
|
|
||||||
|
All Python releases are Open Source (see http://www.opensource.org for
|
||||||
|
the Open Source Definition). Historically, most, but not all, Python
|
||||||
|
releases have also been GPL-compatible; the table below summarizes
|
||||||
|
the various releases.
|
||||||
|
|
||||||
|
Release Derived Year Owner GPL-
|
||||||
|
from compatible? (1)
|
||||||
|
|
||||||
|
0.9.0 thru 1.2 1991-1995 CWI yes
|
||||||
|
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
||||||
|
1.6 1.5.2 2000 CNRI no
|
||||||
|
2.0 1.6 2000 BeOpen.com no
|
||||||
|
1.6.1 1.6 2001 CNRI yes (2)
|
||||||
|
2.1 2.0+1.6.1 2001 PSF no
|
||||||
|
2.0.1 2.0+1.6.1 2001 PSF yes
|
||||||
|
2.1.1 2.1+2.0.1 2001 PSF yes
|
||||||
|
2.1.2 2.1.1 2002 PSF yes
|
||||||
|
2.1.3 2.1.2 2002 PSF yes
|
||||||
|
2.2 and above 2.1.1 2001-now PSF yes
|
||||||
|
|
||||||
|
Footnotes:
|
||||||
|
|
||||||
|
(1) GPL-compatible doesn't mean that we're distributing Python under
|
||||||
|
the GPL. All Python licenses, unlike the GPL, let you distribute
|
||||||
|
a modified version without making your changes open source. The
|
||||||
|
GPL-compatible licenses make it possible to combine Python with
|
||||||
|
other software that is released under the GPL; the others don't.
|
||||||
|
|
||||||
|
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
||||||
|
because its license has a choice of law clause. According to
|
||||||
|
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
||||||
|
is "not incompatible" with the GPL.
|
||||||
|
|
||||||
|
Thanks to the many outside volunteers who have worked under Guido's
|
||||||
|
direction to make these releases possible.
|
||||||
|
|
||||||
|
|
||||||
|
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
||||||
|
===============================================================
|
||||||
|
|
||||||
|
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
||||||
|
--------------------------------------------
|
||||||
|
|
||||||
|
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
||||||
|
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
||||||
|
otherwise using this software ("Python") in source or binary form and
|
||||||
|
its associated documentation.
|
||||||
|
|
||||||
|
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
||||||
|
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
||||||
|
analyze, test, perform and/or display publicly, prepare derivative works,
|
||||||
|
distribute, and otherwise use Python alone or in any derivative version,
|
||||||
|
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
||||||
|
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
||||||
|
2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
|
||||||
|
retained in Python alone or in any derivative version prepared by Licensee.
|
||||||
|
|
||||||
|
3. In the event Licensee prepares a derivative work that is based on
|
||||||
|
or incorporates Python or any part thereof, and wants to make
|
||||||
|
the derivative work available to others as provided herein, then
|
||||||
|
Licensee hereby agrees to include in any such work a brief summary of
|
||||||
|
the changes made to Python.
|
||||||
|
|
||||||
|
4. PSF is making Python available to Licensee on an "AS IS"
|
||||||
|
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||||
|
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
||||||
|
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||||
|
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
||||||
|
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||||
|
|
||||||
|
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||||
|
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||||
|
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
||||||
|
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||||
|
|
||||||
|
6. This License Agreement will automatically terminate upon a material
|
||||||
|
breach of its terms and conditions.
|
||||||
|
|
||||||
|
7. Nothing in this License Agreement shall be deemed to create any
|
||||||
|
relationship of agency, partnership, or joint venture between PSF and
|
||||||
|
Licensee. This License Agreement does not grant permission to use PSF
|
||||||
|
trademarks or trade name in a trademark sense to endorse or promote
|
||||||
|
products or services of Licensee, or any third party.
|
||||||
|
|
||||||
|
8. By copying, installing or otherwise using Python, Licensee
|
||||||
|
agrees to be bound by the terms and conditions of this License
|
||||||
|
Agreement.
|
||||||
|
|
||||||
|
|
||||||
|
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
||||||
|
-------------------------------------------
|
||||||
|
|
||||||
|
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
||||||
|
|
||||||
|
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
||||||
|
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
||||||
|
Individual or Organization ("Licensee") accessing and otherwise using
|
||||||
|
this software in source or binary form and its associated
|
||||||
|
documentation ("the Software").
|
||||||
|
|
||||||
|
2. Subject to the terms and conditions of this BeOpen Python License
|
||||||
|
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
||||||
|
royalty-free, world-wide license to reproduce, analyze, test, perform
|
||||||
|
and/or display publicly, prepare derivative works, distribute, and
|
||||||
|
otherwise use the Software alone or in any derivative version,
|
||||||
|
provided, however, that the BeOpen Python License is retained in the
|
||||||
|
Software, alone or in any derivative version prepared by Licensee.
|
||||||
|
|
||||||
|
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
||||||
|
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||||
|
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
||||||
|
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||||
|
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
||||||
|
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||||
|
|
||||||
|
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
||||||
|
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
||||||
|
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
||||||
|
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||||
|
|
||||||
|
5. This License Agreement will automatically terminate upon a material
|
||||||
|
breach of its terms and conditions.
|
||||||
|
|
||||||
|
6. This License Agreement shall be governed by and interpreted in all
|
||||||
|
respects by the law of the State of California, excluding conflict of
|
||||||
|
law provisions. Nothing in this License Agreement shall be deemed to
|
||||||
|
create any relationship of agency, partnership, or joint venture
|
||||||
|
between BeOpen and Licensee. This License Agreement does not grant
|
||||||
|
permission to use BeOpen trademarks or trade names in a trademark
|
||||||
|
sense to endorse or promote products or services of Licensee, or any
|
||||||
|
third party. As an exception, the "BeOpen Python" logos available at
|
||||||
|
http://www.pythonlabs.com/logos.html may be used according to the
|
||||||
|
permissions granted on that web page.
|
||||||
|
|
||||||
|
7. By copying, installing or otherwise using the software, Licensee
|
||||||
|
agrees to be bound by the terms and conditions of this License
|
||||||
|
Agreement.
|
||||||
|
|
||||||
|
|
||||||
|
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
|
1. This LICENSE AGREEMENT is between the Corporation for National
|
||||||
|
Research Initiatives, having an office at 1895 Preston White Drive,
|
||||||
|
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
||||||
|
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
||||||
|
source or binary form and its associated documentation.
|
||||||
|
|
||||||
|
2. Subject to the terms and conditions of this License Agreement, CNRI
|
||||||
|
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
||||||
|
license to reproduce, analyze, test, perform and/or display publicly,
|
||||||
|
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
||||||
|
alone or in any derivative version, provided, however, that CNRI's
|
||||||
|
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
||||||
|
1995-2001 Corporation for National Research Initiatives; All Rights
|
||||||
|
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
||||||
|
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
||||||
|
Agreement, Licensee may substitute the following text (omitting the
|
||||||
|
quotes): "Python 1.6.1 is made available subject to the terms and
|
||||||
|
conditions in CNRI's License Agreement. This Agreement together with
|
||||||
|
Python 1.6.1 may be located on the Internet using the following
|
||||||
|
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
||||||
|
Agreement may also be obtained from a proxy server on the Internet
|
||||||
|
using the following URL: http://hdl.handle.net/1895.22/1013".
|
||||||
|
|
||||||
|
3. In the event Licensee prepares a derivative work that is based on
|
||||||
|
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
||||||
|
the derivative work available to others as provided herein, then
|
||||||
|
Licensee hereby agrees to include in any such work a brief summary of
|
||||||
|
the changes made to Python 1.6.1.
|
||||||
|
|
||||||
|
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
||||||
|
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
||||||
|
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
||||||
|
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
||||||
|
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
||||||
|
INFRINGE ANY THIRD PARTY RIGHTS.
|
||||||
|
|
||||||
|
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
||||||
|
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
||||||
|
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
||||||
|
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
||||||
|
|
||||||
|
6. This License Agreement will automatically terminate upon a material
|
||||||
|
breach of its terms and conditions.
|
||||||
|
|
||||||
|
7. This License Agreement shall be governed by the federal
|
||||||
|
intellectual property law of the United States, including without
|
||||||
|
limitation the federal copyright law, and, to the extent such
|
||||||
|
U.S. federal law does not apply, by the law of the Commonwealth of
|
||||||
|
Virginia, excluding Virginia's conflict of law provisions.
|
||||||
|
Notwithstanding the foregoing, with regard to derivative works based
|
||||||
|
on Python 1.6.1 that incorporate non-separable material that was
|
||||||
|
previously distributed under the GNU General Public License (GPL), the
|
||||||
|
law of the Commonwealth of Virginia shall govern this License
|
||||||
|
Agreement only as to issues arising under or with respect to
|
||||||
|
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
||||||
|
License Agreement shall be deemed to create any relationship of
|
||||||
|
agency, partnership, or joint venture between CNRI and Licensee. This
|
||||||
|
License Agreement does not grant permission to use CNRI trademarks or
|
||||||
|
trade name in a trademark sense to endorse or promote products or
|
||||||
|
services of Licensee, or any third party.
|
||||||
|
|
||||||
|
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
||||||
|
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
||||||
|
bound by the terms and conditions of this License Agreement.
|
||||||
|
|
||||||
|
ACCEPT
|
||||||
|
|
||||||
|
|
||||||
|
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
||||||
|
The Netherlands. All rights reserved.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software and its
|
||||||
|
documentation for any purpose and without fee is hereby granted,
|
||||||
|
provided that the above copyright notice appear in all copies and that
|
||||||
|
both that copyright notice and this permission notice appear in
|
||||||
|
supporting documentation, and that the name of Stichting Mathematisch
|
||||||
|
Centrum or CWI not be used in advertising or publicity pertaining to
|
||||||
|
distribution of the software without specific, written prior
|
||||||
|
permission.
|
||||||
|
|
||||||
|
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
||||||
|
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||||
|
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
||||||
|
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
||||||
|
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
2908
lib/spack/external/_vendoring/typing_extensions.py
vendored
Normal file
2908
lib/spack/external/_vendoring/typing_extensions.py
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
lib/spack/external/_vendoring/typing_extensions.pyi
vendored
Normal file
1
lib/spack/external/_vendoring/typing_extensions.pyi
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
from typing_extensions import *
|
1
lib/spack/external/vendor.txt
vendored
1
lib/spack/external/vendor.txt
vendored
@@ -8,3 +8,4 @@ six==1.16.0
|
|||||||
macholib==1.16.2
|
macholib==1.16.2
|
||||||
altgraph==0.17.3
|
altgraph==0.17.3
|
||||||
ruamel.yaml==0.17.21
|
ruamel.yaml==0.17.21
|
||||||
|
typing_extensions==4.1.1
|
||||||
|
@@ -66,7 +66,7 @@ def _is_url(path_or_url: str) -> bool:
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def system_path_filter(_func=None, arg_slice: Optional[slice] = None):
|
def _system_path_filter(_func=None, arg_slice: Optional[slice] = None):
|
||||||
"""Filters function arguments to account for platform path separators.
|
"""Filters function arguments to account for platform path separators.
|
||||||
Optional slicing range can be specified to select specific arguments
|
Optional slicing range can be specified to select specific arguments
|
||||||
|
|
||||||
@@ -100,6 +100,16 @@ def path_filter_caller(*args, **kwargs):
|
|||||||
return holder_func
|
return holder_func
|
||||||
|
|
||||||
|
|
||||||
|
def _noop_decorator(_func=None, arg_slice: Optional[slice] = None):
|
||||||
|
return _func if _func else lambda x: x
|
||||||
|
|
||||||
|
|
||||||
|
if sys.platform == "win32":
|
||||||
|
system_path_filter = _system_path_filter
|
||||||
|
else:
|
||||||
|
system_path_filter = _noop_decorator
|
||||||
|
|
||||||
|
|
||||||
def sanitize_win_longpath(path: str) -> str:
|
def sanitize_win_longpath(path: str) -> str:
|
||||||
"""Strip Windows extended path prefix from strings
|
"""Strip Windows extended path prefix from strings
|
||||||
Returns sanitized string.
|
Returns sanitized string.
|
||||||
|
@@ -301,35 +301,32 @@ def filter_file(
|
|||||||
ignore_absent: bool = False,
|
ignore_absent: bool = False,
|
||||||
start_at: Optional[str] = None,
|
start_at: Optional[str] = None,
|
||||||
stop_at: Optional[str] = None,
|
stop_at: Optional[str] = None,
|
||||||
|
encoding: Optional[str] = "utf-8",
|
||||||
) -> None:
|
) -> None:
|
||||||
r"""Like sed, but uses python regular expressions.
|
r"""Like sed, but uses python regular expressions.
|
||||||
|
|
||||||
Filters every line of each file through regex and replaces the file
|
Filters every line of each file through regex and replaces the file with a filtered version.
|
||||||
with a filtered version. Preserves mode of filtered files.
|
Preserves mode of filtered files.
|
||||||
|
|
||||||
As with re.sub, ``repl`` can be either a string or a callable.
|
As with re.sub, ``repl`` can be either a string or a callable. If it is a callable, it is
|
||||||
If it is a callable, it is passed the match object and should
|
passed the match object and should return a suitable replacement string. If it is a string, it
|
||||||
return a suitable replacement string. If it is a string, it
|
can contain ``\1``, ``\2``, etc. to represent back-substitution as sed would allow.
|
||||||
can contain ``\1``, ``\2``, etc. to represent back-substitution
|
|
||||||
as sed would allow.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
regex (str): The regular expression to search for
|
regex: The regular expression to search for
|
||||||
repl (str): The string to replace matches with
|
repl: The string to replace matches with
|
||||||
*filenames: One or more files to search and replace
|
*filenames: One or more files to search and replace string: Treat regex as a plain string.
|
||||||
string (bool): Treat regex as a plain string. Default it False
|
Default it False backup: Make backup file(s) suffixed with ``~``. Default is False
|
||||||
backup (bool): Make backup file(s) suffixed with ``~``. Default is False
|
ignore_absent: Ignore any files that don't exist. Default is False
|
||||||
ignore_absent (bool): Ignore any files that don't exist.
|
start_at: Marker used to start applying the replacements. If a text line matches this
|
||||||
Default is False
|
marker filtering is started at the next line. All contents before the marker and the
|
||||||
start_at (str): Marker used to start applying the replacements. If a
|
marker itself are copied verbatim. Default is to start filtering from the first line of
|
||||||
text line matches this marker filtering is started at the next line.
|
the file.
|
||||||
All contents before the marker and the marker itself are copied
|
stop_at: Marker used to stop scanning the file further. If a text line matches this marker
|
||||||
verbatim. Default is to start filtering from the first line of the
|
filtering is stopped and the rest of the file is copied verbatim. Default is to filter
|
||||||
file.
|
until the end of the file.
|
||||||
stop_at (str): Marker used to stop scanning the file further. If a text
|
encoding: The encoding to use when reading and writing the files. Default is None, which
|
||||||
line matches this marker filtering is stopped and the rest of the
|
uses the system's default encoding.
|
||||||
file is copied verbatim. Default is to filter until the end of the
|
|
||||||
file.
|
|
||||||
"""
|
"""
|
||||||
# Allow strings to use \1, \2, etc. for replacement, like sed
|
# Allow strings to use \1, \2, etc. for replacement, like sed
|
||||||
if not callable(repl):
|
if not callable(repl):
|
||||||
@@ -345,72 +342,56 @@ def groupid_to_group(x):
|
|||||||
|
|
||||||
if string:
|
if string:
|
||||||
regex = re.escape(regex)
|
regex = re.escape(regex)
|
||||||
for filename in path_to_os_path(*filenames):
|
regex_compiled = re.compile(regex)
|
||||||
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
for path in path_to_os_path(*filenames):
|
||||||
tty.debug(msg.format(filename, regex))
|
if ignore_absent and not os.path.exists(path):
|
||||||
|
tty.debug(f'FILTER FILE: file "{path}" not found. Skipping to next file.')
|
||||||
backup_filename = filename + "~"
|
|
||||||
tmp_filename = filename + ".spack~"
|
|
||||||
|
|
||||||
if ignore_absent and not os.path.exists(filename):
|
|
||||||
msg = 'FILTER FILE: file "{0}" not found. Skipping to next file.'
|
|
||||||
tty.debug(msg.format(filename))
|
|
||||||
continue
|
continue
|
||||||
|
else:
|
||||||
|
tty.debug(f'FILTER FILE: {path} [replacing "{regex}"]')
|
||||||
|
|
||||||
# Create backup file. Don't overwrite an existing backup
|
fd, temp_path = tempfile.mkstemp(
|
||||||
# file in case this file is being filtered multiple times.
|
prefix=f"{os.path.basename(path)}.", dir=os.path.dirname(path)
|
||||||
if not os.path.exists(backup_filename):
|
)
|
||||||
shutil.copy(filename, backup_filename)
|
os.close(fd)
|
||||||
|
|
||||||
# Create a temporary file to read from. We cannot use backup_filename
|
shutil.copy(path, temp_path)
|
||||||
# in case filter_file is invoked multiple times on the same file.
|
errored = False
|
||||||
shutil.copy(filename, tmp_filename)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Open as a text file and filter until the end of the file is
|
# Open as a text file and filter until the end of the file is reached, or we found a
|
||||||
# reached, or we found a marker in the line if it was specified
|
# marker in the line if it was specified. To avoid translating line endings (\n to
|
||||||
#
|
# \r\n and vice-versa) use newline="".
|
||||||
# To avoid translating line endings (\n to \r\n and vice-versa)
|
with open(
|
||||||
# we force os.open to ignore translations and use the line endings
|
temp_path, mode="r", errors="surrogateescape", newline="", encoding=encoding
|
||||||
# the file comes with
|
) as input_file, open(
|
||||||
with open(tmp_filename, mode="r", errors="surrogateescape", newline="") as input_file:
|
path, mode="w", errors="surrogateescape", newline="", encoding=encoding
|
||||||
with open(filename, mode="w", errors="surrogateescape", newline="") as output_file:
|
) as output_file:
|
||||||
do_filtering = start_at is None
|
if start_at is None and stop_at is None: # common case, avoids branching in loop
|
||||||
# Using iter and readline is a workaround needed not to
|
for line in input_file:
|
||||||
# disable input_file.tell(), which will happen if we call
|
output_file.write(re.sub(regex_compiled, repl, line))
|
||||||
# input_file.next() implicitly via the for loop
|
else:
|
||||||
for line in iter(input_file.readline, ""):
|
# state is -1 before start_at; 0 between; 1 after stop_at
|
||||||
if stop_at is not None:
|
state = 0 if start_at is None else -1
|
||||||
current_position = input_file.tell()
|
for line in input_file:
|
||||||
|
if state == 0:
|
||||||
if stop_at == line.strip():
|
if stop_at == line.strip():
|
||||||
output_file.write(line)
|
state = 1
|
||||||
break
|
else:
|
||||||
if do_filtering:
|
line = re.sub(regex_compiled, repl, line)
|
||||||
filtered_line = re.sub(regex, repl, line)
|
elif state == -1 and start_at == line.strip():
|
||||||
output_file.write(filtered_line)
|
state = 0
|
||||||
else:
|
output_file.write(line)
|
||||||
do_filtering = start_at == line.strip()
|
|
||||||
output_file.write(line)
|
|
||||||
else:
|
|
||||||
current_position = None
|
|
||||||
|
|
||||||
# If we stopped filtering at some point, reopen the file in
|
|
||||||
# binary mode and copy verbatim the remaining part
|
|
||||||
if current_position and stop_at:
|
|
||||||
with open(tmp_filename, mode="rb") as input_binary_buffer:
|
|
||||||
input_binary_buffer.seek(current_position)
|
|
||||||
with open(filename, mode="ab") as output_binary_buffer:
|
|
||||||
output_binary_buffer.writelines(input_binary_buffer.readlines())
|
|
||||||
|
|
||||||
except BaseException:
|
except BaseException:
|
||||||
# clean up the original file on failure.
|
# restore the original file
|
||||||
shutil.move(backup_filename, filename)
|
os.rename(temp_path, path)
|
||||||
|
errored = True
|
||||||
raise
|
raise
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
os.remove(tmp_filename)
|
if not errored and not backup:
|
||||||
if not backup and os.path.exists(backup_filename):
|
os.unlink(temp_path)
|
||||||
os.remove(backup_filename)
|
|
||||||
|
|
||||||
|
|
||||||
class FileFilter:
|
class FileFilter:
|
||||||
@@ -1115,12 +1096,12 @@ def hash_directory(directory, ignore=[]):
|
|||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def write_tmp_and_move(filename):
|
def write_tmp_and_move(filename: str, *, encoding: Optional[str] = None):
|
||||||
"""Write to a temporary file, then move into place."""
|
"""Write to a temporary file, then move into place."""
|
||||||
dirname = os.path.dirname(filename)
|
dirname = os.path.dirname(filename)
|
||||||
basename = os.path.basename(filename)
|
basename = os.path.basename(filename)
|
||||||
tmp = os.path.join(dirname, ".%s.tmp" % basename)
|
tmp = os.path.join(dirname, ".%s.tmp" % basename)
|
||||||
with open(tmp, "w") as f:
|
with open(tmp, "w", encoding=encoding) as f:
|
||||||
yield f
|
yield f
|
||||||
shutil.move(tmp, filename)
|
shutil.move(tmp, filename)
|
||||||
|
|
||||||
|
@@ -96,8 +96,8 @@ def get_fh(self, path: str) -> IO:
|
|||||||
Arguments:
|
Arguments:
|
||||||
path: path to lock file we want a filehandle for
|
path: path to lock file we want a filehandle for
|
||||||
"""
|
"""
|
||||||
# Open writable files as 'r+' so we can upgrade to write later
|
# Open writable files as rb+ so we can upgrade to write later
|
||||||
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "r+"
|
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "rb+"
|
||||||
|
|
||||||
pid = os.getpid()
|
pid = os.getpid()
|
||||||
open_file = None # OpenFile object, if there is one
|
open_file = None # OpenFile object, if there is one
|
||||||
@@ -124,7 +124,7 @@ def get_fh(self, path: str) -> IO:
|
|||||||
# we know path exists but not if it's writable. If it's read-only,
|
# we know path exists but not if it's writable. If it's read-only,
|
||||||
# only open the file for reading (and fail if we're trying to get
|
# only open the file for reading (and fail if we're trying to get
|
||||||
# an exclusive (write) lock on it)
|
# an exclusive (write) lock on it)
|
||||||
os_mode, fh_mode = os.O_RDONLY, "r"
|
os_mode, fh_mode = os.O_RDONLY, "rb"
|
||||||
|
|
||||||
fd = os.open(path, os_mode)
|
fd = os.open(path, os_mode)
|
||||||
fh = os.fdopen(fd, fh_mode)
|
fh = os.fdopen(fd, fh_mode)
|
||||||
@@ -243,7 +243,7 @@ def __init__(
|
|||||||
helpful for distinguishing between different Spack locks.
|
helpful for distinguishing between different Spack locks.
|
||||||
"""
|
"""
|
||||||
self.path = path
|
self.path = path
|
||||||
self._file: Optional[IO] = None
|
self._file: Optional[IO[bytes]] = None
|
||||||
self._reads = 0
|
self._reads = 0
|
||||||
self._writes = 0
|
self._writes = 0
|
||||||
|
|
||||||
@@ -329,9 +329,9 @@ def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
|||||||
self._ensure_parent_directory()
|
self._ensure_parent_directory()
|
||||||
self._file = FILE_TRACKER.get_fh(self.path)
|
self._file = FILE_TRACKER.get_fh(self.path)
|
||||||
|
|
||||||
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "r":
|
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "rb":
|
||||||
# Attempt to upgrade to write lock w/a read-only file.
|
# Attempt to upgrade to write lock w/a read-only file.
|
||||||
# If the file were writable, we'd have opened it 'r+'
|
# If the file were writable, we'd have opened it rb+
|
||||||
raise LockROFileError(self.path)
|
raise LockROFileError(self.path)
|
||||||
|
|
||||||
self._log_debug(
|
self._log_debug(
|
||||||
@@ -426,7 +426,7 @@ def _read_log_debug_data(self) -> None:
|
|||||||
|
|
||||||
line = self._file.read()
|
line = self._file.read()
|
||||||
if line:
|
if line:
|
||||||
pid, host = line.strip().split(",")
|
pid, host = line.decode("utf-8").strip().split(",")
|
||||||
_, _, pid = pid.rpartition("=")
|
_, _, pid = pid.rpartition("=")
|
||||||
_, _, self.host = host.rpartition("=")
|
_, _, self.host = host.rpartition("=")
|
||||||
self.pid = int(pid)
|
self.pid = int(pid)
|
||||||
@@ -442,7 +442,7 @@ def _write_log_debug_data(self) -> None:
|
|||||||
|
|
||||||
# write pid, host to disk to sync over FS
|
# write pid, host to disk to sync over FS
|
||||||
self._file.seek(0)
|
self._file.seek(0)
|
||||||
self._file.write("pid=%s,host=%s" % (self.pid, self.host))
|
self._file.write(f"pid={self.pid},host={self.host}".encode("utf-8"))
|
||||||
self._file.truncate()
|
self._file.truncate()
|
||||||
self._file.flush()
|
self._file.flush()
|
||||||
os.fsync(self._file.fileno())
|
os.fsync(self._file.fileno())
|
||||||
|
@@ -161,7 +161,7 @@ def _err_check(result, func, args):
|
|||||||
)
|
)
|
||||||
# Use conout$ here to handle a redirectired stdout/get active console associated
|
# Use conout$ here to handle a redirectired stdout/get active console associated
|
||||||
# with spack
|
# with spack
|
||||||
with open(r"\\.\CONOUT$", "w") as conout:
|
with open(r"\\.\CONOUT$", "w", encoding="utf-8") as conout:
|
||||||
# Link above would use kernel32.GetStdHandle(-11) however this would not handle
|
# Link above would use kernel32.GetStdHandle(-11) however this would not handle
|
||||||
# a redirected stdout appropriately, so we always refer to the current CONSOLE out
|
# a redirected stdout appropriately, so we always refer to the current CONSOLE out
|
||||||
# which is defined as conout$ on Windows.
|
# which is defined as conout$ on Windows.
|
||||||
|
@@ -762,7 +762,7 @@ def __enter__(self):
|
|||||||
self.reader = open(self.logfile, mode="rb+")
|
self.reader = open(self.logfile, mode="rb+")
|
||||||
|
|
||||||
# Dup stdout so we can still write to it after redirection
|
# Dup stdout so we can still write to it after redirection
|
||||||
self.echo_writer = open(os.dup(sys.stdout.fileno()), "w")
|
self.echo_writer = open(os.dup(sys.stdout.fileno()), "w", encoding=sys.stdout.encoding)
|
||||||
# Redirect stdout and stderr to write to logfile
|
# Redirect stdout and stderr to write to logfile
|
||||||
self.stderr.redirect_stream(self.writer.fileno())
|
self.stderr.redirect_stream(self.writer.fileno())
|
||||||
self.stdout.redirect_stream(self.writer.fileno())
|
self.stdout.redirect_stream(self.writer.fileno())
|
||||||
@@ -879,10 +879,13 @@ def _writer_daemon(
|
|||||||
write_fd.close()
|
write_fd.close()
|
||||||
|
|
||||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||||
# that prevents unbuffered text I/O.
|
# that prevents unbuffered text I/O. [needs citation]
|
||||||
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
# 2. Enforce a UTF-8 interpretation of build process output with errors replaced by '?'.
|
||||||
|
# The downside is that the log file will not contain the exact output of the build process.
|
||||||
# 3. closefd=False because Connection has "ownership"
|
# 3. closefd=False because Connection has "ownership"
|
||||||
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
|
read_file = os.fdopen(
|
||||||
|
read_fd.fileno(), "r", 1, encoding="utf-8", errors="replace", closefd=False
|
||||||
|
)
|
||||||
|
|
||||||
if stdin_fd:
|
if stdin_fd:
|
||||||
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
||||||
@@ -928,11 +931,7 @@ def _writer_daemon(
|
|||||||
try:
|
try:
|
||||||
while line_count < 100:
|
while line_count < 100:
|
||||||
# Handle output from the calling process.
|
# Handle output from the calling process.
|
||||||
try:
|
line = _retry(read_file.readline)()
|
||||||
line = _retry(read_file.readline)()
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
# installs like --test=root gpgme produce non-UTF8 logs
|
|
||||||
line = "<line lost: output was not encoded as UTF-8>\n"
|
|
||||||
|
|
||||||
if not line:
|
if not line:
|
||||||
return
|
return
|
||||||
@@ -946,6 +945,13 @@ def _writer_daemon(
|
|||||||
output_line = clean_line
|
output_line = clean_line
|
||||||
if filter_fn:
|
if filter_fn:
|
||||||
output_line = filter_fn(clean_line)
|
output_line = filter_fn(clean_line)
|
||||||
|
enc = sys.stdout.encoding
|
||||||
|
if enc != "utf-8":
|
||||||
|
# On Python 3.6 and 3.7-3.14 with non-{utf-8,C} locale stdout
|
||||||
|
# may not be able to handle utf-8 output. We do an inefficient
|
||||||
|
# dance of re-encoding with errors replaced, so stdout.write
|
||||||
|
# does not raise.
|
||||||
|
output_line = output_line.encode(enc, "replace").decode(enc)
|
||||||
sys.stdout.write(output_line)
|
sys.stdout.write(output_line)
|
||||||
|
|
||||||
# Stripped output to log file.
|
# Stripped output to log file.
|
||||||
|
@@ -656,7 +656,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
|||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
details = []
|
details = []
|
||||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||||
with open(filename, "r") as package_file:
|
with open(filename, "r", encoding="utf-8") as package_file:
|
||||||
for i, line in enumerate(package_file):
|
for i, line in enumerate(package_file):
|
||||||
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
||||||
if pattern:
|
if pattern:
|
||||||
@@ -809,7 +809,7 @@ def _uses_deprecated_globals(pkgs, error_cls):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
file = spack.repo.PATH.filename_for_package_name(pkg_name)
|
file = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||||
tree = ast.parse(open(file).read())
|
tree = ast.parse(open(file, "rb").read())
|
||||||
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args"))
|
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args"))
|
||||||
visitor.visit(tree)
|
visitor.visit(tree)
|
||||||
if visitor.references_to_globals:
|
if visitor.references_to_globals:
|
||||||
@@ -1009,20 +1009,6 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
|
|||||||
|
|
||||||
for when, deps_by_name in pkg_cls.dependencies.items():
|
for when, deps_by_name in pkg_cls.dependencies.items():
|
||||||
for dep_name, dep in deps_by_name.items():
|
for dep_name, dep in deps_by_name.items():
|
||||||
# Check if there are nested dependencies declared. We don't want directives like:
|
|
||||||
#
|
|
||||||
# depends_on('foo+bar ^fee+baz')
|
|
||||||
#
|
|
||||||
# but we'd like to have two dependencies listed instead.
|
|
||||||
nested_dependencies = dep.spec.dependencies()
|
|
||||||
if nested_dependencies:
|
|
||||||
summary = f"{pkg_name}: nested dependency declaration '{dep.spec}'"
|
|
||||||
ndir = len(nested_dependencies) + 1
|
|
||||||
details = [
|
|
||||||
f"split depends_on('{dep.spec}', when='{when}') into {ndir} directives",
|
|
||||||
f"in {filename}",
|
|
||||||
]
|
|
||||||
errors.append(error_cls(summary=summary, details=details))
|
|
||||||
|
|
||||||
def check_virtual_with_variants(spec, msg):
|
def check_virtual_with_variants(spec, msg):
|
||||||
if not spec.virtual or not spec.variants:
|
if not spec.virtual or not spec.variants:
|
||||||
|
@@ -69,10 +69,8 @@
|
|||||||
Digest,
|
Digest,
|
||||||
ImageReference,
|
ImageReference,
|
||||||
default_config,
|
default_config,
|
||||||
default_index_tag,
|
|
||||||
default_manifest,
|
default_manifest,
|
||||||
default_tag,
|
ensure_valid_tag,
|
||||||
tag_is_spec,
|
|
||||||
)
|
)
|
||||||
from spack.oci.oci import (
|
from spack.oci.oci import (
|
||||||
copy_missing_layers_with_retry,
|
copy_missing_layers_with_retry,
|
||||||
@@ -83,7 +81,6 @@
|
|||||||
)
|
)
|
||||||
from spack.package_prefs import get_package_dir_permissions, get_package_group
|
from spack.package_prefs import get_package_dir_permissions, get_package_group
|
||||||
from spack.relocate_text import utf8_paths_to_single_binary_regex
|
from spack.relocate_text import utf8_paths_to_single_binary_regex
|
||||||
from spack.spec import Spec
|
|
||||||
from spack.stage import Stage
|
from spack.stage import Stage
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
|
|
||||||
@@ -586,7 +583,7 @@ def buildinfo_file_name(prefix):
|
|||||||
|
|
||||||
def read_buildinfo_file(prefix):
|
def read_buildinfo_file(prefix):
|
||||||
"""Read buildinfo file"""
|
"""Read buildinfo file"""
|
||||||
with open(buildinfo_file_name(prefix), "r") as f:
|
with open(buildinfo_file_name(prefix), "r", encoding="utf-8") as f:
|
||||||
return syaml.load(f)
|
return syaml.load(f)
|
||||||
|
|
||||||
|
|
||||||
@@ -827,10 +824,10 @@ def _read_specs_and_push_index(
|
|||||||
contents = read_method(file)
|
contents = read_method(file)
|
||||||
# Need full spec.json name or this gets confused with index.json.
|
# Need full spec.json name or this gets confused with index.json.
|
||||||
if file.endswith(".json.sig"):
|
if file.endswith(".json.sig"):
|
||||||
specfile_json = Spec.extract_json_from_clearsig(contents)
|
specfile_json = spack.spec.Spec.extract_json_from_clearsig(contents)
|
||||||
fetched_spec = Spec.from_dict(specfile_json)
|
fetched_spec = spack.spec.Spec.from_dict(specfile_json)
|
||||||
elif file.endswith(".json"):
|
elif file.endswith(".json"):
|
||||||
fetched_spec = Spec.from_json(contents)
|
fetched_spec = spack.spec.Spec.from_json(contents)
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -840,17 +837,17 @@ def _read_specs_and_push_index(
|
|||||||
# Now generate the index, compute its hash, and push the two files to
|
# Now generate the index, compute its hash, and push the two files to
|
||||||
# the mirror.
|
# the mirror.
|
||||||
index_json_path = os.path.join(temp_dir, "index.json")
|
index_json_path = os.path.join(temp_dir, "index.json")
|
||||||
with open(index_json_path, "w") as f:
|
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||||
db._write_to_file(f)
|
db._write_to_file(f)
|
||||||
|
|
||||||
# Read the index back in and compute its hash
|
# Read the index back in and compute its hash
|
||||||
with open(index_json_path) as f:
|
with open(index_json_path, encoding="utf-8") as f:
|
||||||
index_string = f.read()
|
index_string = f.read()
|
||||||
index_hash = compute_hash(index_string)
|
index_hash = compute_hash(index_string)
|
||||||
|
|
||||||
# Write the hash out to a local file
|
# Write the hash out to a local file
|
||||||
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
||||||
with open(index_hash_path, "w") as f:
|
with open(index_hash_path, "w", encoding="utf-8") as f:
|
||||||
f.write(index_hash)
|
f.write(index_hash)
|
||||||
|
|
||||||
# Push the index itself
|
# Push the index itself
|
||||||
@@ -884,7 +881,7 @@ def _specs_from_cache_aws_cli(cache_prefix):
|
|||||||
aws = which("aws")
|
aws = which("aws")
|
||||||
|
|
||||||
def file_read_method(file_path):
|
def file_read_method(file_path):
|
||||||
with open(file_path) as fd:
|
with open(file_path, encoding="utf-8") as fd:
|
||||||
return fd.read()
|
return fd.read()
|
||||||
|
|
||||||
tmpspecsdir = tempfile.mkdtemp()
|
tmpspecsdir = tempfile.mkdtemp()
|
||||||
@@ -1029,7 +1026,7 @@ def generate_key_index(key_prefix: str, tmpdir: str) -> None:
|
|||||||
target = os.path.join(tmpdir, "index.json")
|
target = os.path.join(tmpdir, "index.json")
|
||||||
|
|
||||||
index = {"keys": dict((fingerprint, {}) for fingerprint in sorted(set(fingerprints)))}
|
index = {"keys": dict((fingerprint, {}) for fingerprint in sorted(set(fingerprints)))}
|
||||||
with open(target, "w") as f:
|
with open(target, "w", encoding="utf-8") as f:
|
||||||
sjson.dump(index, f)
|
sjson.dump(index, f)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -1100,7 +1097,7 @@ class ExistsInBuildcache(NamedTuple):
|
|||||||
|
|
||||||
|
|
||||||
class BuildcacheFiles:
|
class BuildcacheFiles:
|
||||||
def __init__(self, spec: Spec, local: str, remote: str):
|
def __init__(self, spec: spack.spec.Spec, local: str, remote: str):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
spec: The spec whose tarball and specfile are being managed.
|
spec: The spec whose tarball and specfile are being managed.
|
||||||
@@ -1130,7 +1127,7 @@ def local_tarball(self) -> str:
|
|||||||
return os.path.join(self.local, f"{self.spec.dag_hash()}.tar.gz")
|
return os.path.join(self.local, f"{self.spec.dag_hash()}.tar.gz")
|
||||||
|
|
||||||
|
|
||||||
def _exists_in_buildcache(spec: Spec, tmpdir: str, out_url: str) -> ExistsInBuildcache:
|
def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> ExistsInBuildcache:
|
||||||
"""returns a tuple of bools (signed, unsigned, tarball) indicating whether specfiles/tarballs
|
"""returns a tuple of bools (signed, unsigned, tarball) indicating whether specfiles/tarballs
|
||||||
exist in the buildcache"""
|
exist in the buildcache"""
|
||||||
files = BuildcacheFiles(spec, tmpdir, out_url)
|
files = BuildcacheFiles(spec, tmpdir, out_url)
|
||||||
@@ -1141,7 +1138,11 @@ def _exists_in_buildcache(spec: Spec, tmpdir: str, out_url: str) -> ExistsInBuil
|
|||||||
|
|
||||||
|
|
||||||
def _url_upload_tarball_and_specfile(
|
def _url_upload_tarball_and_specfile(
|
||||||
spec: Spec, tmpdir: str, out_url: str, exists: ExistsInBuildcache, signing_key: Optional[str]
|
spec: spack.spec.Spec,
|
||||||
|
tmpdir: str,
|
||||||
|
out_url: str,
|
||||||
|
exists: ExistsInBuildcache,
|
||||||
|
signing_key: Optional[str],
|
||||||
):
|
):
|
||||||
files = BuildcacheFiles(spec, tmpdir, out_url)
|
files = BuildcacheFiles(spec, tmpdir, out_url)
|
||||||
tarball = files.local_tarball()
|
tarball = files.local_tarball()
|
||||||
@@ -1159,7 +1160,7 @@ def _url_upload_tarball_and_specfile(
|
|||||||
web_util.push_to_url(tarball, files.remote_tarball(), keep_original=False)
|
web_util.push_to_url(tarball, files.remote_tarball(), keep_original=False)
|
||||||
|
|
||||||
specfile = files.local_specfile()
|
specfile = files.local_specfile()
|
||||||
with open(specfile, "w") as f:
|
with open(specfile, "w", encoding="utf-8") as f:
|
||||||
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
|
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
|
||||||
# If lines are longer, they are truncated without error. Thanks GPG!
|
# If lines are longer, they are truncated without error. Thanks GPG!
|
||||||
# So, here we still add newlines, but no indent, so save on file size and
|
# So, here we still add newlines, but no indent, so save on file size and
|
||||||
@@ -1314,7 +1315,7 @@ def make_uploader(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _format_spec(spec: Spec) -> str:
|
def _format_spec(spec: spack.spec.Spec) -> str:
|
||||||
return spec.cformat("{name}{@version}{/hash:7}")
|
return spec.cformat("{name}{@version}{/hash:7}")
|
||||||
|
|
||||||
|
|
||||||
@@ -1337,7 +1338,7 @@ def _progress(self):
|
|||||||
return f"[{self.n:{digits}}/{self.total}] "
|
return f"[{self.n:{digits}}/{self.total}] "
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def start(self, spec: Spec, running: bool) -> None:
|
def start(self, spec: spack.spec.Spec, running: bool) -> None:
|
||||||
self.n += 1
|
self.n += 1
|
||||||
self.running = running
|
self.running = running
|
||||||
self.pre = self._progress()
|
self.pre = self._progress()
|
||||||
@@ -1356,18 +1357,18 @@ def fail(self) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def _url_push(
|
def _url_push(
|
||||||
specs: List[Spec],
|
specs: List[spack.spec.Spec],
|
||||||
out_url: str,
|
out_url: str,
|
||||||
signing_key: Optional[str],
|
signing_key: Optional[str],
|
||||||
force: bool,
|
force: bool,
|
||||||
update_index: bool,
|
update_index: bool,
|
||||||
tmpdir: str,
|
tmpdir: str,
|
||||||
executor: concurrent.futures.Executor,
|
executor: concurrent.futures.Executor,
|
||||||
) -> Tuple[List[Spec], List[Tuple[Spec, BaseException]]]:
|
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
|
||||||
"""Pushes to the provided build cache, and returns a list of skipped specs that were already
|
"""Pushes to the provided build cache, and returns a list of skipped specs that were already
|
||||||
present (when force=False), and a list of errors. Does not raise on error."""
|
present (when force=False), and a list of errors. Does not raise on error."""
|
||||||
skipped: List[Spec] = []
|
skipped: List[spack.spec.Spec] = []
|
||||||
errors: List[Tuple[Spec, BaseException]] = []
|
errors: List[Tuple[spack.spec.Spec, BaseException]] = []
|
||||||
|
|
||||||
exists_futures = [
|
exists_futures = [
|
||||||
executor.submit(_exists_in_buildcache, spec, tmpdir, out_url) for spec in specs
|
executor.submit(_exists_in_buildcache, spec, tmpdir, out_url) for spec in specs
|
||||||
@@ -1440,7 +1441,7 @@ def _url_push(
|
|||||||
return skipped, errors
|
return skipped, errors
|
||||||
|
|
||||||
|
|
||||||
def _oci_upload_success_msg(spec: Spec, digest: Digest, size: int, elapsed: float):
|
def _oci_upload_success_msg(spec: spack.spec.Spec, digest: Digest, size: int, elapsed: float):
|
||||||
elapsed = max(elapsed, 0.001) # guard against division by zero
|
elapsed = max(elapsed, 0.001) # guard against division by zero
|
||||||
return (
|
return (
|
||||||
f"Pushed {_format_spec(spec)}: {digest} ({elapsed:.2f}s, "
|
f"Pushed {_format_spec(spec)}: {digest} ({elapsed:.2f}s, "
|
||||||
@@ -1526,7 +1527,7 @@ def _oci_put_manifest(
|
|||||||
):
|
):
|
||||||
architecture = _oci_archspec_to_gooarch(specs[0])
|
architecture = _oci_archspec_to_gooarch(specs[0])
|
||||||
|
|
||||||
expected_blobs: List[Spec] = [
|
expected_blobs: List[spack.spec.Spec] = [
|
||||||
s
|
s
|
||||||
for s in traverse.traverse_nodes(specs, order="topo", deptype=("link", "run"), root=True)
|
for s in traverse.traverse_nodes(specs, order="topo", deptype=("link", "run"), root=True)
|
||||||
if not s.external
|
if not s.external
|
||||||
@@ -1570,7 +1571,7 @@ def _oci_put_manifest(
|
|||||||
|
|
||||||
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
|
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
|
||||||
|
|
||||||
with open(config_file, "w") as f:
|
with open(config_file, "w", encoding="utf-8") as f:
|
||||||
json.dump(config, f, separators=(",", ":"))
|
json.dump(config, f, separators=(",", ":"))
|
||||||
|
|
||||||
config_file_checksum = Digest.from_sha256(
|
config_file_checksum = Digest.from_sha256(
|
||||||
@@ -1640,19 +1641,33 @@ def _oci_update_base_images(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _oci_default_tag(spec: spack.spec.Spec) -> str:
|
||||||
|
"""Return a valid, default image tag for a spec."""
|
||||||
|
return ensure_valid_tag(f"{spec.name}-{spec.version}-{spec.dag_hash()}.spack")
|
||||||
|
|
||||||
|
|
||||||
|
#: Default OCI index tag
|
||||||
|
default_index_tag = "index.spack"
|
||||||
|
|
||||||
|
|
||||||
|
def tag_is_spec(tag: str) -> bool:
|
||||||
|
"""Check if a tag is likely a Spec"""
|
||||||
|
return tag.endswith(".spack") and tag != default_index_tag
|
||||||
|
|
||||||
|
|
||||||
def _oci_push(
|
def _oci_push(
|
||||||
*,
|
*,
|
||||||
target_image: ImageReference,
|
target_image: ImageReference,
|
||||||
base_image: Optional[ImageReference],
|
base_image: Optional[ImageReference],
|
||||||
installed_specs_with_deps: List[Spec],
|
installed_specs_with_deps: List[spack.spec.Spec],
|
||||||
tmpdir: str,
|
tmpdir: str,
|
||||||
executor: concurrent.futures.Executor,
|
executor: concurrent.futures.Executor,
|
||||||
force: bool = False,
|
force: bool = False,
|
||||||
) -> Tuple[
|
) -> Tuple[
|
||||||
List[Spec],
|
List[spack.spec.Spec],
|
||||||
Dict[str, Tuple[dict, dict]],
|
Dict[str, Tuple[dict, dict]],
|
||||||
Dict[str, spack.oci.oci.Blob],
|
Dict[str, spack.oci.oci.Blob],
|
||||||
List[Tuple[Spec, BaseException]],
|
List[Tuple[spack.spec.Spec, BaseException]],
|
||||||
]:
|
]:
|
||||||
# Spec dag hash -> blob
|
# Spec dag hash -> blob
|
||||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||||
@@ -1661,13 +1676,15 @@ def _oci_push(
|
|||||||
base_images: Dict[str, Tuple[dict, dict]] = {}
|
base_images: Dict[str, Tuple[dict, dict]] = {}
|
||||||
|
|
||||||
# Specs not uploaded because they already exist
|
# Specs not uploaded because they already exist
|
||||||
skipped: List[Spec] = []
|
skipped: List[spack.spec.Spec] = []
|
||||||
|
|
||||||
if not force:
|
if not force:
|
||||||
tty.info("Checking for existing specs in the buildcache")
|
tty.info("Checking for existing specs in the buildcache")
|
||||||
blobs_to_upload = []
|
blobs_to_upload = []
|
||||||
|
|
||||||
tags_to_check = (target_image.with_tag(default_tag(s)) for s in installed_specs_with_deps)
|
tags_to_check = (
|
||||||
|
target_image.with_tag(_oci_default_tag(s)) for s in installed_specs_with_deps
|
||||||
|
)
|
||||||
available_blobs = executor.map(_oci_get_blob_info, tags_to_check)
|
available_blobs = executor.map(_oci_get_blob_info, tags_to_check)
|
||||||
|
|
||||||
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
|
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
|
||||||
@@ -1695,8 +1712,8 @@ def _oci_push(
|
|||||||
executor.submit(_oci_push_pkg_blob, target_image, spec, tmpdir) for spec in blobs_to_upload
|
executor.submit(_oci_push_pkg_blob, target_image, spec, tmpdir) for spec in blobs_to_upload
|
||||||
]
|
]
|
||||||
|
|
||||||
manifests_to_upload: List[Spec] = []
|
manifests_to_upload: List[spack.spec.Spec] = []
|
||||||
errors: List[Tuple[Spec, BaseException]] = []
|
errors: List[Tuple[spack.spec.Spec, BaseException]] = []
|
||||||
|
|
||||||
# And update the spec to blob mapping for successful uploads
|
# And update the spec to blob mapping for successful uploads
|
||||||
for spec, blob_future in zip(blobs_to_upload, blob_futures):
|
for spec, blob_future in zip(blobs_to_upload, blob_futures):
|
||||||
@@ -1722,7 +1739,7 @@ def _oci_push(
|
|||||||
base_image_cache=base_images,
|
base_image_cache=base_images,
|
||||||
)
|
)
|
||||||
|
|
||||||
def extra_config(spec: Spec):
|
def extra_config(spec: spack.spec.Spec):
|
||||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||||
spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||||
spec_dict["binary_cache_checksum"] = {
|
spec_dict["binary_cache_checksum"] = {
|
||||||
@@ -1738,7 +1755,7 @@ def extra_config(spec: Spec):
|
|||||||
_oci_put_manifest,
|
_oci_put_manifest,
|
||||||
base_images,
|
base_images,
|
||||||
checksums,
|
checksums,
|
||||||
target_image.with_tag(default_tag(spec)),
|
target_image.with_tag(_oci_default_tag(spec)),
|
||||||
tmpdir,
|
tmpdir,
|
||||||
extra_config(spec),
|
extra_config(spec),
|
||||||
{"org.opencontainers.image.description": spec.format()},
|
{"org.opencontainers.image.description": spec.format()},
|
||||||
@@ -1755,7 +1772,7 @@ def extra_config(spec: Spec):
|
|||||||
manifest_progress.start(spec, manifest_future.running())
|
manifest_progress.start(spec, manifest_future.running())
|
||||||
if error is None:
|
if error is None:
|
||||||
manifest_progress.ok(
|
manifest_progress.ok(
|
||||||
f"Tagged {_format_spec(spec)} as {target_image.with_tag(default_tag(spec))}"
|
f"Tagged {_format_spec(spec)} as {target_image.with_tag(_oci_default_tag(spec))}"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
manifest_progress.fail()
|
manifest_progress.fail()
|
||||||
@@ -1790,13 +1807,13 @@ def _oci_update_index(
|
|||||||
db = BuildCacheDatabase(db_root_dir)
|
db = BuildCacheDatabase(db_root_dir)
|
||||||
|
|
||||||
for spec_dict in spec_dicts:
|
for spec_dict in spec_dicts:
|
||||||
spec = Spec.from_dict(spec_dict)
|
spec = spack.spec.Spec.from_dict(spec_dict)
|
||||||
db.add(spec)
|
db.add(spec)
|
||||||
db.mark(spec, "in_buildcache", True)
|
db.mark(spec, "in_buildcache", True)
|
||||||
|
|
||||||
# Create the index.json file
|
# Create the index.json file
|
||||||
index_json_path = os.path.join(tmpdir, "index.json")
|
index_json_path = os.path.join(tmpdir, "index.json")
|
||||||
with open(index_json_path, "w") as f:
|
with open(index_json_path, "w", encoding="utf-8") as f:
|
||||||
db._write_to_file(f)
|
db._write_to_file(f)
|
||||||
|
|
||||||
# Create an empty config.json file
|
# Create an empty config.json file
|
||||||
@@ -1905,7 +1922,7 @@ def _get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, in
|
|||||||
try:
|
try:
|
||||||
as_string = binary_content.decode("utf-8")
|
as_string = binary_content.decode("utf-8")
|
||||||
if path.endswith(".json.sig"):
|
if path.endswith(".json.sig"):
|
||||||
spec_dict = Spec.extract_json_from_clearsig(as_string)
|
spec_dict = spack.spec.Spec.extract_json_from_clearsig(as_string)
|
||||||
else:
|
else:
|
||||||
spec_dict = json.loads(as_string)
|
spec_dict = json.loads(as_string)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -2001,7 +2018,7 @@ def fetch_url_to_mirror(url):
|
|||||||
if fetch_url.startswith("oci://"):
|
if fetch_url.startswith("oci://"):
|
||||||
ref = spack.oci.image.ImageReference.from_string(
|
ref = spack.oci.image.ImageReference.from_string(
|
||||||
fetch_url[len("oci://") :]
|
fetch_url[len("oci://") :]
|
||||||
).with_tag(spack.oci.image.default_tag(spec))
|
).with_tag(_oci_default_tag(spec))
|
||||||
|
|
||||||
# Fetch the manifest
|
# Fetch the manifest
|
||||||
try:
|
try:
|
||||||
@@ -2245,7 +2262,8 @@ def relocate_package(spec):
|
|||||||
]
|
]
|
||||||
if analogs:
|
if analogs:
|
||||||
# Prefer same-name analogs and prefer higher versions
|
# Prefer same-name analogs and prefer higher versions
|
||||||
# This matches the preferences in Spec.splice, so we will find same node
|
# This matches the preferences in spack.spec.Spec.splice, so we
|
||||||
|
# will find same node
|
||||||
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
|
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
|
||||||
|
|
||||||
lookup_dag_hash = analog.dag_hash()
|
lookup_dag_hash = analog.dag_hash()
|
||||||
@@ -2681,10 +2699,10 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
# are concrete (as they are built) so we need to mark this spec
|
# are concrete (as they are built) so we need to mark this spec
|
||||||
# concrete on read-in.
|
# concrete on read-in.
|
||||||
if specfile_is_signed:
|
if specfile_is_signed:
|
||||||
specfile_json = Spec.extract_json_from_clearsig(specfile_contents)
|
specfile_json = spack.spec.Spec.extract_json_from_clearsig(specfile_contents)
|
||||||
fetched_spec = Spec.from_dict(specfile_json)
|
fetched_spec = spack.spec.Spec.from_dict(specfile_json)
|
||||||
else:
|
else:
|
||||||
fetched_spec = Spec.from_json(specfile_contents)
|
fetched_spec = spack.spec.Spec.from_json(specfile_contents)
|
||||||
fetched_spec._mark_concrete()
|
fetched_spec._mark_concrete()
|
||||||
|
|
||||||
found_specs.append({"mirror_url": mirror.fetch_url, "spec": fetched_spec})
|
found_specs.append({"mirror_url": mirror.fetch_url, "spec": fetched_spec})
|
||||||
@@ -2889,7 +2907,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
|||||||
}
|
}
|
||||||
|
|
||||||
if output_file:
|
if output_file:
|
||||||
with open(output_file, "w") as outf:
|
with open(output_file, "w", encoding="utf-8") as outf:
|
||||||
outf.write(json.dumps(rebuilds))
|
outf.write(json.dumps(rebuilds))
|
||||||
|
|
||||||
return 1 if rebuilds else 0
|
return 1 if rebuilds else 0
|
||||||
@@ -2983,7 +3001,7 @@ def __init__(self, all_architectures):
|
|||||||
|
|
||||||
self.possible_specs = specs
|
self.possible_specs = specs
|
||||||
|
|
||||||
def __call__(self, spec: Spec, **kwargs):
|
def __call__(self, spec: spack.spec.Spec, **kwargs):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
spec: The spec being searched for
|
spec: The spec being searched for
|
||||||
@@ -3121,7 +3139,7 @@ def __init__(self, url: str, local_hash, urlopen=None) -> None:
|
|||||||
|
|
||||||
def conditional_fetch(self) -> FetchIndexResult:
|
def conditional_fetch(self) -> FetchIndexResult:
|
||||||
"""Download an index from an OCI registry type mirror."""
|
"""Download an index from an OCI registry type mirror."""
|
||||||
url_manifest = self.ref.with_tag(spack.oci.image.default_index_tag).manifest_url()
|
url_manifest = self.ref.with_tag(default_index_tag).manifest_url()
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(
|
response = self.urlopen(
|
||||||
urllib.request.Request(
|
urllib.request.Request(
|
||||||
|
@@ -35,7 +35,6 @@
|
|||||||
from llnl.util.lang import GroupedExceptionHandler
|
from llnl.util.lang import GroupedExceptionHandler
|
||||||
|
|
||||||
import spack.binary_distribution
|
import spack.binary_distribution
|
||||||
import spack.concretize
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.detection
|
import spack.detection
|
||||||
import spack.mirrors.mirror
|
import spack.mirrors.mirror
|
||||||
@@ -272,10 +271,10 @@ def try_import(self, module: str, abstract_spec_str: str) -> bool:
|
|||||||
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
bootstrapper = ClingoBootstrapConcretizer(configuration=spack.config.CONFIG)
|
||||||
concrete_spec = bootstrapper.concretize()
|
concrete_spec = bootstrapper.concretize()
|
||||||
else:
|
else:
|
||||||
abstract_spec = spack.spec.Spec(
|
concrete_spec = spack.spec.Spec(
|
||||||
abstract_spec_str + " ^" + spec_for_current_python()
|
abstract_spec_str + " ^" + spec_for_current_python()
|
||||||
)
|
)
|
||||||
concrete_spec = spack.concretize.concretized(abstract_spec)
|
concrete_spec.concretize()
|
||||||
|
|
||||||
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
msg = "[BOOTSTRAP MODULE {0}] Try installing '{1}' from sources"
|
||||||
tty.debug(msg.format(module, abstract_spec_str))
|
tty.debug(msg.format(module, abstract_spec_str))
|
||||||
@@ -301,7 +300,7 @@ def try_search_path(self, executables: Tuple[str], abstract_spec_str: str) -> bo
|
|||||||
# might reduce compilation time by a fair amount
|
# might reduce compilation time by a fair amount
|
||||||
_add_externals_if_missing()
|
_add_externals_if_missing()
|
||||||
|
|
||||||
concrete_spec = spack.concretize.concretized(spack.spec.Spec(abstract_spec_str))
|
concrete_spec = spack.spec.Spec(abstract_spec_str).concretized()
|
||||||
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
msg = "[BOOTSTRAP] Try installing '{0}' from sources"
|
||||||
tty.debug(msg.format(abstract_spec_str))
|
tty.debug(msg.format(abstract_spec_str))
|
||||||
with spack.config.override(self.mirror_scope):
|
with spack.config.override(self.mirror_scope):
|
||||||
|
@@ -182,10 +182,7 @@ def patch_config_files(self) -> bool:
|
|||||||
@property
|
@property
|
||||||
def _removed_la_files_log(self) -> str:
|
def _removed_la_files_log(self) -> str:
|
||||||
"""File containing the list of removed libtool archives"""
|
"""File containing the list of removed libtool archives"""
|
||||||
build_dir = self.build_directory
|
return os.path.join(self.build_directory, "removed_la_files.txt")
|
||||||
if not os.path.isabs(self.build_directory):
|
|
||||||
build_dir = os.path.join(self.pkg.stage.path, build_dir)
|
|
||||||
return os.path.join(build_dir, "removed_la_files.txt")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def archive_files(self) -> List[str]:
|
def archive_files(self) -> List[str]:
|
||||||
@@ -523,7 +520,12 @@ def configure_abs_path(self) -> str:
|
|||||||
@property
|
@property
|
||||||
def build_directory(self) -> str:
|
def build_directory(self) -> str:
|
||||||
"""Override to provide another place to build the package"""
|
"""Override to provide another place to build the package"""
|
||||||
return self.configure_directory
|
# Handle the case where the configure directory is set to a non-absolute path
|
||||||
|
# Non-absolute paths are always relative to the staging source path
|
||||||
|
build_dir = self.configure_directory
|
||||||
|
if not os.path.isabs(build_dir):
|
||||||
|
build_dir = os.path.join(self.pkg.stage.source_path, build_dir)
|
||||||
|
return build_dir
|
||||||
|
|
||||||
@spack.phase_callbacks.run_before("autoreconf")
|
@spack.phase_callbacks.run_before("autoreconf")
|
||||||
def delete_configure_to_force_update(self) -> None:
|
def delete_configure_to_force_update(self) -> None:
|
||||||
@@ -836,7 +838,7 @@ def remove_libtool_archives(self) -> None:
|
|||||||
libtool_files = fs.find(str(self.pkg.prefix), "*.la", recursive=True)
|
libtool_files = fs.find(str(self.pkg.prefix), "*.la", recursive=True)
|
||||||
with fs.safe_remove(*libtool_files):
|
with fs.safe_remove(*libtool_files):
|
||||||
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
|
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
|
||||||
with open(self._removed_la_files_log, mode="w") as f:
|
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
|
||||||
f.write("\n".join(libtool_files))
|
f.write("\n".join(libtool_files))
|
||||||
|
|
||||||
def setup_build_environment(self, env):
|
def setup_build_environment(self, env):
|
||||||
|
@@ -324,7 +324,7 @@ def initconfig(self, pkg, spec, prefix):
|
|||||||
+ self.initconfig_package_entries()
|
+ self.initconfig_package_entries()
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(self.cache_name, "w") as f:
|
with open(self.cache_name, "w", encoding="utf-8") as f:
|
||||||
for entry in cache_entries:
|
for entry in cache_entries:
|
||||||
f.write("%s\n" % entry)
|
f.write("%s\n" % entry)
|
||||||
f.write("\n")
|
f.write("\n")
|
||||||
|
@@ -1153,7 +1153,7 @@ def _determine_license_type(self):
|
|||||||
# The file will have been created upon self.license_required AND
|
# The file will have been created upon self.license_required AND
|
||||||
# self.license_files having been populated, so the "if" is usually
|
# self.license_files having been populated, so the "if" is usually
|
||||||
# true by the time the present function runs; ../hooks/licensing.py
|
# true by the time the present function runs; ../hooks/licensing.py
|
||||||
with open(f) as fh:
|
with open(f, encoding="utf-8") as fh:
|
||||||
if re.search(r"^[ \t]*[^" + self.license_comment + "\n]", fh.read(), re.MULTILINE):
|
if re.search(r"^[ \t]*[^" + self.license_comment + "\n]", fh.read(), re.MULTILINE):
|
||||||
license_type = {
|
license_type = {
|
||||||
"ACTIVATION_TYPE": "license_file",
|
"ACTIVATION_TYPE": "license_file",
|
||||||
@@ -1185,7 +1185,7 @@ def configure(self):
|
|||||||
# our configuration accordingly. We can do this because the tokens are
|
# our configuration accordingly. We can do this because the tokens are
|
||||||
# quite long and specific.
|
# quite long and specific.
|
||||||
|
|
||||||
validator_code = open("pset/check.awk", "r").read()
|
validator_code = open("pset/check.awk", "r", encoding="utf-8").read()
|
||||||
# Let's go a little further and distill the tokens (plus some noise).
|
# Let's go a little further and distill the tokens (plus some noise).
|
||||||
tokenlike_words = set(re.findall(r"[A-Z_]{4,}", validator_code))
|
tokenlike_words = set(re.findall(r"[A-Z_]{4,}", validator_code))
|
||||||
|
|
||||||
@@ -1222,7 +1222,7 @@ def configure(self):
|
|||||||
config_draft.update(self._determine_license_type)
|
config_draft.update(self._determine_license_type)
|
||||||
|
|
||||||
# Write sorted *by token* so the file looks less like a hash dump.
|
# Write sorted *by token* so the file looks less like a hash dump.
|
||||||
f = open("silent.cfg", "w")
|
f = open("silent.cfg", "w", encoding="utf-8")
|
||||||
for token, value in sorted(config_draft.items()):
|
for token, value in sorted(config_draft.items()):
|
||||||
if token in tokenlike_words:
|
if token in tokenlike_words:
|
||||||
f.write("%s=%s\n" % (token, value))
|
f.write("%s=%s\n" % (token, value))
|
||||||
@@ -1273,7 +1273,7 @@ def configure_rpath(self):
|
|||||||
raise InstallError("Cannot find compiler command to configure rpath:\n\t" + f)
|
raise InstallError("Cannot find compiler command to configure rpath:\n\t" + f)
|
||||||
|
|
||||||
compiler_cfg = os.path.abspath(f + ".cfg")
|
compiler_cfg = os.path.abspath(f + ".cfg")
|
||||||
with open(compiler_cfg, "w") as fh:
|
with open(compiler_cfg, "w", encoding="utf-8") as fh:
|
||||||
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
|
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("install")
|
@spack.phase_callbacks.run_after("install")
|
||||||
@@ -1297,7 +1297,7 @@ def configure_auto_dispatch(self):
|
|||||||
ad.append(x)
|
ad.append(x)
|
||||||
|
|
||||||
compiler_cfg = os.path.abspath(f + ".cfg")
|
compiler_cfg = os.path.abspath(f + ".cfg")
|
||||||
with open(compiler_cfg, "a") as fh:
|
with open(compiler_cfg, "a", encoding="utf-8") as fh:
|
||||||
fh.write("-ax{0}\n".format(",".join(ad)))
|
fh.write("-ax{0}\n".format(",".join(ad)))
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("install")
|
@spack.phase_callbacks.run_after("install")
|
||||||
|
@@ -75,7 +75,7 @@ def generate_luarocks_config(self, pkg, spec, prefix):
|
|||||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||||
|
|
||||||
path = self._luarocks_config_path()
|
path = self._luarocks_config_path()
|
||||||
with open(path, "w") as config:
|
with open(path, "w", encoding="utf-8") as config:
|
||||||
config.write(
|
config.write(
|
||||||
"""
|
"""
|
||||||
deps_mode="all"
|
deps_mode="all"
|
||||||
|
@@ -32,6 +32,9 @@ class IntelOneApiPackage(Package):
|
|||||||
# organization (e.g. University/Company).
|
# organization (e.g. University/Company).
|
||||||
redistribute(source=False, binary=False)
|
redistribute(source=False, binary=False)
|
||||||
|
|
||||||
|
# contains precompiled binaries without rpaths
|
||||||
|
unresolved_libraries = ["*"]
|
||||||
|
|
||||||
for c in [
|
for c in [
|
||||||
"target=ppc64:",
|
"target=ppc64:",
|
||||||
"target=ppc64le:",
|
"target=ppc64le:",
|
||||||
|
File diff suppressed because it is too large
Load Diff
41
lib/spack/spack/ci/README.md
Normal file
41
lib/spack/spack/ci/README.md
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# Spack CI generators
|
||||||
|
|
||||||
|
This document describes how the ci module can be extended to provide novel
|
||||||
|
ci generators. The module currently has only a single generator for gitlab.
|
||||||
|
The unit-tests for the ci module define a small custom generator for testing
|
||||||
|
purposes as well.
|
||||||
|
|
||||||
|
The process of generating a pipeline involves creating a ci-enabled spack
|
||||||
|
environment, activating it, and running `spack ci generate`, possibly with
|
||||||
|
arguments describing things like where the output should be written.
|
||||||
|
|
||||||
|
Internally pipeline generation is broken into two components: general and
|
||||||
|
ci platform specific.
|
||||||
|
|
||||||
|
## General pipeline functionality
|
||||||
|
|
||||||
|
General pipeline functionality includes building a pipeline graph (really,
|
||||||
|
a forest), pruning it in a variety of ways, and gathering attributes for all
|
||||||
|
the generated spec build jobs from the spack configuration.
|
||||||
|
|
||||||
|
All of the above functionality is defined in the `__init__.py` of the top-level
|
||||||
|
ci module, and should be roughly the same for pipelines generated for any
|
||||||
|
platform.
|
||||||
|
|
||||||
|
## CI platform specific functionality
|
||||||
|
|
||||||
|
Functionality specific to CI platforms (e.g. gitlab, gha, etc.) should be
|
||||||
|
defined in a dedicated module. In order to define a generator for a new
|
||||||
|
platform, there are only a few requirements:
|
||||||
|
|
||||||
|
1. add a file under `ci` in which you define a generator method decorated with
|
||||||
|
the `@generator` attribute. .
|
||||||
|
|
||||||
|
1. import it from `lib/spack/spack/ci/__init__.py`, so that your new generator
|
||||||
|
is registered.
|
||||||
|
|
||||||
|
1. the generator method must take as arguments PipelineDag, SpackCIConfig,
|
||||||
|
and PipelineOptions objects, in that order.
|
||||||
|
|
||||||
|
1. the generator method must produce an output file containing the
|
||||||
|
generated pipeline.
|
1285
lib/spack/spack/ci/__init__.py
Normal file
1285
lib/spack/spack/ci/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
825
lib/spack/spack/ci/common.py
Normal file
825
lib/spack/spack/ci/common.py
Normal file
@@ -0,0 +1,825 @@
|
|||||||
|
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import codecs
|
||||||
|
import copy
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import ssl
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from collections import deque
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Dict, Generator, List, Optional, Set, Tuple
|
||||||
|
from urllib.parse import quote, urlencode, urlparse
|
||||||
|
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
|
||||||
|
|
||||||
|
import llnl.util.filesystem as fs
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.lang import Singleton, memoized
|
||||||
|
|
||||||
|
import spack.binary_distribution as bindist
|
||||||
|
import spack.config as cfg
|
||||||
|
import spack.deptypes as dt
|
||||||
|
import spack.environment as ev
|
||||||
|
import spack.error
|
||||||
|
import spack.mirrors.mirror
|
||||||
|
import spack.schema
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.spack_yaml as syaml
|
||||||
|
import spack.util.url as url_util
|
||||||
|
import spack.util.web as web_util
|
||||||
|
from spack import traverse
|
||||||
|
from spack.reporters import CDash, CDashConfiguration
|
||||||
|
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
||||||
|
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
||||||
|
|
||||||
|
|
||||||
|
def _urlopen():
|
||||||
|
error_handler = web_util.SpackHTTPDefaultErrorHandler()
|
||||||
|
|
||||||
|
# One opener with HTTPS ssl enabled
|
||||||
|
with_ssl = build_opener(
|
||||||
|
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
|
||||||
|
)
|
||||||
|
|
||||||
|
# One opener with HTTPS ssl disabled
|
||||||
|
without_ssl = build_opener(
|
||||||
|
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
||||||
|
)
|
||||||
|
|
||||||
|
# And dynamically dispatch based on the config:verify_ssl.
|
||||||
|
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
||||||
|
opener = with_ssl if verify_ssl else without_ssl
|
||||||
|
timeout = timeout or cfg.get("config:connect_timeout", 1)
|
||||||
|
return opener.open(fullurl, data, timeout)
|
||||||
|
|
||||||
|
return dispatch_open
|
||||||
|
|
||||||
|
|
||||||
|
IS_WINDOWS = sys.platform == "win32"
|
||||||
|
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
||||||
|
_dyn_mapping_urlopener = Singleton(_urlopen)
|
||||||
|
|
||||||
|
|
||||||
|
def copy_files_to_artifacts(src, artifacts_dir):
|
||||||
|
"""
|
||||||
|
Copy file(s) to the given artifacts directory
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
src (str): the glob-friendly path expression for the file(s) to copy
|
||||||
|
artifacts_dir (str): the destination directory
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
fs.copy(src, artifacts_dir)
|
||||||
|
except Exception as err:
|
||||||
|
msg = (
|
||||||
|
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
|
||||||
|
f"exception: {str(err)}"
|
||||||
|
)
|
||||||
|
tty.warn(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def win_quote(quote_str: str) -> str:
|
||||||
|
if IS_WINDOWS:
|
||||||
|
quote_str = f'"{quote_str}"'
|
||||||
|
return quote_str
|
||||||
|
|
||||||
|
|
||||||
|
def _spec_matches(spec, match_string):
|
||||||
|
return spec.intersects(match_string)
|
||||||
|
|
||||||
|
|
||||||
|
def _noop(x):
|
||||||
|
return x
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_script(script_section, op=_noop):
|
||||||
|
script = []
|
||||||
|
for cmd in script_section:
|
||||||
|
if isinstance(cmd, list):
|
||||||
|
for subcmd in cmd:
|
||||||
|
script.append(op(subcmd))
|
||||||
|
else:
|
||||||
|
script.append(op(cmd))
|
||||||
|
|
||||||
|
return script
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_expected_target_path(path: str) -> str:
|
||||||
|
"""Returns passed paths with all Windows path separators exchanged
|
||||||
|
for posix separators
|
||||||
|
|
||||||
|
TODO (johnwparent): Refactor config + cli read/write to deal only in posix style paths
|
||||||
|
"""
|
||||||
|
if path:
|
||||||
|
return path.replace("\\", "/")
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def update_env_scopes(
|
||||||
|
env: ev.Environment,
|
||||||
|
cli_scopes: List[str],
|
||||||
|
output_file: str,
|
||||||
|
transform_windows_paths: bool = False,
|
||||||
|
) -> None:
|
||||||
|
"""Add any config scopes from cli_scopes which aren't already included in the
|
||||||
|
environment, by reading the yaml, adding the missing includes, and writing the
|
||||||
|
updated yaml back to the same location.
|
||||||
|
"""
|
||||||
|
with open(env.manifest_path, "r", encoding="utf-8") as env_fd:
|
||||||
|
env_yaml_root = syaml.load(env_fd)
|
||||||
|
|
||||||
|
# Add config scopes to environment
|
||||||
|
env_includes = env_yaml_root["spack"].get("include", [])
|
||||||
|
include_scopes: List[str] = []
|
||||||
|
for scope in cli_scopes:
|
||||||
|
if scope not in include_scopes and scope not in env_includes:
|
||||||
|
include_scopes.insert(0, scope)
|
||||||
|
env_includes.extend(include_scopes)
|
||||||
|
env_yaml_root["spack"]["include"] = [
|
||||||
|
ensure_expected_target_path(i) if transform_windows_paths else i for i in env_includes
|
||||||
|
]
|
||||||
|
|
||||||
|
with open(output_file, "w", encoding="utf-8") as fd:
|
||||||
|
syaml.dump_config(env_yaml_root, fd, default_flow_style=False)
|
||||||
|
|
||||||
|
|
||||||
|
def write_pipeline_manifest(specs, src_prefix, dest_prefix, output_file):
|
||||||
|
"""Write out the file describing specs that should be copied"""
|
||||||
|
buildcache_copies = {}
|
||||||
|
|
||||||
|
for release_spec in specs:
|
||||||
|
release_spec_dag_hash = release_spec.dag_hash()
|
||||||
|
# TODO: This assumes signed version of the spec
|
||||||
|
buildcache_copies[release_spec_dag_hash] = [
|
||||||
|
{
|
||||||
|
"src": url_util.join(
|
||||||
|
src_prefix,
|
||||||
|
bindist.build_cache_relative_path(),
|
||||||
|
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||||
|
),
|
||||||
|
"dest": url_util.join(
|
||||||
|
dest_prefix,
|
||||||
|
bindist.build_cache_relative_path(),
|
||||||
|
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": url_util.join(
|
||||||
|
src_prefix,
|
||||||
|
bindist.build_cache_relative_path(),
|
||||||
|
bindist.tarball_path_name(release_spec, ".spack"),
|
||||||
|
),
|
||||||
|
"dest": url_util.join(
|
||||||
|
dest_prefix,
|
||||||
|
bindist.build_cache_relative_path(),
|
||||||
|
bindist.tarball_path_name(release_spec, ".spack"),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
target_dir = os.path.dirname(output_file)
|
||||||
|
|
||||||
|
if not os.path.exists(target_dir):
|
||||||
|
os.makedirs(target_dir)
|
||||||
|
|
||||||
|
with open(output_file, "w", encoding="utf-8") as fd:
|
||||||
|
fd.write(json.dumps(buildcache_copies))
|
||||||
|
|
||||||
|
|
||||||
|
class CDashHandler:
|
||||||
|
"""
|
||||||
|
Class for managing CDash data and processing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, ci_cdash):
|
||||||
|
# start with the gitlab ci configuration
|
||||||
|
self.url = ci_cdash.get("url")
|
||||||
|
self.build_group = ci_cdash.get("build-group")
|
||||||
|
self.project = ci_cdash.get("project")
|
||||||
|
self.site = ci_cdash.get("site")
|
||||||
|
|
||||||
|
# grab the authorization token when available
|
||||||
|
self.auth_token = os.environ.get("SPACK_CDASH_AUTH_TOKEN")
|
||||||
|
if self.auth_token:
|
||||||
|
tty.verbose("Using CDash auth token from environment")
|
||||||
|
|
||||||
|
# append runner description to the site if available
|
||||||
|
runner = os.environ.get("CI_RUNNER_DESCRIPTION")
|
||||||
|
if runner:
|
||||||
|
self.site += f" ({runner})"
|
||||||
|
|
||||||
|
def args(self):
|
||||||
|
return [
|
||||||
|
"--cdash-upload-url",
|
||||||
|
win_quote(self.upload_url),
|
||||||
|
"--cdash-build",
|
||||||
|
win_quote(self.build_name()),
|
||||||
|
"--cdash-site",
|
||||||
|
win_quote(self.site),
|
||||||
|
"--cdash-buildstamp",
|
||||||
|
win_quote(self.build_stamp),
|
||||||
|
]
|
||||||
|
|
||||||
|
def build_name(self, spec: Optional[spack.spec.Spec] = None) -> Optional[str]:
|
||||||
|
"""Returns the CDash build name.
|
||||||
|
|
||||||
|
A name will be generated if the `spec` is provided,
|
||||||
|
otherwise, the value will be retrieved from the environment
|
||||||
|
through the `SPACK_CDASH_BUILD_NAME` variable.
|
||||||
|
|
||||||
|
Returns: (str) given spec's CDash build name."""
|
||||||
|
if spec:
|
||||||
|
build_name = f"{spec.name}@{spec.version}%{spec.compiler} \
|
||||||
|
hash={spec.dag_hash()} arch={spec.architecture} ({self.build_group})"
|
||||||
|
tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}")
|
||||||
|
return build_name
|
||||||
|
|
||||||
|
env_build_name = os.environ.get("SPACK_CDASH_BUILD_NAME")
|
||||||
|
tty.debug(f"Using CDash build name ({env_build_name}) from the environment")
|
||||||
|
return env_build_name
|
||||||
|
|
||||||
|
@property # type: ignore
|
||||||
|
def build_stamp(self):
|
||||||
|
"""Returns the CDash build stamp.
|
||||||
|
|
||||||
|
The one defined by SPACK_CDASH_BUILD_STAMP environment variable
|
||||||
|
is preferred due to the representation of timestamps; otherwise,
|
||||||
|
one will be built.
|
||||||
|
|
||||||
|
Returns: (str) current CDash build stamp"""
|
||||||
|
build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP")
|
||||||
|
if build_stamp:
|
||||||
|
tty.debug(f"Using build stamp ({build_stamp}) from the environment")
|
||||||
|
return build_stamp
|
||||||
|
|
||||||
|
build_stamp = cdash_build_stamp(self.build_group, time.time())
|
||||||
|
tty.debug(f"Generated new build stamp ({build_stamp})")
|
||||||
|
return build_stamp
|
||||||
|
|
||||||
|
@property # type: ignore
|
||||||
|
@memoized
|
||||||
|
def project_enc(self):
|
||||||
|
tty.debug(f"Encoding project ({type(self.project)}): {self.project})")
|
||||||
|
encode = urlencode({"project": self.project})
|
||||||
|
index = encode.find("=") + 1
|
||||||
|
return encode[index:]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def upload_url(self):
|
||||||
|
url_format = f"{self.url}/submit.php?project={self.project_enc}"
|
||||||
|
return url_format
|
||||||
|
|
||||||
|
def copy_test_results(self, source, dest):
|
||||||
|
"""Copy test results to artifacts directory."""
|
||||||
|
reports = fs.join_path(source, "*_Test*.xml")
|
||||||
|
copy_files_to_artifacts(reports, dest)
|
||||||
|
|
||||||
|
def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
||||||
|
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
||||||
|
|
||||||
|
enc_data = json.dumps(data).encode("utf-8")
|
||||||
|
|
||||||
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
|
|
||||||
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
|
response_code = response.getcode()
|
||||||
|
|
||||||
|
if response_code not in [200, 201]:
|
||||||
|
msg = f"Creating buildgroup failed (response code = {response_code})"
|
||||||
|
tty.warn(msg)
|
||||||
|
return None
|
||||||
|
|
||||||
|
response_text = response.read()
|
||||||
|
response_json = json.loads(response_text)
|
||||||
|
build_group_id = response_json["id"]
|
||||||
|
|
||||||
|
return build_group_id
|
||||||
|
|
||||||
|
def populate_buildgroup(self, job_names):
|
||||||
|
url = f"{self.url}/api/v1/buildgroup.php"
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer {self.auth_token}",
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
}
|
||||||
|
|
||||||
|
opener = build_opener(HTTPHandler)
|
||||||
|
|
||||||
|
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
||||||
|
group_id = self.create_buildgroup(
|
||||||
|
opener, headers, url, f"Latest {self.build_group}", "Latest"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not parent_group_id or not group_id:
|
||||||
|
msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
|
||||||
|
tty.warn(msg)
|
||||||
|
return
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"dynamiclist": [
|
||||||
|
{"match": name, "parentgroupid": parent_group_id, "site": self.site}
|
||||||
|
for name in job_names
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
enc_data = json.dumps(data).encode("utf-8")
|
||||||
|
|
||||||
|
request = Request(url, data=enc_data, headers=headers)
|
||||||
|
request.get_method = lambda: "PUT"
|
||||||
|
|
||||||
|
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
||||||
|
response_code = response.getcode()
|
||||||
|
|
||||||
|
if response_code != 200:
|
||||||
|
msg = f"Error response code ({response_code}) in populate_buildgroup"
|
||||||
|
tty.warn(msg)
|
||||||
|
|
||||||
|
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
||||||
|
"""Explicitly report skipping testing of a spec (e.g., it's CI
|
||||||
|
configuration identifies it as known to have broken tests or
|
||||||
|
the CI installation failed).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
spec: spec being tested
|
||||||
|
report_dir: directory where the report will be written
|
||||||
|
reason: reason the test is being skipped
|
||||||
|
"""
|
||||||
|
configuration = CDashConfiguration(
|
||||||
|
upload_url=self.upload_url,
|
||||||
|
packages=[spec.name],
|
||||||
|
build=self.build_name(),
|
||||||
|
site=self.site,
|
||||||
|
buildstamp=self.build_stamp,
|
||||||
|
track=None,
|
||||||
|
)
|
||||||
|
reporter = CDash(configuration=configuration)
|
||||||
|
reporter.test_skipped_report(report_dir, spec, reason)
|
||||||
|
|
||||||
|
|
||||||
|
class PipelineType(Enum):
|
||||||
|
COPY_ONLY = 1
|
||||||
|
spack_copy_only = 1
|
||||||
|
PROTECTED_BRANCH = 2
|
||||||
|
spack_protected_branch = 2
|
||||||
|
PULL_REQUEST = 3
|
||||||
|
spack_pull_request = 3
|
||||||
|
|
||||||
|
|
||||||
|
class PipelineOptions:
|
||||||
|
"""A container for all pipeline options that can be specified (whether
|
||||||
|
via cli, config/yaml, or environment variables)"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
env: ev.Environment,
|
||||||
|
buildcache_destination: spack.mirrors.mirror.Mirror,
|
||||||
|
artifacts_root: str = "jobs_scratch_dir",
|
||||||
|
print_summary: bool = True,
|
||||||
|
output_file: Optional[str] = None,
|
||||||
|
check_index_only: bool = False,
|
||||||
|
broken_specs_url: Optional[str] = None,
|
||||||
|
rebuild_index: bool = True,
|
||||||
|
untouched_pruning_dependent_depth: Optional[int] = None,
|
||||||
|
prune_untouched: bool = False,
|
||||||
|
prune_up_to_date: bool = True,
|
||||||
|
prune_external: bool = True,
|
||||||
|
stack_name: Optional[str] = None,
|
||||||
|
pipeline_type: Optional[PipelineType] = None,
|
||||||
|
require_signing: bool = False,
|
||||||
|
cdash_handler: Optional["CDashHandler"] = None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Args:
|
||||||
|
env: Active spack environment
|
||||||
|
buildcache_destination: The mirror where built binaries should be pushed
|
||||||
|
artifacts_root: Path to location where artifacts should be stored
|
||||||
|
print_summary: Print a summary of the scheduled pipeline
|
||||||
|
output_file: Path where output file should be written
|
||||||
|
check_index_only: Only fetch the index or fetch all spec files
|
||||||
|
broken_specs_url: URL where broken specs (on develop) should be reported
|
||||||
|
rebuild_index: Generate a job to rebuild mirror index after rebuilds
|
||||||
|
untouched_pruning_dependent_depth: How many parents to traverse from changed pkg specs
|
||||||
|
prune_untouched: Prune jobs for specs that were unchanged in git history
|
||||||
|
prune_up_to_date: Prune specs from pipeline if binary exists on the mirror
|
||||||
|
prune_external: Prune specs from pipeline if they are external
|
||||||
|
stack_name: Name of spack stack
|
||||||
|
pipeline_type: Type of pipeline running (optional)
|
||||||
|
require_signing: Require buildcache to be signed (fail w/out signing key)
|
||||||
|
cdash_handler: Object for communicating build information with CDash
|
||||||
|
"""
|
||||||
|
self.env = env
|
||||||
|
self.buildcache_destination = buildcache_destination
|
||||||
|
self.artifacts_root = artifacts_root
|
||||||
|
self.print_summary = print_summary
|
||||||
|
self.output_file = output_file
|
||||||
|
self.check_index_only = check_index_only
|
||||||
|
self.broken_specs_url = broken_specs_url
|
||||||
|
self.rebuild_index = rebuild_index
|
||||||
|
self.untouched_pruning_dependent_depth = untouched_pruning_dependent_depth
|
||||||
|
self.prune_untouched = prune_untouched
|
||||||
|
self.prune_up_to_date = prune_up_to_date
|
||||||
|
self.prune_external = prune_external
|
||||||
|
self.stack_name = stack_name
|
||||||
|
self.pipeline_type = pipeline_type
|
||||||
|
self.require_signing = require_signing
|
||||||
|
self.cdash_handler = cdash_handler
|
||||||
|
|
||||||
|
|
||||||
|
class PipelineNode:
|
||||||
|
spec: spack.spec.Spec
|
||||||
|
parents: Set[str]
|
||||||
|
children: Set[str]
|
||||||
|
|
||||||
|
def __init__(self, spec: spack.spec.Spec):
|
||||||
|
self.spec = spec
|
||||||
|
self.parents = set()
|
||||||
|
self.children = set()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key(self):
|
||||||
|
"""Return key of the stored spec"""
|
||||||
|
return PipelineDag.key(self.spec)
|
||||||
|
|
||||||
|
|
||||||
|
class PipelineDag:
|
||||||
|
"""Turn a list of specs into a simple directed graph, that doesn't keep track
|
||||||
|
of edge types."""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def key(cls, spec: spack.spec.Spec) -> str:
|
||||||
|
return spec.dag_hash()
|
||||||
|
|
||||||
|
def __init__(self, specs: List[spack.spec.Spec]) -> None:
|
||||||
|
# Build dictionary of nodes
|
||||||
|
self.nodes: Dict[str, PipelineNode] = {
|
||||||
|
PipelineDag.key(s): PipelineNode(s)
|
||||||
|
for s in traverse.traverse_nodes(specs, deptype=dt.ALL_TYPES, root=True)
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create edges
|
||||||
|
for edge in traverse.traverse_edges(
|
||||||
|
specs, deptype=dt.ALL_TYPES, root=False, cover="edges"
|
||||||
|
):
|
||||||
|
parent_key = PipelineDag.key(edge.parent)
|
||||||
|
child_key = PipelineDag.key(edge.spec)
|
||||||
|
|
||||||
|
self.nodes[parent_key].children.add(child_key)
|
||||||
|
self.nodes[child_key].parents.add(parent_key)
|
||||||
|
|
||||||
|
def prune(self, node_key: str):
|
||||||
|
"""Remove a node from the graph, and reconnect its parents and children"""
|
||||||
|
node = self.nodes[node_key]
|
||||||
|
for parent in node.parents:
|
||||||
|
self.nodes[parent].children.remove(node_key)
|
||||||
|
self.nodes[parent].children |= node.children
|
||||||
|
for child in node.children:
|
||||||
|
self.nodes[child].parents.remove(node_key)
|
||||||
|
self.nodes[child].parents |= node.parents
|
||||||
|
del self.nodes[node_key]
|
||||||
|
|
||||||
|
def traverse_nodes(
|
||||||
|
self, direction: str = "children"
|
||||||
|
) -> Generator[Tuple[int, PipelineNode], None, None]:
|
||||||
|
"""Yields (depth, node) from the pipeline graph. Traversal is topologically
|
||||||
|
ordered from the roots if ``direction`` is ``children``, or from the leaves
|
||||||
|
if ``direction`` is ``parents``. The yielded depth is the length of the
|
||||||
|
longest path from the starting point to the yielded node."""
|
||||||
|
if direction == "children":
|
||||||
|
get_in_edges = lambda node: node.parents
|
||||||
|
get_out_edges = lambda node: node.children
|
||||||
|
else:
|
||||||
|
get_in_edges = lambda node: node.children
|
||||||
|
get_out_edges = lambda node: node.parents
|
||||||
|
|
||||||
|
sort_key = lambda k: self.nodes[k].spec.name
|
||||||
|
|
||||||
|
out_edges = {k: sorted(get_out_edges(n), key=sort_key) for k, n in self.nodes.items()}
|
||||||
|
num_in_edges = {k: len(get_in_edges(n)) for k, n in self.nodes.items()}
|
||||||
|
|
||||||
|
# Populate a queue with all the nodes that have no incoming edges
|
||||||
|
nodes = deque(
|
||||||
|
sorted(
|
||||||
|
[(0, key) for key in self.nodes.keys() if num_in_edges[key] == 0],
|
||||||
|
key=lambda item: item[1],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
while nodes:
|
||||||
|
# Remove the next node, n, from the queue and yield it
|
||||||
|
depth, n_key = nodes.pop()
|
||||||
|
yield (depth, self.nodes[n_key])
|
||||||
|
|
||||||
|
# Remove an in-edge from every node, m, pointed to by an
|
||||||
|
# out-edge from n. If any of those nodes are left with
|
||||||
|
# 0 remaining in-edges, add them to the queue.
|
||||||
|
for m in out_edges[n_key]:
|
||||||
|
num_in_edges[m] -= 1
|
||||||
|
if num_in_edges[m] == 0:
|
||||||
|
nodes.appendleft((depth + 1, m))
|
||||||
|
|
||||||
|
def get_dependencies(self, node: PipelineNode) -> List[PipelineNode]:
|
||||||
|
"""Returns a list of nodes corresponding to the direct dependencies
|
||||||
|
of the given node."""
|
||||||
|
return [self.nodes[k] for k in node.children]
|
||||||
|
|
||||||
|
|
||||||
|
class SpackCIConfig:
|
||||||
|
"""Spack CI object used to generate intermediate representation
|
||||||
|
used by the CI generator(s).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, ci_config):
|
||||||
|
"""Given the information from the ci section of the config
|
||||||
|
and the staged jobs, set up meta data needed for generating Spack
|
||||||
|
CI IR.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.ci_config = ci_config
|
||||||
|
self.named_jobs = ["any", "build", "copy", "cleanup", "noop", "reindex", "signing"]
|
||||||
|
|
||||||
|
self.ir = {
|
||||||
|
"jobs": {},
|
||||||
|
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
||||||
|
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
||||||
|
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
||||||
|
"target": self.ci_config.get("target", "gitlab"),
|
||||||
|
}
|
||||||
|
jobs = self.ir["jobs"]
|
||||||
|
|
||||||
|
for name in self.named_jobs:
|
||||||
|
# Skip the special named jobs
|
||||||
|
if name not in ["any", "build"]:
|
||||||
|
jobs[name] = self.__init_job("")
|
||||||
|
|
||||||
|
def __init_job(self, release_spec):
|
||||||
|
"""Initialize job object"""
|
||||||
|
job_object = {"spec": release_spec, "attributes": {}}
|
||||||
|
if release_spec:
|
||||||
|
job_vars = job_object["attributes"].setdefault("variables", {})
|
||||||
|
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec.dag_hash()
|
||||||
|
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
||||||
|
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
||||||
|
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
||||||
|
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
||||||
|
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
||||||
|
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
||||||
|
|
||||||
|
return job_object
|
||||||
|
|
||||||
|
def __is_named(self, section):
|
||||||
|
"""Check if a pipeline-gen configuration section is for a named job,
|
||||||
|
and if so return the name otherwise return none.
|
||||||
|
"""
|
||||||
|
for _name in self.named_jobs:
|
||||||
|
keys = [f"{_name}-job", f"{_name}-job-remove"]
|
||||||
|
if any([key for key in keys if key in section]):
|
||||||
|
return _name
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __job_name(name, suffix=""):
|
||||||
|
"""Compute the name of a named job with appropriate suffix.
|
||||||
|
Valid suffixes are either '-remove' or empty string or None
|
||||||
|
"""
|
||||||
|
assert isinstance(name, str)
|
||||||
|
|
||||||
|
jname = name
|
||||||
|
if suffix:
|
||||||
|
jname = f"{name}-job{suffix}"
|
||||||
|
else:
|
||||||
|
jname = f"{name}-job"
|
||||||
|
|
||||||
|
return jname
|
||||||
|
|
||||||
|
def __apply_submapping(self, dest, spec, section):
|
||||||
|
"""Apply submapping setion to the IR dict"""
|
||||||
|
matched = False
|
||||||
|
only_first = section.get("match_behavior", "first") == "first"
|
||||||
|
|
||||||
|
for match_attrs in reversed(section["submapping"]):
|
||||||
|
attrs = cfg.InternalConfigScope._process_dict_keyname_overrides(match_attrs)
|
||||||
|
for match_string in match_attrs["match"]:
|
||||||
|
if _spec_matches(spec, match_string):
|
||||||
|
matched = True
|
||||||
|
if "build-job-remove" in match_attrs:
|
||||||
|
spack.config.remove_yaml(dest, attrs["build-job-remove"])
|
||||||
|
if "build-job" in match_attrs:
|
||||||
|
spack.schema.merge_yaml(dest, attrs["build-job"])
|
||||||
|
break
|
||||||
|
if matched and only_first:
|
||||||
|
break
|
||||||
|
|
||||||
|
return dest
|
||||||
|
|
||||||
|
# Create jobs for all the pipeline specs
|
||||||
|
def init_pipeline_jobs(self, pipeline: PipelineDag):
|
||||||
|
for _, node in pipeline.traverse_nodes():
|
||||||
|
dag_hash = node.spec.dag_hash()
|
||||||
|
self.ir["jobs"][dag_hash] = self.__init_job(node.spec)
|
||||||
|
|
||||||
|
# Generate IR from the configs
|
||||||
|
def generate_ir(self):
|
||||||
|
"""Generate the IR from the Spack CI configurations."""
|
||||||
|
|
||||||
|
jobs = self.ir["jobs"]
|
||||||
|
|
||||||
|
# Implicit job defaults
|
||||||
|
defaults = [
|
||||||
|
{
|
||||||
|
"build-job": {
|
||||||
|
"script": [
|
||||||
|
"cd {env_dir}",
|
||||||
|
"spack env activate --without-view .",
|
||||||
|
"spack ci rebuild",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"noop-job": {"script": ['echo "All specs already up to date, nothing to rebuild."']}},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Job overrides
|
||||||
|
overrides = [
|
||||||
|
# Reindex script
|
||||||
|
{
|
||||||
|
"reindex-job": {
|
||||||
|
"script:": ["spack buildcache update-index --keys {index_target_mirror}"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
# Cleanup script
|
||||||
|
{
|
||||||
|
"cleanup-job": {
|
||||||
|
"script:": ["spack -d mirror destroy {mirror_prefix}/$CI_PIPELINE_ID"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
# Add signing job tags
|
||||||
|
{"signing-job": {"tags": ["aws", "protected", "notary"]}},
|
||||||
|
# Remove reserved tags
|
||||||
|
{"any-job-remove": {"tags": SPACK_RESERVED_TAGS}},
|
||||||
|
]
|
||||||
|
|
||||||
|
pipeline_gen = overrides + self.ci_config.get("pipeline-gen", []) + defaults
|
||||||
|
|
||||||
|
for section in reversed(pipeline_gen):
|
||||||
|
name = self.__is_named(section)
|
||||||
|
has_submapping = "submapping" in section
|
||||||
|
has_dynmapping = "dynamic-mapping" in section
|
||||||
|
section = cfg.InternalConfigScope._process_dict_keyname_overrides(section)
|
||||||
|
|
||||||
|
if name:
|
||||||
|
remove_job_name = self.__job_name(name, suffix="-remove")
|
||||||
|
merge_job_name = self.__job_name(name)
|
||||||
|
do_remove = remove_job_name in section
|
||||||
|
do_merge = merge_job_name in section
|
||||||
|
|
||||||
|
def _apply_section(dest, src):
|
||||||
|
if do_remove:
|
||||||
|
dest = spack.config.remove_yaml(dest, src[remove_job_name])
|
||||||
|
if do_merge:
|
||||||
|
dest = copy.copy(spack.schema.merge_yaml(dest, src[merge_job_name]))
|
||||||
|
|
||||||
|
if name == "build":
|
||||||
|
# Apply attributes to all build jobs
|
||||||
|
for _, job in jobs.items():
|
||||||
|
if job["spec"]:
|
||||||
|
_apply_section(job["attributes"], section)
|
||||||
|
elif name == "any":
|
||||||
|
# Apply section attributes too all jobs
|
||||||
|
for _, job in jobs.items():
|
||||||
|
_apply_section(job["attributes"], section)
|
||||||
|
else:
|
||||||
|
# Create a signing job if there is script and the job hasn't
|
||||||
|
# been initialized yet
|
||||||
|
if name == "signing" and name not in jobs:
|
||||||
|
if "signing-job" in section:
|
||||||
|
if "script" not in section["signing-job"]:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
jobs[name] = self.__init_job("")
|
||||||
|
# Apply attributes to named job
|
||||||
|
_apply_section(jobs[name]["attributes"], section)
|
||||||
|
|
||||||
|
elif has_submapping:
|
||||||
|
# Apply section jobs with specs to match
|
||||||
|
for _, job in jobs.items():
|
||||||
|
if job["spec"]:
|
||||||
|
job["attributes"] = self.__apply_submapping(
|
||||||
|
job["attributes"], job["spec"], section
|
||||||
|
)
|
||||||
|
elif has_dynmapping:
|
||||||
|
mapping = section["dynamic-mapping"]
|
||||||
|
|
||||||
|
dynmap_name = mapping.get("name")
|
||||||
|
|
||||||
|
# Check if this section should be skipped
|
||||||
|
dynmap_skip = os.environ.get("SPACK_CI_SKIP_DYNAMIC_MAPPING")
|
||||||
|
if dynmap_name and dynmap_skip:
|
||||||
|
if re.match(dynmap_skip, dynmap_name):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Get the endpoint
|
||||||
|
endpoint = mapping["endpoint"]
|
||||||
|
endpoint_url = urlparse(endpoint)
|
||||||
|
|
||||||
|
# Configure the request header
|
||||||
|
header = {"User-Agent": web_util.SPACK_USER_AGENT}
|
||||||
|
header.update(mapping.get("header", {}))
|
||||||
|
|
||||||
|
# Expand header environment variables
|
||||||
|
# ie. if tokens are passed
|
||||||
|
for value in header.values():
|
||||||
|
value = os.path.expandvars(value)
|
||||||
|
|
||||||
|
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
|
||||||
|
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
|
||||||
|
|
||||||
|
required = mapping.get("require", [])
|
||||||
|
allowed = mapping.get("allow", [])
|
||||||
|
ignored = mapping.get("ignore", [])
|
||||||
|
|
||||||
|
# required keys are implicitly allowed
|
||||||
|
allowed = sorted(set(allowed + required))
|
||||||
|
ignored = sorted(set(ignored))
|
||||||
|
required = sorted(set(required))
|
||||||
|
|
||||||
|
# Make sure required things are not also ignored
|
||||||
|
assert not any([ikey in required for ikey in ignored])
|
||||||
|
|
||||||
|
def job_query(job):
|
||||||
|
job_vars = job["attributes"]["variables"]
|
||||||
|
query = (
|
||||||
|
"{SPACK_JOB_SPEC_PKG_NAME}@{SPACK_JOB_SPEC_PKG_VERSION}"
|
||||||
|
# The preceding spaces are required (ref. https://github.com/spack/spack-gantry/blob/develop/docs/api.md#allocation)
|
||||||
|
" {SPACK_JOB_SPEC_VARIANTS}"
|
||||||
|
" arch={SPACK_JOB_SPEC_ARCH}"
|
||||||
|
"%{SPACK_JOB_SPEC_COMPILER_NAME}@{SPACK_JOB_SPEC_COMPILER_VERSION}"
|
||||||
|
).format_map(job_vars)
|
||||||
|
return f"spec={quote(query)}"
|
||||||
|
|
||||||
|
for job in jobs.values():
|
||||||
|
if not job["spec"]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Create request for this job
|
||||||
|
query = job_query(job)
|
||||||
|
request = Request(
|
||||||
|
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
response = _dyn_mapping_urlopener(
|
||||||
|
request, verify_ssl=verify_ssl, timeout=timeout
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
# For now just ignore any errors from dynamic mapping and continue
|
||||||
|
# This is still experimental, and failures should not stop CI
|
||||||
|
# from running normally
|
||||||
|
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
|
||||||
|
tty.warn(f"{e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
config = json.load(codecs.getreader("utf-8")(response))
|
||||||
|
|
||||||
|
# Strip ignore keys
|
||||||
|
if ignored:
|
||||||
|
for key in ignored:
|
||||||
|
if key in config:
|
||||||
|
config.pop(key)
|
||||||
|
|
||||||
|
# Only keep allowed keys
|
||||||
|
clean_config = {}
|
||||||
|
if allowed:
|
||||||
|
for key in allowed:
|
||||||
|
if key in config:
|
||||||
|
clean_config[key] = config[key]
|
||||||
|
else:
|
||||||
|
clean_config = config
|
||||||
|
|
||||||
|
# Verify all of the required keys are present
|
||||||
|
if required:
|
||||||
|
missing_keys = []
|
||||||
|
for key in required:
|
||||||
|
if key not in clean_config.keys():
|
||||||
|
missing_keys.append(key)
|
||||||
|
|
||||||
|
if missing_keys:
|
||||||
|
tty.warn(f"Response missing required keys: {missing_keys}")
|
||||||
|
|
||||||
|
if clean_config:
|
||||||
|
job["attributes"] = spack.schema.merge_yaml(
|
||||||
|
job.get("attributes", {}), clean_config
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, job in jobs.items():
|
||||||
|
if job["spec"]:
|
||||||
|
job["spec"] = job["spec"].name
|
||||||
|
|
||||||
|
return self.ir
|
||||||
|
|
||||||
|
|
||||||
|
class SpackCIError(spack.error.SpackError):
|
||||||
|
def __init__(self, msg):
|
||||||
|
super().__init__(msg)
|
36
lib/spack/spack/ci/generator_registry.py
Normal file
36
lib/spack/spack/ci/generator_registry.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
# Holds all known formatters
|
||||||
|
"""Generators that support writing out pipelines for various CI platforms,
|
||||||
|
using a common pipeline graph definition.
|
||||||
|
"""
|
||||||
|
import spack.error
|
||||||
|
|
||||||
|
_generators = {}
|
||||||
|
|
||||||
|
|
||||||
|
def generator(name):
|
||||||
|
"""Decorator to register a pipeline generator method.
|
||||||
|
A generator method should take PipelineDag, SpackCIConfig, and
|
||||||
|
PipelineOptions arguments, and should produce a pipeline file.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _decorator(generate_method):
|
||||||
|
_generators[name] = generate_method
|
||||||
|
return generate_method
|
||||||
|
|
||||||
|
return _decorator
|
||||||
|
|
||||||
|
|
||||||
|
def get_generator(name):
|
||||||
|
try:
|
||||||
|
return _generators[name]
|
||||||
|
except KeyError:
|
||||||
|
raise UnknownGeneratorException(name)
|
||||||
|
|
||||||
|
|
||||||
|
class UnknownGeneratorException(spack.error.SpackError):
|
||||||
|
def __init__(self, generator_name):
|
||||||
|
super().__init__(f"No registered generator for {generator_name}")
|
416
lib/spack/spack/ci/gitlab.py
Normal file
416
lib/spack/spack/ci/gitlab.py
Normal file
@@ -0,0 +1,416 @@
|
|||||||
|
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
import copy
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
import ruamel.yaml
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
|
import spack
|
||||||
|
import spack.binary_distribution as bindist
|
||||||
|
import spack.config as cfg
|
||||||
|
import spack.mirrors.mirror
|
||||||
|
import spack.schema
|
||||||
|
import spack.spec
|
||||||
|
import spack.util.spack_yaml as syaml
|
||||||
|
|
||||||
|
from .common import (
|
||||||
|
SPACK_RESERVED_TAGS,
|
||||||
|
PipelineDag,
|
||||||
|
PipelineOptions,
|
||||||
|
PipelineType,
|
||||||
|
SpackCIConfig,
|
||||||
|
SpackCIError,
|
||||||
|
ensure_expected_target_path,
|
||||||
|
unpack_script,
|
||||||
|
update_env_scopes,
|
||||||
|
write_pipeline_manifest,
|
||||||
|
)
|
||||||
|
from .generator_registry import generator
|
||||||
|
|
||||||
|
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
||||||
|
JOB_RETRY_CONDITIONS = [
|
||||||
|
# "always",
|
||||||
|
"unknown_failure",
|
||||||
|
"script_failure",
|
||||||
|
"api_failure",
|
||||||
|
"stuck_or_timeout_failure",
|
||||||
|
"runner_system_failure",
|
||||||
|
"runner_unsupported",
|
||||||
|
"stale_schedule",
|
||||||
|
# "job_execution_timeout",
|
||||||
|
"archived_failure",
|
||||||
|
"unmet_prerequisites",
|
||||||
|
"scheduler_failure",
|
||||||
|
"data_integrity_failure",
|
||||||
|
]
|
||||||
|
JOB_NAME_FORMAT = "{name}{@version} {/hash}"
|
||||||
|
|
||||||
|
|
||||||
|
def _remove_reserved_tags(tags):
|
||||||
|
"""Convenience function to strip reserved tags from jobs"""
|
||||||
|
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
||||||
|
|
||||||
|
|
||||||
|
def get_job_name(spec: spack.spec.Spec, build_group: Optional[str] = None) -> str:
|
||||||
|
"""Given a spec and possibly a build group, return the job name. If the
|
||||||
|
resulting name is longer than 255 characters, it will be truncated.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
spec: Spec job will build
|
||||||
|
build_group: Name of build group this job belongs to (a CDash notion)
|
||||||
|
|
||||||
|
Returns: The job name
|
||||||
|
"""
|
||||||
|
job_name = spec.format(JOB_NAME_FORMAT)
|
||||||
|
|
||||||
|
if build_group:
|
||||||
|
job_name = f"{job_name} {build_group}"
|
||||||
|
|
||||||
|
return job_name[:255]
|
||||||
|
|
||||||
|
|
||||||
|
def maybe_generate_manifest(pipeline: PipelineDag, options: PipelineOptions, manifest_path):
|
||||||
|
# TODO: Consider including only hashes of rebuilt specs in the manifest,
|
||||||
|
# instead of full source and destination urls. Also, consider renaming
|
||||||
|
# the variable that controls whether or not to write the manifest from
|
||||||
|
# "SPACK_COPY_BUILDCACHE" to "SPACK_WRITE_PIPELINE_MANIFEST" or similar.
|
||||||
|
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
|
||||||
|
if spack_buildcache_copy:
|
||||||
|
buildcache_copy_src_prefix = options.buildcache_destination.fetch_url
|
||||||
|
buildcache_copy_dest_prefix = spack_buildcache_copy
|
||||||
|
|
||||||
|
if options.pipeline_type == PipelineType.COPY_ONLY:
|
||||||
|
manifest_specs = [s for s in options.env.all_specs() if not s.external]
|
||||||
|
else:
|
||||||
|
manifest_specs = [n.spec for _, n in pipeline.traverse_nodes(direction="children")]
|
||||||
|
|
||||||
|
write_pipeline_manifest(
|
||||||
|
manifest_specs, buildcache_copy_src_prefix, buildcache_copy_dest_prefix, manifest_path
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@generator("gitlab")
|
||||||
|
def generate_gitlab_yaml(pipeline: PipelineDag, spack_ci: SpackCIConfig, options: PipelineOptions):
|
||||||
|
"""Given a pipeline graph, job attributes, and pipeline options,
|
||||||
|
write a pipeline that can be consumed by GitLab to the given output file.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
pipeline: An already pruned graph of jobs representing all the specs to build
|
||||||
|
spack_ci: An object containing the configured attributes of all jobs in the pipeline
|
||||||
|
options: An object containing all the pipeline options gathered from yaml, env, etc...
|
||||||
|
"""
|
||||||
|
ci_project_dir = os.environ.get("CI_PROJECT_DIR") or os.getcwd()
|
||||||
|
generate_job_name = os.environ.get("CI_JOB_NAME", "job-does-not-exist")
|
||||||
|
generate_pipeline_id = os.environ.get("CI_PIPELINE_ID", "pipeline-does-not-exist")
|
||||||
|
artifacts_root = options.artifacts_root
|
||||||
|
if artifacts_root.startswith(ci_project_dir):
|
||||||
|
artifacts_root = os.path.relpath(artifacts_root, ci_project_dir)
|
||||||
|
pipeline_artifacts_dir = os.path.join(ci_project_dir, artifacts_root)
|
||||||
|
output_file = options.output_file
|
||||||
|
|
||||||
|
if not output_file:
|
||||||
|
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||||
|
else:
|
||||||
|
output_file_path = os.path.abspath(output_file)
|
||||||
|
gen_ci_dir = os.path.dirname(output_file_path)
|
||||||
|
if not os.path.exists(gen_ci_dir):
|
||||||
|
os.makedirs(gen_ci_dir)
|
||||||
|
|
||||||
|
spack_ci_ir = spack_ci.generate_ir()
|
||||||
|
|
||||||
|
concrete_env_dir = os.path.join(pipeline_artifacts_dir, "concrete_environment")
|
||||||
|
|
||||||
|
# Now that we've added the mirrors we know about, they should be properly
|
||||||
|
# reflected in the environment manifest file, so copy that into the
|
||||||
|
# concrete environment directory, along with the spack.lock file.
|
||||||
|
if not os.path.exists(concrete_env_dir):
|
||||||
|
os.makedirs(concrete_env_dir)
|
||||||
|
shutil.copyfile(options.env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
|
||||||
|
shutil.copyfile(options.env.lock_path, os.path.join(concrete_env_dir, "spack.lock"))
|
||||||
|
|
||||||
|
update_env_scopes(
|
||||||
|
options.env,
|
||||||
|
[
|
||||||
|
os.path.relpath(s.path, concrete_env_dir)
|
||||||
|
for s in cfg.scopes().values()
|
||||||
|
if not s.writable
|
||||||
|
and isinstance(s, (cfg.DirectoryConfigScope))
|
||||||
|
and os.path.exists(s.path)
|
||||||
|
],
|
||||||
|
os.path.join(concrete_env_dir, "spack.yaml"),
|
||||||
|
# Here transforming windows paths is only required in the special case
|
||||||
|
# of copy_only_pipelines, a unique scenario where the generate job and
|
||||||
|
# child pipelines are run on different platforms. To make this compatible
|
||||||
|
# w/ Windows, we cannot write Windows style path separators that will be
|
||||||
|
# consumed on by the Posix copy job runner.
|
||||||
|
#
|
||||||
|
# TODO (johnwparent): Refactor config + cli read/write to deal only in
|
||||||
|
# posix style paths
|
||||||
|
transform_windows_paths=(options.pipeline_type == PipelineType.COPY_ONLY),
|
||||||
|
)
|
||||||
|
|
||||||
|
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
||||||
|
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
||||||
|
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
||||||
|
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
||||||
|
|
||||||
|
# We communicate relative paths to the downstream jobs to avoid issues in
|
||||||
|
# situations where the CI_PROJECT_DIR varies between the pipeline
|
||||||
|
# generation job and the rebuild jobs. This can happen when gitlab
|
||||||
|
# checks out the project into a runner-specific directory, for example,
|
||||||
|
# and different runners are picked for generate and rebuild jobs.
|
||||||
|
|
||||||
|
rel_concrete_env_dir = os.path.relpath(concrete_env_dir, ci_project_dir)
|
||||||
|
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
||||||
|
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
||||||
|
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
|
||||||
|
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
||||||
|
|
||||||
|
def main_script_replacements(cmd):
|
||||||
|
return cmd.replace("{env_dir}", rel_concrete_env_dir)
|
||||||
|
|
||||||
|
output_object = {}
|
||||||
|
job_id = 0
|
||||||
|
stage_id = 0
|
||||||
|
stages: List[List] = []
|
||||||
|
stage_names = []
|
||||||
|
|
||||||
|
max_length_needs = 0
|
||||||
|
max_needs_job = ""
|
||||||
|
|
||||||
|
if not options.pipeline_type == PipelineType.COPY_ONLY:
|
||||||
|
for level, node in pipeline.traverse_nodes(direction="parents"):
|
||||||
|
stage_id = level
|
||||||
|
if len(stages) == stage_id:
|
||||||
|
stages.append([])
|
||||||
|
stages[stage_id].append(node.spec)
|
||||||
|
stage_name = f"stage-{level}"
|
||||||
|
|
||||||
|
if stage_name not in stage_names:
|
||||||
|
stage_names.append(stage_name)
|
||||||
|
|
||||||
|
release_spec = node.spec
|
||||||
|
release_spec_dag_hash = release_spec.dag_hash()
|
||||||
|
|
||||||
|
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
||||||
|
|
||||||
|
if not job_object:
|
||||||
|
tty.warn(f"No match found for {release_spec}, skipping it")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if options.pipeline_type is not None:
|
||||||
|
# For spack pipelines "public" and "protected" are reserved tags
|
||||||
|
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
|
||||||
|
if options.pipeline_type == PipelineType.PROTECTED_BRANCH:
|
||||||
|
job_object["tags"].extend(["protected"])
|
||||||
|
elif options.pipeline_type == PipelineType.PULL_REQUEST:
|
||||||
|
job_object["tags"].extend(["public"])
|
||||||
|
|
||||||
|
if "script" not in job_object:
|
||||||
|
raise AttributeError
|
||||||
|
|
||||||
|
job_object["script"] = unpack_script(job_object["script"], op=main_script_replacements)
|
||||||
|
|
||||||
|
if "before_script" in job_object:
|
||||||
|
job_object["before_script"] = unpack_script(job_object["before_script"])
|
||||||
|
|
||||||
|
if "after_script" in job_object:
|
||||||
|
job_object["after_script"] = unpack_script(job_object["after_script"])
|
||||||
|
|
||||||
|
build_group = options.cdash_handler.build_group if options.cdash_handler else None
|
||||||
|
job_name = get_job_name(release_spec, build_group)
|
||||||
|
|
||||||
|
dep_nodes = pipeline.get_dependencies(node)
|
||||||
|
job_object["needs"] = [
|
||||||
|
{"job": get_job_name(dep_node.spec, build_group), "artifacts": False}
|
||||||
|
for dep_node in dep_nodes
|
||||||
|
]
|
||||||
|
|
||||||
|
job_object["needs"].append(
|
||||||
|
{"job": generate_job_name, "pipeline": f"{generate_pipeline_id}"}
|
||||||
|
)
|
||||||
|
|
||||||
|
job_vars = job_object["variables"]
|
||||||
|
|
||||||
|
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
||||||
|
# whether DAG pruning was enabled or not.
|
||||||
|
already_built = bindist.get_mirrors_for_spec(spec=release_spec, index_only=True)
|
||||||
|
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = "False" if already_built else "True"
|
||||||
|
|
||||||
|
if options.cdash_handler:
|
||||||
|
build_name = options.cdash_handler.build_name(release_spec)
|
||||||
|
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
|
||||||
|
build_stamp = options.cdash_handler.build_stamp
|
||||||
|
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
|
||||||
|
|
||||||
|
job_object["artifacts"] = spack.schema.merge_yaml(
|
||||||
|
job_object.get("artifacts", {}),
|
||||||
|
{
|
||||||
|
"when": "always",
|
||||||
|
"paths": [
|
||||||
|
rel_job_log_dir,
|
||||||
|
rel_job_repro_dir,
|
||||||
|
rel_job_test_dir,
|
||||||
|
rel_user_artifacts_dir,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
job_object["stage"] = stage_name
|
||||||
|
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
||||||
|
job_object["interruptible"] = True
|
||||||
|
|
||||||
|
length_needs = len(job_object["needs"])
|
||||||
|
if length_needs > max_length_needs:
|
||||||
|
max_length_needs = length_needs
|
||||||
|
max_needs_job = job_name
|
||||||
|
|
||||||
|
output_object[job_name] = job_object
|
||||||
|
job_id += 1
|
||||||
|
|
||||||
|
tty.debug(f"{job_id} build jobs generated in {stage_id} stages")
|
||||||
|
|
||||||
|
if job_id > 0:
|
||||||
|
tty.debug(f"The max_needs_job is {max_needs_job}, with {max_length_needs} needs")
|
||||||
|
|
||||||
|
service_job_retries = {
|
||||||
|
"max": 2,
|
||||||
|
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
||||||
|
}
|
||||||
|
|
||||||
|
# In some cases, pipeline generation should write a manifest. Currently
|
||||||
|
# the only purpose is to specify a list of sources and destinations for
|
||||||
|
# everything that should be copied.
|
||||||
|
distinguish_stack = options.stack_name if options.stack_name else "rebuilt"
|
||||||
|
manifest_path = os.path.join(
|
||||||
|
pipeline_artifacts_dir, "specs_to_copy", f"copy_{distinguish_stack}_specs.json"
|
||||||
|
)
|
||||||
|
maybe_generate_manifest(pipeline, options, manifest_path)
|
||||||
|
|
||||||
|
if options.pipeline_type == PipelineType.COPY_ONLY:
|
||||||
|
stage_names.append("copy")
|
||||||
|
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
||||||
|
sync_job["stage"] = "copy"
|
||||||
|
sync_job["needs"] = [{"job": generate_job_name, "pipeline": f"{generate_pipeline_id}"}]
|
||||||
|
|
||||||
|
if "variables" not in sync_job:
|
||||||
|
sync_job["variables"] = {}
|
||||||
|
|
||||||
|
sync_job["variables"][
|
||||||
|
"SPACK_COPY_ONLY_DESTINATION"
|
||||||
|
] = options.buildcache_destination.fetch_url
|
||||||
|
|
||||||
|
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
|
||||||
|
if "buildcache-source" not in pipeline_mirrors:
|
||||||
|
raise SpackCIError("Copy-only pipelines require a mirror named 'buildcache-source'")
|
||||||
|
|
||||||
|
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
||||||
|
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
||||||
|
sync_job["dependencies"] = []
|
||||||
|
|
||||||
|
output_object["copy"] = sync_job
|
||||||
|
job_id += 1
|
||||||
|
|
||||||
|
if job_id > 0:
|
||||||
|
if (
|
||||||
|
"script" in spack_ci_ir["jobs"]["signing"]["attributes"]
|
||||||
|
and options.pipeline_type == PipelineType.PROTECTED_BRANCH
|
||||||
|
):
|
||||||
|
# External signing: generate a job to check and sign binary pkgs
|
||||||
|
stage_names.append("stage-sign-pkgs")
|
||||||
|
signing_job = spack_ci_ir["jobs"]["signing"]["attributes"]
|
||||||
|
|
||||||
|
signing_job["script"] = unpack_script(signing_job["script"])
|
||||||
|
|
||||||
|
signing_job["stage"] = "stage-sign-pkgs"
|
||||||
|
signing_job["when"] = "always"
|
||||||
|
signing_job["retry"] = {"max": 2, "when": ["always"]}
|
||||||
|
signing_job["interruptible"] = True
|
||||||
|
if "variables" not in signing_job:
|
||||||
|
signing_job["variables"] = {}
|
||||||
|
signing_job["variables"][
|
||||||
|
"SPACK_BUILDCACHE_DESTINATION"
|
||||||
|
] = options.buildcache_destination.push_url
|
||||||
|
signing_job["dependencies"] = []
|
||||||
|
|
||||||
|
output_object["sign-pkgs"] = signing_job
|
||||||
|
|
||||||
|
if options.rebuild_index:
|
||||||
|
# Add a final job to regenerate the index
|
||||||
|
stage_names.append("stage-rebuild-index")
|
||||||
|
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
|
||||||
|
|
||||||
|
final_job["stage"] = "stage-rebuild-index"
|
||||||
|
target_mirror = options.buildcache_destination.push_url
|
||||||
|
final_job["script"] = unpack_script(
|
||||||
|
final_job["script"],
|
||||||
|
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
|
||||||
|
)
|
||||||
|
|
||||||
|
final_job["when"] = "always"
|
||||||
|
final_job["retry"] = service_job_retries
|
||||||
|
final_job["interruptible"] = True
|
||||||
|
final_job["dependencies"] = []
|
||||||
|
|
||||||
|
output_object["rebuild-index"] = final_job
|
||||||
|
|
||||||
|
output_object["stages"] = stage_names
|
||||||
|
|
||||||
|
# Capture the version of Spack used to generate the pipeline, that can be
|
||||||
|
# passed to `git checkout` for version consistency. If we aren't in a Git
|
||||||
|
# repository, presume we are a Spack release and use the Git tag instead.
|
||||||
|
spack_version = spack.get_version()
|
||||||
|
version_to_clone = spack.get_spack_commit() or f"v{spack.spack_version}"
|
||||||
|
|
||||||
|
rebuild_everything = not options.prune_up_to_date and not options.prune_untouched
|
||||||
|
|
||||||
|
output_object["variables"] = {
|
||||||
|
"SPACK_ARTIFACTS_ROOT": artifacts_root,
|
||||||
|
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
|
||||||
|
"SPACK_VERSION": spack_version,
|
||||||
|
"SPACK_CHECKOUT_VERSION": version_to_clone,
|
||||||
|
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
||||||
|
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
||||||
|
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
|
||||||
|
"SPACK_PIPELINE_TYPE": options.pipeline_type.name if options.pipeline_type else "None",
|
||||||
|
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
||||||
|
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(options.prune_up_to_date),
|
||||||
|
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
||||||
|
"SPACK_REQUIRE_SIGNING": str(options.require_signing),
|
||||||
|
}
|
||||||
|
|
||||||
|
if options.stack_name:
|
||||||
|
output_object["variables"]["SPACK_CI_STACK_NAME"] = options.stack_name
|
||||||
|
|
||||||
|
output_vars = output_object["variables"]
|
||||||
|
for item, val in output_vars.items():
|
||||||
|
output_vars[item] = ensure_expected_target_path(val)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# No jobs were generated
|
||||||
|
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
||||||
|
# If this job fails ignore the status and carry on
|
||||||
|
noop_job["retry"] = 0
|
||||||
|
noop_job["allow_failure"] = True
|
||||||
|
|
||||||
|
tty.debug("No specs to rebuild, generating no-op job")
|
||||||
|
output_object = {"no-specs-to-rebuild": noop_job}
|
||||||
|
|
||||||
|
# Ensure the child pipeline always runs
|
||||||
|
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
||||||
|
|
||||||
|
sorted_output = {}
|
||||||
|
for output_key, output_value in sorted(output_object.items()):
|
||||||
|
sorted_output[output_key] = output_value
|
||||||
|
|
||||||
|
# Minimize yaml output size through use of anchors
|
||||||
|
syaml.anchorify(sorted_output)
|
||||||
|
|
||||||
|
with open(output_file, "w", encoding="utf-8") as f:
|
||||||
|
ruamel.yaml.YAML().dump(sorted_output, f)
|
@@ -24,10 +24,10 @@
|
|||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.extensions
|
import spack.extensions
|
||||||
import spack.parser
|
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
|
import spack.spec_parser
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.traverse as traverse
|
import spack.traverse as traverse
|
||||||
import spack.user_environment as uenv
|
import spack.user_environment as uenv
|
||||||
@@ -163,12 +163,12 @@ def quote_kvp(string: str) -> str:
|
|||||||
or ``name==``, and we assume the rest of the argument is the value. This covers the
|
or ``name==``, and we assume the rest of the argument is the value. This covers the
|
||||||
common cases of passign flags, e.g., ``cflags="-O2 -g"`` on the command line.
|
common cases of passign flags, e.g., ``cflags="-O2 -g"`` on the command line.
|
||||||
"""
|
"""
|
||||||
match = spack.parser.SPLIT_KVP.match(string)
|
match = spack.spec_parser.SPLIT_KVP.match(string)
|
||||||
if not match:
|
if not match:
|
||||||
return string
|
return string
|
||||||
|
|
||||||
key, delim, value = match.groups()
|
key, delim, value = match.groups()
|
||||||
return f"{key}{delim}{spack.parser.quote_if_needed(value)}"
|
return f"{key}{delim}{spack.spec_parser.quote_if_needed(value)}"
|
||||||
|
|
||||||
|
|
||||||
def parse_specs(
|
def parse_specs(
|
||||||
@@ -180,7 +180,7 @@ def parse_specs(
|
|||||||
args = [args] if isinstance(args, str) else args
|
args = [args] if isinstance(args, str) else args
|
||||||
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
||||||
|
|
||||||
specs = spack.parser.parse(arg_string)
|
specs = spack.spec_parser.parse(arg_string)
|
||||||
if not concretize:
|
if not concretize:
|
||||||
return specs
|
return specs
|
||||||
|
|
||||||
@@ -199,7 +199,7 @@ def _concretize_spec_pairs(to_concretize, tests=False):
|
|||||||
# Special case for concretizing a single spec
|
# Special case for concretizing a single spec
|
||||||
if len(to_concretize) == 1:
|
if len(to_concretize) == 1:
|
||||||
abstract, concrete = to_concretize[0]
|
abstract, concrete = to_concretize[0]
|
||||||
return [concrete or spack.concretize.concretized(abstract)]
|
return [concrete or abstract.concretized()]
|
||||||
|
|
||||||
# Special case if every spec is either concrete or has an abstract hash
|
# Special case if every spec is either concrete or has an abstract hash
|
||||||
if all(
|
if all(
|
||||||
@@ -251,9 +251,9 @@ def matching_spec_from_env(spec):
|
|||||||
"""
|
"""
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
if env:
|
if env:
|
||||||
return env.matching_spec(spec) or spack.concretize.concretized(spec)
|
return env.matching_spec(spec) or spec.concretized()
|
||||||
else:
|
else:
|
||||||
return spack.concretize.concretized(spec)
|
return spec.concretized()
|
||||||
|
|
||||||
|
|
||||||
def matching_specs_from_env(specs):
|
def matching_specs_from_env(specs):
|
||||||
|
@@ -15,7 +15,6 @@
|
|||||||
import spack.bootstrap
|
import spack.bootstrap
|
||||||
import spack.bootstrap.config
|
import spack.bootstrap.config
|
||||||
import spack.bootstrap.core
|
import spack.bootstrap.core
|
||||||
import spack.concretize
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.mirrors.utils
|
import spack.mirrors.utils
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@@ -399,7 +398,7 @@ def _mirror(args):
|
|||||||
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
|
||||||
# Suppress tty from the call below for terser messages
|
# Suppress tty from the call below for terser messages
|
||||||
llnl.util.tty.set_msg_enabled(False)
|
llnl.util.tty.set_msg_enabled(False)
|
||||||
spec = spack.concretize.concretized(spack.spec.Spec(spec_str))
|
spec = spack.spec.Spec(spec_str).concretized()
|
||||||
for node in spec.traverse():
|
for node in spec.traverse():
|
||||||
spack.mirrors.utils.create(mirror_dir, [node])
|
spack.mirrors.utils.create(mirror_dir, [node])
|
||||||
llnl.util.tty.set_msg_enabled(True)
|
llnl.util.tty.set_msg_enabled(True)
|
||||||
@@ -420,7 +419,7 @@ def write_metadata(subdir, metadata):
|
|||||||
metadata_rel_dir = os.path.join("metadata", subdir)
|
metadata_rel_dir = os.path.join("metadata", subdir)
|
||||||
metadata_yaml = os.path.join(args.root_dir, metadata_rel_dir, "metadata.yaml")
|
metadata_yaml = os.path.join(args.root_dir, metadata_rel_dir, "metadata.yaml")
|
||||||
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
|
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
|
||||||
with open(metadata_yaml, mode="w") as f:
|
with open(metadata_yaml, mode="w", encoding="utf-8") as f:
|
||||||
spack.util.spack_yaml.dump(metadata, stream=f)
|
spack.util.spack_yaml.dump(metadata, stream=f)
|
||||||
return os.path.dirname(metadata_yaml), metadata_rel_dir
|
return os.path.dirname(metadata_yaml), metadata_rel_dir
|
||||||
|
|
||||||
|
@@ -17,7 +17,6 @@
|
|||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.concretize
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
@@ -556,7 +555,8 @@ def check_fn(args: argparse.Namespace):
|
|||||||
tty.msg("No specs provided, exiting.")
|
tty.msg("No specs provided, exiting.")
|
||||||
return
|
return
|
||||||
|
|
||||||
specs = [spack.concretize.concretized(s) for s in specs]
|
for spec in specs:
|
||||||
|
spec.concretize()
|
||||||
|
|
||||||
# Next see if there are any configured binary mirrors
|
# Next see if there are any configured binary mirrors
|
||||||
configured_mirrors = spack.config.get("mirrors", scope=args.scope)
|
configured_mirrors = spack.config.get("mirrors", scope=args.scope)
|
||||||
@@ -624,7 +624,7 @@ def save_specfile_fn(args):
|
|||||||
root = specs[0]
|
root = specs[0]
|
||||||
|
|
||||||
if not root.concrete:
|
if not root.concrete:
|
||||||
root = spack.concretize.concretized(root)
|
root.concretize()
|
||||||
|
|
||||||
save_dependency_specfiles(
|
save_dependency_specfiles(
|
||||||
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
root, args.specfile_dir, dependencies=spack.cmd.parse_specs(args.specs)
|
||||||
@@ -731,7 +731,7 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
|
|||||||
deduped_manifest = {}
|
deduped_manifest = {}
|
||||||
|
|
||||||
for manifest_path in manifest_file_list:
|
for manifest_path in manifest_file_list:
|
||||||
with open(manifest_path) as fd:
|
with open(manifest_path, encoding="utf-8") as fd:
|
||||||
manifest = json.loads(fd.read())
|
manifest = json.loads(fd.read())
|
||||||
for spec_hash, copy_list in manifest.items():
|
for spec_hash, copy_list in manifest.items():
|
||||||
# Last duplicate hash wins
|
# Last duplicate hash wins
|
||||||
|
@@ -253,7 +253,7 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool
|
|||||||
if match:
|
if match:
|
||||||
new_versions.append((Version(match.group(1)), ver_line))
|
new_versions.append((Version(match.group(1)), ver_line))
|
||||||
|
|
||||||
with open(filename, "r+") as f:
|
with open(filename, "r+", encoding="utf-8") as f:
|
||||||
contents = f.read()
|
contents = f.read()
|
||||||
split_contents = version_statement_re.split(contents)
|
split_contents = version_statement_re.split(contents)
|
||||||
|
|
||||||
|
@@ -6,7 +6,6 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import warnings
|
|
||||||
from urllib.parse import urlparse, urlunparse
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
@@ -17,6 +16,7 @@
|
|||||||
import spack.ci as spack_ci
|
import spack.ci as spack_ci
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.buildcache as buildcache
|
import spack.cmd.buildcache as buildcache
|
||||||
|
import spack.cmd.common.arguments
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
@@ -62,22 +62,8 @@ def setup_parser(subparser):
|
|||||||
"path to the file where generated jobs file should be written. "
|
"path to the file where generated jobs file should be written. "
|
||||||
"default is .gitlab-ci.yml in the root of the repository",
|
"default is .gitlab-ci.yml in the root of the repository",
|
||||||
)
|
)
|
||||||
generate.add_argument(
|
prune_dag_group = generate.add_mutually_exclusive_group()
|
||||||
"--optimize",
|
prune_dag_group.add_argument(
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="(DEPRECATED) optimize the gitlab yaml file for size\n\n"
|
|
||||||
"run the generated document through a series of optimization passes "
|
|
||||||
"designed to reduce the size of the generated file",
|
|
||||||
)
|
|
||||||
generate.add_argument(
|
|
||||||
"--dependencies",
|
|
||||||
action="store_true",
|
|
||||||
default=False,
|
|
||||||
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
|
||||||
)
|
|
||||||
prune_group = generate.add_mutually_exclusive_group()
|
|
||||||
prune_group.add_argument(
|
|
||||||
"--prune-dag",
|
"--prune-dag",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
dest="prune_dag",
|
dest="prune_dag",
|
||||||
@@ -85,7 +71,7 @@ def setup_parser(subparser):
|
|||||||
help="skip up-to-date specs\n\n"
|
help="skip up-to-date specs\n\n"
|
||||||
"do not generate jobs for specs that are up-to-date on the mirror",
|
"do not generate jobs for specs that are up-to-date on the mirror",
|
||||||
)
|
)
|
||||||
prune_group.add_argument(
|
prune_dag_group.add_argument(
|
||||||
"--no-prune-dag",
|
"--no-prune-dag",
|
||||||
action="store_false",
|
action="store_false",
|
||||||
dest="prune_dag",
|
dest="prune_dag",
|
||||||
@@ -93,6 +79,23 @@ def setup_parser(subparser):
|
|||||||
help="process up-to-date specs\n\n"
|
help="process up-to-date specs\n\n"
|
||||||
"generate jobs for specs even when they are up-to-date on the mirror",
|
"generate jobs for specs even when they are up-to-date on the mirror",
|
||||||
)
|
)
|
||||||
|
prune_ext_group = generate.add_mutually_exclusive_group()
|
||||||
|
prune_ext_group.add_argument(
|
||||||
|
"--prune-externals",
|
||||||
|
action="store_true",
|
||||||
|
dest="prune_externals",
|
||||||
|
default=True,
|
||||||
|
help="skip external specs\n\n"
|
||||||
|
"do not generate jobs for specs that are marked as external",
|
||||||
|
)
|
||||||
|
prune_ext_group.add_argument(
|
||||||
|
"--no-prune-externals",
|
||||||
|
action="store_false",
|
||||||
|
dest="prune_externals",
|
||||||
|
default=True,
|
||||||
|
help="process external specs\n\n"
|
||||||
|
"generate jobs for specs even when they are marked as external",
|
||||||
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--check-index-only",
|
"--check-index-only",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -108,14 +111,18 @@ def setup_parser(subparser):
|
|||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--artifacts-root",
|
"--artifacts-root",
|
||||||
default=None,
|
default="jobs_scratch_dir",
|
||||||
help="path to the root of the artifacts directory\n\n"
|
help="path to the root of the artifacts directory\n\n"
|
||||||
"if provided, concrete environment files (spack.yaml, spack.lock) will be generated under "
|
"The spack ci module assumes it will normally be run from within your project "
|
||||||
"this directory. their location will be passed to generated child jobs through the "
|
"directory, wherever that is checked out to run your ci. The artifacts root directory "
|
||||||
"SPACK_CONCRETE_ENVIRONMENT_PATH variable",
|
"should specifiy a name that can safely be used for artifacts within your project "
|
||||||
|
"directory.",
|
||||||
)
|
)
|
||||||
generate.set_defaults(func=ci_generate)
|
generate.set_defaults(func=ci_generate)
|
||||||
|
|
||||||
|
spack.cmd.common.arguments.add_concretizer_args(generate)
|
||||||
|
spack.cmd.common.arguments.add_common_arguments(generate, ["jobs"])
|
||||||
|
|
||||||
# Rebuild the buildcache index associated with the mirror in the
|
# Rebuild the buildcache index associated with the mirror in the
|
||||||
# active, gitlab-enabled environment.
|
# active, gitlab-enabled environment.
|
||||||
index = subparsers.add_parser(
|
index = subparsers.add_parser(
|
||||||
@@ -145,6 +152,7 @@ def setup_parser(subparser):
|
|||||||
help="stop stand-alone tests after the first failure",
|
help="stop stand-alone tests after the first failure",
|
||||||
)
|
)
|
||||||
rebuild.set_defaults(func=ci_rebuild)
|
rebuild.set_defaults(func=ci_rebuild)
|
||||||
|
spack.cmd.common.arguments.add_common_arguments(rebuild, ["jobs"])
|
||||||
|
|
||||||
# Facilitate reproduction of a failed CI build job
|
# Facilitate reproduction of a failed CI build job
|
||||||
reproduce = subparsers.add_parser(
|
reproduce = subparsers.add_parser(
|
||||||
@@ -187,42 +195,8 @@ def ci_generate(args):
|
|||||||
before invoking this command. the value must be the CDash authorization token needed to create
|
before invoking this command. the value must be the CDash authorization token needed to create
|
||||||
a build group and register all generated jobs under it
|
a build group and register all generated jobs under it
|
||||||
"""
|
"""
|
||||||
if args.optimize:
|
|
||||||
warnings.warn(
|
|
||||||
"The --optimize option has been deprecated, and currently has no effect. "
|
|
||||||
"It will be removed in Spack v0.24."
|
|
||||||
)
|
|
||||||
|
|
||||||
if args.dependencies:
|
|
||||||
warnings.warn(
|
|
||||||
"The --dependencies option has been deprecated, and currently has no effect. "
|
|
||||||
"It will be removed in Spack v0.24."
|
|
||||||
)
|
|
||||||
|
|
||||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||||
|
spack_ci.generate_pipeline(env, args)
|
||||||
output_file = args.output_file
|
|
||||||
prune_dag = args.prune_dag
|
|
||||||
index_only = args.index_only
|
|
||||||
artifacts_root = args.artifacts_root
|
|
||||||
|
|
||||||
if not output_file:
|
|
||||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
|
||||||
else:
|
|
||||||
output_file_path = os.path.abspath(output_file)
|
|
||||||
gen_ci_dir = os.path.dirname(output_file_path)
|
|
||||||
if not os.path.exists(gen_ci_dir):
|
|
||||||
os.makedirs(gen_ci_dir)
|
|
||||||
|
|
||||||
# Generate the jobs
|
|
||||||
spack_ci.generate_gitlab_ci_yaml(
|
|
||||||
env,
|
|
||||||
True,
|
|
||||||
output_file,
|
|
||||||
prune_dag=prune_dag,
|
|
||||||
check_index_only=index_only,
|
|
||||||
artifacts_root=artifacts_root,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def ci_reindex(args):
|
def ci_reindex(args):
|
||||||
@@ -387,7 +361,7 @@ def ci_rebuild(args):
|
|||||||
# Write this job's spec json into the reproduction directory, and it will
|
# Write this job's spec json into the reproduction directory, and it will
|
||||||
# also be used in the generated "spack install" command to install the spec
|
# also be used in the generated "spack install" command to install the spec
|
||||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||||
with open(job_spec_json_path, "w") as fd:
|
with open(job_spec_json_path, "w", encoding="utf-8") as fd:
|
||||||
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
||||||
|
|
||||||
# Write some other details to aid in reproduction into an artifact
|
# Write some other details to aid in reproduction into an artifact
|
||||||
@@ -397,7 +371,7 @@ def ci_rebuild(args):
|
|||||||
"job_spec_json": job_spec_json_file,
|
"job_spec_json": job_spec_json_file,
|
||||||
"ci_project_dir": ci_project_dir,
|
"ci_project_dir": ci_project_dir,
|
||||||
}
|
}
|
||||||
with open(repro_file, "w") as fd:
|
with open(repro_file, "w", encoding="utf-8") as fd:
|
||||||
fd.write(json.dumps(repro_details))
|
fd.write(json.dumps(repro_details))
|
||||||
|
|
||||||
# Write information about spack into an artifact in the repro dir
|
# Write information about spack into an artifact in the repro dir
|
||||||
@@ -433,14 +407,19 @@ def ci_rebuild(args):
|
|||||||
if not config["verify_ssl"]:
|
if not config["verify_ssl"]:
|
||||||
spack_cmd.append("-k")
|
spack_cmd.append("-k")
|
||||||
|
|
||||||
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
|
install_args = [
|
||||||
|
f'--use-buildcache={spack_ci.common.win_quote("package:never,dependencies:only")}'
|
||||||
|
]
|
||||||
|
|
||||||
can_verify = spack_ci.can_verify_binaries()
|
can_verify = spack_ci.can_verify_binaries()
|
||||||
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
||||||
if not verify_binaries:
|
if not verify_binaries:
|
||||||
install_args.append("--no-check-signature")
|
install_args.append("--no-check-signature")
|
||||||
|
|
||||||
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
|
if args.jobs:
|
||||||
|
install_args.append(f"-j{args.jobs}")
|
||||||
|
|
||||||
|
slash_hash = spack_ci.common.win_quote("/" + job_spec.dag_hash())
|
||||||
|
|
||||||
# Arguments when installing the root from sources
|
# Arguments when installing the root from sources
|
||||||
deps_install_args = install_args + ["--only=dependencies"]
|
deps_install_args = install_args + ["--only=dependencies"]
|
||||||
@@ -605,7 +584,7 @@ def ci_rebuild(args):
|
|||||||
|
|
||||||
rebuild_timer.stop()
|
rebuild_timer.stop()
|
||||||
try:
|
try:
|
||||||
with open("install_timers.json", "w") as timelog:
|
with open("install_timers.json", "w", encoding="utf-8") as timelog:
|
||||||
extra_attributes = {"name": ".ci-rebuild"}
|
extra_attributes = {"name": ".ci-rebuild"}
|
||||||
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
|
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@@ -743,7 +743,7 @@ def rst(args: Namespace, out: IO) -> None:
|
|||||||
# extract cross-refs of the form `_cmd-spack-<cmd>:` from rst files
|
# extract cross-refs of the form `_cmd-spack-<cmd>:` from rst files
|
||||||
documented_commands: Set[str] = set()
|
documented_commands: Set[str] = set()
|
||||||
for filename in args.rst_files:
|
for filename in args.rst_files:
|
||||||
with open(filename) as f:
|
with open(filename, encoding="utf-8") as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
match = re.match(r"\.\. _cmd-(spack-.*):", line)
|
match = re.match(r"\.\. _cmd-(spack-.*):", line)
|
||||||
if match:
|
if match:
|
||||||
@@ -815,7 +815,7 @@ def prepend_header(args: Namespace, out: IO) -> None:
|
|||||||
if not args.header:
|
if not args.header:
|
||||||
return
|
return
|
||||||
|
|
||||||
with open(args.header) as header:
|
with open(args.header, encoding="utf-8") as header:
|
||||||
out.write(header.read())
|
out.write(header.read())
|
||||||
|
|
||||||
|
|
||||||
@@ -836,7 +836,7 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
|||||||
|
|
||||||
if args.update:
|
if args.update:
|
||||||
tty.msg(f"Updating file: {args.update}")
|
tty.msg(f"Updating file: {args.update}")
|
||||||
with open(args.update, "w") as f:
|
with open(args.update, "w", encoding="utf-8") as f:
|
||||||
prepend_header(args, f)
|
prepend_header(args, f)
|
||||||
formatter(args, f)
|
formatter(args, f)
|
||||||
|
|
||||||
|
@@ -169,7 +169,7 @@ def installed_specs(args):
|
|||||||
else:
|
else:
|
||||||
packages = []
|
packages = []
|
||||||
for file in args.specfiles:
|
for file in args.specfiles:
|
||||||
with open(file, "r") as f:
|
with open(file, "r", encoding="utf-8") as f:
|
||||||
s = spack.spec.Spec.from_yaml(f)
|
s = spack.spec.Spec.from_yaml(f)
|
||||||
packages.append(s.format())
|
packages.append(s.format())
|
||||||
return packages
|
return packages
|
||||||
|
@@ -14,6 +14,7 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
|
import spack.schema
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -566,7 +567,7 @@ def config_prefer_upstream(args):
|
|||||||
|
|
||||||
# Simply write the config to the specified file.
|
# Simply write the config to the specified file.
|
||||||
existing = spack.config.get("packages", scope=scope)
|
existing = spack.config.get("packages", scope=scope)
|
||||||
new = spack.config.merge_yaml(existing, pkgs)
|
new = spack.schema.merge_yaml(existing, pkgs)
|
||||||
spack.config.set("packages", new, scope)
|
spack.config.set("packages", new, scope)
|
||||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||||
|
|
||||||
|
@@ -110,7 +110,7 @@ def write(self, pkg_path):
|
|||||||
all_deps.append(self.dependencies)
|
all_deps.append(self.dependencies)
|
||||||
|
|
||||||
# Write out a template for the file
|
# Write out a template for the file
|
||||||
with open(pkg_path, "w") as pkg_file:
|
with open(pkg_path, "w", encoding="utf-8") as pkg_file:
|
||||||
pkg_file.write(
|
pkg_file.write(
|
||||||
package_template.format(
|
package_template.format(
|
||||||
name=self.name,
|
name=self.name,
|
||||||
|
@@ -19,7 +19,6 @@
|
|||||||
from llnl.util.symlink import symlink
|
from llnl.util.symlink import symlink
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.concretize
|
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.installer
|
import spack.installer
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -105,7 +104,7 @@ def deprecate(parser, args):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if args.install:
|
if args.install:
|
||||||
deprecator = spack.concretize.concretized(specs[1])
|
deprecator = specs[1].concretized()
|
||||||
else:
|
else:
|
||||||
deprecator = spack.cmd.disambiguate_spec(specs[1], env, local=True)
|
deprecator = spack.cmd.disambiguate_spec(specs[1], env, local=True)
|
||||||
|
|
||||||
|
@@ -11,7 +11,6 @@
|
|||||||
import spack.build_environment
|
import spack.build_environment
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.common.arguments
|
import spack.cmd.common.arguments
|
||||||
import spack.concretize
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.repo
|
import spack.repo
|
||||||
from spack.cmd.common import arguments
|
from spack.cmd.common import arguments
|
||||||
@@ -116,7 +115,7 @@ def dev_build(self, args):
|
|||||||
|
|
||||||
# Forces the build to run out of the source directory.
|
# Forces the build to run out of the source directory.
|
||||||
spec.constrain("dev_path=%s" % source_path)
|
spec.constrain("dev_path=%s" % source_path)
|
||||||
spec = spack.concretize.concretized(spec)
|
spec.concretize()
|
||||||
|
|
||||||
if spec.installed:
|
if spec.installed:
|
||||||
tty.error("Already installed in %s" % spec.prefix)
|
tty.error("Already installed in %s" % spec.prefix)
|
||||||
|
@@ -76,7 +76,7 @@ def locate_package(name: str, repo: spack.repo.Repo) -> str:
|
|||||||
path = repo.filename_for_package_name(name)
|
path = repo.filename_for_package_name(name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(path, "r"):
|
with open(path, "r", encoding="utf-8"):
|
||||||
return path
|
return path
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno == errno.ENOENT:
|
if e.errno == errno.ENOENT:
|
||||||
@@ -93,7 +93,7 @@ def locate_file(name: str, path: str) -> str:
|
|||||||
|
|
||||||
# Try to open direct match.
|
# Try to open direct match.
|
||||||
try:
|
try:
|
||||||
with open(file_path, "r"):
|
with open(file_path, "r", encoding="utf-8"):
|
||||||
return file_path
|
return file_path
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.ENOENT:
|
if e.errno != errno.ENOENT:
|
||||||
|
@@ -865,7 +865,7 @@ def env_loads(args):
|
|||||||
args.recurse_dependencies = False
|
args.recurse_dependencies = False
|
||||||
|
|
||||||
loads_file = fs.join_path(env.path, "loads")
|
loads_file = fs.join_path(env.path, "loads")
|
||||||
with open(loads_file, "w") as f:
|
with open(loads_file, "w", encoding="utf-8") as f:
|
||||||
specs = env._get_environment_specs(recurse_dependencies=recurse_dependencies)
|
specs = env._get_environment_specs(recurse_dependencies=recurse_dependencies)
|
||||||
|
|
||||||
spack.cmd.modules.loads(module_type, specs, args, f)
|
spack.cmd.modules.loads(module_type, specs, args, f)
|
||||||
@@ -1053,7 +1053,7 @@ def env_depfile(args):
|
|||||||
|
|
||||||
# Finally write to stdout/file.
|
# Finally write to stdout/file.
|
||||||
if args.output:
|
if args.output:
|
||||||
with open(args.output, "w") as f:
|
with open(args.output, "w", encoding="utf-8") as f:
|
||||||
f.write(makefile)
|
f.write(makefile)
|
||||||
else:
|
else:
|
||||||
sys.stdout.write(makefile)
|
sys.stdout.write(makefile)
|
||||||
|
@@ -14,7 +14,6 @@
|
|||||||
from llnl.util import lang, tty
|
from llnl.util import lang, tty
|
||||||
|
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.concretize
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.paths
|
import spack.paths
|
||||||
@@ -292,7 +291,7 @@ def _dump_log_on_error(e: InstallError):
|
|||||||
tty.error("'spack install' created no log.")
|
tty.error("'spack install' created no log.")
|
||||||
else:
|
else:
|
||||||
sys.stderr.write("Full build log:\n")
|
sys.stderr.write("Full build log:\n")
|
||||||
with open(e.pkg.log_path, errors="replace") as log:
|
with open(e.pkg.log_path, errors="replace", encoding="utf-8") as log:
|
||||||
shutil.copyfileobj(log, sys.stderr)
|
shutil.copyfileobj(log, sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
@@ -446,13 +445,13 @@ def concrete_specs_from_file(args):
|
|||||||
"""Return the list of concrete specs read from files."""
|
"""Return the list of concrete specs read from files."""
|
||||||
result = []
|
result = []
|
||||||
for file in args.specfiles:
|
for file in args.specfiles:
|
||||||
with open(file, "r") as f:
|
with open(file, "r", encoding="utf-8") as f:
|
||||||
if file.endswith("yaml") or file.endswith("yml"):
|
if file.endswith("yaml") or file.endswith("yml"):
|
||||||
s = spack.spec.Spec.from_yaml(f)
|
s = spack.spec.Spec.from_yaml(f)
|
||||||
else:
|
else:
|
||||||
s = spack.spec.Spec.from_json(f)
|
s = spack.spec.Spec.from_json(f)
|
||||||
|
|
||||||
concretized = spack.concretize.concretized(s)
|
concretized = s.concretized()
|
||||||
if concretized.dag_hash() != s.dag_hash():
|
if concretized.dag_hash() != s.dag_hash():
|
||||||
msg = 'skipped invalid file "{0}". '
|
msg = 'skipped invalid file "{0}". '
|
||||||
msg += "The file does not contain a concrete spec."
|
msg += "The file does not contain a concrete spec."
|
||||||
|
@@ -191,7 +191,7 @@ def verify(args):
|
|||||||
|
|
||||||
for relpath in _licensed_files(args):
|
for relpath in _licensed_files(args):
|
||||||
path = os.path.join(args.root, relpath)
|
path = os.path.join(args.root, relpath)
|
||||||
with open(path) as f:
|
with open(path, encoding="utf-8") as f:
|
||||||
lines = [line for line in f][:license_lines]
|
lines = [line for line in f][:license_lines]
|
||||||
|
|
||||||
error = _check_license(lines, path)
|
error = _check_license(lines, path)
|
||||||
|
@@ -340,7 +340,7 @@ def list(parser, args):
|
|||||||
return
|
return
|
||||||
|
|
||||||
tty.msg("Updating file: %s" % args.update)
|
tty.msg("Updating file: %s" % args.update)
|
||||||
with open(args.update, "w") as f:
|
with open(args.update, "w", encoding="utf-8") as f:
|
||||||
formatter(sorted_packages, f)
|
formatter(sorted_packages, f)
|
||||||
|
|
||||||
elif args.count:
|
elif args.count:
|
||||||
|
@@ -8,7 +8,6 @@
|
|||||||
|
|
||||||
from llnl.path import convert_to_posix_path
|
from llnl.path import convert_to_posix_path
|
||||||
|
|
||||||
import spack.concretize
|
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
from spack.spec import Spec
|
from spack.spec import Spec
|
||||||
@@ -32,7 +31,7 @@ def line_to_rtf(str):
|
|||||||
return str.replace("\n", "\\par")
|
return str.replace("\n", "\\par")
|
||||||
|
|
||||||
contents = ""
|
contents = ""
|
||||||
with open(file_path, "r+") as f:
|
with open(file_path, "r+", encoding="utf-8") as f:
|
||||||
for line in f.readlines():
|
for line in f.readlines():
|
||||||
contents += line_to_rtf(line)
|
contents += line_to_rtf(line)
|
||||||
return rtf_header.format(contents)
|
return rtf_header.format(contents)
|
||||||
@@ -67,7 +66,8 @@ def make_installer(parser, args):
|
|||||||
"""
|
"""
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
output_dir = args.output_dir
|
output_dir = args.output_dir
|
||||||
cmake_spec = spack.concretize.concretized(Spec("cmake"))
|
cmake_spec = Spec("cmake")
|
||||||
|
cmake_spec.concretize()
|
||||||
cmake_path = os.path.join(cmake_spec.prefix, "bin", "cmake.exe")
|
cmake_path = os.path.join(cmake_spec.prefix, "bin", "cmake.exe")
|
||||||
cpack_path = os.path.join(cmake_spec.prefix, "bin", "cpack.exe")
|
cpack_path = os.path.join(cmake_spec.prefix, "bin", "cpack.exe")
|
||||||
spack_source = args.spack_source
|
spack_source = args.spack_source
|
||||||
@@ -93,7 +93,7 @@ def make_installer(parser, args):
|
|||||||
rtf_spack_license = txt_to_rtf(spack_license)
|
rtf_spack_license = txt_to_rtf(spack_license)
|
||||||
spack_license = posixpath.join(source_dir, "LICENSE.rtf")
|
spack_license = posixpath.join(source_dir, "LICENSE.rtf")
|
||||||
|
|
||||||
with open(spack_license, "w") as rtf_license:
|
with open(spack_license, "w", encoding="utf-8") as rtf_license:
|
||||||
written = rtf_license.write(rtf_spack_license)
|
written = rtf_license.write(rtf_spack_license)
|
||||||
if written == 0:
|
if written == 0:
|
||||||
raise RuntimeError("Failed to generate properly formatted license file")
|
raise RuntimeError("Failed to generate properly formatted license file")
|
||||||
|
@@ -468,7 +468,7 @@ def specs_from_text_file(filename, concretize=False):
|
|||||||
concretize (bool): if True concretize the specs before returning
|
concretize (bool): if True concretize the specs before returning
|
||||||
the list.
|
the list.
|
||||||
"""
|
"""
|
||||||
with open(filename, "r") as f:
|
with open(filename, "r", encoding="utf-8") as f:
|
||||||
specs_in_file = f.readlines()
|
specs_in_file = f.readlines()
|
||||||
specs_in_file = [s.strip() for s in specs_in_file]
|
specs_in_file = [s.strip() for s in specs_in_file]
|
||||||
return spack.cmd.parse_specs(" ".join(specs_in_file), concretize=concretize)
|
return spack.cmd.parse_specs(" ".join(specs_in_file), concretize=concretize)
|
||||||
@@ -493,7 +493,7 @@ def extend_with_additional_versions(specs, num_versions):
|
|||||||
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
||||||
else:
|
else:
|
||||||
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
|
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
|
||||||
mirror_specs = [spack.concretize.concretized(x) for x in mirror_specs]
|
mirror_specs = [x.concretized() for x in mirror_specs]
|
||||||
return mirror_specs
|
return mirror_specs
|
||||||
|
|
||||||
|
|
||||||
|
@@ -150,7 +150,7 @@ def pkg_source(args):
|
|||||||
content = ph.canonical_source(spec)
|
content = ph.canonical_source(spec)
|
||||||
else:
|
else:
|
||||||
message = "Source for %s:" % filename
|
message = "Source for %s:" % filename
|
||||||
with open(filename) as f:
|
with open(filename, encoding="utf-8") as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
|
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
|
@@ -94,7 +94,7 @@ def ipython_interpreter(args):
|
|||||||
if "PYTHONSTARTUP" in os.environ:
|
if "PYTHONSTARTUP" in os.environ:
|
||||||
startup_file = os.environ["PYTHONSTARTUP"]
|
startup_file = os.environ["PYTHONSTARTUP"]
|
||||||
if os.path.isfile(startup_file):
|
if os.path.isfile(startup_file):
|
||||||
with open(startup_file) as startup:
|
with open(startup_file, encoding="utf-8") as startup:
|
||||||
exec(startup.read())
|
exec(startup.read())
|
||||||
|
|
||||||
# IPython can also support running a script OR command, not both
|
# IPython can also support running a script OR command, not both
|
||||||
@@ -126,7 +126,7 @@ def python_interpreter(args):
|
|||||||
if "PYTHONSTARTUP" in os.environ:
|
if "PYTHONSTARTUP" in os.environ:
|
||||||
startup_file = os.environ["PYTHONSTARTUP"]
|
startup_file = os.environ["PYTHONSTARTUP"]
|
||||||
if os.path.isfile(startup_file):
|
if os.path.isfile(startup_file):
|
||||||
with open(startup_file) as startup:
|
with open(startup_file, encoding="utf-8") as startup:
|
||||||
console.runsource(startup.read(), startup_file, "exec")
|
console.runsource(startup.read(), startup_file, "exec")
|
||||||
if args.python_command:
|
if args.python_command:
|
||||||
propagate_exceptions_from(console)
|
propagate_exceptions_from(console)
|
||||||
|
@@ -19,11 +19,48 @@
|
|||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
|
|
||||||
|
class StageFilter:
|
||||||
|
"""
|
||||||
|
Encapsulation of reasons to skip staging
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, exclusions, skip_installed):
|
||||||
|
"""
|
||||||
|
:param exclusions: A list of specs to skip if satisfied.
|
||||||
|
:param skip_installed: A boolean indicating whether to skip already installed specs.
|
||||||
|
"""
|
||||||
|
self.exclusions = exclusions
|
||||||
|
self.skip_installed = skip_installed
|
||||||
|
|
||||||
|
def __call__(self, spec):
|
||||||
|
"""filter action, true means spec should be filtered"""
|
||||||
|
if spec.external:
|
||||||
|
return True
|
||||||
|
|
||||||
|
if self.skip_installed and spec.installed:
|
||||||
|
return True
|
||||||
|
|
||||||
|
if any(spec.satisfies(exclude) for exclude in self.exclusions):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
arguments.add_common_arguments(subparser, ["no_checksum", "specs"])
|
arguments.add_common_arguments(subparser, ["no_checksum", "specs"])
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-p", "--path", dest="path", help="path to stage package, does not add to spack tree"
|
"-p", "--path", dest="path", help="path to stage package, does not add to spack tree"
|
||||||
)
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"-e",
|
||||||
|
"--exclude",
|
||||||
|
action="append",
|
||||||
|
default=[],
|
||||||
|
help="exclude packages that satisfy the specified specs",
|
||||||
|
)
|
||||||
|
subparser.add_argument(
|
||||||
|
"-s", "--skip-installed", action="store_true", help="dont restage already installed specs"
|
||||||
|
)
|
||||||
arguments.add_concretizer_args(subparser)
|
arguments.add_concretizer_args(subparser)
|
||||||
|
|
||||||
|
|
||||||
@@ -31,11 +68,14 @@ def stage(parser, args):
|
|||||||
if args.no_checksum:
|
if args.no_checksum:
|
||||||
spack.config.set("config:checksum", False, scope="command_line")
|
spack.config.set("config:checksum", False, scope="command_line")
|
||||||
|
|
||||||
|
exclusion_specs = spack.cmd.parse_specs(args.exclude, concretize=False)
|
||||||
|
filter = StageFilter(exclusion_specs, args.skip_installed)
|
||||||
|
|
||||||
if not args.specs:
|
if not args.specs:
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
if not env:
|
if not env:
|
||||||
tty.die("`spack stage` requires a spec or an active environment")
|
tty.die("`spack stage` requires a spec or an active environment")
|
||||||
return _stage_env(env)
|
return _stage_env(env, filter)
|
||||||
|
|
||||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||||
|
|
||||||
@@ -49,6 +89,11 @@ def stage(parser, args):
|
|||||||
|
|
||||||
specs = spack.cmd.matching_specs_from_env(specs)
|
specs = spack.cmd.matching_specs_from_env(specs)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
|
spec = spack.cmd.matching_spec_from_env(spec)
|
||||||
|
|
||||||
|
if filter(spec):
|
||||||
|
continue
|
||||||
|
|
||||||
pkg = spec.package
|
pkg = spec.package
|
||||||
|
|
||||||
if custom_path:
|
if custom_path:
|
||||||
@@ -57,9 +102,13 @@ def stage(parser, args):
|
|||||||
_stage(pkg)
|
_stage(pkg)
|
||||||
|
|
||||||
|
|
||||||
def _stage_env(env: ev.Environment):
|
def _stage_env(env: ev.Environment, filter):
|
||||||
tty.msg(f"Staging specs from environment {env.name}")
|
tty.msg(f"Staging specs from environment {env.name}")
|
||||||
for spec in spack.traverse.traverse_nodes(env.concrete_roots()):
|
for spec in spack.traverse.traverse_nodes(env.concrete_roots()):
|
||||||
|
|
||||||
|
if filter(spec):
|
||||||
|
continue
|
||||||
|
|
||||||
_stage(spec.package)
|
_stage(spec.package)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -415,8 +415,8 @@ def _run_import_check(
|
|||||||
pretty_path = file if root_relative else cwd_relative(file, root, working_dir)
|
pretty_path = file if root_relative else cwd_relative(file, root, working_dir)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(file, "r") as f:
|
with open(file, "r", encoding="utf-8") as f:
|
||||||
contents = open(file, "r").read()
|
contents = f.read()
|
||||||
parsed = ast.parse(contents)
|
parsed = ast.parse(contents)
|
||||||
except Exception:
|
except Exception:
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
@@ -448,7 +448,7 @@ def _run_import_check(
|
|||||||
if not fix or not to_add and not to_remove:
|
if not fix or not to_add and not to_remove:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
with open(file, "r") as f:
|
with open(file, "r", encoding="utf-8") as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
|
|
||||||
if to_add:
|
if to_add:
|
||||||
@@ -468,7 +468,7 @@ def _run_import_check(
|
|||||||
for statement in to_remove:
|
for statement in to_remove:
|
||||||
new_contents = new_contents.replace(f"{statement}\n", "")
|
new_contents = new_contents.replace(f"{statement}\n", "")
|
||||||
|
|
||||||
with open(file, "w") as f:
|
with open(file, "w", encoding="utf-8") as f:
|
||||||
f.write(new_contents)
|
f.write(new_contents)
|
||||||
|
|
||||||
return exit_code
|
return exit_code
|
||||||
|
@@ -346,7 +346,7 @@ def _report_suite_results(test_suite, args, constraints):
|
|||||||
tty.msg("{0} for test suite '{1}'{2}:".format(results_desc, test_suite.name, matching))
|
tty.msg("{0} for test suite '{1}'{2}:".format(results_desc, test_suite.name, matching))
|
||||||
|
|
||||||
results = {}
|
results = {}
|
||||||
with open(test_suite.results_file, "r") as f:
|
with open(test_suite.results_file, "r", encoding="utf-8") as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
pkg_id, status = line.split()
|
pkg_id, status = line.split()
|
||||||
results[pkg_id] = status
|
results[pkg_id] = status
|
||||||
@@ -371,7 +371,7 @@ def _report_suite_results(test_suite, args, constraints):
|
|||||||
spec = test_specs[pkg_id]
|
spec = test_specs[pkg_id]
|
||||||
log_file = test_suite.log_file_for_spec(spec)
|
log_file = test_suite.log_file_for_spec(spec)
|
||||||
if os.path.isfile(log_file):
|
if os.path.isfile(log_file):
|
||||||
with open(log_file, "r") as f:
|
with open(log_file, "r", encoding="utf-8") as f:
|
||||||
msg += "\n{0}".format("".join(f.readlines()))
|
msg += "\n{0}".format("".join(f.readlines()))
|
||||||
tty.msg(msg)
|
tty.msg(msg)
|
||||||
|
|
||||||
|
@@ -192,7 +192,7 @@ def view(parser, args):
|
|||||||
|
|
||||||
if args.action in actions_link and args.projection_file:
|
if args.action in actions_link and args.projection_file:
|
||||||
# argparse confirms file exists
|
# argparse confirms file exists
|
||||||
with open(args.projection_file, "r") as f:
|
with open(args.projection_file, "r", encoding="utf-8") as f:
|
||||||
projections_data = s_yaml.load(f)
|
projections_data = s_yaml.load(f)
|
||||||
validate(projections_data, spack.schema.projections.schema)
|
validate(projections_data, spack.schema.projections.schema)
|
||||||
ordered_projections = projections_data["projections"]
|
ordered_projections = projections_data["projections"]
|
||||||
|
@@ -469,7 +469,7 @@ def _compile_dummy_c_source(self) -> Optional[str]:
|
|||||||
fout = os.path.join(tmpdir, "output")
|
fout = os.path.join(tmpdir, "output")
|
||||||
fin = os.path.join(tmpdir, f"main.{ext}")
|
fin = os.path.join(tmpdir, f"main.{ext}")
|
||||||
|
|
||||||
with open(fin, "w") as csource:
|
with open(fin, "w", encoding="utf-8") as csource:
|
||||||
csource.write(
|
csource.write(
|
||||||
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
|
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
|
||||||
)
|
)
|
||||||
|
@@ -51,10 +51,11 @@ def concretize_specs_together(
|
|||||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||||
will have test dependencies. If False, test dependencies will be disregarded.
|
will have test dependencies. If False, test dependencies will be disregarded.
|
||||||
"""
|
"""
|
||||||
from spack.solver.asp import Solver
|
import spack.solver.asp
|
||||||
|
|
||||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||||
result = Solver().solve(abstract_specs, tests=tests, allow_deprecated=allow_deprecated)
|
solver = spack.solver.asp.Solver()
|
||||||
|
result = solver.solve(abstract_specs, tests=tests, allow_deprecated=allow_deprecated)
|
||||||
return [s.copy() for s in result.specs]
|
return [s.copy() for s in result.specs]
|
||||||
|
|
||||||
|
|
||||||
@@ -89,7 +90,7 @@ def concretize_together_when_possible(
|
|||||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||||
will have test dependencies. If False, test dependencies will be disregarded.
|
will have test dependencies. If False, test dependencies will be disregarded.
|
||||||
"""
|
"""
|
||||||
from spack.solver.asp import Solver
|
import spack.solver.asp
|
||||||
|
|
||||||
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
to_concretize = [concrete if concrete else abstract for abstract, concrete in spec_list]
|
||||||
old_concrete_to_abstract = {
|
old_concrete_to_abstract = {
|
||||||
@@ -97,8 +98,9 @@ def concretize_together_when_possible(
|
|||||||
}
|
}
|
||||||
|
|
||||||
result_by_user_spec = {}
|
result_by_user_spec = {}
|
||||||
|
solver = spack.solver.asp.Solver()
|
||||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||||
for result in Solver().solve_in_rounds(
|
for result in solver.solve_in_rounds(
|
||||||
to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
to_concretize, tests=tests, allow_deprecated=allow_deprecated
|
||||||
):
|
):
|
||||||
result_by_user_spec.update(result.specs_by_input)
|
result_by_user_spec.update(result.specs_by_input)
|
||||||
@@ -122,7 +124,7 @@ def concretize_separately(
|
|||||||
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
tests: list of package names for which to consider tests dependencies. If True, all nodes
|
||||||
will have test dependencies. If False, test dependencies will be disregarded.
|
will have test dependencies. If False, test dependencies will be disregarded.
|
||||||
"""
|
"""
|
||||||
from spack.bootstrap import ensure_bootstrap_configuration, ensure_clingo_importable_or_raise
|
import spack.bootstrap
|
||||||
|
|
||||||
to_concretize = [abstract for abstract, concrete in spec_list if not concrete]
|
to_concretize = [abstract for abstract, concrete in spec_list if not concrete]
|
||||||
args = [
|
args = [
|
||||||
@@ -132,8 +134,8 @@ def concretize_separately(
|
|||||||
]
|
]
|
||||||
ret = [(i, abstract) for i, abstract in enumerate(to_concretize) if abstract.concrete]
|
ret = [(i, abstract) for i, abstract in enumerate(to_concretize) if abstract.concrete]
|
||||||
# Ensure we don't try to bootstrap clingo in parallel
|
# Ensure we don't try to bootstrap clingo in parallel
|
||||||
with ensure_bootstrap_configuration():
|
with spack.bootstrap.ensure_bootstrap_configuration():
|
||||||
ensure_clingo_importable_or_raise()
|
spack.bootstrap.ensure_clingo_importable_or_raise()
|
||||||
|
|
||||||
# Ensure all the indexes have been built or updated, since
|
# Ensure all the indexes have been built or updated, since
|
||||||
# otherwise the processes in the pool may timeout on waiting
|
# otherwise the processes in the pool may timeout on waiting
|
||||||
@@ -188,50 +190,10 @@ def _concretize_task(packed_arguments: Tuple[int, str, TestsType]) -> Tuple[int,
|
|||||||
index, spec_str, tests = packed_arguments
|
index, spec_str, tests = packed_arguments
|
||||||
with tty.SuppressOutput(msg_enabled=False):
|
with tty.SuppressOutput(msg_enabled=False):
|
||||||
start = time.time()
|
start = time.time()
|
||||||
spec = concretized(Spec(spec_str), tests=tests)
|
spec = Spec(spec_str).concretized(tests=tests)
|
||||||
return index, spec, time.time() - start
|
return index, spec, time.time() - start
|
||||||
|
|
||||||
|
|
||||||
def concretized(spec: Spec, tests: Union[bool, Iterable[str]] = False) -> Spec:
|
|
||||||
"""Return a concretized copy of the given spec.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
tests: if False disregard 'test' dependencies, if a list of names activate them for
|
|
||||||
the packages in the list, if True activate 'test' dependencies for all packages.
|
|
||||||
"""
|
|
||||||
from spack.solver.asp import Solver, SpecBuilder
|
|
||||||
|
|
||||||
spec.replace_hash()
|
|
||||||
|
|
||||||
for node in spec.traverse():
|
|
||||||
if not node.name:
|
|
||||||
raise spack.error.SpecError(
|
|
||||||
f"Spec {node} has no name; cannot concretize an anonymous spec"
|
|
||||||
)
|
|
||||||
|
|
||||||
if spec._concrete:
|
|
||||||
return spec.copy()
|
|
||||||
|
|
||||||
allow_deprecated = spack.config.get("config:deprecated", False)
|
|
||||||
result = Solver().solve([spec], tests=tests, allow_deprecated=allow_deprecated)
|
|
||||||
|
|
||||||
# take the best answer
|
|
||||||
opt, i, answer = min(result.answers)
|
|
||||||
name = spec.name
|
|
||||||
# TODO: Consolidate this code with similar code in solve.py
|
|
||||||
if spec.virtual:
|
|
||||||
providers = [s.name for s in answer.values() if s.package.provides(name)]
|
|
||||||
name = providers[0]
|
|
||||||
|
|
||||||
node = SpecBuilder.make_node(pkg=name)
|
|
||||||
assert (
|
|
||||||
node in answer
|
|
||||||
), f"cannot find {name} in the list of specs {','.join([n.pkg for n in answer.keys()])}"
|
|
||||||
|
|
||||||
concretized = answer[node]
|
|
||||||
return concretized
|
|
||||||
|
|
||||||
|
|
||||||
class UnavailableCompilerVersionError(spack.error.SpackError):
|
class UnavailableCompilerVersionError(spack.error.SpackError):
|
||||||
"""Raised when there is no available compiler that satisfies a
|
"""Raised when there is no available compiler that satisfies a
|
||||||
compiler spec."""
|
compiler spec."""
|
||||||
|
@@ -179,7 +179,7 @@ def _write_section(self, section: str) -> None:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
filesystem.mkdirp(self.path)
|
filesystem.mkdirp(self.path)
|
||||||
with open(filename, "w") as f:
|
with open(filename, "w", encoding="utf-8") as f:
|
||||||
syaml.dump_config(data, stream=f, default_flow_style=False)
|
syaml.dump_config(data, stream=f, default_flow_style=False)
|
||||||
except (syaml.SpackYAMLError, OSError) as e:
|
except (syaml.SpackYAMLError, OSError) as e:
|
||||||
raise ConfigFileError(f"cannot write to '{filename}'") from e
|
raise ConfigFileError(f"cannot write to '{filename}'") from e
|
||||||
@@ -314,7 +314,7 @@ def _write_section(self, section: str) -> None:
|
|||||||
filesystem.mkdirp(parent)
|
filesystem.mkdirp(parent)
|
||||||
|
|
||||||
tmp = os.path.join(parent, f".{os.path.basename(self.path)}.tmp")
|
tmp = os.path.join(parent, f".{os.path.basename(self.path)}.tmp")
|
||||||
with open(tmp, "w") as f:
|
with open(tmp, "w", encoding="utf-8") as f:
|
||||||
syaml.dump_config(data_to_write, stream=f, default_flow_style=False)
|
syaml.dump_config(data_to_write, stream=f, default_flow_style=False)
|
||||||
filesystem.rename(tmp, self.path)
|
filesystem.rename(tmp, self.path)
|
||||||
|
|
||||||
@@ -619,7 +619,7 @@ def _get_config_memoized(self, section: str, scope: Optional[str]) -> YamlConfig
|
|||||||
if changed:
|
if changed:
|
||||||
self.format_updates[section].append(scope)
|
self.format_updates[section].append(scope)
|
||||||
|
|
||||||
merged_section = merge_yaml(merged_section, data)
|
merged_section = spack.schema.merge_yaml(merged_section, data)
|
||||||
|
|
||||||
# no config files -- empty config.
|
# no config files -- empty config.
|
||||||
if section not in merged_section:
|
if section not in merged_section:
|
||||||
@@ -680,7 +680,7 @@ def set(self, path: str, value: Any, scope: Optional[str] = None) -> None:
|
|||||||
while len(parts) > 1:
|
while len(parts) > 1:
|
||||||
key = parts.pop(0)
|
key = parts.pop(0)
|
||||||
|
|
||||||
if _override(key):
|
if spack.schema.override(key):
|
||||||
new = type(data[key])()
|
new = type(data[key])()
|
||||||
del data[key]
|
del data[key]
|
||||||
else:
|
else:
|
||||||
@@ -693,7 +693,7 @@ def set(self, path: str, value: Any, scope: Optional[str] = None) -> None:
|
|||||||
data[key] = new
|
data[key] = new
|
||||||
data = new
|
data = new
|
||||||
|
|
||||||
if _override(parts[0]):
|
if spack.schema.override(parts[0]):
|
||||||
data.pop(parts[0], None)
|
data.pop(parts[0], None)
|
||||||
|
|
||||||
# update new value
|
# update new value
|
||||||
@@ -790,30 +790,6 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
|||||||
return config_paths
|
return config_paths
|
||||||
|
|
||||||
|
|
||||||
def _add_command_line_scopes(cfg: Configuration, command_line_scopes: List[str]) -> None:
|
|
||||||
"""Add additional scopes from the --config-scope argument, either envs or dirs."""
|
|
||||||
import spack.environment.environment as env # circular import
|
|
||||||
|
|
||||||
for i, path in enumerate(command_line_scopes):
|
|
||||||
name = f"cmd_scope_{i}"
|
|
||||||
|
|
||||||
if env.exists(path): # managed environment
|
|
||||||
manifest = env.EnvironmentManifestFile(env.root(path))
|
|
||||||
elif env.is_env_dir(path): # anonymous environment
|
|
||||||
manifest = env.EnvironmentManifestFile(path)
|
|
||||||
elif os.path.isdir(path): # directory with config files
|
|
||||||
cfg.push_scope(DirectoryConfigScope(name, path, writable=False))
|
|
||||||
_add_platform_scope(cfg, name, path, writable=False)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
|
|
||||||
|
|
||||||
for scope in manifest.env_config_scopes:
|
|
||||||
scope.name = f"{name}:{scope.name}"
|
|
||||||
scope.writable = False
|
|
||||||
cfg.push_scope(scope)
|
|
||||||
|
|
||||||
|
|
||||||
def create() -> Configuration:
|
def create() -> Configuration:
|
||||||
"""Singleton Configuration instance.
|
"""Singleton Configuration instance.
|
||||||
|
|
||||||
@@ -894,7 +870,7 @@ def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
|||||||
|
|
||||||
value = data[section]
|
value = data[section]
|
||||||
existing = get(section, scope=scope)
|
existing = get(section, scope=scope)
|
||||||
new = merge_yaml(existing, value)
|
new = spack.schema.merge_yaml(existing, value)
|
||||||
|
|
||||||
# We cannot call config.set directly (set is a type)
|
# We cannot call config.set directly (set is a type)
|
||||||
CONFIG.set(section, new, scope)
|
CONFIG.set(section, new, scope)
|
||||||
@@ -946,7 +922,7 @@ def add(fullpath: str, scope: Optional[str] = None) -> None:
|
|||||||
value: List[str] = [value] # type: ignore[no-redef]
|
value: List[str] = [value] # type: ignore[no-redef]
|
||||||
|
|
||||||
# merge value into existing
|
# merge value into existing
|
||||||
new = merge_yaml(existing, value)
|
new = spack.schema.merge_yaml(existing, value)
|
||||||
CONFIG.set(path, new, scope)
|
CONFIG.set(path, new, scope)
|
||||||
|
|
||||||
|
|
||||||
@@ -1093,7 +1069,7 @@ def read_config_file(
|
|||||||
# schema when it's not necessary) while allowing us to validate against a
|
# schema when it's not necessary) while allowing us to validate against a
|
||||||
# known schema when the top-level key could be incorrect.
|
# known schema when the top-level key could be incorrect.
|
||||||
try:
|
try:
|
||||||
with open(path) as f:
|
with open(path, encoding="utf-8") as f:
|
||||||
tty.debug(f"Reading config from file {path}")
|
tty.debug(f"Reading config from file {path}")
|
||||||
data = syaml.load_config(f)
|
data = syaml.load_config(f)
|
||||||
|
|
||||||
@@ -1120,44 +1096,6 @@ def read_config_file(
|
|||||||
raise ConfigFileError(str(e)) from e
|
raise ConfigFileError(str(e)) from e
|
||||||
|
|
||||||
|
|
||||||
def _override(string: str) -> bool:
|
|
||||||
"""Test if a spack YAML string is an override.
|
|
||||||
|
|
||||||
See ``spack_yaml`` for details. Keys in Spack YAML can end in `::`,
|
|
||||||
and if they do, their values completely replace lower-precedence
|
|
||||||
configs instead of merging into them.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return hasattr(string, "override") and string.override
|
|
||||||
|
|
||||||
|
|
||||||
def _append(string: str) -> bool:
|
|
||||||
"""Test if a spack YAML string is an override.
|
|
||||||
|
|
||||||
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
|
|
||||||
and if they do, their values append lower-precedence
|
|
||||||
configs.
|
|
||||||
|
|
||||||
str, str : concatenate strings.
|
|
||||||
[obj], [obj] : append lists.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return getattr(string, "append", False)
|
|
||||||
|
|
||||||
|
|
||||||
def _prepend(string: str) -> bool:
|
|
||||||
"""Test if a spack YAML string is an override.
|
|
||||||
|
|
||||||
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
|
|
||||||
and if they do, their values prepend lower-precedence
|
|
||||||
configs.
|
|
||||||
|
|
||||||
str, str : concatenate strings.
|
|
||||||
[obj], [obj] : prepend lists. (default behavior)
|
|
||||||
"""
|
|
||||||
return getattr(string, "prepend", False)
|
|
||||||
|
|
||||||
|
|
||||||
def _mark_internal(data, name):
|
def _mark_internal(data, name):
|
||||||
"""Add a simple name mark to raw YAML/JSON data.
|
"""Add a simple name mark to raw YAML/JSON data.
|
||||||
|
|
||||||
@@ -1260,7 +1198,7 @@ def they_are(t):
|
|||||||
unmerge = sk in dest
|
unmerge = sk in dest
|
||||||
old_dest_value = dest.pop(sk, None)
|
old_dest_value = dest.pop(sk, None)
|
||||||
|
|
||||||
if unmerge and not _override(sk):
|
if unmerge and not spack.schema.override(sk):
|
||||||
dest[sk] = remove_yaml(old_dest_value, sv)
|
dest[sk] = remove_yaml(old_dest_value, sv)
|
||||||
|
|
||||||
return dest
|
return dest
|
||||||
@@ -1270,81 +1208,6 @@ def they_are(t):
|
|||||||
return dest
|
return dest
|
||||||
|
|
||||||
|
|
||||||
def merge_yaml(dest, source, prepend=False, append=False):
|
|
||||||
"""Merges source into dest; entries in source take precedence over dest.
|
|
||||||
|
|
||||||
This routine may modify dest and should be assigned to dest, in
|
|
||||||
case dest was None to begin with, e.g.:
|
|
||||||
|
|
||||||
dest = merge_yaml(dest, source)
|
|
||||||
|
|
||||||
In the result, elements from lists from ``source`` will appear before
|
|
||||||
elements of lists from ``dest``. Likewise, when iterating over keys
|
|
||||||
or items in merged ``OrderedDict`` objects, keys from ``source`` will
|
|
||||||
appear before keys from ``dest``.
|
|
||||||
|
|
||||||
Config file authors can optionally end any attribute in a dict
|
|
||||||
with `::` instead of `:`, and the key will override that of the
|
|
||||||
parent instead of merging.
|
|
||||||
|
|
||||||
`+:` will extend the default prepend merge strategy to include string concatenation
|
|
||||||
`-:` will change the merge strategy to append, it also includes string concatentation
|
|
||||||
"""
|
|
||||||
|
|
||||||
def they_are(t):
|
|
||||||
return isinstance(dest, t) and isinstance(source, t)
|
|
||||||
|
|
||||||
# If source is None, overwrite with source.
|
|
||||||
if source is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Source list is prepended (for precedence)
|
|
||||||
if they_are(list):
|
|
||||||
if append:
|
|
||||||
# Make sure to copy ruamel comments
|
|
||||||
dest[:] = [x for x in dest if x not in source] + source
|
|
||||||
else:
|
|
||||||
# Make sure to copy ruamel comments
|
|
||||||
dest[:] = source + [x for x in dest if x not in source]
|
|
||||||
return dest
|
|
||||||
|
|
||||||
# Source dict is merged into dest.
|
|
||||||
elif they_are(dict):
|
|
||||||
# save dest keys to reinsert later -- this ensures that source items
|
|
||||||
# come *before* dest in OrderdDicts
|
|
||||||
dest_keys = [dk for dk in dest.keys() if dk not in source]
|
|
||||||
|
|
||||||
for sk, sv in source.items():
|
|
||||||
# always remove the dest items. Python dicts do not overwrite
|
|
||||||
# keys on insert, so this ensures that source keys are copied
|
|
||||||
# into dest along with mark provenance (i.e., file/line info).
|
|
||||||
merge = sk in dest
|
|
||||||
old_dest_value = dest.pop(sk, None)
|
|
||||||
|
|
||||||
if merge and not _override(sk):
|
|
||||||
dest[sk] = merge_yaml(old_dest_value, sv, _prepend(sk), _append(sk))
|
|
||||||
else:
|
|
||||||
# if sk ended with ::, or if it's new, completely override
|
|
||||||
dest[sk] = copy.deepcopy(sv)
|
|
||||||
|
|
||||||
# reinsert dest keys so they are last in the result
|
|
||||||
for dk in dest_keys:
|
|
||||||
dest[dk] = dest.pop(dk)
|
|
||||||
|
|
||||||
return dest
|
|
||||||
|
|
||||||
elif they_are(str):
|
|
||||||
# Concatenate strings in prepend mode
|
|
||||||
if prepend:
|
|
||||||
return source + dest
|
|
||||||
elif append:
|
|
||||||
return dest + source
|
|
||||||
|
|
||||||
# If we reach here source and dest are either different types or are
|
|
||||||
# not both lists or dicts: replace with source.
|
|
||||||
return copy.copy(source)
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigPath:
|
class ConfigPath:
|
||||||
quoted_string = "(?:\"[^\"]+\")|(?:'[^']+')"
|
quoted_string = "(?:\"[^\"]+\")|(?:'[^']+')"
|
||||||
unquoted_string = "[^:'\"]+"
|
unquoted_string = "[^:'\"]+"
|
||||||
|
@@ -33,7 +33,7 @@ def validate(configuration_file):
|
|||||||
"""
|
"""
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
|
||||||
with open(configuration_file) as f:
|
with open(configuration_file, encoding="utf-8") as f:
|
||||||
config = syaml.load(f)
|
config = syaml.load(f)
|
||||||
|
|
||||||
# Ensure we have a "container" attribute with sensible defaults set
|
# Ensure we have a "container" attribute with sensible defaults set
|
||||||
|
@@ -27,7 +27,7 @@ def data():
|
|||||||
if not _data:
|
if not _data:
|
||||||
json_dir = os.path.abspath(os.path.dirname(__file__))
|
json_dir = os.path.abspath(os.path.dirname(__file__))
|
||||||
json_file = os.path.join(json_dir, "images.json")
|
json_file = os.path.join(json_dir, "images.json")
|
||||||
with open(json_file) as f:
|
with open(json_file, encoding="utf-8") as f:
|
||||||
_data = json.load(f)
|
_data = json.load(f)
|
||||||
return _data
|
return _data
|
||||||
|
|
||||||
|
@@ -211,7 +211,7 @@ def entries_to_specs(entries):
|
|||||||
def read(path, apply_updates):
|
def read(path, apply_updates):
|
||||||
decode_exception_type = json.decoder.JSONDecodeError
|
decode_exception_type = json.decoder.JSONDecodeError
|
||||||
try:
|
try:
|
||||||
with open(path, "r") as json_file:
|
with open(path, "r", encoding="utf-8") as json_file:
|
||||||
json_data = json.load(json_file)
|
json_data = json.load(json_file)
|
||||||
|
|
||||||
jsonschema.validate(json_data, manifest_schema)
|
jsonschema.validate(json_data, manifest_schema)
|
||||||
|
@@ -760,7 +760,7 @@ def _read_from_file(self, filename):
|
|||||||
Does not do any locking.
|
Does not do any locking.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with open(filename, "r") as f:
|
with open(filename, "r", encoding="utf-8") as f:
|
||||||
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
|
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
|
||||||
fdata, _ = JSONDecoder().raw_decode(f.read())
|
fdata, _ = JSONDecoder().raw_decode(f.read())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -1031,12 +1031,12 @@ def _write(self, type, value, traceback):
|
|||||||
|
|
||||||
# Write a temporary database file them move it into place
|
# Write a temporary database file them move it into place
|
||||||
try:
|
try:
|
||||||
with open(temp_file, "w") as f:
|
with open(temp_file, "w", encoding="utf-8") as f:
|
||||||
self._write_to_file(f)
|
self._write_to_file(f)
|
||||||
fs.rename(temp_file, self._index_path)
|
fs.rename(temp_file, self._index_path)
|
||||||
|
|
||||||
if _use_uuid:
|
if _use_uuid:
|
||||||
with open(self._verifier_path, "w") as f:
|
with open(self._verifier_path, "w", encoding="utf-8") as f:
|
||||||
new_verifier = str(uuid.uuid4())
|
new_verifier = str(uuid.uuid4())
|
||||||
f.write(new_verifier)
|
f.write(new_verifier)
|
||||||
self.last_seen_verifier = new_verifier
|
self.last_seen_verifier = new_verifier
|
||||||
@@ -1053,7 +1053,7 @@ def _read(self):
|
|||||||
current_verifier = ""
|
current_verifier = ""
|
||||||
if _use_uuid:
|
if _use_uuid:
|
||||||
try:
|
try:
|
||||||
with open(self._verifier_path, "r") as f:
|
with open(self._verifier_path, "r", encoding="utf-8") as f:
|
||||||
current_verifier = f.read()
|
current_verifier = f.read()
|
||||||
except BaseException:
|
except BaseException:
|
||||||
pass
|
pass
|
||||||
|
@@ -6,6 +6,8 @@
|
|||||||
|
|
||||||
from typing import Iterable, List, Tuple, Union
|
from typing import Iterable, List, Tuple, Union
|
||||||
|
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
#: Type hint for the low-level dependency input (enum.Flag is too slow)
|
#: Type hint for the low-level dependency input (enum.Flag is too slow)
|
||||||
DepFlag = int
|
DepFlag = int
|
||||||
|
|
||||||
@@ -13,7 +15,7 @@
|
|||||||
DepTypes = Union[str, List[str], Tuple[str, ...]]
|
DepTypes = Union[str, List[str], Tuple[str, ...]]
|
||||||
|
|
||||||
#: Individual dependency types
|
#: Individual dependency types
|
||||||
DepType = str # Python 3.8: Literal["build", "link", "run", "test"]
|
DepType = Literal["build", "link", "run", "test"]
|
||||||
|
|
||||||
# Flag values. NOTE: these values are not arbitrary, since hash computation imposes
|
# Flag values. NOTE: these values are not arbitrary, since hash computation imposes
|
||||||
# the order (link, run, build, test) when depending on the same package multiple times,
|
# the order (link, run, build, test) when depending on the same package multiple times,
|
||||||
|
@@ -27,6 +27,7 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.operating_systems.windows_os as winOs
|
import spack.operating_systems.windows_os as winOs
|
||||||
|
import spack.schema
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
import spack.util.spack_yaml
|
import spack.util.spack_yaml
|
||||||
@@ -226,7 +227,7 @@ def update_configuration(
|
|||||||
pkg_to_cfg[package_name] = pkg_config
|
pkg_to_cfg[package_name] = pkg_config
|
||||||
|
|
||||||
pkgs_cfg = spack.config.get("packages", scope=scope)
|
pkgs_cfg = spack.config.get("packages", scope=scope)
|
||||||
pkgs_cfg = spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg)
|
pkgs_cfg = spack.schema.merge_yaml(pkgs_cfg, pkg_to_cfg)
|
||||||
spack.config.set("packages", pkgs_cfg, scope=scope)
|
spack.config.set("packages", pkgs_cfg, scope=scope)
|
||||||
|
|
||||||
return all_new_specs
|
return all_new_specs
|
||||||
@@ -246,7 +247,7 @@ def set_virtuals_nonbuildable(virtuals: Set[str], scope: Optional[str] = None) -
|
|||||||
# Update the provided scope
|
# Update the provided scope
|
||||||
spack.config.set(
|
spack.config.set(
|
||||||
"packages",
|
"packages",
|
||||||
spack.config.merge_yaml(spack.config.get("packages", scope=scope), new_config),
|
spack.schema.merge_yaml(spack.config.get("packages", scope=scope), new_config),
|
||||||
scope=scope,
|
scope=scope,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -198,6 +198,6 @@ def _detection_tests_yaml(
|
|||||||
) -> Tuple[pathlib.Path, Dict[str, Any]]:
|
) -> Tuple[pathlib.Path, Dict[str, Any]]:
|
||||||
pkg_dir = pathlib.Path(repository.filename_for_package_name(pkg_name)).parent
|
pkg_dir = pathlib.Path(repository.filename_for_package_name(pkg_name)).parent
|
||||||
detection_tests_yaml = pkg_dir / "detection_test.yaml"
|
detection_tests_yaml = pkg_dir / "detection_test.yaml"
|
||||||
with open(str(detection_tests_yaml)) as f:
|
with open(str(detection_tests_yaml), encoding="utf-8") as f:
|
||||||
content = spack_yaml.load(f)
|
content = spack_yaml.load(f)
|
||||||
return detection_tests_yaml, content
|
return detection_tests_yaml, content
|
||||||
|
@@ -297,6 +297,13 @@ def _depends_on(
|
|||||||
deps_by_name = pkg.dependencies.setdefault(when_spec, {})
|
deps_by_name = pkg.dependencies.setdefault(when_spec, {})
|
||||||
dependency = deps_by_name.get(spec.name)
|
dependency = deps_by_name.get(spec.name)
|
||||||
|
|
||||||
|
if spec.dependencies():
|
||||||
|
raise DirectiveError(
|
||||||
|
f"the '^' sigil cannot be used in 'depends_on' directives. Please reformulate "
|
||||||
|
f"the directive below as multiple directives:\n\n"
|
||||||
|
f'\tdepends_on("{spec}", when="{when_spec}")\n'
|
||||||
|
)
|
||||||
|
|
||||||
if not dependency:
|
if not dependency:
|
||||||
dependency = Dependency(pkg, spec, depflag=depflag)
|
dependency = Dependency(pkg, spec, depflag=depflag)
|
||||||
deps_by_name[spec.name] = dependency
|
deps_by_name[spec.name] = dependency
|
||||||
|
@@ -141,7 +141,7 @@ def relative_path_for_spec(self, spec):
|
|||||||
def write_spec(self, spec, path):
|
def write_spec(self, spec, path):
|
||||||
"""Write a spec out to a file."""
|
"""Write a spec out to a file."""
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
with open(path, "w") as f:
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
# The hash of the projection is the DAG hash which contains
|
# The hash of the projection is the DAG hash which contains
|
||||||
# the full provenance, so it's availabe if we want it later
|
# the full provenance, so it's availabe if we want it later
|
||||||
spec.to_json(f, hash=ht.dag_hash)
|
spec.to_json(f, hash=ht.dag_hash)
|
||||||
@@ -153,13 +153,13 @@ def write_host_environment(self, spec):
|
|||||||
"""
|
"""
|
||||||
env_file = self.env_metadata_path(spec)
|
env_file = self.env_metadata_path(spec)
|
||||||
environ = spack.spec.get_host_environment_metadata()
|
environ = spack.spec.get_host_environment_metadata()
|
||||||
with open(env_file, "w") as fd:
|
with open(env_file, "w", encoding="utf-8") as fd:
|
||||||
sjson.dump(environ, fd)
|
sjson.dump(environ, fd)
|
||||||
|
|
||||||
def read_spec(self, path):
|
def read_spec(self, path):
|
||||||
"""Read the contents of a file and parse them as a spec"""
|
"""Read the contents of a file and parse them as a spec"""
|
||||||
try:
|
try:
|
||||||
with open(path) as f:
|
with open(path, encoding="utf-8") as f:
|
||||||
extension = os.path.splitext(path)[-1].lower()
|
extension = os.path.splitext(path)[-1].lower()
|
||||||
if extension == ".json":
|
if extension == ".json":
|
||||||
spec = spack.spec.Spec.from_json(f)
|
spec = spack.spec.Spec.from_json(f)
|
||||||
|
@@ -482,6 +482,7 @@
|
|||||||
display_specs,
|
display_specs,
|
||||||
environment_dir_from_name,
|
environment_dir_from_name,
|
||||||
environment_from_name_or_dir,
|
environment_from_name_or_dir,
|
||||||
|
environment_path_scopes,
|
||||||
exists,
|
exists,
|
||||||
initialize_environment_dir,
|
initialize_environment_dir,
|
||||||
installed_specs,
|
installed_specs,
|
||||||
@@ -518,6 +519,7 @@
|
|||||||
"display_specs",
|
"display_specs",
|
||||||
"environment_dir_from_name",
|
"environment_dir_from_name",
|
||||||
"environment_from_name_or_dir",
|
"environment_from_name_or_dir",
|
||||||
|
"environment_path_scopes",
|
||||||
"exists",
|
"exists",
|
||||||
"initialize_environment_dir",
|
"initialize_environment_dir",
|
||||||
"installed_specs",
|
"installed_specs",
|
||||||
|
@@ -27,7 +27,6 @@
|
|||||||
import spack.concretize
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.environment
|
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.filesystem_view as fsv
|
import spack.filesystem_view as fsv
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
@@ -163,7 +162,7 @@ def installed_specs():
|
|||||||
Returns the specs of packages installed in the active environment or None
|
Returns the specs of packages installed in the active environment or None
|
||||||
if no packages are installed.
|
if no packages are installed.
|
||||||
"""
|
"""
|
||||||
env = spack.environment.active_environment()
|
env = active_environment()
|
||||||
hashes = env.all_hashes() if env else None
|
hashes = env.all_hashes() if env else None
|
||||||
return spack.store.STORE.db.query(hashes=hashes)
|
return spack.store.STORE.db.query(hashes=hashes)
|
||||||
|
|
||||||
@@ -972,7 +971,7 @@ def _read(self):
|
|||||||
self._construct_state_from_manifest()
|
self._construct_state_from_manifest()
|
||||||
|
|
||||||
if os.path.exists(self.lock_path):
|
if os.path.exists(self.lock_path):
|
||||||
with open(self.lock_path) as f:
|
with open(self.lock_path, encoding="utf-8") as f:
|
||||||
read_lock_version = self._read_lockfile(f)["_meta"]["lockfile-version"]
|
read_lock_version = self._read_lockfile(f)["_meta"]["lockfile-version"]
|
||||||
|
|
||||||
if read_lock_version == 1:
|
if read_lock_version == 1:
|
||||||
@@ -1054,7 +1053,7 @@ def _process_concrete_includes(self):
|
|||||||
|
|
||||||
if self.included_concrete_envs:
|
if self.included_concrete_envs:
|
||||||
if os.path.exists(self.lock_path):
|
if os.path.exists(self.lock_path):
|
||||||
with open(self.lock_path) as f:
|
with open(self.lock_path, encoding="utf-8") as f:
|
||||||
data = self._read_lockfile(f)
|
data = self._read_lockfile(f)
|
||||||
|
|
||||||
if included_concrete_name in data:
|
if included_concrete_name in data:
|
||||||
@@ -2333,7 +2332,7 @@ def write(self, regenerate: bool = True) -> None:
|
|||||||
self.new_specs.clear()
|
self.new_specs.clear()
|
||||||
|
|
||||||
def update_lockfile(self) -> None:
|
def update_lockfile(self) -> None:
|
||||||
with fs.write_tmp_and_move(self.lock_path) as f:
|
with fs.write_tmp_and_move(self.lock_path, encoding="utf-8") as f:
|
||||||
sjson.dump(self._to_lockfile_dict(), stream=f)
|
sjson.dump(self._to_lockfile_dict(), stream=f)
|
||||||
|
|
||||||
def ensure_env_directory_exists(self, dot_env: bool = False) -> None:
|
def ensure_env_directory_exists(self, dot_env: bool = False) -> None:
|
||||||
@@ -2508,7 +2507,7 @@ def update_yaml(manifest, backup_file):
|
|||||||
AssertionError: in case anything goes wrong during the update
|
AssertionError: in case anything goes wrong during the update
|
||||||
"""
|
"""
|
||||||
# Check if the environment needs update
|
# Check if the environment needs update
|
||||||
with open(manifest) as f:
|
with open(manifest, encoding="utf-8") as f:
|
||||||
data = syaml.load(f)
|
data = syaml.load(f)
|
||||||
|
|
||||||
top_level_key = _top_level_key(data)
|
top_level_key = _top_level_key(data)
|
||||||
@@ -2526,7 +2525,7 @@ def update_yaml(manifest, backup_file):
|
|||||||
assert not os.path.exists(backup_file), msg.format(backup_file)
|
assert not os.path.exists(backup_file), msg.format(backup_file)
|
||||||
|
|
||||||
shutil.copy(manifest, backup_file)
|
shutil.copy(manifest, backup_file)
|
||||||
with open(manifest, "w") as f:
|
with open(manifest, "w", encoding="utf-8") as f:
|
||||||
syaml.dump_config(data, f)
|
syaml.dump_config(data, f)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -2554,7 +2553,7 @@ def is_latest_format(manifest):
|
|||||||
manifest (str): manifest file to be analyzed
|
manifest (str): manifest file to be analyzed
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with open(manifest) as f:
|
with open(manifest, encoding="utf-8") as f:
|
||||||
data = syaml.load(f)
|
data = syaml.load(f)
|
||||||
except (OSError, IOError):
|
except (OSError, IOError):
|
||||||
return True
|
return True
|
||||||
@@ -2656,7 +2655,7 @@ def from_lockfile(manifest_dir: Union[pathlib.Path, str]) -> "EnvironmentManifes
|
|||||||
# TBD: Should this be the abspath?
|
# TBD: Should this be the abspath?
|
||||||
manifest_dir = pathlib.Path(manifest_dir)
|
manifest_dir = pathlib.Path(manifest_dir)
|
||||||
lockfile = manifest_dir / lockfile_name
|
lockfile = manifest_dir / lockfile_name
|
||||||
with lockfile.open("r") as f:
|
with lockfile.open("r", encoding="utf-8") as f:
|
||||||
data = sjson.load(f)
|
data = sjson.load(f)
|
||||||
user_specs = data["roots"]
|
user_specs = data["roots"]
|
||||||
|
|
||||||
@@ -2683,7 +2682,7 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str], name: Optional[str] =
|
|||||||
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
|
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
|
||||||
raise SpackEnvironmentError(msg)
|
raise SpackEnvironmentError(msg)
|
||||||
|
|
||||||
with self.manifest_file.open() as f:
|
with self.manifest_file.open(encoding="utf-8") as f:
|
||||||
self.yaml_content = _read_yaml(f)
|
self.yaml_content = _read_yaml(f)
|
||||||
|
|
||||||
self.changed = False
|
self.changed = False
|
||||||
@@ -3059,6 +3058,29 @@ def use_config(self):
|
|||||||
self.deactivate_config_scope()
|
self.deactivate_config_scope()
|
||||||
|
|
||||||
|
|
||||||
|
def environment_path_scopes(name: str, path: str) -> Optional[List[spack.config.ConfigScope]]:
|
||||||
|
"""Retrieve the suitably named environment path scopes
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
name: configuration scope name
|
||||||
|
path: path to configuration file(s)
|
||||||
|
|
||||||
|
Returns: list of environment scopes, if any, or None
|
||||||
|
"""
|
||||||
|
if exists(path): # managed environment
|
||||||
|
manifest = EnvironmentManifestFile(root(path))
|
||||||
|
elif is_env_dir(path): # anonymous environment
|
||||||
|
manifest = EnvironmentManifestFile(path)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for scope in manifest.env_config_scopes:
|
||||||
|
scope.name = f"{name}:{scope.name}"
|
||||||
|
scope.writable = False
|
||||||
|
|
||||||
|
return manifest.env_config_scopes
|
||||||
|
|
||||||
|
|
||||||
class SpackEnvironmentError(spack.error.SpackError):
|
class SpackEnvironmentError(spack.error.SpackError):
|
||||||
"""Superclass for all errors to do with Spack environments."""
|
"""Superclass for all errors to do with Spack environments."""
|
||||||
|
|
||||||
|
@@ -12,6 +12,8 @@
|
|||||||
import sys
|
import sys
|
||||||
from typing import Callable, Dict, Optional
|
from typing import Callable, Dict, Optional
|
||||||
|
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
from llnl.string import comma_or
|
from llnl.string import comma_or
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
from llnl.util.filesystem import (
|
from llnl.util.filesystem import (
|
||||||
@@ -109,6 +111,9 @@ def view_copy(
|
|||||||
tty.debug(f"Can't change the permissions for {dst}")
|
tty.debug(f"Can't change the permissions for {dst}")
|
||||||
|
|
||||||
|
|
||||||
|
#: Type alias for link types
|
||||||
|
LinkType = Literal["hardlink", "hard", "copy", "relocate", "add", "symlink", "soft"]
|
||||||
|
|
||||||
#: supported string values for `link_type` in an env, mapped to canonical values
|
#: supported string values for `link_type` in an env, mapped to canonical values
|
||||||
_LINK_TYPES = {
|
_LINK_TYPES = {
|
||||||
"hardlink": "hardlink",
|
"hardlink": "hardlink",
|
||||||
@@ -123,7 +128,7 @@ def view_copy(
|
|||||||
_VALID_LINK_TYPES = sorted(set(_LINK_TYPES.values()))
|
_VALID_LINK_TYPES = sorted(set(_LINK_TYPES.values()))
|
||||||
|
|
||||||
|
|
||||||
def canonicalize_link_type(link_type: str) -> str:
|
def canonicalize_link_type(link_type: LinkType) -> str:
|
||||||
"""Return canonical"""
|
"""Return canonical"""
|
||||||
canonical = _LINK_TYPES.get(link_type)
|
canonical = _LINK_TYPES.get(link_type)
|
||||||
if not canonical:
|
if not canonical:
|
||||||
@@ -133,7 +138,7 @@ def canonicalize_link_type(link_type: str) -> str:
|
|||||||
return canonical
|
return canonical
|
||||||
|
|
||||||
|
|
||||||
def function_for_link_type(link_type: str) -> LinkCallbackType:
|
def function_for_link_type(link_type: LinkType) -> LinkCallbackType:
|
||||||
link_type = canonicalize_link_type(link_type)
|
link_type = canonicalize_link_type(link_type)
|
||||||
if link_type == "hardlink":
|
if link_type == "hardlink":
|
||||||
return view_hardlink
|
return view_hardlink
|
||||||
@@ -142,7 +147,7 @@ def function_for_link_type(link_type: str) -> LinkCallbackType:
|
|||||||
elif link_type == "copy":
|
elif link_type == "copy":
|
||||||
return view_copy
|
return view_copy
|
||||||
|
|
||||||
assert False, "invalid link type" # need mypy Literal values
|
assert False, "invalid link type"
|
||||||
|
|
||||||
|
|
||||||
class FilesystemView:
|
class FilesystemView:
|
||||||
@@ -166,7 +171,7 @@ def __init__(
|
|||||||
projections: Optional[Dict] = None,
|
projections: Optional[Dict] = None,
|
||||||
ignore_conflicts: bool = False,
|
ignore_conflicts: bool = False,
|
||||||
verbose: bool = False,
|
verbose: bool = False,
|
||||||
link_type: str = "symlink",
|
link_type: LinkType = "symlink",
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Initialize a filesystem view under the given `root` directory with
|
Initialize a filesystem view under the given `root` directory with
|
||||||
@@ -292,7 +297,7 @@ def __init__(
|
|||||||
projections: Optional[Dict] = None,
|
projections: Optional[Dict] = None,
|
||||||
ignore_conflicts: bool = False,
|
ignore_conflicts: bool = False,
|
||||||
verbose: bool = False,
|
verbose: bool = False,
|
||||||
link_type: str = "symlink",
|
link_type: LinkType = "symlink",
|
||||||
):
|
):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
root,
|
root,
|
||||||
@@ -326,12 +331,12 @@ def __init__(
|
|||||||
def write_projections(self):
|
def write_projections(self):
|
||||||
if self.projections:
|
if self.projections:
|
||||||
mkdirp(os.path.dirname(self.projections_path))
|
mkdirp(os.path.dirname(self.projections_path))
|
||||||
with open(self.projections_path, "w") as f:
|
with open(self.projections_path, "w", encoding="utf-8") as f:
|
||||||
f.write(s_yaml.dump_config({"projections": self.projections}))
|
f.write(s_yaml.dump_config({"projections": self.projections}))
|
||||||
|
|
||||||
def read_projections(self):
|
def read_projections(self):
|
||||||
if os.path.exists(self.projections_path):
|
if os.path.exists(self.projections_path):
|
||||||
with open(self.projections_path, "r") as f:
|
with open(self.projections_path, "r", encoding="utf-8") as f:
|
||||||
projections_data = s_yaml.load(f)
|
projections_data = s_yaml.load(f)
|
||||||
spack.config.validate(projections_data, spack.schema.projections.schema)
|
spack.config.validate(projections_data, spack.schema.projections.schema)
|
||||||
return projections_data["projections"]
|
return projections_data["projections"]
|
||||||
@@ -429,7 +434,7 @@ def needs_file(spec, file):
|
|||||||
self.get_path_meta_folder(spec), spack.store.STORE.layout.manifest_file_name
|
self.get_path_meta_folder(spec), spack.store.STORE.layout.manifest_file_name
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
with open(manifest_file, "r") as f:
|
with open(manifest_file, "r", encoding="utf-8") as f:
|
||||||
manifest = s_json.load(f)
|
manifest = s_json.load(f)
|
||||||
except (OSError, IOError):
|
except (OSError, IOError):
|
||||||
# if we can't load it, assume it doesn't know about the file.
|
# if we can't load it, assume it doesn't know about the file.
|
||||||
@@ -833,7 +838,7 @@ def get_projection_for_spec(self, spec):
|
|||||||
#####################
|
#####################
|
||||||
def get_spec_from_file(filename):
|
def get_spec_from_file(filename):
|
||||||
try:
|
try:
|
||||||
with open(filename, "r") as f:
|
with open(filename, "r", encoding="utf-8") as f:
|
||||||
return spack.spec.Spec.from_yaml(f)
|
return spack.spec.Spec.from_yaml(f)
|
||||||
except IOError:
|
except IOError:
|
||||||
return None
|
return None
|
||||||
|
@@ -35,6 +35,7 @@ class _HookRunner:
|
|||||||
"spack.hooks.drop_redundant_rpaths",
|
"spack.hooks.drop_redundant_rpaths",
|
||||||
"spack.hooks.absolutify_elf_sonames",
|
"spack.hooks.absolutify_elf_sonames",
|
||||||
"spack.hooks.permissions_setters",
|
"spack.hooks.permissions_setters",
|
||||||
|
"spack.hooks.resolve_shared_libraries",
|
||||||
# after all mutations to the install prefix, write metadata
|
# after all mutations to the install prefix, write metadata
|
||||||
"spack.hooks.write_install_manifest",
|
"spack.hooks.write_install_manifest",
|
||||||
# after all metadata is written
|
# after all metadata is written
|
||||||
|
@@ -142,7 +142,7 @@ def write_license_file(pkg, license_path):
|
|||||||
os.makedirs(os.path.dirname(license_path))
|
os.makedirs(os.path.dirname(license_path))
|
||||||
|
|
||||||
# Output
|
# Output
|
||||||
with open(license_path, "w") as f:
|
with open(license_path, "w", encoding="utf-8") as f:
|
||||||
for line in txt.splitlines():
|
for line in txt.splitlines():
|
||||||
f.write("{0}{1}\n".format(pkg.license_comment, line))
|
f.write("{0}{1}\n".format(pkg.license_comment, line))
|
||||||
f.close()
|
f.close()
|
||||||
|
240
lib/spack/spack/hooks/resolve_shared_libraries.py
Normal file
240
lib/spack/spack/hooks/resolve_shared_libraries.py
Normal file
@@ -0,0 +1,240 @@
|
|||||||
|
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
||||||
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
|
import fnmatch
|
||||||
|
import io
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from typing import Dict, List, Union
|
||||||
|
|
||||||
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.filesystem import BaseDirectoryVisitor, visit_directory_tree
|
||||||
|
from llnl.util.lang import stable_partition
|
||||||
|
|
||||||
|
import spack.config
|
||||||
|
import spack.error
|
||||||
|
import spack.util.elf as elf
|
||||||
|
|
||||||
|
#: Patterns for names of libraries that are allowed to be unresolved when *just* looking at RPATHs
|
||||||
|
#: added by Spack. These are libraries outside of Spack's control, and assumed to be located in
|
||||||
|
#: default search paths of the dynamic linker.
|
||||||
|
ALLOW_UNRESOLVED = [
|
||||||
|
# kernel
|
||||||
|
"linux-vdso.so.*",
|
||||||
|
"libselinux.so.*",
|
||||||
|
# musl libc
|
||||||
|
"ld-musl-*.so.*",
|
||||||
|
# glibc
|
||||||
|
"ld-linux*.so.*",
|
||||||
|
"ld64.so.*",
|
||||||
|
"libanl.so.*",
|
||||||
|
"libc.so.*",
|
||||||
|
"libdl.so.*",
|
||||||
|
"libm.so.*",
|
||||||
|
"libmemusage.so.*",
|
||||||
|
"libmvec.so.*",
|
||||||
|
"libnsl.so.*",
|
||||||
|
"libnss_compat.so.*",
|
||||||
|
"libnss_db.so.*",
|
||||||
|
"libnss_dns.so.*",
|
||||||
|
"libnss_files.so.*",
|
||||||
|
"libnss_hesiod.so.*",
|
||||||
|
"libpcprofile.so.*",
|
||||||
|
"libpthread.so.*",
|
||||||
|
"libresolv.so.*",
|
||||||
|
"librt.so.*",
|
||||||
|
"libSegFault.so.*",
|
||||||
|
"libthread_db.so.*",
|
||||||
|
"libutil.so.*",
|
||||||
|
# gcc -- this is required even with gcc-runtime, because e.g. libstdc++ depends on libgcc_s,
|
||||||
|
# but the binaries we copy from the compiler don't have an $ORIGIN rpath.
|
||||||
|
"libasan.so.*",
|
||||||
|
"libatomic.so.*",
|
||||||
|
"libcc1.so.*",
|
||||||
|
"libgcc_s.so.*",
|
||||||
|
"libgfortran.so.*",
|
||||||
|
"libgomp.so.*",
|
||||||
|
"libitm.so.*",
|
||||||
|
"liblsan.so.*",
|
||||||
|
"libquadmath.so.*",
|
||||||
|
"libssp.so.*",
|
||||||
|
"libstdc++.so.*",
|
||||||
|
"libtsan.so.*",
|
||||||
|
"libubsan.so.*",
|
||||||
|
# systemd
|
||||||
|
"libudev.so.*",
|
||||||
|
# cuda driver
|
||||||
|
"libcuda.so.*",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def is_compatible(parent: elf.ElfFile, child: elf.ElfFile) -> bool:
|
||||||
|
return (
|
||||||
|
child.elf_hdr.e_type == elf.ELF_CONSTANTS.ET_DYN
|
||||||
|
and parent.is_little_endian == child.is_little_endian
|
||||||
|
and parent.is_64_bit == child.is_64_bit
|
||||||
|
and parent.elf_hdr.e_machine == child.elf_hdr.e_machine
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def candidate_matches(current_elf: elf.ElfFile, candidate_path: bytes) -> bool:
|
||||||
|
try:
|
||||||
|
with open(candidate_path, "rb") as g:
|
||||||
|
return is_compatible(current_elf, elf.parse_elf(g))
|
||||||
|
except (OSError, elf.ElfParsingError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class Problem:
|
||||||
|
def __init__(
|
||||||
|
self, resolved: Dict[bytes, bytes], unresolved: List[bytes], relative_rpaths: List[bytes]
|
||||||
|
) -> None:
|
||||||
|
self.resolved = resolved
|
||||||
|
self.unresolved = unresolved
|
||||||
|
self.relative_rpaths = relative_rpaths
|
||||||
|
|
||||||
|
|
||||||
|
class ResolveSharedElfLibDepsVisitor(BaseDirectoryVisitor):
|
||||||
|
def __init__(self, allow_unresolved_patterns: List[str]) -> None:
|
||||||
|
self.problems: Dict[str, Problem] = {}
|
||||||
|
self._allow_unresolved_regex = re.compile(
|
||||||
|
"|".join(fnmatch.translate(x) for x in allow_unresolved_patterns)
|
||||||
|
)
|
||||||
|
|
||||||
|
def allow_unresolved(self, needed: bytes) -> bool:
|
||||||
|
try:
|
||||||
|
name = needed.decode("utf-8")
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
return False
|
||||||
|
return bool(self._allow_unresolved_regex.match(name))
|
||||||
|
|
||||||
|
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||||
|
# We work with byte strings for paths.
|
||||||
|
path = os.path.join(root, rel_path).encode("utf-8")
|
||||||
|
|
||||||
|
# For $ORIGIN interpolation: should not have trailing dir seperator.
|
||||||
|
origin = os.path.dirname(path)
|
||||||
|
|
||||||
|
# Retrieve the needed libs + rpaths.
|
||||||
|
try:
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
parsed_elf = elf.parse_elf(f, interpreter=False, dynamic_section=True)
|
||||||
|
except (OSError, elf.ElfParsingError):
|
||||||
|
# Not dealing with an invalid ELF file.
|
||||||
|
return
|
||||||
|
|
||||||
|
# If there's no needed libs all is good
|
||||||
|
if not parsed_elf.has_needed:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Get the needed libs and rpaths (notice: byte strings)
|
||||||
|
# Don't force an encoding cause paths are just a bag of bytes.
|
||||||
|
needed_libs = parsed_elf.dt_needed_strs
|
||||||
|
|
||||||
|
rpaths = parsed_elf.dt_rpath_str.split(b":") if parsed_elf.has_rpath else []
|
||||||
|
|
||||||
|
# We only interpolate $ORIGIN, not $LIB and $PLATFORM, they're not really
|
||||||
|
# supported in general. Also remove empty paths.
|
||||||
|
rpaths = [x.replace(b"$ORIGIN", origin) for x in rpaths if x]
|
||||||
|
|
||||||
|
# Do not allow relative rpaths (they are relative to the current working directory)
|
||||||
|
rpaths, relative_rpaths = stable_partition(rpaths, os.path.isabs)
|
||||||
|
|
||||||
|
# If there's a / in the needed lib, it's opened directly, otherwise it needs
|
||||||
|
# a search.
|
||||||
|
direct_libs, search_libs = stable_partition(needed_libs, lambda x: b"/" in x)
|
||||||
|
|
||||||
|
# Do not allow relative paths in direct libs (they are relative to the current working
|
||||||
|
# directory)
|
||||||
|
direct_libs, unresolved = stable_partition(direct_libs, os.path.isabs)
|
||||||
|
|
||||||
|
resolved: Dict[bytes, bytes] = {}
|
||||||
|
|
||||||
|
for lib in search_libs:
|
||||||
|
if self.allow_unresolved(lib):
|
||||||
|
continue
|
||||||
|
for rpath in rpaths:
|
||||||
|
candidate = os.path.join(rpath, lib)
|
||||||
|
if candidate_matches(parsed_elf, candidate):
|
||||||
|
resolved[lib] = candidate
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
unresolved.append(lib)
|
||||||
|
|
||||||
|
# Check if directly opened libs are compatible
|
||||||
|
for lib in direct_libs:
|
||||||
|
if candidate_matches(parsed_elf, lib):
|
||||||
|
resolved[lib] = lib
|
||||||
|
else:
|
||||||
|
unresolved.append(lib)
|
||||||
|
|
||||||
|
if unresolved or relative_rpaths:
|
||||||
|
self.problems[rel_path] = Problem(resolved, unresolved, relative_rpaths)
|
||||||
|
|
||||||
|
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
|
# There can be binaries in .spack/test which shouldn't be checked.
|
||||||
|
if rel_path == ".spack":
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class CannotLocateSharedLibraries(spack.error.SpackError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def maybe_decode(byte_str: bytes) -> Union[str, bytes]:
|
||||||
|
try:
|
||||||
|
return byte_str.decode("utf-8")
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
return byte_str
|
||||||
|
|
||||||
|
|
||||||
|
def post_install(spec, explicit):
|
||||||
|
"""Check whether shared libraries can be resolved in RPATHs."""
|
||||||
|
policy = spack.config.get("config:shared_linking:missing_library_policy", "ignore")
|
||||||
|
|
||||||
|
# Currently only supported for ELF files.
|
||||||
|
if policy == "ignore" or spec.external or spec.platform not in ("linux", "freebsd"):
|
||||||
|
return
|
||||||
|
|
||||||
|
visitor = ResolveSharedElfLibDepsVisitor(
|
||||||
|
[*ALLOW_UNRESOLVED, *spec.package.unresolved_libraries]
|
||||||
|
)
|
||||||
|
visit_directory_tree(spec.prefix, visitor)
|
||||||
|
|
||||||
|
# All good?
|
||||||
|
if not visitor.problems:
|
||||||
|
return
|
||||||
|
|
||||||
|
# For now just list the issues (print it in ldd style, except we don't recurse)
|
||||||
|
output = io.StringIO()
|
||||||
|
output.write("not all executables and libraries can resolve their dependencies:\n")
|
||||||
|
for path, problem in visitor.problems.items():
|
||||||
|
output.write(path)
|
||||||
|
output.write("\n")
|
||||||
|
for needed, full_path in problem.resolved.items():
|
||||||
|
output.write(" ")
|
||||||
|
if needed == full_path:
|
||||||
|
output.write(maybe_decode(needed))
|
||||||
|
else:
|
||||||
|
output.write(f"{maybe_decode(needed)} => {maybe_decode(full_path)}")
|
||||||
|
output.write("\n")
|
||||||
|
for not_found in problem.unresolved:
|
||||||
|
output.write(f" {maybe_decode(not_found)} => not found\n")
|
||||||
|
for relative_rpath in problem.relative_rpaths:
|
||||||
|
output.write(f" {maybe_decode(relative_rpath)} => relative rpath\n")
|
||||||
|
|
||||||
|
message = output.getvalue().strip()
|
||||||
|
|
||||||
|
if policy == "error":
|
||||||
|
raise CannotLocateSharedLibraries(message)
|
||||||
|
|
||||||
|
tty.warn(message)
|
@@ -81,7 +81,7 @@ def get_escaped_text_output(filename: str) -> List[str]:
|
|||||||
Returns:
|
Returns:
|
||||||
escaped text lines read from the file
|
escaped text lines read from the file
|
||||||
"""
|
"""
|
||||||
with open(filename) as f:
|
with open(filename, encoding="utf-8") as f:
|
||||||
# Ensure special characters are escaped as needed
|
# Ensure special characters are escaped as needed
|
||||||
expected = f.read()
|
expected = f.read()
|
||||||
|
|
||||||
@@ -458,7 +458,7 @@ def write_tested_status(self):
|
|||||||
elif self.counts[TestStatus.PASSED] > 0:
|
elif self.counts[TestStatus.PASSED] > 0:
|
||||||
status = TestStatus.PASSED
|
status = TestStatus.PASSED
|
||||||
|
|
||||||
with open(self.tested_file, "w") as f:
|
with open(self.tested_file, "w", encoding="utf-8") as f:
|
||||||
f.write(f"{status.value}\n")
|
f.write(f"{status.value}\n")
|
||||||
|
|
||||||
|
|
||||||
@@ -502,7 +502,7 @@ def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbos
|
|||||||
for i, entry in enumerate(stack):
|
for i, entry in enumerate(stack):
|
||||||
filename, lineno, function, text = entry
|
filename, lineno, function, text = entry
|
||||||
if spack.repo.is_package_file(filename):
|
if spack.repo.is_package_file(filename):
|
||||||
with open(filename) as f:
|
with open(filename, encoding="utf-8") as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
new_lineno = lineno - 2
|
new_lineno = lineno - 2
|
||||||
text = lines[new_lineno]
|
text = lines[new_lineno]
|
||||||
@@ -822,7 +822,7 @@ def get_test_suite(name: str) -> Optional["TestSuite"]:
|
|||||||
|
|
||||||
def write_test_suite_file(suite):
|
def write_test_suite_file(suite):
|
||||||
"""Write the test suite to its (JSON) lock file."""
|
"""Write the test suite to its (JSON) lock file."""
|
||||||
with open(suite.stage.join(test_suite_filename), "w") as f:
|
with open(suite.stage.join(test_suite_filename), "w", encoding="utf-8") as f:
|
||||||
sjson.dump(suite.to_dict(), stream=f)
|
sjson.dump(suite.to_dict(), stream=f)
|
||||||
|
|
||||||
|
|
||||||
@@ -977,7 +977,7 @@ def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestSt
|
|||||||
status = TestStatus.NO_TESTS
|
status = TestStatus.NO_TESTS
|
||||||
return status
|
return status
|
||||||
|
|
||||||
with open(tests_status_file, "r") as f:
|
with open(tests_status_file, "r", encoding="utf-8") as f:
|
||||||
value = (f.read()).strip("\n")
|
value = (f.read()).strip("\n")
|
||||||
return TestStatus(int(value)) if value else TestStatus.NO_TESTS
|
return TestStatus(int(value)) if value else TestStatus.NO_TESTS
|
||||||
|
|
||||||
@@ -1179,7 +1179,7 @@ def from_file(filename):
|
|||||||
BaseException: sjson.SpackJSONError if problem parsing the file
|
BaseException: sjson.SpackJSONError if problem parsing the file
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with open(filename) as f:
|
with open(filename, encoding="utf-8") as f:
|
||||||
data = sjson.load(f)
|
data = sjson.load(f)
|
||||||
test_suite = TestSuite.from_dict(data)
|
test_suite = TestSuite.from_dict(data)
|
||||||
content_hash = os.path.basename(os.path.dirname(filename))
|
content_hash = os.path.basename(os.path.dirname(filename))
|
||||||
@@ -1196,7 +1196,7 @@ def _add_msg_to_file(filename, msg):
|
|||||||
filename (str): path to the file
|
filename (str): path to the file
|
||||||
msg (str): message to be appended to the file
|
msg (str): message to be appended to the file
|
||||||
"""
|
"""
|
||||||
with open(filename, "a+") as f:
|
with open(filename, "a+", encoding="utf-8") as f:
|
||||||
f.write(f"{msg}\n")
|
f.write(f"{msg}\n")
|
||||||
|
|
||||||
|
|
||||||
|
@@ -105,7 +105,7 @@ def __str__(self):
|
|||||||
def _write_timer_json(pkg, timer, cache):
|
def _write_timer_json(pkg, timer, cache):
|
||||||
extra_attributes = {"name": pkg.name, "cache": cache, "hash": pkg.spec.dag_hash()}
|
extra_attributes = {"name": pkg.name, "cache": cache, "hash": pkg.spec.dag_hash()}
|
||||||
try:
|
try:
|
||||||
with open(pkg.times_log_path, "w") as timelog:
|
with open(pkg.times_log_path, "w", encoding="utf-8") as timelog:
|
||||||
timer.write_json(timelog, extra_attributes=extra_attributes)
|
timer.write_json(timelog, extra_attributes=extra_attributes)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
tty.debug(str(e))
|
tty.debug(str(e))
|
||||||
@@ -692,7 +692,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
|
|||||||
if errors.getvalue():
|
if errors.getvalue():
|
||||||
error_file = os.path.join(target_dir, "errors.txt")
|
error_file = os.path.join(target_dir, "errors.txt")
|
||||||
fs.mkdirp(target_dir)
|
fs.mkdirp(target_dir)
|
||||||
with open(error_file, "w") as err:
|
with open(error_file, "w", encoding="utf-8") as err:
|
||||||
err.write(errors.getvalue())
|
err.write(errors.getvalue())
|
||||||
tty.warn(f"Errors occurred when archiving files.\n\tSee: {error_file}")
|
tty.warn(f"Errors occurred when archiving files.\n\tSee: {error_file}")
|
||||||
|
|
||||||
@@ -2405,7 +2405,7 @@ def _real_install(self) -> None:
|
|||||||
|
|
||||||
# Save just the changes to the environment. This file can be
|
# Save just the changes to the environment. This file can be
|
||||||
# safely installed, since it does not contain secret variables.
|
# safely installed, since it does not contain secret variables.
|
||||||
with open(pkg.env_mods_path, "w") as env_mods_file:
|
with open(pkg.env_mods_path, "w", encoding="utf-8") as env_mods_file:
|
||||||
mods = self.env_mods.shell_modifications(explicit=True, env=self.unmodified_env)
|
mods = self.env_mods.shell_modifications(explicit=True, env=self.unmodified_env)
|
||||||
env_mods_file.write(mods)
|
env_mods_file.write(mods)
|
||||||
|
|
||||||
@@ -2414,7 +2414,7 @@ def _real_install(self) -> None:
|
|||||||
configure_args = getattr(pkg, attr)()
|
configure_args = getattr(pkg, attr)()
|
||||||
configure_args = " ".join(configure_args)
|
configure_args = " ".join(configure_args)
|
||||||
|
|
||||||
with open(pkg.configure_args_path, "w") as args_file:
|
with open(pkg.configure_args_path, "w", encoding="utf-8") as args_file:
|
||||||
args_file.write(configure_args)
|
args_file.write(configure_args)
|
||||||
|
|
||||||
break
|
break
|
||||||
|
@@ -48,7 +48,6 @@
|
|||||||
import spack.util.debug
|
import spack.util.debug
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
import spack.util.lock
|
import spack.util.lock
|
||||||
from spack.error import SpackError
|
|
||||||
|
|
||||||
#: names of profile statistics
|
#: names of profile statistics
|
||||||
stat_names = pstats.Stats.sort_arg_dict_default
|
stat_names = pstats.Stats.sort_arg_dict_default
|
||||||
@@ -858,6 +857,33 @@ def resolve_alias(cmd_name: str, cmd: List[str]) -> Tuple[str, List[str]]:
|
|||||||
return cmd_name, cmd
|
return cmd_name, cmd
|
||||||
|
|
||||||
|
|
||||||
|
def add_command_line_scopes(
|
||||||
|
cfg: spack.config.Configuration, command_line_scopes: List[str]
|
||||||
|
) -> None:
|
||||||
|
"""Add additional scopes from the --config-scope argument, either envs or dirs.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cfg: configuration instance
|
||||||
|
command_line_scopes: list of configuration scope paths
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
spack.error.ConfigError: if the path is an invalid configuration scope
|
||||||
|
"""
|
||||||
|
for i, path in enumerate(command_line_scopes):
|
||||||
|
name = f"cmd_scope_{i}"
|
||||||
|
scopes = ev.environment_path_scopes(name, path)
|
||||||
|
if scopes is None:
|
||||||
|
if os.path.isdir(path): # directory with config files
|
||||||
|
cfg.push_scope(spack.config.DirectoryConfigScope(name, path, writable=False))
|
||||||
|
spack.config._add_platform_scope(cfg, name, path, writable=False)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
|
||||||
|
|
||||||
|
for scope in scopes:
|
||||||
|
cfg.push_scope(scope)
|
||||||
|
|
||||||
|
|
||||||
def _main(argv=None):
|
def _main(argv=None):
|
||||||
"""Logic for the main entry point for the Spack command.
|
"""Logic for the main entry point for the Spack command.
|
||||||
|
|
||||||
@@ -926,7 +952,7 @@ def _main(argv=None):
|
|||||||
|
|
||||||
# Push scopes from the command line last
|
# Push scopes from the command line last
|
||||||
if args.config_scopes:
|
if args.config_scopes:
|
||||||
spack.config._add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
|
add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
|
||||||
spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("command_line"))
|
spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("command_line"))
|
||||||
setup_main_options(args)
|
setup_main_options(args)
|
||||||
|
|
||||||
@@ -1012,7 +1038,7 @@ def main(argv=None):
|
|||||||
try:
|
try:
|
||||||
return _main(argv)
|
return _main(argv)
|
||||||
|
|
||||||
except SpackError as e:
|
except spack.error.SpackError as e:
|
||||||
tty.debug(e)
|
tty.debug(e)
|
||||||
e.die() # gracefully die on any SpackErrors
|
e.die() # gracefully die on any SpackErrors
|
||||||
|
|
||||||
|
@@ -48,6 +48,7 @@
|
|||||||
import spack.error
|
import spack.error
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.projections as proj
|
import spack.projections as proj
|
||||||
|
import spack.schema
|
||||||
import spack.schema.environment
|
import spack.schema.environment
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -216,7 +217,7 @@ def root_path(name, module_set_name):
|
|||||||
roots = spack.config.get(f"modules:{module_set_name}:roots", {})
|
roots = spack.config.get(f"modules:{module_set_name}:roots", {})
|
||||||
|
|
||||||
# Merge config values into the defaults so we prefer configured values
|
# Merge config values into the defaults so we prefer configured values
|
||||||
roots = spack.config.merge_yaml(defaults, roots)
|
roots = spack.schema.merge_yaml(defaults, roots)
|
||||||
|
|
||||||
path = roots.get(name, os.path.join(spack.paths.share_path, name))
|
path = roots.get(name, os.path.join(spack.paths.share_path, name))
|
||||||
return spack.util.path.canonicalize_path(path)
|
return spack.util.path.canonicalize_path(path)
|
||||||
@@ -227,7 +228,7 @@ def generate_module_index(root, modules, overwrite=False):
|
|||||||
if overwrite or not os.path.exists(index_path):
|
if overwrite or not os.path.exists(index_path):
|
||||||
entries = syaml.syaml_dict()
|
entries = syaml.syaml_dict()
|
||||||
else:
|
else:
|
||||||
with open(index_path) as index_file:
|
with open(index_path, encoding="utf-8") as index_file:
|
||||||
yaml_content = syaml.load(index_file)
|
yaml_content = syaml.load(index_file)
|
||||||
entries = yaml_content["module_index"]
|
entries = yaml_content["module_index"]
|
||||||
|
|
||||||
@@ -236,7 +237,7 @@ def generate_module_index(root, modules, overwrite=False):
|
|||||||
entries[m.spec.dag_hash()] = entry
|
entries[m.spec.dag_hash()] = entry
|
||||||
index = {"module_index": entries}
|
index = {"module_index": entries}
|
||||||
llnl.util.filesystem.mkdirp(root)
|
llnl.util.filesystem.mkdirp(root)
|
||||||
with open(index_path, "w") as index_file:
|
with open(index_path, "w", encoding="utf-8") as index_file:
|
||||||
syaml.dump(index, default_flow_style=False, stream=index_file)
|
syaml.dump(index, default_flow_style=False, stream=index_file)
|
||||||
|
|
||||||
|
|
||||||
@@ -256,7 +257,7 @@ def read_module_index(root):
|
|||||||
index_path = os.path.join(root, "module-index.yaml")
|
index_path = os.path.join(root, "module-index.yaml")
|
||||||
if not os.path.exists(index_path):
|
if not os.path.exists(index_path):
|
||||||
return {}
|
return {}
|
||||||
with open(index_path) as index_file:
|
with open(index_path, encoding="utf-8") as index_file:
|
||||||
return _read_module_index(index_file)
|
return _read_module_index(index_file)
|
||||||
|
|
||||||
|
|
||||||
@@ -605,7 +606,7 @@ def configure_options(self):
|
|||||||
return msg
|
return msg
|
||||||
|
|
||||||
if os.path.exists(pkg.install_configure_args_path):
|
if os.path.exists(pkg.install_configure_args_path):
|
||||||
with open(pkg.install_configure_args_path) as args_file:
|
with open(pkg.install_configure_args_path, encoding="utf-8") as args_file:
|
||||||
return spack.util.path.padding_filter(args_file.read())
|
return spack.util.path.padding_filter(args_file.read())
|
||||||
|
|
||||||
# Returning a false-like value makes the default templates skip
|
# Returning a false-like value makes the default templates skip
|
||||||
@@ -624,10 +625,10 @@ def environment_modifications(self):
|
|||||||
"""List of environment modifications to be processed."""
|
"""List of environment modifications to be processed."""
|
||||||
# Modifications guessed by inspecting the spec prefix
|
# Modifications guessed by inspecting the spec prefix
|
||||||
prefix_inspections = syaml.syaml_dict()
|
prefix_inspections = syaml.syaml_dict()
|
||||||
spack.config.merge_yaml(
|
spack.schema.merge_yaml(
|
||||||
prefix_inspections, spack.config.get("modules:prefix_inspections", {})
|
prefix_inspections, spack.config.get("modules:prefix_inspections", {})
|
||||||
)
|
)
|
||||||
spack.config.merge_yaml(
|
spack.schema.merge_yaml(
|
||||||
prefix_inspections,
|
prefix_inspections,
|
||||||
spack.config.get(f"modules:{self.conf.name}:prefix_inspections", {}),
|
spack.config.get(f"modules:{self.conf.name}:prefix_inspections", {}),
|
||||||
)
|
)
|
||||||
@@ -900,7 +901,7 @@ def write(self, overwrite=False):
|
|||||||
# Render the template
|
# Render the template
|
||||||
text = template.render(context)
|
text = template.render(context)
|
||||||
# Write it to file
|
# Write it to file
|
||||||
with open(self.layout.filename, "w") as f:
|
with open(self.layout.filename, "w", encoding="utf-8") as f:
|
||||||
f.write(text)
|
f.write(text)
|
||||||
|
|
||||||
# Set the file permissions of the module to match that of the package
|
# Set the file permissions of the module to match that of the package
|
||||||
@@ -939,7 +940,7 @@ def update_module_hiddenness(self, remove=False):
|
|||||||
|
|
||||||
if modulerc_exists:
|
if modulerc_exists:
|
||||||
# retrieve modulerc content
|
# retrieve modulerc content
|
||||||
with open(modulerc_path) as f:
|
with open(modulerc_path, encoding="utf-8") as f:
|
||||||
content = f.readlines()
|
content = f.readlines()
|
||||||
content = "".join(content).split("\n")
|
content = "".join(content).split("\n")
|
||||||
# remove last empty item if any
|
# remove last empty item if any
|
||||||
@@ -974,7 +975,7 @@ def update_module_hiddenness(self, remove=False):
|
|||||||
elif content != self.modulerc_header:
|
elif content != self.modulerc_header:
|
||||||
# ensure file ends with a newline character
|
# ensure file ends with a newline character
|
||||||
content.append("")
|
content.append("")
|
||||||
with open(modulerc_path, "w") as f:
|
with open(modulerc_path, "w", encoding="utf-8") as f:
|
||||||
f.write("\n".join(content))
|
f.write("\n".join(content))
|
||||||
|
|
||||||
def remove(self):
|
def remove(self):
|
||||||
|
@@ -7,8 +7,6 @@
|
|||||||
import urllib.parse
|
import urllib.parse
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union
|
||||||
|
|
||||||
import spack.spec
|
|
||||||
|
|
||||||
# notice: Docker is more strict (no uppercase allowed). We parse image names *with* uppercase
|
# notice: Docker is more strict (no uppercase allowed). We parse image names *with* uppercase
|
||||||
# and normalize, so: example.com/Organization/Name -> example.com/organization/name. Tags are
|
# and normalize, so: example.com/Organization/Name -> example.com/organization/name. Tags are
|
||||||
# case sensitive though.
|
# case sensitive though.
|
||||||
@@ -195,7 +193,7 @@ def __eq__(self, __value: object) -> bool:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _ensure_valid_tag(tag: str) -> str:
|
def ensure_valid_tag(tag: str) -> str:
|
||||||
"""Ensure a tag is valid for an OCI registry."""
|
"""Ensure a tag is valid for an OCI registry."""
|
||||||
sanitized = re.sub(r"[^\w.-]", "_", tag)
|
sanitized = re.sub(r"[^\w.-]", "_", tag)
|
||||||
if len(sanitized) > 128:
|
if len(sanitized) > 128:
|
||||||
@@ -203,20 +201,6 @@ def _ensure_valid_tag(tag: str) -> str:
|
|||||||
return sanitized
|
return sanitized
|
||||||
|
|
||||||
|
|
||||||
def default_tag(spec: "spack.spec.Spec") -> str:
|
|
||||||
"""Return a valid, default image tag for a spec."""
|
|
||||||
return _ensure_valid_tag(f"{spec.name}-{spec.version}-{spec.dag_hash()}.spack")
|
|
||||||
|
|
||||||
|
|
||||||
#: Default OCI index tag
|
|
||||||
default_index_tag = "index.spack"
|
|
||||||
|
|
||||||
|
|
||||||
def tag_is_spec(tag: str) -> bool:
|
|
||||||
"""Check if a tag is likely a Spec"""
|
|
||||||
return tag.endswith(".spack") and tag != default_index_tag
|
|
||||||
|
|
||||||
|
|
||||||
def default_config(architecture: str, os: str):
|
def default_config(architecture: str, os: str):
|
||||||
return {
|
return {
|
||||||
"architecture": architecture,
|
"architecture": architecture,
|
||||||
|
@@ -21,7 +21,7 @@
|
|||||||
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.mirrors.mirror
|
import spack.mirrors.mirror
|
||||||
import spack.parser
|
import spack.tokenize
|
||||||
import spack.util.web
|
import spack.util.web
|
||||||
|
|
||||||
from .image import ImageReference
|
from .image import ImageReference
|
||||||
@@ -57,7 +57,7 @@ def dispatch_open(fullurl, data=None, timeout=None):
|
|||||||
quoted_string = rf'"(?:({qdtext}*)|{quoted_pair})*"'
|
quoted_string = rf'"(?:({qdtext}*)|{quoted_pair})*"'
|
||||||
|
|
||||||
|
|
||||||
class TokenType(spack.parser.TokenBase):
|
class WwwAuthenticateTokens(spack.tokenize.TokenBase):
|
||||||
AUTH_PARAM = rf"({token}){BWS}={BWS}({token}|{quoted_string})"
|
AUTH_PARAM = rf"({token}){BWS}={BWS}({token}|{quoted_string})"
|
||||||
# TOKEN68 = r"([A-Za-z0-9\-._~+/]+=*)" # todo... support this?
|
# TOKEN68 = r"([A-Za-z0-9\-._~+/]+=*)" # todo... support this?
|
||||||
TOKEN = rf"{tchar}+"
|
TOKEN = rf"{tchar}+"
|
||||||
@@ -68,9 +68,7 @@ class TokenType(spack.parser.TokenBase):
|
|||||||
ANY = r"."
|
ANY = r"."
|
||||||
|
|
||||||
|
|
||||||
TOKEN_REGEXES = [rf"(?P<{token}>{token.regex})" for token in TokenType]
|
WWW_AUTHENTICATE_TOKENIZER = spack.tokenize.Tokenizer(WwwAuthenticateTokens)
|
||||||
|
|
||||||
ALL_TOKENS = re.compile("|".join(TOKEN_REGEXES))
|
|
||||||
|
|
||||||
|
|
||||||
class State(Enum):
|
class State(Enum):
|
||||||
@@ -81,18 +79,6 @@ class State(Enum):
|
|||||||
AUTH_PARAM_OR_SCHEME = auto()
|
AUTH_PARAM_OR_SCHEME = auto()
|
||||||
|
|
||||||
|
|
||||||
def tokenize(input: str):
|
|
||||||
scanner = ALL_TOKENS.scanner(input) # type: ignore[attr-defined]
|
|
||||||
|
|
||||||
for match in iter(scanner.match, None): # type: ignore[var-annotated]
|
|
||||||
yield spack.parser.Token(
|
|
||||||
TokenType.__members__[match.lastgroup], # type: ignore[attr-defined]
|
|
||||||
match.group(), # type: ignore[attr-defined]
|
|
||||||
match.start(), # type: ignore[attr-defined]
|
|
||||||
match.end(), # type: ignore[attr-defined]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Challenge:
|
class Challenge:
|
||||||
__slots__ = ["scheme", "params"]
|
__slots__ = ["scheme", "params"]
|
||||||
|
|
||||||
@@ -128,7 +114,7 @@ def parse_www_authenticate(input: str):
|
|||||||
unquote = lambda s: _unquote(r"\1", s[1:-1])
|
unquote = lambda s: _unquote(r"\1", s[1:-1])
|
||||||
|
|
||||||
mode: State = State.CHALLENGE
|
mode: State = State.CHALLENGE
|
||||||
tokens = tokenize(input)
|
tokens = WWW_AUTHENTICATE_TOKENIZER.tokenize(input)
|
||||||
|
|
||||||
current_challenge = Challenge()
|
current_challenge = Challenge()
|
||||||
|
|
||||||
@@ -141,36 +127,36 @@ def extract_auth_param(input: str) -> Tuple[str, str]:
|
|||||||
return key, value
|
return key, value
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
token: spack.parser.Token = next(tokens)
|
token: spack.tokenize.Token = next(tokens)
|
||||||
|
|
||||||
if mode == State.CHALLENGE:
|
if mode == State.CHALLENGE:
|
||||||
if token.kind == TokenType.EOF:
|
if token.kind == WwwAuthenticateTokens.EOF:
|
||||||
raise ValueError(token)
|
raise ValueError(token)
|
||||||
elif token.kind == TokenType.TOKEN:
|
elif token.kind == WwwAuthenticateTokens.TOKEN:
|
||||||
current_challenge.scheme = token.value
|
current_challenge.scheme = token.value
|
||||||
mode = State.AUTH_PARAM_LIST_START
|
mode = State.AUTH_PARAM_LIST_START
|
||||||
else:
|
else:
|
||||||
raise ValueError(token)
|
raise ValueError(token)
|
||||||
|
|
||||||
elif mode == State.AUTH_PARAM_LIST_START:
|
elif mode == State.AUTH_PARAM_LIST_START:
|
||||||
if token.kind == TokenType.EOF:
|
if token.kind == WwwAuthenticateTokens.EOF:
|
||||||
challenges.append(current_challenge)
|
challenges.append(current_challenge)
|
||||||
break
|
break
|
||||||
elif token.kind == TokenType.COMMA:
|
elif token.kind == WwwAuthenticateTokens.COMMA:
|
||||||
# Challenge without param list, followed by another challenge.
|
# Challenge without param list, followed by another challenge.
|
||||||
challenges.append(current_challenge)
|
challenges.append(current_challenge)
|
||||||
current_challenge = Challenge()
|
current_challenge = Challenge()
|
||||||
mode = State.CHALLENGE
|
mode = State.CHALLENGE
|
||||||
elif token.kind == TokenType.SPACE:
|
elif token.kind == WwwAuthenticateTokens.SPACE:
|
||||||
# A space means it must be followed by param list
|
# A space means it must be followed by param list
|
||||||
mode = State.AUTH_PARAM
|
mode = State.AUTH_PARAM
|
||||||
else:
|
else:
|
||||||
raise ValueError(token)
|
raise ValueError(token)
|
||||||
|
|
||||||
elif mode == State.AUTH_PARAM:
|
elif mode == State.AUTH_PARAM:
|
||||||
if token.kind == TokenType.EOF:
|
if token.kind == WwwAuthenticateTokens.EOF:
|
||||||
raise ValueError(token)
|
raise ValueError(token)
|
||||||
elif token.kind == TokenType.AUTH_PARAM:
|
elif token.kind == WwwAuthenticateTokens.AUTH_PARAM:
|
||||||
key, value = extract_auth_param(token.value)
|
key, value = extract_auth_param(token.value)
|
||||||
current_challenge.params.append((key, value))
|
current_challenge.params.append((key, value))
|
||||||
mode = State.NEXT_IN_LIST
|
mode = State.NEXT_IN_LIST
|
||||||
@@ -178,22 +164,22 @@ def extract_auth_param(input: str) -> Tuple[str, str]:
|
|||||||
raise ValueError(token)
|
raise ValueError(token)
|
||||||
|
|
||||||
elif mode == State.NEXT_IN_LIST:
|
elif mode == State.NEXT_IN_LIST:
|
||||||
if token.kind == TokenType.EOF:
|
if token.kind == WwwAuthenticateTokens.EOF:
|
||||||
challenges.append(current_challenge)
|
challenges.append(current_challenge)
|
||||||
break
|
break
|
||||||
elif token.kind == TokenType.COMMA:
|
elif token.kind == WwwAuthenticateTokens.COMMA:
|
||||||
mode = State.AUTH_PARAM_OR_SCHEME
|
mode = State.AUTH_PARAM_OR_SCHEME
|
||||||
else:
|
else:
|
||||||
raise ValueError(token)
|
raise ValueError(token)
|
||||||
|
|
||||||
elif mode == State.AUTH_PARAM_OR_SCHEME:
|
elif mode == State.AUTH_PARAM_OR_SCHEME:
|
||||||
if token.kind == TokenType.EOF:
|
if token.kind == WwwAuthenticateTokens.EOF:
|
||||||
raise ValueError(token)
|
raise ValueError(token)
|
||||||
elif token.kind == TokenType.TOKEN:
|
elif token.kind == WwwAuthenticateTokens.TOKEN:
|
||||||
challenges.append(current_challenge)
|
challenges.append(current_challenge)
|
||||||
current_challenge = Challenge(token.value)
|
current_challenge = Challenge(token.value)
|
||||||
mode = State.AUTH_PARAM_LIST_START
|
mode = State.AUTH_PARAM_LIST_START
|
||||||
elif token.kind == TokenType.AUTH_PARAM:
|
elif token.kind == WwwAuthenticateTokens.AUTH_PARAM:
|
||||||
key, value = extract_auth_param(token.value)
|
key, value = extract_auth_param(token.value)
|
||||||
current_challenge.params.append((key, value))
|
current_challenge.params.append((key, value))
|
||||||
mode = State.NEXT_IN_LIST
|
mode = State.NEXT_IN_LIST
|
||||||
|
@@ -24,9 +24,10 @@
|
|||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
import typing
|
import typing
|
||||||
import warnings
|
|
||||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union
|
from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union
|
||||||
|
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
import llnl.util.filesystem as fsys
|
import llnl.util.filesystem as fsys
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.lang import classproperty, memoized
|
from llnl.util.lang import classproperty, memoized
|
||||||
@@ -59,6 +60,7 @@
|
|||||||
from spack.solver.version_order import concretization_version_order
|
from spack.solver.version_order import concretization_version_order
|
||||||
from spack.stage import DevelopStage, ResourceStage, Stage, StageComposite, compute_stage_name
|
from spack.stage import DevelopStage, ResourceStage, Stage, StageComposite, compute_stage_name
|
||||||
from spack.util.package_hash import package_hash
|
from spack.util.package_hash import package_hash
|
||||||
|
from spack.util.typing import SupportsRichComparison
|
||||||
from spack.version import GitVersion, StandardVersion
|
from spack.version import GitVersion, StandardVersion
|
||||||
|
|
||||||
FLAG_HANDLER_RETURN_TYPE = Tuple[
|
FLAG_HANDLER_RETURN_TYPE = Tuple[
|
||||||
@@ -86,32 +88,6 @@
|
|||||||
spack_times_log = "install_times.json"
|
spack_times_log = "install_times.json"
|
||||||
|
|
||||||
|
|
||||||
def deprecated_version(pkg: "PackageBase", version: Union[str, StandardVersion]) -> bool:
|
|
||||||
"""Return True iff the version is deprecated.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
pkg: The package whose version is to be checked.
|
|
||||||
version: The version being checked
|
|
||||||
"""
|
|
||||||
if not isinstance(version, StandardVersion):
|
|
||||||
version = StandardVersion.from_string(version)
|
|
||||||
|
|
||||||
details = pkg.versions.get(version)
|
|
||||||
return details is not None and details.get("deprecated", False)
|
|
||||||
|
|
||||||
|
|
||||||
def preferred_version(pkg: "PackageBase"):
|
|
||||||
"""
|
|
||||||
Returns a sorted list of the preferred versions of the package.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
pkg: The package whose versions are to be assessed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
version, _ = max(pkg.versions.items(), key=concretization_version_order)
|
|
||||||
return version
|
|
||||||
|
|
||||||
|
|
||||||
class WindowsRPath:
|
class WindowsRPath:
|
||||||
"""Collection of functionality surrounding Windows RPATH specific features
|
"""Collection of functionality surrounding Windows RPATH specific features
|
||||||
|
|
||||||
@@ -416,59 +392,77 @@ def remove_files_from_view(self, view, merge_map):
|
|||||||
|
|
||||||
Pb = TypeVar("Pb", bound="PackageBase")
|
Pb = TypeVar("Pb", bound="PackageBase")
|
||||||
|
|
||||||
WhenDict = Dict[spack.spec.Spec, Dict[str, Any]]
|
# Some typedefs for dealing with when-indexed dictionaries
|
||||||
NameValuesDict = Dict[str, List[Any]]
|
#
|
||||||
NameWhenDict = Dict[str, Dict[spack.spec.Spec, List[Any]]]
|
# Many of the dictionaries on PackageBase are of the form:
|
||||||
|
# { Spec: { K: V } }
|
||||||
|
#
|
||||||
|
# K might be a variant name, a version, etc. V is a definition of some Spack object.
|
||||||
|
# The methods below transform these types of dictionaries.
|
||||||
|
K = TypeVar("K", bound=SupportsRichComparison)
|
||||||
|
V = TypeVar("V")
|
||||||
|
|
||||||
|
|
||||||
def _by_name(
|
def _by_subkey(
|
||||||
when_indexed_dictionary: WhenDict, when: bool = False
|
when_indexed_dictionary: Dict[spack.spec.Spec, Dict[K, V]], when: bool = False
|
||||||
) -> Union[NameValuesDict, NameWhenDict]:
|
) -> Dict[K, Union[List[V], Dict[spack.spec.Spec, List[V]]]]:
|
||||||
"""Convert a dict of dicts keyed by when/name into a dict of lists keyed by name.
|
"""Convert a dict of dicts keyed by when/subkey into a dict of lists keyed by subkey.
|
||||||
|
|
||||||
Optional Arguments:
|
Optional Arguments:
|
||||||
when: if ``True``, don't discared the ``when`` specs; return a 2-level dictionary
|
when: if ``True``, don't discared the ``when`` specs; return a 2-level dictionary
|
||||||
keyed by name and when spec.
|
keyed by subkey and when spec.
|
||||||
"""
|
"""
|
||||||
# very hard to define this type to be conditional on `when`
|
# very hard to define this type to be conditional on `when`
|
||||||
all_by_name: Dict[str, Any] = {}
|
all_by_subkey: Dict[K, Any] = {}
|
||||||
|
|
||||||
for when_spec, by_name in when_indexed_dictionary.items():
|
for when_spec, by_key in when_indexed_dictionary.items():
|
||||||
for name, value in by_name.items():
|
for key, value in by_key.items():
|
||||||
if when:
|
if when:
|
||||||
when_dict = all_by_name.setdefault(name, {})
|
when_dict = all_by_subkey.setdefault(key, {})
|
||||||
when_dict.setdefault(when_spec, []).append(value)
|
when_dict.setdefault(when_spec, []).append(value)
|
||||||
else:
|
else:
|
||||||
all_by_name.setdefault(name, []).append(value)
|
all_by_subkey.setdefault(key, []).append(value)
|
||||||
|
|
||||||
# this needs to preserve the insertion order of whens
|
# this needs to preserve the insertion order of whens
|
||||||
return dict(sorted(all_by_name.items()))
|
return dict(sorted(all_by_subkey.items()))
|
||||||
|
|
||||||
|
|
||||||
def _names(when_indexed_dictionary: WhenDict) -> List[str]:
|
def _subkeys(when_indexed_dictionary: Dict[spack.spec.Spec, Dict[K, V]]) -> List[K]:
|
||||||
"""Get sorted names from dicts keyed by when/name."""
|
"""Get sorted names from dicts keyed by when/name."""
|
||||||
all_names = set()
|
all_keys = set()
|
||||||
for when, by_name in when_indexed_dictionary.items():
|
for when, by_key in when_indexed_dictionary.items():
|
||||||
for name in by_name:
|
for key in by_key:
|
||||||
all_names.add(name)
|
all_keys.add(key)
|
||||||
|
|
||||||
return sorted(all_names)
|
return sorted(all_keys)
|
||||||
|
|
||||||
|
|
||||||
WhenVariantList = List[Tuple[spack.spec.Spec, spack.variant.Variant]]
|
def _has_subkey(when_indexed_dictionary: Dict[spack.spec.Spec, Dict[K, V]], key: K) -> bool:
|
||||||
|
return any(key in dictionary for dictionary in when_indexed_dictionary.values())
|
||||||
|
|
||||||
|
|
||||||
def _remove_overridden_vdefs(variant_defs: WhenVariantList) -> None:
|
def _num_definitions(when_indexed_dictionary: Dict[spack.spec.Spec, Dict[K, V]]) -> int:
|
||||||
"""Remove variant defs from the list if their when specs are satisfied by later ones.
|
return sum(len(dictionary) for dictionary in when_indexed_dictionary.values())
|
||||||
|
|
||||||
Any such variant definitions are *always* overridden by their successor, as it will
|
|
||||||
match everything the predecessor matches, and the solver will prefer it because of
|
|
||||||
its higher precedence.
|
|
||||||
|
|
||||||
We can just remove these defs from variant definitions and avoid putting them in the
|
def _precedence(obj) -> int:
|
||||||
solver. This is also useful for, e.g., `spack info`, where we don't want to show a
|
"""Get either a 'precedence' attribute or item from an object."""
|
||||||
variant from a superclass if it is always overridden by a variant defined in a
|
precedence = getattr(obj, "precedence", None)
|
||||||
subclass.
|
if precedence is None:
|
||||||
|
raise KeyError(f"Couldn't get precedence from {type(obj)}")
|
||||||
|
return precedence
|
||||||
|
|
||||||
|
|
||||||
|
def _remove_overridden_defs(defs: List[Tuple[spack.spec.Spec, Any]]) -> None:
|
||||||
|
"""Remove definitions from the list if their when specs are satisfied by later ones.
|
||||||
|
|
||||||
|
Any such definitions are *always* overridden by their successor, as they will
|
||||||
|
match everything the predecessor matches, and the solver will prefer them because of
|
||||||
|
their higher precedence.
|
||||||
|
|
||||||
|
We can just remove these defs and avoid putting them in the solver. This is also
|
||||||
|
useful for, e.g., `spack info`, where we don't want to show a variant from a
|
||||||
|
superclass if it is always overridden by a variant defined in a subclass.
|
||||||
|
|
||||||
Example::
|
Example::
|
||||||
|
|
||||||
@@ -486,14 +480,33 @@ class Hipblas:
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
i = 0
|
i = 0
|
||||||
while i < len(variant_defs):
|
while i < len(defs):
|
||||||
when, vdef = variant_defs[i]
|
when, _ = defs[i]
|
||||||
if any(when.satisfies(successor) for successor, _ in variant_defs[i + 1 :]):
|
if any(when.satisfies(successor) for successor, _ in defs[i + 1 :]):
|
||||||
del variant_defs[i]
|
del defs[i]
|
||||||
else:
|
else:
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
|
|
||||||
|
def _definitions(
|
||||||
|
when_indexed_dictionary: Dict[spack.spec.Spec, Dict[K, V]], key: K
|
||||||
|
) -> List[Tuple[spack.spec.Spec, V]]:
|
||||||
|
"""Iterator over (when_spec, Value) for all values with a particular Key."""
|
||||||
|
# construct a list of defs sorted by precedence
|
||||||
|
defs: List[Tuple[spack.spec.Spec, V]] = []
|
||||||
|
for when, values_by_key in when_indexed_dictionary.items():
|
||||||
|
value_def = values_by_key.get(key)
|
||||||
|
if value_def:
|
||||||
|
defs.append((when, value_def))
|
||||||
|
|
||||||
|
# With multiple definitions, ensure precedence order and simplify overrides
|
||||||
|
if len(defs) > 1:
|
||||||
|
defs.sort(key=lambda v: _precedence(v[1]))
|
||||||
|
_remove_overridden_defs(defs)
|
||||||
|
|
||||||
|
return defs
|
||||||
|
|
||||||
|
|
||||||
#: Store whether a given Spec source/binary should not be redistributed.
|
#: Store whether a given Spec source/binary should not be redistributed.
|
||||||
class DisableRedistribute:
|
class DisableRedistribute:
|
||||||
def __init__(self, source, binary):
|
def __init__(self, source, binary):
|
||||||
@@ -634,6 +647,14 @@ class PackageBase(WindowsRPath, PackageViewMixin, metaclass=PackageMeta):
|
|||||||
#: stubs directory are not bound by path."""
|
#: stubs directory are not bound by path."""
|
||||||
non_bindable_shared_objects: List[str] = []
|
non_bindable_shared_objects: List[str] = []
|
||||||
|
|
||||||
|
#: List of fnmatch patterns of library file names (specifically DT_NEEDED entries) that are not
|
||||||
|
#: expected to be locatable in RPATHs. Generally this is a problem, and Spack install with
|
||||||
|
#: config:shared_linking:strict will cause install failures if such libraries are found.
|
||||||
|
#: However, in certain cases it can be hard if not impossible to avoid accidental linking
|
||||||
|
#: against system libraries; until that is resolved, this attribute can be used to suppress
|
||||||
|
#: errors.
|
||||||
|
unresolved_libraries: List[str] = []
|
||||||
|
|
||||||
#: List of prefix-relative file paths (or a single path). If these do
|
#: List of prefix-relative file paths (or a single path). If these do
|
||||||
#: not exist after install, or if they exist but are not files,
|
#: not exist after install, or if they exist but are not files,
|
||||||
#: sanity checks fail.
|
#: sanity checks fail.
|
||||||
@@ -749,44 +770,32 @@ def __init__(self, spec):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def dependency_names(cls):
|
def dependency_names(cls):
|
||||||
return _names(cls.dependencies)
|
return _subkeys(cls.dependencies)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def dependencies_by_name(cls, when: bool = False):
|
def dependencies_by_name(cls, when: bool = False):
|
||||||
return _by_name(cls.dependencies, when=when)
|
return _by_subkey(cls.dependencies, when=when)
|
||||||
|
|
||||||
# Accessors for variants
|
# Accessors for variants
|
||||||
# External code workingw with Variants should go through the methods below
|
# External code working with Variants should go through the methods below
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def variant_names(cls) -> List[str]:
|
def variant_names(cls) -> List[str]:
|
||||||
return _names(cls.variants)
|
return _subkeys(cls.variants)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def has_variant(cls, name) -> bool:
|
def has_variant(cls, name) -> bool:
|
||||||
return any(name in dictionary for dictionary in cls.variants.values())
|
return _has_subkey(cls.variants, name)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def num_variant_definitions(cls) -> int:
|
def num_variant_definitions(cls) -> int:
|
||||||
"""Total number of variant definitions in this class so far."""
|
"""Total number of variant definitions in this class so far."""
|
||||||
return sum(len(variants_by_name) for variants_by_name in cls.variants.values())
|
return _num_definitions(cls.variants)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def variant_definitions(cls, name: str) -> WhenVariantList:
|
def variant_definitions(cls, name: str) -> List[Tuple[spack.spec.Spec, spack.variant.Variant]]:
|
||||||
"""Iterator over (when_spec, Variant) for all variant definitions for a particular name."""
|
"""Iterator over (when_spec, Variant) for all variant definitions for a particular name."""
|
||||||
# construct a list of defs sorted by precedence
|
return _definitions(cls.variants, name)
|
||||||
defs: WhenVariantList = []
|
|
||||||
for when, variants_by_name in cls.variants.items():
|
|
||||||
variant_def = variants_by_name.get(name)
|
|
||||||
if variant_def:
|
|
||||||
defs.append((when, variant_def))
|
|
||||||
|
|
||||||
# With multiple definitions, ensure precedence order and simplify overrides
|
|
||||||
if len(defs) > 1:
|
|
||||||
defs.sort(key=lambda v: v[1].precedence)
|
|
||||||
_remove_overridden_vdefs(defs)
|
|
||||||
|
|
||||||
return defs
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def variant_items(cls) -> Iterable[Tuple[spack.spec.Spec, Dict[str, spack.variant.Variant]]]:
|
def variant_items(cls) -> Iterable[Tuple[spack.spec.Spec, Dict[str, spack.variant.Variant]]]:
|
||||||
@@ -1002,10 +1011,8 @@ def redistribute_binary(self):
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# NOTE: return type should be Optional[Literal['all', 'specific', 'none']] in
|
|
||||||
# Python 3.8+, but we still support 3.6.
|
|
||||||
@property
|
@property
|
||||||
def keep_werror(self) -> Optional[str]:
|
def keep_werror(self) -> Optional[Literal["all", "specific", "none"]]:
|
||||||
"""Keep ``-Werror`` flags, matches ``config:flags:keep_werror`` to override config.
|
"""Keep ``-Werror`` flags, matches ``config:flags:keep_werror`` to override config.
|
||||||
|
|
||||||
Valid return values are:
|
Valid return values are:
|
||||||
@@ -1360,24 +1367,6 @@ def tester(self):
|
|||||||
self._tester = spack.install_test.PackageTest(self)
|
self._tester = spack.install_test.PackageTest(self)
|
||||||
return self._tester
|
return self._tester
|
||||||
|
|
||||||
@property
|
|
||||||
def installed(self):
|
|
||||||
msg = (
|
|
||||||
'the "PackageBase.installed" property is deprecated and will be '
|
|
||||||
'removed in Spack v0.19, use "Spec.installed" instead'
|
|
||||||
)
|
|
||||||
warnings.warn(msg)
|
|
||||||
return self.spec.installed
|
|
||||||
|
|
||||||
@property
|
|
||||||
def installed_upstream(self):
|
|
||||||
msg = (
|
|
||||||
'the "PackageBase.installed_upstream" property is deprecated and will '
|
|
||||||
'be removed in Spack v0.19, use "Spec.installed_upstream" instead'
|
|
||||||
)
|
|
||||||
warnings.warn(msg)
|
|
||||||
return self.spec.installed_upstream
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def fetcher(self):
|
def fetcher(self):
|
||||||
if not self.spec.versions.concrete:
|
if not self.spec.versions.concrete:
|
||||||
@@ -1755,7 +1744,7 @@ def all_patches(cls):
|
|||||||
|
|
||||||
return patches
|
return patches
|
||||||
|
|
||||||
def content_hash(self, content=None):
|
def content_hash(self, content: Optional[bytes] = None) -> str:
|
||||||
"""Create a hash based on the artifacts and patches used to build this package.
|
"""Create a hash based on the artifacts and patches used to build this package.
|
||||||
|
|
||||||
This includes:
|
This includes:
|
||||||
@@ -2380,6 +2369,32 @@ def possible_dependencies(
|
|||||||
return visited
|
return visited
|
||||||
|
|
||||||
|
|
||||||
|
def deprecated_version(pkg: PackageBase, version: Union[str, StandardVersion]) -> bool:
|
||||||
|
"""Return True iff the version is deprecated.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
pkg: The package whose version is to be checked.
|
||||||
|
version: The version being checked
|
||||||
|
"""
|
||||||
|
if not isinstance(version, StandardVersion):
|
||||||
|
version = StandardVersion.from_string(version)
|
||||||
|
|
||||||
|
details = pkg.versions.get(version)
|
||||||
|
return details is not None and details.get("deprecated", False)
|
||||||
|
|
||||||
|
|
||||||
|
def preferred_version(pkg: PackageBase):
|
||||||
|
"""
|
||||||
|
Returns a sorted list of the preferred versions of the package.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
pkg: The package whose versions are to be assessed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
version, _ = max(pkg.versions.items(), key=concretization_version_order)
|
||||||
|
return version
|
||||||
|
|
||||||
|
|
||||||
class PackageStillNeededError(InstallError):
|
class PackageStillNeededError(InstallError):
|
||||||
"""Raised when package is still needed by another on uninstall."""
|
"""Raised when package is still needed by another on uninstall."""
|
||||||
|
|
||||||
|
@@ -40,7 +40,7 @@ def compare_output(current_output, blessed_output):
|
|||||||
|
|
||||||
def compare_output_file(current_output, blessed_output_file):
|
def compare_output_file(current_output, blessed_output_file):
|
||||||
"""Same as above, but when the blessed output is given as a file."""
|
"""Same as above, but when the blessed output is given as a file."""
|
||||||
with open(blessed_output_file, "r") as f:
|
with open(blessed_output_file, "r", encoding="utf-8") as f:
|
||||||
blessed_output = f.read()
|
blessed_output = f.read()
|
||||||
|
|
||||||
compare_output(current_output, blessed_output)
|
compare_output(current_output, blessed_output)
|
||||||
|
@@ -1031,7 +1031,7 @@ def is_prefix(self, fullname: str) -> bool:
|
|||||||
def _read_config(self) -> Dict[str, str]:
|
def _read_config(self) -> Dict[str, str]:
|
||||||
"""Check for a YAML config file in this db's root directory."""
|
"""Check for a YAML config file in this db's root directory."""
|
||||||
try:
|
try:
|
||||||
with open(self.config_file) as reponame_file:
|
with open(self.config_file, encoding="utf-8") as reponame_file:
|
||||||
yaml_data = syaml.load(reponame_file)
|
yaml_data = syaml.load(reponame_file)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
@@ -1365,7 +1365,7 @@ def create_repo(root, namespace=None, subdir=packages_dir_name):
|
|||||||
packages_path = os.path.join(root, subdir)
|
packages_path = os.path.join(root, subdir)
|
||||||
|
|
||||||
fs.mkdirp(packages_path)
|
fs.mkdirp(packages_path)
|
||||||
with open(config_path, "w") as config:
|
with open(config_path, "w", encoding="utf-8") as config:
|
||||||
config.write("repo:\n")
|
config.write("repo:\n")
|
||||||
config.write(f" namespace: '{namespace}'\n")
|
config.write(f" namespace: '{namespace}'\n")
|
||||||
if subdir != packages_dir_name:
|
if subdir != packages_dir_name:
|
||||||
@@ -1492,7 +1492,7 @@ def add_package(self, name, dependencies=None):
|
|||||||
text = template.render(context)
|
text = template.render(context)
|
||||||
package_py = self.recipe_filename(name)
|
package_py = self.recipe_filename(name)
|
||||||
fs.mkdirp(os.path.dirname(package_py))
|
fs.mkdirp(os.path.dirname(package_py))
|
||||||
with open(package_py, "w") as f:
|
with open(package_py, "w", encoding="utf-8") as f:
|
||||||
f.write(text)
|
f.write(text)
|
||||||
|
|
||||||
def remove(self, name):
|
def remove(self, name):
|
||||||
|
@@ -191,9 +191,9 @@ def on_success(self, pkg, kwargs, package_record):
|
|||||||
def fetch_log(self, pkg):
|
def fetch_log(self, pkg):
|
||||||
try:
|
try:
|
||||||
if os.path.exists(pkg.install_log_path):
|
if os.path.exists(pkg.install_log_path):
|
||||||
stream = gzip.open(pkg.install_log_path, "rt")
|
stream = gzip.open(pkg.install_log_path, "rt", encoding="utf-8")
|
||||||
else:
|
else:
|
||||||
stream = open(pkg.log_path)
|
stream = open(pkg.log_path, encoding="utf-8")
|
||||||
with stream as f:
|
with stream as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
except OSError:
|
except OSError:
|
||||||
|
@@ -2,7 +2,6 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
import codecs
|
|
||||||
import collections
|
import collections
|
||||||
import hashlib
|
import hashlib
|
||||||
import os.path
|
import os.path
|
||||||
@@ -11,6 +10,7 @@
|
|||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
import time
|
import time
|
||||||
|
import warnings
|
||||||
import xml.sax.saxutils
|
import xml.sax.saxutils
|
||||||
from typing import Dict, Optional
|
from typing import Dict, Optional
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
@@ -124,11 +124,15 @@ def __init__(self, configuration: CDashConfiguration):
|
|||||||
self.multiple_packages = False
|
self.multiple_packages = False
|
||||||
|
|
||||||
def report_build_name(self, pkg_name):
|
def report_build_name(self, pkg_name):
|
||||||
return (
|
buildname = (
|
||||||
"{0} - {1}".format(self.base_buildname, pkg_name)
|
"{0} - {1}".format(self.base_buildname, pkg_name)
|
||||||
if self.multiple_packages
|
if self.multiple_packages
|
||||||
else self.base_buildname
|
else self.base_buildname
|
||||||
)
|
)
|
||||||
|
if len(buildname) > 190:
|
||||||
|
warnings.warn("Build name exceeds CDash 190 character maximum and will be truncated.")
|
||||||
|
buildname = buildname[:190]
|
||||||
|
return buildname
|
||||||
|
|
||||||
def build_report_for_package(self, report_dir, package, duration):
|
def build_report_for_package(self, report_dir, package, duration):
|
||||||
if "stdout" not in package:
|
if "stdout" not in package:
|
||||||
@@ -253,7 +257,7 @@ def clean_log_event(event):
|
|||||||
report_file_name = report_name
|
report_file_name = report_name
|
||||||
phase_report = os.path.join(report_dir, report_file_name)
|
phase_report = os.path.join(report_dir, report_file_name)
|
||||||
|
|
||||||
with codecs.open(phase_report, "w", "utf-8") as f:
|
with open(phase_report, "w", encoding="utf-8") as f:
|
||||||
env = spack.tengine.make_environment()
|
env = spack.tengine.make_environment()
|
||||||
if phase != "update":
|
if phase != "update":
|
||||||
# Update.xml stores site information differently
|
# Update.xml stores site information differently
|
||||||
@@ -317,7 +321,7 @@ def report_test_data(self, report_dir, package, phases, report_data):
|
|||||||
report_file_name = "_".join([package["name"], package["id"], report_name])
|
report_file_name = "_".join([package["name"], package["id"], report_name])
|
||||||
phase_report = os.path.join(report_dir, report_file_name)
|
phase_report = os.path.join(report_dir, report_file_name)
|
||||||
|
|
||||||
with codecs.open(phase_report, "w", "utf-8") as f:
|
with open(phase_report, "w", encoding="utf-8") as f:
|
||||||
env = spack.tengine.make_environment()
|
env = spack.tengine.make_environment()
|
||||||
if phase not in ["update", "testing"]:
|
if phase not in ["update", "testing"]:
|
||||||
# Update.xml stores site information differently
|
# Update.xml stores site information differently
|
||||||
@@ -399,7 +403,7 @@ def concretization_report(self, report_dir, msg):
|
|||||||
update_template = posixpath.join(self.template_dir, "Update.xml")
|
update_template = posixpath.join(self.template_dir, "Update.xml")
|
||||||
t = env.get_template(update_template)
|
t = env.get_template(update_template)
|
||||||
output_filename = os.path.join(report_dir, "Update.xml")
|
output_filename = os.path.join(report_dir, "Update.xml")
|
||||||
with open(output_filename, "w") as f:
|
with open(output_filename, "w", encoding="utf-8") as f:
|
||||||
f.write(t.render(report_data))
|
f.write(t.render(report_data))
|
||||||
# We don't have a current package when reporting on concretization
|
# We don't have a current package when reporting on concretization
|
||||||
# errors so refer to this report with the base buildname instead.
|
# errors so refer to this report with the base buildname instead.
|
||||||
|
@@ -24,7 +24,7 @@ def build_report(self, filename, specs):
|
|||||||
filename = filename + ".xml"
|
filename = filename + ".xml"
|
||||||
|
|
||||||
report_data = {"specs": specs}
|
report_data = {"specs": specs}
|
||||||
with open(filename, "w") as f:
|
with open(filename, "w", encoding="utf-8") as f:
|
||||||
env = spack.tengine.make_environment()
|
env = spack.tengine.make_environment()
|
||||||
t = env.get_template(self._jinja_template)
|
t = env.get_template(self._jinja_template)
|
||||||
f.write(t.render(report_data))
|
f.write(t.render(report_data))
|
||||||
|
@@ -3,6 +3,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""This module contains jsonschema files for all of Spack's YAML formats."""
|
"""This module contains jsonschema files for all of Spack's YAML formats."""
|
||||||
|
import copy
|
||||||
import typing
|
import typing
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
@@ -25,14 +26,14 @@ def _validate_spec(validator, is_spec, instance, schema):
|
|||||||
"""Check if the attributes on instance are valid specs."""
|
"""Check if the attributes on instance are valid specs."""
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
|
||||||
import spack.parser
|
import spack.spec_parser
|
||||||
|
|
||||||
if not validator.is_type(instance, "object"):
|
if not validator.is_type(instance, "object"):
|
||||||
return
|
return
|
||||||
|
|
||||||
for spec_str in instance:
|
for spec_str in instance:
|
||||||
try:
|
try:
|
||||||
spack.parser.parse(spec_str)
|
spack.spec_parser.parse(spec_str)
|
||||||
except SpecSyntaxError as e:
|
except SpecSyntaxError as e:
|
||||||
yield jsonschema.ValidationError(str(e))
|
yield jsonschema.ValidationError(str(e))
|
||||||
|
|
||||||
@@ -73,3 +74,116 @@ def _deprecated_properties(validator, deprecated, instance, schema):
|
|||||||
|
|
||||||
|
|
||||||
Validator = llnl.util.lang.Singleton(_make_validator)
|
Validator = llnl.util.lang.Singleton(_make_validator)
|
||||||
|
|
||||||
|
|
||||||
|
def _append(string: str) -> bool:
|
||||||
|
"""Test if a spack YAML string is an append.
|
||||||
|
|
||||||
|
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
|
||||||
|
and if they do, their values append lower-precedence
|
||||||
|
configs.
|
||||||
|
|
||||||
|
str, str : concatenate strings.
|
||||||
|
[obj], [obj] : append lists.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return getattr(string, "append", False)
|
||||||
|
|
||||||
|
|
||||||
|
def _prepend(string: str) -> bool:
|
||||||
|
"""Test if a spack YAML string is an prepend.
|
||||||
|
|
||||||
|
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
|
||||||
|
and if they do, their values prepend lower-precedence
|
||||||
|
configs.
|
||||||
|
|
||||||
|
str, str : concatenate strings.
|
||||||
|
[obj], [obj] : prepend lists. (default behavior)
|
||||||
|
"""
|
||||||
|
return getattr(string, "prepend", False)
|
||||||
|
|
||||||
|
|
||||||
|
def override(string: str) -> bool:
|
||||||
|
"""Test if a spack YAML string is an override.
|
||||||
|
|
||||||
|
See ``spack_yaml`` for details. Keys in Spack YAML can end in `::`,
|
||||||
|
and if they do, their values completely replace lower-precedence
|
||||||
|
configs instead of merging into them.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return hasattr(string, "override") and string.override
|
||||||
|
|
||||||
|
|
||||||
|
def merge_yaml(dest, source, prepend=False, append=False):
|
||||||
|
"""Merges source into dest; entries in source take precedence over dest.
|
||||||
|
|
||||||
|
This routine may modify dest and should be assigned to dest, in
|
||||||
|
case dest was None to begin with, e.g.:
|
||||||
|
|
||||||
|
dest = merge_yaml(dest, source)
|
||||||
|
|
||||||
|
In the result, elements from lists from ``source`` will appear before
|
||||||
|
elements of lists from ``dest``. Likewise, when iterating over keys
|
||||||
|
or items in merged ``OrderedDict`` objects, keys from ``source`` will
|
||||||
|
appear before keys from ``dest``.
|
||||||
|
|
||||||
|
Config file authors can optionally end any attribute in a dict
|
||||||
|
with `::` instead of `:`, and the key will override that of the
|
||||||
|
parent instead of merging.
|
||||||
|
|
||||||
|
`+:` will extend the default prepend merge strategy to include string concatenation
|
||||||
|
`-:` will change the merge strategy to append, it also includes string concatentation
|
||||||
|
"""
|
||||||
|
|
||||||
|
def they_are(t):
|
||||||
|
return isinstance(dest, t) and isinstance(source, t)
|
||||||
|
|
||||||
|
# If source is None, overwrite with source.
|
||||||
|
if source is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Source list is prepended (for precedence)
|
||||||
|
if they_are(list):
|
||||||
|
if append:
|
||||||
|
# Make sure to copy ruamel comments
|
||||||
|
dest[:] = [x for x in dest if x not in source] + source
|
||||||
|
else:
|
||||||
|
# Make sure to copy ruamel comments
|
||||||
|
dest[:] = source + [x for x in dest if x not in source]
|
||||||
|
return dest
|
||||||
|
|
||||||
|
# Source dict is merged into dest.
|
||||||
|
elif they_are(dict):
|
||||||
|
# save dest keys to reinsert later -- this ensures that source items
|
||||||
|
# come *before* dest in OrderdDicts
|
||||||
|
dest_keys = [dk for dk in dest.keys() if dk not in source]
|
||||||
|
|
||||||
|
for sk, sv in source.items():
|
||||||
|
# always remove the dest items. Python dicts do not overwrite
|
||||||
|
# keys on insert, so this ensures that source keys are copied
|
||||||
|
# into dest along with mark provenance (i.e., file/line info).
|
||||||
|
merge = sk in dest
|
||||||
|
old_dest_value = dest.pop(sk, None)
|
||||||
|
|
||||||
|
if merge and not override(sk):
|
||||||
|
dest[sk] = merge_yaml(old_dest_value, sv, _prepend(sk), _append(sk))
|
||||||
|
else:
|
||||||
|
# if sk ended with ::, or if it's new, completely override
|
||||||
|
dest[sk] = copy.deepcopy(sv)
|
||||||
|
|
||||||
|
# reinsert dest keys so they are last in the result
|
||||||
|
for dk in dest_keys:
|
||||||
|
dest[dk] = dest.pop(dk)
|
||||||
|
|
||||||
|
return dest
|
||||||
|
|
||||||
|
elif they_are(str):
|
||||||
|
# Concatenate strings in prepend mode
|
||||||
|
if prepend:
|
||||||
|
return source + dest
|
||||||
|
elif append:
|
||||||
|
return dest + source
|
||||||
|
|
||||||
|
# If we reach here source and dest are either different types or are
|
||||||
|
# not both lists or dicts: replace with source.
|
||||||
|
return copy.copy(source)
|
||||||
|
@@ -11,7 +11,7 @@
|
|||||||
|
|
||||||
from llnl.util.lang import union_dicts
|
from llnl.util.lang import union_dicts
|
||||||
|
|
||||||
import spack.config
|
import spack.schema
|
||||||
import spack.schema.projections
|
import spack.schema.projections
|
||||||
|
|
||||||
#: Properties for inclusion in other schemas
|
#: Properties for inclusion in other schemas
|
||||||
@@ -34,6 +34,7 @@
|
|||||||
"properties": {
|
"properties": {
|
||||||
"type": {"type": "string", "enum": ["rpath", "runpath"]},
|
"type": {"type": "string", "enum": ["rpath", "runpath"]},
|
||||||
"bind": {"type": "boolean"},
|
"bind": {"type": "boolean"},
|
||||||
|
"missing_library_policy": {"enum": ["error", "warn", "ignore"]},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
@@ -157,7 +158,7 @@ def update(data):
|
|||||||
# whether install_tree was updated or not
|
# whether install_tree was updated or not
|
||||||
# we merge the yaml to ensure we don't invalidate other projections
|
# we merge the yaml to ensure we don't invalidate other projections
|
||||||
update_data = data.get("install_tree", {})
|
update_data = data.get("install_tree", {})
|
||||||
update_data = spack.config.merge_yaml(update_data, projections_data)
|
update_data = spack.schema.merge_yaml(update_data, projections_data)
|
||||||
data["install_tree"] = update_data
|
data["install_tree"] = update_data
|
||||||
changed = True
|
changed = True
|
||||||
|
|
||||||
|
@@ -68,7 +68,7 @@
|
|||||||
|
|
||||||
GitOrStandardVersion = Union[spack.version.GitVersion, spack.version.StandardVersion]
|
GitOrStandardVersion = Union[spack.version.GitVersion, spack.version.StandardVersion]
|
||||||
|
|
||||||
TransformFunction = Callable[[spack.spec.Spec, List[AspFunction]], List[AspFunction]]
|
TransformFunction = Callable[["spack.spec.Spec", List[AspFunction]], List[AspFunction]]
|
||||||
|
|
||||||
#: Enable the addition of a runtime node
|
#: Enable the addition of a runtime node
|
||||||
WITH_RUNTIME = sys.platform != "win32"
|
WITH_RUNTIME = sys.platform != "win32"
|
||||||
@@ -128,8 +128,8 @@ def __str__(self):
|
|||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def named_spec(
|
def named_spec(
|
||||||
spec: Optional[spack.spec.Spec], name: Optional[str]
|
spec: Optional["spack.spec.Spec"], name: Optional[str]
|
||||||
) -> Iterator[Optional[spack.spec.Spec]]:
|
) -> Iterator[Optional["spack.spec.Spec"]]:
|
||||||
"""Context manager to temporarily set the name of a spec"""
|
"""Context manager to temporarily set the name of a spec"""
|
||||||
if spec is None or name is None:
|
if spec is None or name is None:
|
||||||
yield spec
|
yield spec
|
||||||
@@ -748,11 +748,11 @@ def on_model(model):
|
|||||||
class KnownCompiler(NamedTuple):
|
class KnownCompiler(NamedTuple):
|
||||||
"""Data class to collect information on compilers"""
|
"""Data class to collect information on compilers"""
|
||||||
|
|
||||||
spec: spack.spec.Spec
|
spec: "spack.spec.Spec"
|
||||||
os: str
|
os: str
|
||||||
target: str
|
target: str
|
||||||
available: bool
|
available: bool
|
||||||
compiler_obj: Optional[spack.compiler.Compiler]
|
compiler_obj: Optional["spack.compiler.Compiler"]
|
||||||
|
|
||||||
def _key(self):
|
def _key(self):
|
||||||
return self.spec, self.os, self.target
|
return self.spec, self.os, self.target
|
||||||
@@ -1387,7 +1387,7 @@ def effect_rules(self):
|
|||||||
|
|
||||||
def define_variant(
|
def define_variant(
|
||||||
self,
|
self,
|
||||||
pkg: Type[spack.package_base.PackageBase],
|
pkg: "Type[spack.package_base.PackageBase]",
|
||||||
name: str,
|
name: str,
|
||||||
when: spack.spec.Spec,
|
when: spack.spec.Spec,
|
||||||
variant_def: vt.Variant,
|
variant_def: vt.Variant,
|
||||||
@@ -1491,7 +1491,7 @@ def define_auto_variant(self, name: str, multi: bool):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
def variant_rules(self, pkg: Type[spack.package_base.PackageBase]):
|
def variant_rules(self, pkg: "Type[spack.package_base.PackageBase]"):
|
||||||
for name in pkg.variant_names():
|
for name in pkg.variant_names():
|
||||||
self.gen.h3(f"Variant {name} in package {pkg.name}")
|
self.gen.h3(f"Variant {name} in package {pkg.name}")
|
||||||
for when, variant_def in pkg.variant_definitions(name):
|
for when, variant_def in pkg.variant_definitions(name):
|
||||||
@@ -1682,8 +1682,8 @@ def dependency_holds(input_spec, requirements):
|
|||||||
def _gen_match_variant_splice_constraints(
|
def _gen_match_variant_splice_constraints(
|
||||||
self,
|
self,
|
||||||
pkg,
|
pkg,
|
||||||
cond_spec: spack.spec.Spec,
|
cond_spec: "spack.spec.Spec",
|
||||||
splice_spec: spack.spec.Spec,
|
splice_spec: "spack.spec.Spec",
|
||||||
hash_asp_var: "AspVar",
|
hash_asp_var: "AspVar",
|
||||||
splice_node,
|
splice_node,
|
||||||
match_variants: List[str],
|
match_variants: List[str],
|
||||||
@@ -2978,7 +2978,7 @@ def _specs_from_requires(self, pkg_name, section):
|
|||||||
for s in spec_group[key]:
|
for s in spec_group[key]:
|
||||||
yield _spec_with_default_name(s, pkg_name)
|
yield _spec_with_default_name(s, pkg_name)
|
||||||
|
|
||||||
def pkg_class(self, pkg_name: str) -> typing.Type[spack.package_base.PackageBase]:
|
def pkg_class(self, pkg_name: str) -> typing.Type["spack.package_base.PackageBase"]:
|
||||||
request = pkg_name
|
request = pkg_name
|
||||||
if pkg_name in self.explicitly_required_namespaces:
|
if pkg_name in self.explicitly_required_namespaces:
|
||||||
namespace = self.explicitly_required_namespaces[pkg_name]
|
namespace = self.explicitly_required_namespaces[pkg_name]
|
||||||
@@ -3097,7 +3097,7 @@ def __init__(self, configuration) -> None:
|
|||||||
|
|
||||||
self.compilers.add(candidate)
|
self.compilers.add(candidate)
|
||||||
|
|
||||||
def with_input_specs(self, input_specs: List[spack.spec.Spec]) -> "CompilerParser":
|
def with_input_specs(self, input_specs: List["spack.spec.Spec"]) -> "CompilerParser":
|
||||||
"""Accounts for input specs when building the list of possible compilers.
|
"""Accounts for input specs when building the list of possible compilers.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -3137,7 +3137,7 @@ def with_input_specs(self, input_specs: List[spack.spec.Spec]) -> "CompilerParse
|
|||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def add_compiler_from_concrete_spec(self, spec: spack.spec.Spec) -> None:
|
def add_compiler_from_concrete_spec(self, spec: "spack.spec.Spec") -> None:
|
||||||
"""Account for compilers that are coming from concrete specs, through reuse.
|
"""Account for compilers that are coming from concrete specs, through reuse.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
@@ -73,14 +73,16 @@
|
|||||||
import spack
|
import spack
|
||||||
import spack.compiler
|
import spack.compiler
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.parser
|
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.provider_index
|
import spack.provider_index
|
||||||
import spack.repo
|
import spack.repo
|
||||||
|
import spack.solver
|
||||||
|
import spack.spec_parser
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.traverse as traverse
|
import spack.traverse as traverse
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
@@ -610,7 +612,7 @@ def __init__(self, *args):
|
|||||||
# If there is one argument, it's either another CompilerSpec
|
# If there is one argument, it's either another CompilerSpec
|
||||||
# to copy or a string to parse
|
# to copy or a string to parse
|
||||||
if isinstance(arg, str):
|
if isinstance(arg, str):
|
||||||
spec = spack.parser.parse_one_or_raise(f"%{arg}")
|
spec = spack.spec_parser.parse_one_or_raise(f"%{arg}")
|
||||||
self.name = spec.compiler.name
|
self.name = spec.compiler.name
|
||||||
self.versions = spec.compiler.versions
|
self.versions = spec.compiler.versions
|
||||||
|
|
||||||
@@ -948,11 +950,13 @@ def __str__(self):
|
|||||||
for flag_type, flags in sorted_items:
|
for flag_type, flags in sorted_items:
|
||||||
normal = [f for f in flags if not f.propagate]
|
normal = [f for f in flags if not f.propagate]
|
||||||
if normal:
|
if normal:
|
||||||
result += f" {flag_type}={spack.parser.quote_if_needed(' '.join(normal))}"
|
value = spack.spec_parser.quote_if_needed(" ".join(normal))
|
||||||
|
result += f" {flag_type}={value}"
|
||||||
|
|
||||||
propagated = [f for f in flags if f.propagate]
|
propagated = [f for f in flags if f.propagate]
|
||||||
if propagated:
|
if propagated:
|
||||||
result += f" {flag_type}=={spack.parser.quote_if_needed(' '.join(propagated))}"
|
value = spack.spec_parser.quote_if_needed(" ".join(propagated))
|
||||||
|
result += f" {flag_type}=={value}"
|
||||||
|
|
||||||
# TODO: somehow add this space only if something follows in Spec.format()
|
# TODO: somehow add this space only if something follows in Spec.format()
|
||||||
if sorted_items:
|
if sorted_items:
|
||||||
@@ -1429,10 +1433,6 @@ def tree(
|
|||||||
|
|
||||||
@lang.lazy_lexicographic_ordering(set_hash=False)
|
@lang.lazy_lexicographic_ordering(set_hash=False)
|
||||||
class Spec:
|
class Spec:
|
||||||
#: Cache for spec's prefix, computed lazily in the corresponding property
|
|
||||||
_prefix = None
|
|
||||||
abstract_hash = None
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def default_arch():
|
def default_arch():
|
||||||
"""Return an anonymous spec for the default architecture"""
|
"""Return an anonymous spec for the default architecture"""
|
||||||
@@ -1440,27 +1440,17 @@ def default_arch():
|
|||||||
s.architecture = ArchSpec.default_arch()
|
s.architecture = ArchSpec.default_arch()
|
||||||
return s
|
return s
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, spec_like=None, *, external_path=None, external_modules=None):
|
||||||
self,
|
|
||||||
spec_like=None,
|
|
||||||
normal=False,
|
|
||||||
concrete=False,
|
|
||||||
external_path=None,
|
|
||||||
external_modules=None,
|
|
||||||
):
|
|
||||||
"""Create a new Spec.
|
"""Create a new Spec.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
spec_like (optional string): if not provided, we initialize
|
spec_like: if not provided, we initialize an anonymous Spec that matches any Spec;
|
||||||
an anonymous Spec that matches any Spec object; if
|
if provided we parse this as a Spec string, or we copy the provided Spec.
|
||||||
provided we parse this as a Spec string.
|
|
||||||
|
|
||||||
Keyword arguments:
|
Keyword arguments:
|
||||||
# assign special fields from constructor
|
external_path: prefix, if this is a spec for an external package
|
||||||
self._normal = normal
|
external_modules: list of external modules, if this is an external package
|
||||||
self._concrete = concrete
|
using modules.
|
||||||
self.external_path = external_path
|
|
||||||
self.external_module = external_module
|
|
||||||
"""
|
"""
|
||||||
# Copy if spec_like is a Spec.
|
# Copy if spec_like is a Spec.
|
||||||
if isinstance(spec_like, Spec):
|
if isinstance(spec_like, Spec):
|
||||||
@@ -1477,26 +1467,26 @@ def __init__(
|
|||||||
self._dependents = _EdgeMap(store_by_child=False)
|
self._dependents = _EdgeMap(store_by_child=False)
|
||||||
self._dependencies = _EdgeMap(store_by_child=True)
|
self._dependencies = _EdgeMap(store_by_child=True)
|
||||||
self.namespace = None
|
self.namespace = None
|
||||||
|
self.abstract_hash = None
|
||||||
|
|
||||||
# initial values for all spec hash types
|
# initial values for all spec hash types
|
||||||
for h in ht.hashes:
|
for h in ht.hashes:
|
||||||
setattr(self, h.attr, None)
|
setattr(self, h.attr, None)
|
||||||
|
|
||||||
|
# cache for spec's prefix, computed lazily by prefix property
|
||||||
|
self._prefix = None
|
||||||
|
|
||||||
# Python __hash__ is handled separately from the cached spec hashes
|
# Python __hash__ is handled separately from the cached spec hashes
|
||||||
self._dunder_hash = None
|
self._dunder_hash = None
|
||||||
|
|
||||||
# cache of package for this spec
|
# cache of package for this spec
|
||||||
self._package = None
|
self._package = None
|
||||||
|
|
||||||
# Most of these are internal implementation details that can be
|
# whether the spec is concrete or not; set at the end of concretization
|
||||||
# set by internal Spack calls in the constructor.
|
self._concrete = False
|
||||||
#
|
|
||||||
# For example, Specs are by default not assumed to be normal, but
|
# External detection details that can be set by internal Spack calls
|
||||||
# in some cases we've read them from a file want to assume
|
# in the constructor.
|
||||||
# normal. This allows us to manipulate specs that Spack doesn't
|
|
||||||
# have package.py files for.
|
|
||||||
self._normal = normal
|
|
||||||
self._concrete = concrete
|
|
||||||
self._external_path = external_path
|
self._external_path = external_path
|
||||||
self.external_modules = Spec._format_module_list(external_modules)
|
self.external_modules = Spec._format_module_list(external_modules)
|
||||||
|
|
||||||
@@ -1511,7 +1501,7 @@ def __init__(
|
|||||||
self._build_spec = None
|
self._build_spec = None
|
||||||
|
|
||||||
if isinstance(spec_like, str):
|
if isinstance(spec_like, str):
|
||||||
spack.parser.parse_one_or_raise(spec_like, self)
|
spack.spec_parser.parse_one_or_raise(spec_like, self)
|
||||||
|
|
||||||
elif spec_like is not None:
|
elif spec_like is not None:
|
||||||
raise TypeError("Can't make spec out of %s" % type(spec_like))
|
raise TypeError("Can't make spec out of %s" % type(spec_like))
|
||||||
@@ -2406,7 +2396,7 @@ def to_json(self, stream=None, hash=ht.dag_hash):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def from_specfile(path):
|
def from_specfile(path):
|
||||||
"""Construct a spec from a JSON or YAML spec file path"""
|
"""Construct a spec from a JSON or YAML spec file path"""
|
||||||
with open(path, "r") as fd:
|
with open(path, "r", encoding="utf-8") as fd:
|
||||||
file_content = fd.read()
|
file_content = fd.read()
|
||||||
if path.endswith(".json"):
|
if path.endswith(".json"):
|
||||||
return Spec.from_json(file_content)
|
return Spec.from_json(file_content)
|
||||||
@@ -2828,11 +2818,50 @@ def ensure_no_deprecated(root):
|
|||||||
msg += " For each package listed, choose another spec\n"
|
msg += " For each package listed, choose another spec\n"
|
||||||
raise SpecDeprecatedError(msg)
|
raise SpecDeprecatedError(msg)
|
||||||
|
|
||||||
|
def concretize(self, tests: Union[bool, Iterable[str]] = False) -> None:
|
||||||
|
"""Concretize the current spec.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tests: if False disregard 'test' dependencies, if a list of names activate them for
|
||||||
|
the packages in the list, if True activate 'test' dependencies for all packages.
|
||||||
|
"""
|
||||||
|
import spack.solver.asp
|
||||||
|
|
||||||
|
self.replace_hash()
|
||||||
|
|
||||||
|
for node in self.traverse():
|
||||||
|
if not node.name:
|
||||||
|
raise spack.error.SpecError(
|
||||||
|
f"Spec {node} has no name; cannot concretize an anonymous spec"
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._concrete:
|
||||||
|
return
|
||||||
|
|
||||||
|
allow_deprecated = spack.config.get("config:deprecated", False)
|
||||||
|
solver = spack.solver.asp.Solver()
|
||||||
|
result = solver.solve([self], tests=tests, allow_deprecated=allow_deprecated)
|
||||||
|
|
||||||
|
# take the best answer
|
||||||
|
opt, i, answer = min(result.answers)
|
||||||
|
name = self.name
|
||||||
|
# TODO: Consolidate this code with similar code in solve.py
|
||||||
|
if self.virtual:
|
||||||
|
providers = [spec.name for spec in answer.values() if spec.package.provides(name)]
|
||||||
|
name = providers[0]
|
||||||
|
|
||||||
|
node = spack.solver.asp.SpecBuilder.make_node(pkg=name)
|
||||||
|
assert (
|
||||||
|
node in answer
|
||||||
|
), f"cannot find {name} in the list of specs {','.join([n.pkg for n in answer.keys()])}"
|
||||||
|
|
||||||
|
concretized = answer[node]
|
||||||
|
self._dup(concretized)
|
||||||
|
|
||||||
def _mark_root_concrete(self, value=True):
|
def _mark_root_concrete(self, value=True):
|
||||||
"""Mark just this spec (not dependencies) concrete."""
|
"""Mark just this spec (not dependencies) concrete."""
|
||||||
if (not value) and self.concrete and self.installed:
|
if (not value) and self.concrete and self.installed:
|
||||||
return
|
return
|
||||||
self._normal = value
|
|
||||||
self._concrete = value
|
self._concrete = value
|
||||||
self._validate_version()
|
self._validate_version()
|
||||||
|
|
||||||
@@ -2916,6 +2945,21 @@ def _finalize_concretization(self):
|
|||||||
for spec in self.traverse():
|
for spec in self.traverse():
|
||||||
spec._cached_hash(ht.dag_hash)
|
spec._cached_hash(ht.dag_hash)
|
||||||
|
|
||||||
|
def concretized(self, tests: Union[bool, Iterable[str]] = False) -> "spack.spec.Spec":
|
||||||
|
"""This is a non-destructive version of concretize().
|
||||||
|
|
||||||
|
First clones, then returns a concrete version of this package
|
||||||
|
without modifying this package.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tests (bool or list): if False disregard 'test' dependencies,
|
||||||
|
if a list of names activate them for the packages in the list,
|
||||||
|
if True activate 'test' dependencies for all packages.
|
||||||
|
"""
|
||||||
|
clone = self.copy()
|
||||||
|
clone.concretize(tests=tests)
|
||||||
|
return clone
|
||||||
|
|
||||||
def index(self, deptype="all"):
|
def index(self, deptype="all"):
|
||||||
"""Return a dictionary that points to all the dependencies in this
|
"""Return a dictionary that points to all the dependencies in this
|
||||||
spec.
|
spec.
|
||||||
@@ -3479,7 +3523,6 @@ def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, clearde
|
|||||||
and self.architecture != other.architecture
|
and self.architecture != other.architecture
|
||||||
and self.compiler != other.compiler
|
and self.compiler != other.compiler
|
||||||
and self.variants != other.variants
|
and self.variants != other.variants
|
||||||
and self._normal != other._normal
|
|
||||||
and self.concrete != other.concrete
|
and self.concrete != other.concrete
|
||||||
and self.external_path != other.external_path
|
and self.external_path != other.external_path
|
||||||
and self.external_modules != other.external_modules
|
and self.external_modules != other.external_modules
|
||||||
@@ -3525,20 +3568,17 @@ def _dup(self, other, deps: Union[bool, dt.DepTypes, dt.DepFlag] = True, clearde
|
|||||||
depflag = dt.canonicalize(deps)
|
depflag = dt.canonicalize(deps)
|
||||||
self._dup_deps(other, depflag)
|
self._dup_deps(other, depflag)
|
||||||
|
|
||||||
|
self._prefix = other._prefix
|
||||||
self._concrete = other._concrete
|
self._concrete = other._concrete
|
||||||
|
|
||||||
self.abstract_hash = other.abstract_hash
|
self.abstract_hash = other.abstract_hash
|
||||||
|
|
||||||
if self._concrete:
|
if self._concrete:
|
||||||
self._dunder_hash = other._dunder_hash
|
self._dunder_hash = other._dunder_hash
|
||||||
self._normal = other._normal
|
|
||||||
for h in ht.hashes:
|
for h in ht.hashes:
|
||||||
setattr(self, h.attr, getattr(other, h.attr, None))
|
setattr(self, h.attr, getattr(other, h.attr, None))
|
||||||
else:
|
else:
|
||||||
self._dunder_hash = None
|
self._dunder_hash = None
|
||||||
# Note, we could use other._normal if we are copying all deps, but
|
|
||||||
# always set it False here to avoid the complexity of checking
|
|
||||||
self._normal = False
|
|
||||||
for h in ht.hashes:
|
for h in ht.hashes:
|
||||||
setattr(self, h.attr, None)
|
setattr(self, h.attr, None)
|
||||||
|
|
||||||
@@ -5038,7 +5078,7 @@ def save_dependency_specfiles(root: Spec, output_directory: str, dependencies: L
|
|||||||
|
|
||||||
json_path = os.path.join(output_directory, f"{spec.name}.json")
|
json_path = os.path.join(output_directory, f"{spec.name}.json")
|
||||||
|
|
||||||
with open(json_path, "w") as fd:
|
with open(json_path, "w", encoding="utf-8") as fd:
|
||||||
fd.write(spec.to_json(hash=ht.dag_hash))
|
fd.write(spec.to_json(hash=ht.dag_hash))
|
||||||
|
|
||||||
|
|
||||||
|
@@ -57,12 +57,11 @@
|
|||||||
specs to avoid ambiguity. Both are provided because ~ can cause shell
|
specs to avoid ambiguity. Both are provided because ~ can cause shell
|
||||||
expansion when it is the first character in an id typed on the command line.
|
expansion when it is the first character in an id typed on the command line.
|
||||||
"""
|
"""
|
||||||
import enum
|
|
||||||
import json
|
import json
|
||||||
import pathlib
|
import pathlib
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from typing import Iterator, List, Match, Optional
|
from typing import Iterator, List, Optional
|
||||||
|
|
||||||
from llnl.util.tty import color
|
from llnl.util.tty import color
|
||||||
|
|
||||||
@@ -70,9 +69,8 @@
|
|||||||
import spack.error
|
import spack.error
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.version
|
import spack.version
|
||||||
from spack.error import SpecSyntaxError
|
from spack.tokenize import Token, TokenBase, Tokenizer
|
||||||
|
|
||||||
IS_WINDOWS = sys.platform == "win32"
|
|
||||||
#: Valid name for specs and variants. Here we are not using
|
#: Valid name for specs and variants. Here we are not using
|
||||||
#: the previous "w[\w.-]*" since that would match most
|
#: the previous "w[\w.-]*" since that would match most
|
||||||
#: characters that can be part of a word in any language
|
#: characters that can be part of a word in any language
|
||||||
@@ -87,22 +85,9 @@
|
|||||||
|
|
||||||
HASH = r"[a-zA-Z_0-9]+"
|
HASH = r"[a-zA-Z_0-9]+"
|
||||||
|
|
||||||
#: A filename starts either with a "." or a "/" or a "{name}/,
|
|
||||||
# or on Windows, a drive letter followed by a colon and "\"
|
|
||||||
# or "." or {name}\
|
|
||||||
WINDOWS_FILENAME = r"(?:\.|[a-zA-Z0-9-_]*\\|[a-zA-Z]:\\)(?:[a-zA-Z0-9-_\.\\]*)(?:\.json|\.yaml)"
|
|
||||||
UNIX_FILENAME = r"(?:\.|\/|[a-zA-Z0-9-_]*\/)(?:[a-zA-Z0-9-_\.\/]*)(?:\.json|\.yaml)"
|
|
||||||
if not IS_WINDOWS:
|
|
||||||
FILENAME = UNIX_FILENAME
|
|
||||||
else:
|
|
||||||
FILENAME = WINDOWS_FILENAME
|
|
||||||
|
|
||||||
#: These are legal values that *can* be parsed bare, without quotes on the command line.
|
#: These are legal values that *can* be parsed bare, without quotes on the command line.
|
||||||
VALUE = r"(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\]+)"
|
VALUE = r"(?:[a-zA-Z_0-9\-+\*.,:=\~\/\\]+)"
|
||||||
|
|
||||||
#: Variant/flag values that match this can be left unquoted in Spack output
|
|
||||||
NO_QUOTES_NEEDED = re.compile(r"^[a-zA-Z0-9,/_.-]+$")
|
|
||||||
|
|
||||||
#: Quoted values can be *anything* in between quotes, including escaped quotes.
|
#: Quoted values can be *anything* in between quotes, including escaped quotes.
|
||||||
QUOTED_VALUE = r"(?:'(?:[^']|(?<=\\)')*'|\"(?:[^\"]|(?<=\\)\")*\")"
|
QUOTED_VALUE = r"(?:'(?:[^']|(?<=\\)')*'|\"(?:[^\"]|(?<=\\)\")*\")"
|
||||||
|
|
||||||
@@ -113,60 +98,21 @@
|
|||||||
#: Regex with groups to use for splitting (optionally propagated) key-value pairs
|
#: Regex with groups to use for splitting (optionally propagated) key-value pairs
|
||||||
SPLIT_KVP = re.compile(rf"^({NAME})(==?)(.*)$")
|
SPLIT_KVP = re.compile(rf"^({NAME})(==?)(.*)$")
|
||||||
|
|
||||||
|
#: A filename starts either with a "." or a "/" or a "{name}/, or on Windows, a drive letter
|
||||||
|
#: followed by a colon and "\" or "." or {name}\
|
||||||
|
WINDOWS_FILENAME = r"(?:\.|[a-zA-Z0-9-_]*\\|[a-zA-Z]:\\)(?:[a-zA-Z0-9-_\.\\]*)(?:\.json|\.yaml)"
|
||||||
|
UNIX_FILENAME = r"(?:\.|\/|[a-zA-Z0-9-_]*\/)(?:[a-zA-Z0-9-_\.\/]*)(?:\.json|\.yaml)"
|
||||||
|
FILENAME = WINDOWS_FILENAME if sys.platform == "win32" else UNIX_FILENAME
|
||||||
|
|
||||||
#: Regex to strip quotes. Group 2 will be the unquoted string.
|
#: Regex to strip quotes. Group 2 will be the unquoted string.
|
||||||
STRIP_QUOTES = re.compile(r"^(['\"])(.*)\1$")
|
STRIP_QUOTES = re.compile(r"^(['\"])(.*)\1$")
|
||||||
|
|
||||||
|
#: Values that match this (e.g., variants, flags) can be left unquoted in Spack output
|
||||||
def strip_quotes_and_unescape(string: str) -> str:
|
NO_QUOTES_NEEDED = re.compile(r"^[a-zA-Z0-9,/_.-]+$")
|
||||||
"""Remove surrounding single or double quotes from string, if present."""
|
|
||||||
match = STRIP_QUOTES.match(string)
|
|
||||||
if not match:
|
|
||||||
return string
|
|
||||||
|
|
||||||
# replace any escaped quotes with bare quotes
|
|
||||||
quote, result = match.groups()
|
|
||||||
return result.replace(rf"\{quote}", quote)
|
|
||||||
|
|
||||||
|
|
||||||
def quote_if_needed(value: str) -> str:
|
class SpecTokens(TokenBase):
|
||||||
"""Add quotes around the value if it requires quotes.
|
|
||||||
|
|
||||||
This will add quotes around the value unless it matches ``NO_QUOTES_NEEDED``.
|
|
||||||
|
|
||||||
This adds:
|
|
||||||
* single quotes by default
|
|
||||||
* double quotes around any value that contains single quotes
|
|
||||||
|
|
||||||
If double quotes are used, we json-escpae the string. That is, we escape ``\\``,
|
|
||||||
``"``, and control codes.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if NO_QUOTES_NEEDED.match(value):
|
|
||||||
return value
|
|
||||||
|
|
||||||
return json.dumps(value) if "'" in value else f"'{value}'"
|
|
||||||
|
|
||||||
|
|
||||||
class TokenBase(enum.Enum):
|
|
||||||
"""Base class for an enum type with a regex value"""
|
|
||||||
|
|
||||||
def __new__(cls, *args, **kwargs):
|
|
||||||
# See
|
|
||||||
value = len(cls.__members__) + 1
|
|
||||||
obj = object.__new__(cls)
|
|
||||||
obj._value_ = value
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def __init__(self, regex):
|
|
||||||
self.regex = regex
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"{self._name_}"
|
|
||||||
|
|
||||||
|
|
||||||
class TokenType(TokenBase):
|
|
||||||
"""Enumeration of the different token kinds in the spec grammar.
|
"""Enumeration of the different token kinds in the spec grammar.
|
||||||
|
|
||||||
Order of declaration is extremely important, since text containing specs is parsed with a
|
Order of declaration is extremely important, since text containing specs is parsed with a
|
||||||
single regex obtained by ``"|".join(...)`` of all the regex in the order of declaration.
|
single regex obtained by ``"|".join(...)`` of all the regex in the order of declaration.
|
||||||
"""
|
"""
|
||||||
@@ -196,79 +142,24 @@ class TokenType(TokenBase):
|
|||||||
DAG_HASH = rf"(?:/(?:{HASH}))"
|
DAG_HASH = rf"(?:/(?:{HASH}))"
|
||||||
# White spaces
|
# White spaces
|
||||||
WS = r"(?:\s+)"
|
WS = r"(?:\s+)"
|
||||||
|
# Unexpected character(s)
|
||||||
|
|
||||||
class ErrorTokenType(TokenBase):
|
|
||||||
"""Enum with regexes for error analysis"""
|
|
||||||
|
|
||||||
# Unexpected character
|
|
||||||
UNEXPECTED = r"(?:.[\s]*)"
|
UNEXPECTED = r"(?:.[\s]*)"
|
||||||
|
|
||||||
|
|
||||||
class Token:
|
#: Tokenizer that includes all the regexes in the SpecTokens enum
|
||||||
"""Represents tokens; generated from input by lexer and fed to parse()."""
|
SPEC_TOKENIZER = Tokenizer(SpecTokens)
|
||||||
|
|
||||||
__slots__ = "kind", "value", "start", "end"
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, kind: TokenBase, value: str, start: Optional[int] = None, end: Optional[int] = None
|
|
||||||
):
|
|
||||||
self.kind = kind
|
|
||||||
self.value = value
|
|
||||||
self.start = start
|
|
||||||
self.end = end
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return str(self)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"({self.kind}, {self.value})"
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return (self.kind == other.kind) and (self.value == other.value)
|
|
||||||
|
|
||||||
|
|
||||||
#: List of all the regexes used to match spec parts, in order of precedence
|
|
||||||
TOKEN_REGEXES = [rf"(?P<{token}>{token.regex})" for token in TokenType]
|
|
||||||
#: List of all valid regexes followed by error analysis regexes
|
|
||||||
ERROR_HANDLING_REGEXES = TOKEN_REGEXES + [
|
|
||||||
rf"(?P<{token}>{token.regex})" for token in ErrorTokenType
|
|
||||||
]
|
|
||||||
#: Regex to scan a valid text
|
|
||||||
ALL_TOKENS = re.compile("|".join(TOKEN_REGEXES))
|
|
||||||
#: Regex to analyze an invalid text
|
|
||||||
ANALYSIS_REGEX = re.compile("|".join(ERROR_HANDLING_REGEXES))
|
|
||||||
|
|
||||||
|
|
||||||
def tokenize(text: str) -> Iterator[Token]:
|
def tokenize(text: str) -> Iterator[Token]:
|
||||||
"""Return a token generator from the text passed as input.
|
"""Return a token generator from the text passed as input.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
SpecTokenizationError: if we can't tokenize anymore, but didn't reach the
|
SpecTokenizationError: when unexpected characters are found in the text
|
||||||
end of the input text.
|
|
||||||
"""
|
"""
|
||||||
scanner = ALL_TOKENS.scanner(text) # type: ignore[attr-defined]
|
for token in SPEC_TOKENIZER.tokenize(text):
|
||||||
match: Optional[Match] = None
|
if token.kind == SpecTokens.UNEXPECTED:
|
||||||
for match in iter(scanner.match, None):
|
raise SpecTokenizationError(list(SPEC_TOKENIZER.tokenize(text)), text)
|
||||||
# The following two assertions are to help mypy
|
yield token
|
||||||
msg = (
|
|
||||||
"unexpected value encountered during parsing. Please submit a bug report "
|
|
||||||
"at https://github.com/spack/spack/issues/new/choose"
|
|
||||||
)
|
|
||||||
assert match is not None, msg
|
|
||||||
assert match.lastgroup is not None, msg
|
|
||||||
yield Token(
|
|
||||||
TokenType.__members__[match.lastgroup], match.group(), match.start(), match.end()
|
|
||||||
)
|
|
||||||
|
|
||||||
if match is None and not text:
|
|
||||||
# We just got an empty string
|
|
||||||
return
|
|
||||||
|
|
||||||
if match is None or match.end() != len(text):
|
|
||||||
scanner = ANALYSIS_REGEX.scanner(text) # type: ignore[attr-defined]
|
|
||||||
matches = [m for m in iter(scanner.match, None)] # type: ignore[var-annotated]
|
|
||||||
raise SpecTokenizationError(matches, text)
|
|
||||||
|
|
||||||
|
|
||||||
class TokenContext:
|
class TokenContext:
|
||||||
@@ -286,7 +177,7 @@ def advance(self):
|
|||||||
"""Advance one token"""
|
"""Advance one token"""
|
||||||
self.current_token, self.next_token = self.next_token, next(self.token_stream, None)
|
self.current_token, self.next_token = self.next_token, next(self.token_stream, None)
|
||||||
|
|
||||||
def accept(self, kind: TokenType):
|
def accept(self, kind: SpecTokens):
|
||||||
"""If the next token is of the specified kind, advance the stream and return True.
|
"""If the next token is of the specified kind, advance the stream and return True.
|
||||||
Otherwise return False.
|
Otherwise return False.
|
||||||
"""
|
"""
|
||||||
@@ -295,10 +186,25 @@ def accept(self, kind: TokenType):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def expect(self, *kinds: TokenType):
|
def expect(self, *kinds: SpecTokens):
|
||||||
return self.next_token and self.next_token.kind in kinds
|
return self.next_token and self.next_token.kind in kinds
|
||||||
|
|
||||||
|
|
||||||
|
class SpecTokenizationError(spack.error.SpecSyntaxError):
|
||||||
|
"""Syntax error in a spec string"""
|
||||||
|
|
||||||
|
def __init__(self, tokens: List[Token], text: str):
|
||||||
|
message = f"unexpected characters in the spec string\n{text}\n"
|
||||||
|
|
||||||
|
underline = ""
|
||||||
|
for token in tokens:
|
||||||
|
is_error = token.kind == SpecTokens.UNEXPECTED
|
||||||
|
underline += ("^" if is_error else " ") * (token.end - token.start)
|
||||||
|
|
||||||
|
message += color.colorize(f"@*r{{{underline}}}")
|
||||||
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
class SpecParser:
|
class SpecParser:
|
||||||
"""Parse text into specs"""
|
"""Parse text into specs"""
|
||||||
|
|
||||||
@@ -306,13 +212,13 @@ class SpecParser:
|
|||||||
|
|
||||||
def __init__(self, literal_str: str):
|
def __init__(self, literal_str: str):
|
||||||
self.literal_str = literal_str
|
self.literal_str = literal_str
|
||||||
self.ctx = TokenContext(filter(lambda x: x.kind != TokenType.WS, tokenize(literal_str)))
|
self.ctx = TokenContext(filter(lambda x: x.kind != SpecTokens.WS, tokenize(literal_str)))
|
||||||
|
|
||||||
def tokens(self) -> List[Token]:
|
def tokens(self) -> List[Token]:
|
||||||
"""Return the entire list of token from the initial text. White spaces are
|
"""Return the entire list of token from the initial text. White spaces are
|
||||||
filtered out.
|
filtered out.
|
||||||
"""
|
"""
|
||||||
return list(filter(lambda x: x.kind != TokenType.WS, tokenize(self.literal_str)))
|
return list(filter(lambda x: x.kind != SpecTokens.WS, tokenize(self.literal_str)))
|
||||||
|
|
||||||
def next_spec(
|
def next_spec(
|
||||||
self, initial_spec: Optional["spack.spec.Spec"] = None
|
self, initial_spec: Optional["spack.spec.Spec"] = None
|
||||||
@@ -339,14 +245,14 @@ def add_dependency(dep, **edge_properties):
|
|||||||
initial_spec = initial_spec or spack.spec.Spec()
|
initial_spec = initial_spec or spack.spec.Spec()
|
||||||
root_spec = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec)
|
root_spec = SpecNodeParser(self.ctx, self.literal_str).parse(initial_spec)
|
||||||
while True:
|
while True:
|
||||||
if self.ctx.accept(TokenType.START_EDGE_PROPERTIES):
|
if self.ctx.accept(SpecTokens.START_EDGE_PROPERTIES):
|
||||||
edge_properties = EdgeAttributeParser(self.ctx, self.literal_str).parse()
|
edge_properties = EdgeAttributeParser(self.ctx, self.literal_str).parse()
|
||||||
edge_properties.setdefault("depflag", 0)
|
edge_properties.setdefault("depflag", 0)
|
||||||
edge_properties.setdefault("virtuals", ())
|
edge_properties.setdefault("virtuals", ())
|
||||||
dependency = self._parse_node(root_spec)
|
dependency = self._parse_node(root_spec)
|
||||||
add_dependency(dependency, **edge_properties)
|
add_dependency(dependency, **edge_properties)
|
||||||
|
|
||||||
elif self.ctx.accept(TokenType.DEPENDENCY):
|
elif self.ctx.accept(SpecTokens.DEPENDENCY):
|
||||||
dependency = self._parse_node(root_spec)
|
dependency = self._parse_node(root_spec)
|
||||||
add_dependency(dependency, depflag=0, virtuals=())
|
add_dependency(dependency, depflag=0, virtuals=())
|
||||||
|
|
||||||
@@ -394,7 +300,7 @@ def parse(
|
|||||||
Return
|
Return
|
||||||
The object passed as argument
|
The object passed as argument
|
||||||
"""
|
"""
|
||||||
if not self.ctx.next_token or self.ctx.expect(TokenType.DEPENDENCY):
|
if not self.ctx.next_token or self.ctx.expect(SpecTokens.DEPENDENCY):
|
||||||
return initial_spec
|
return initial_spec
|
||||||
|
|
||||||
if initial_spec is None:
|
if initial_spec is None:
|
||||||
@@ -402,17 +308,17 @@ def parse(
|
|||||||
|
|
||||||
# If we start with a package name we have a named spec, we cannot
|
# If we start with a package name we have a named spec, we cannot
|
||||||
# accept another package name afterwards in a node
|
# accept another package name afterwards in a node
|
||||||
if self.ctx.accept(TokenType.UNQUALIFIED_PACKAGE_NAME):
|
if self.ctx.accept(SpecTokens.UNQUALIFIED_PACKAGE_NAME):
|
||||||
initial_spec.name = self.ctx.current_token.value
|
initial_spec.name = self.ctx.current_token.value
|
||||||
|
|
||||||
elif self.ctx.accept(TokenType.FULLY_QUALIFIED_PACKAGE_NAME):
|
elif self.ctx.accept(SpecTokens.FULLY_QUALIFIED_PACKAGE_NAME):
|
||||||
parts = self.ctx.current_token.value.split(".")
|
parts = self.ctx.current_token.value.split(".")
|
||||||
name = parts[-1]
|
name = parts[-1]
|
||||||
namespace = ".".join(parts[:-1])
|
namespace = ".".join(parts[:-1])
|
||||||
initial_spec.name = name
|
initial_spec.name = name
|
||||||
initial_spec.namespace = namespace
|
initial_spec.namespace = namespace
|
||||||
|
|
||||||
elif self.ctx.accept(TokenType.FILENAME):
|
elif self.ctx.accept(SpecTokens.FILENAME):
|
||||||
return FileParser(self.ctx).parse(initial_spec)
|
return FileParser(self.ctx).parse(initial_spec)
|
||||||
|
|
||||||
def raise_parsing_error(string: str, cause: Optional[Exception] = None):
|
def raise_parsing_error(string: str, cause: Optional[Exception] = None):
|
||||||
@@ -427,7 +333,7 @@ def add_flag(name: str, value: str, propagate: bool):
|
|||||||
raise_parsing_error(str(e), e)
|
raise_parsing_error(str(e), e)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
if self.ctx.accept(TokenType.COMPILER):
|
if self.ctx.accept(SpecTokens.COMPILER):
|
||||||
if self.has_compiler:
|
if self.has_compiler:
|
||||||
raise_parsing_error("Spec cannot have multiple compilers")
|
raise_parsing_error("Spec cannot have multiple compilers")
|
||||||
|
|
||||||
@@ -435,7 +341,7 @@ def add_flag(name: str, value: str, propagate: bool):
|
|||||||
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
|
initial_spec.compiler = spack.spec.CompilerSpec(compiler_name.strip(), ":")
|
||||||
self.has_compiler = True
|
self.has_compiler = True
|
||||||
|
|
||||||
elif self.ctx.accept(TokenType.COMPILER_AND_VERSION):
|
elif self.ctx.accept(SpecTokens.COMPILER_AND_VERSION):
|
||||||
if self.has_compiler:
|
if self.has_compiler:
|
||||||
raise_parsing_error("Spec cannot have multiple compilers")
|
raise_parsing_error("Spec cannot have multiple compilers")
|
||||||
|
|
||||||
@@ -446,9 +352,9 @@ def add_flag(name: str, value: str, propagate: bool):
|
|||||||
self.has_compiler = True
|
self.has_compiler = True
|
||||||
|
|
||||||
elif (
|
elif (
|
||||||
self.ctx.accept(TokenType.VERSION_HASH_PAIR)
|
self.ctx.accept(SpecTokens.VERSION_HASH_PAIR)
|
||||||
or self.ctx.accept(TokenType.GIT_VERSION)
|
or self.ctx.accept(SpecTokens.GIT_VERSION)
|
||||||
or self.ctx.accept(TokenType.VERSION)
|
or self.ctx.accept(SpecTokens.VERSION)
|
||||||
):
|
):
|
||||||
if self.has_version:
|
if self.has_version:
|
||||||
raise_parsing_error("Spec cannot have multiple versions")
|
raise_parsing_error("Spec cannot have multiple versions")
|
||||||
@@ -459,32 +365,32 @@ def add_flag(name: str, value: str, propagate: bool):
|
|||||||
initial_spec.attach_git_version_lookup()
|
initial_spec.attach_git_version_lookup()
|
||||||
self.has_version = True
|
self.has_version = True
|
||||||
|
|
||||||
elif self.ctx.accept(TokenType.BOOL_VARIANT):
|
elif self.ctx.accept(SpecTokens.BOOL_VARIANT):
|
||||||
variant_value = self.ctx.current_token.value[0] == "+"
|
variant_value = self.ctx.current_token.value[0] == "+"
|
||||||
add_flag(self.ctx.current_token.value[1:].strip(), variant_value, propagate=False)
|
add_flag(self.ctx.current_token.value[1:].strip(), variant_value, propagate=False)
|
||||||
|
|
||||||
elif self.ctx.accept(TokenType.PROPAGATED_BOOL_VARIANT):
|
elif self.ctx.accept(SpecTokens.PROPAGATED_BOOL_VARIANT):
|
||||||
variant_value = self.ctx.current_token.value[0:2] == "++"
|
variant_value = self.ctx.current_token.value[0:2] == "++"
|
||||||
add_flag(self.ctx.current_token.value[2:].strip(), variant_value, propagate=True)
|
add_flag(self.ctx.current_token.value[2:].strip(), variant_value, propagate=True)
|
||||||
|
|
||||||
elif self.ctx.accept(TokenType.KEY_VALUE_PAIR):
|
elif self.ctx.accept(SpecTokens.KEY_VALUE_PAIR):
|
||||||
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
||||||
assert match, "SPLIT_KVP and KEY_VALUE_PAIR do not agree."
|
assert match, "SPLIT_KVP and KEY_VALUE_PAIR do not agree."
|
||||||
|
|
||||||
name, _, value = match.groups()
|
name, _, value = match.groups()
|
||||||
add_flag(name, strip_quotes_and_unescape(value), propagate=False)
|
add_flag(name, strip_quotes_and_unescape(value), propagate=False)
|
||||||
|
|
||||||
elif self.ctx.accept(TokenType.PROPAGATED_KEY_VALUE_PAIR):
|
elif self.ctx.accept(SpecTokens.PROPAGATED_KEY_VALUE_PAIR):
|
||||||
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
match = SPLIT_KVP.match(self.ctx.current_token.value)
|
||||||
assert match, "SPLIT_KVP and PROPAGATED_KEY_VALUE_PAIR do not agree."
|
assert match, "SPLIT_KVP and PROPAGATED_KEY_VALUE_PAIR do not agree."
|
||||||
|
|
||||||
name, _, value = match.groups()
|
name, _, value = match.groups()
|
||||||
add_flag(name, strip_quotes_and_unescape(value), propagate=True)
|
add_flag(name, strip_quotes_and_unescape(value), propagate=True)
|
||||||
|
|
||||||
elif self.ctx.expect(TokenType.DAG_HASH):
|
elif self.ctx.expect(SpecTokens.DAG_HASH):
|
||||||
if initial_spec.abstract_hash:
|
if initial_spec.abstract_hash:
|
||||||
break
|
break
|
||||||
self.ctx.accept(TokenType.DAG_HASH)
|
self.ctx.accept(SpecTokens.DAG_HASH)
|
||||||
initial_spec.abstract_hash = self.ctx.current_token.value[1:]
|
initial_spec.abstract_hash = self.ctx.current_token.value[1:]
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -534,7 +440,7 @@ def __init__(self, ctx, literal_str):
|
|||||||
def parse(self):
|
def parse(self):
|
||||||
attributes = {}
|
attributes = {}
|
||||||
while True:
|
while True:
|
||||||
if self.ctx.accept(TokenType.KEY_VALUE_PAIR):
|
if self.ctx.accept(SpecTokens.KEY_VALUE_PAIR):
|
||||||
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
name, value = self.ctx.current_token.value.split("=", maxsplit=1)
|
||||||
name = name.strip("'\" ")
|
name = name.strip("'\" ")
|
||||||
value = value.strip("'\" ").split(",")
|
value = value.strip("'\" ").split(",")
|
||||||
@@ -546,7 +452,7 @@ def parse(self):
|
|||||||
)
|
)
|
||||||
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
raise SpecParsingError(msg, self.ctx.current_token, self.literal_str)
|
||||||
# TODO: Add code to accept bool variants here as soon as use variants are implemented
|
# TODO: Add code to accept bool variants here as soon as use variants are implemented
|
||||||
elif self.ctx.accept(TokenType.END_EDGE_PROPERTIES):
|
elif self.ctx.accept(SpecTokens.END_EDGE_PROPERTIES):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
msg = "unexpected token in edge attributes"
|
msg = "unexpected token in edge attributes"
|
||||||
@@ -601,25 +507,7 @@ def parse_one_or_raise(
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class SpecTokenizationError(SpecSyntaxError):
|
class SpecParsingError(spack.error.SpecSyntaxError):
|
||||||
"""Syntax error in a spec string"""
|
|
||||||
|
|
||||||
def __init__(self, matches, text):
|
|
||||||
message = "unexpected tokens in the spec string\n"
|
|
||||||
message += f"{text}"
|
|
||||||
|
|
||||||
underline = "\n"
|
|
||||||
for match in matches:
|
|
||||||
if match.lastgroup == str(ErrorTokenType.UNEXPECTED):
|
|
||||||
underline += f"{'^' * (match.end() - match.start())}"
|
|
||||||
continue
|
|
||||||
underline += f"{' ' * (match.end() - match.start())}"
|
|
||||||
|
|
||||||
message += color.colorize(f"@*r{{{underline}}}")
|
|
||||||
super().__init__(message)
|
|
||||||
|
|
||||||
|
|
||||||
class SpecParsingError(SpecSyntaxError):
|
|
||||||
"""Error when parsing tokens"""
|
"""Error when parsing tokens"""
|
||||||
|
|
||||||
def __init__(self, message, token, text):
|
def __init__(self, message, token, text):
|
||||||
@@ -627,3 +515,33 @@ def __init__(self, message, token, text):
|
|||||||
underline = f"\n{' '*token.start}{'^'*(token.end - token.start)}"
|
underline = f"\n{' '*token.start}{'^'*(token.end - token.start)}"
|
||||||
message += color.colorize(f"@*r{{{underline}}}")
|
message += color.colorize(f"@*r{{{underline}}}")
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
|
def strip_quotes_and_unescape(string: str) -> str:
|
||||||
|
"""Remove surrounding single or double quotes from string, if present."""
|
||||||
|
match = STRIP_QUOTES.match(string)
|
||||||
|
if not match:
|
||||||
|
return string
|
||||||
|
|
||||||
|
# replace any escaped quotes with bare quotes
|
||||||
|
quote, result = match.groups()
|
||||||
|
return result.replace(rf"\{quote}", quote)
|
||||||
|
|
||||||
|
|
||||||
|
def quote_if_needed(value: str) -> str:
|
||||||
|
"""Add quotes around the value if it requires quotes.
|
||||||
|
|
||||||
|
This will add quotes around the value unless it matches ``NO_QUOTES_NEEDED``.
|
||||||
|
|
||||||
|
This adds:
|
||||||
|
* single quotes by default
|
||||||
|
* double quotes around any value that contains single quotes
|
||||||
|
|
||||||
|
If double quotes are used, we json-escape the string. That is, we escape ``\\``,
|
||||||
|
``"``, and control codes.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if NO_QUOTES_NEEDED.match(value):
|
||||||
|
return value
|
||||||
|
|
||||||
|
return json.dumps(value) if "'" in value else f"'{value}'"
|
@@ -992,7 +992,7 @@ def interactive_version_filter(
|
|||||||
editor(filepath, exec_fn=executable)
|
editor(filepath, exec_fn=executable)
|
||||||
|
|
||||||
# Read back in
|
# Read back in
|
||||||
with open(filepath, "r") as f:
|
with open(filepath, "r", encoding="utf-8") as f:
|
||||||
orig_url_dict, url_dict = url_dict, {}
|
orig_url_dict, url_dict = url_dict, {}
|
||||||
for line in f:
|
for line in f:
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
|
@@ -8,7 +8,6 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import spack.concretize
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.solver.asp
|
import spack.solver.asp
|
||||||
@@ -23,7 +22,7 @@ def __init__(self, specs: List[str]) -> None:
|
|||||||
self.concr_specs = []
|
self.concr_specs = []
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
self.concr_specs = [spack.concretize.concretized(Spec(s)) for s in self.req_specs]
|
self.concr_specs = [Spec(s).concretized() for s in self.req_specs]
|
||||||
for s in self.concr_specs:
|
for s in self.concr_specs:
|
||||||
PackageInstaller([s.package], fake=True, explicit=True).install()
|
PackageInstaller([s.package], fake=True, explicit=True).install()
|
||||||
|
|
||||||
@@ -64,13 +63,13 @@ def _has_build_dependency(spec: Spec, name: str):
|
|||||||
def test_simple_reuse(splicing_setup):
|
def test_simple_reuse(splicing_setup):
|
||||||
with CacheManager(["splice-z@1.0.0+compat"]):
|
with CacheManager(["splice-z@1.0.0+compat"]):
|
||||||
spack.config.set("packages", _make_specs_non_buildable(["splice-z"]))
|
spack.config.set("packages", _make_specs_non_buildable(["splice-z"]))
|
||||||
assert spack.concretize.concretized(Spec("splice-z")).satisfies(Spec("splice-z"))
|
assert Spec("splice-z").concretized().satisfies(Spec("splice-z"))
|
||||||
|
|
||||||
|
|
||||||
def test_simple_dep_reuse(splicing_setup):
|
def test_simple_dep_reuse(splicing_setup):
|
||||||
with CacheManager(["splice-z@1.0.0+compat"]):
|
with CacheManager(["splice-z@1.0.0+compat"]):
|
||||||
spack.config.set("packages", _make_specs_non_buildable(["splice-z"]))
|
spack.config.set("packages", _make_specs_non_buildable(["splice-z"]))
|
||||||
assert spack.concretize.concretized(Spec("splice-h@1")).satisfies(Spec("splice-h@1"))
|
assert Spec("splice-h@1").concretized().satisfies(Spec("splice-h@1"))
|
||||||
|
|
||||||
|
|
||||||
def test_splice_installed_hash(splicing_setup):
|
def test_splice_installed_hash(splicing_setup):
|
||||||
@@ -83,9 +82,9 @@ def test_splice_installed_hash(splicing_setup):
|
|||||||
spack.config.set("packages", packages_config)
|
spack.config.set("packages", packages_config)
|
||||||
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0")
|
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0")
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
spack.concretize.concretized(goal_spec)
|
goal_spec.concretized()
|
||||||
_enable_splicing()
|
_enable_splicing()
|
||||||
assert spack.concretize.concretized(goal_spec).satisfies(goal_spec)
|
assert goal_spec.concretized().satisfies(goal_spec)
|
||||||
|
|
||||||
|
|
||||||
def test_splice_build_splice_node(splicing_setup):
|
def test_splice_build_splice_node(splicing_setup):
|
||||||
@@ -93,9 +92,9 @@ def test_splice_build_splice_node(splicing_setup):
|
|||||||
spack.config.set("packages", _make_specs_non_buildable(["splice-t"]))
|
spack.config.set("packages", _make_specs_non_buildable(["splice-t"]))
|
||||||
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0+compat")
|
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.0+compat")
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
spack.concretize.concretized(goal_spec)
|
goal_spec.concretized()
|
||||||
_enable_splicing()
|
_enable_splicing()
|
||||||
assert spack.concretize.concretized(goal_spec).satisfies(goal_spec)
|
assert goal_spec.concretized().satisfies(goal_spec)
|
||||||
|
|
||||||
|
|
||||||
def test_double_splice(splicing_setup):
|
def test_double_splice(splicing_setup):
|
||||||
@@ -109,9 +108,9 @@ def test_double_splice(splicing_setup):
|
|||||||
spack.config.set("packages", freeze_builds_config)
|
spack.config.set("packages", freeze_builds_config)
|
||||||
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.2+compat")
|
goal_spec = Spec("splice-t@1 ^splice-h@1.0.2+compat ^splice-z@1.0.2+compat")
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
spack.concretize.concretized(goal_spec)
|
goal_spec.concretized()
|
||||||
_enable_splicing()
|
_enable_splicing()
|
||||||
assert spack.concretize.concretized(goal_spec).satisfies(goal_spec)
|
assert goal_spec.concretized().satisfies(goal_spec)
|
||||||
|
|
||||||
|
|
||||||
# The next two tests are mirrors of one another
|
# The next two tests are mirrors of one another
|
||||||
@@ -128,10 +127,10 @@ def test_virtual_multi_splices_in(splicing_setup):
|
|||||||
spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
|
spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
|
||||||
for gs in goal_specs:
|
for gs in goal_specs:
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
spack.concretize.concretized(Spec(gs))
|
Spec(gs).concretized()
|
||||||
_enable_splicing()
|
_enable_splicing()
|
||||||
for gs in goal_specs:
|
for gs in goal_specs:
|
||||||
assert spack.concretize.concretized(Spec(gs)).satisfies(gs)
|
assert Spec(gs).concretized().satisfies(gs)
|
||||||
|
|
||||||
|
|
||||||
def test_virtual_multi_can_be_spliced(splicing_setup):
|
def test_virtual_multi_can_be_spliced(splicing_setup):
|
||||||
@@ -145,12 +144,12 @@ def test_virtual_multi_can_be_spliced(splicing_setup):
|
|||||||
]
|
]
|
||||||
with CacheManager(cache):
|
with CacheManager(cache):
|
||||||
spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
|
spack.config.set("packages", _make_specs_non_buildable(["depends-on-virtual-with-abi"]))
|
||||||
for gs in goal_specs:
|
with pytest.raises(Exception):
|
||||||
with pytest.raises(Exception):
|
for gs in goal_specs:
|
||||||
spack.concretize.concretized(Spec(gs))
|
Spec(gs).concretized()
|
||||||
_enable_splicing()
|
_enable_splicing()
|
||||||
for gs in goal_specs:
|
for gs in goal_specs:
|
||||||
assert spack.concretize.concretized(Spec(gs)).satisfies(gs)
|
assert Spec(gs).concretized().satisfies(gs)
|
||||||
|
|
||||||
|
|
||||||
def test_manyvariant_star_matching_variant_splice(splicing_setup):
|
def test_manyvariant_star_matching_variant_splice(splicing_setup):
|
||||||
@@ -168,10 +167,10 @@ def test_manyvariant_star_matching_variant_splice(splicing_setup):
|
|||||||
spack.config.set("packages", freeze_build_config)
|
spack.config.set("packages", freeze_build_config)
|
||||||
for goal in goal_specs:
|
for goal in goal_specs:
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
spack.concretize.concretized(goal)
|
goal.concretized()
|
||||||
_enable_splicing()
|
_enable_splicing()
|
||||||
for goal in goal_specs:
|
for goal in goal_specs:
|
||||||
assert spack.concretize.concretized(goal).satisfies(goal)
|
assert goal.concretized().satisfies(goal)
|
||||||
|
|
||||||
|
|
||||||
def test_manyvariant_limited_matching(splicing_setup):
|
def test_manyvariant_limited_matching(splicing_setup):
|
||||||
@@ -190,10 +189,10 @@ def test_manyvariant_limited_matching(splicing_setup):
|
|||||||
spack.config.set("packages", freeze_build_config)
|
spack.config.set("packages", freeze_build_config)
|
||||||
for s in goal_specs:
|
for s in goal_specs:
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
spack.concretize.concretized(s)
|
s.concretized()
|
||||||
_enable_splicing()
|
_enable_splicing()
|
||||||
for s in goal_specs:
|
for s in goal_specs:
|
||||||
assert spack.concretize.concretized(s).satisfies(s)
|
assert s.concretized().satisfies(s)
|
||||||
|
|
||||||
|
|
||||||
def test_external_splice_same_name(splicing_setup):
|
def test_external_splice_same_name(splicing_setup):
|
||||||
@@ -212,7 +211,7 @@ def test_external_splice_same_name(splicing_setup):
|
|||||||
spack.config.set("packages", packages_yaml)
|
spack.config.set("packages", packages_yaml)
|
||||||
_enable_splicing()
|
_enable_splicing()
|
||||||
for s in goal_specs:
|
for s in goal_specs:
|
||||||
assert spack.concretize.concretized(s).satisfies(s)
|
assert s.concretized().satisfies(s)
|
||||||
|
|
||||||
|
|
||||||
def test_spliced_build_deps_only_in_build_spec(splicing_setup):
|
def test_spliced_build_deps_only_in_build_spec(splicing_setup):
|
||||||
@@ -221,7 +220,7 @@ def test_spliced_build_deps_only_in_build_spec(splicing_setup):
|
|||||||
|
|
||||||
with CacheManager(cache):
|
with CacheManager(cache):
|
||||||
_enable_splicing()
|
_enable_splicing()
|
||||||
concr_goal = spack.concretize.concretized(goal_spec)
|
concr_goal = goal_spec.concretized()
|
||||||
build_spec = concr_goal._build_spec
|
build_spec = concr_goal._build_spec
|
||||||
# Spec has been spliced
|
# Spec has been spliced
|
||||||
assert build_spec is not None
|
assert build_spec is not None
|
||||||
@@ -239,7 +238,7 @@ def test_spliced_transitive_dependency(splicing_setup):
|
|||||||
with CacheManager(cache):
|
with CacheManager(cache):
|
||||||
spack.config.set("packages", _make_specs_non_buildable(["splice-depends-on-t"]))
|
spack.config.set("packages", _make_specs_non_buildable(["splice-depends-on-t"]))
|
||||||
_enable_splicing()
|
_enable_splicing()
|
||||||
concr_goal = spack.concretize.concretized(goal_spec)
|
concr_goal = goal_spec.concretized()
|
||||||
# Spec has been spliced
|
# Spec has been spliced
|
||||||
assert concr_goal._build_spec is not None
|
assert concr_goal._build_spec is not None
|
||||||
assert concr_goal["splice-t"]._build_spec is not None
|
assert concr_goal["splice-t"]._build_spec is not None
|
||||||
|
@@ -134,5 +134,5 @@ def test_concretize_target_ranges(root_target_range, dep_target_range, result, m
|
|||||||
f"pkg-a %gcc@10 foobar=bar target={root_target_range} ^pkg-b target={dep_target_range}"
|
f"pkg-a %gcc@10 foobar=bar target={root_target_range} ^pkg-b target={dep_target_range}"
|
||||||
)
|
)
|
||||||
with spack.concretize.disable_compiler_existence_check():
|
with spack.concretize.disable_compiler_existence_check():
|
||||||
spec = spack.concretize.concretized(spec)
|
spec.concretize()
|
||||||
assert spec.target == spec["pkg-b"].target == result
|
assert spec.target == spec["pkg-b"].target == result
|
||||||
|
@@ -10,6 +10,7 @@
|
|||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import platform
|
import platform
|
||||||
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import tarfile
|
import tarfile
|
||||||
@@ -28,12 +29,12 @@
|
|||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.caches
|
import spack.caches
|
||||||
import spack.compilers
|
import spack.compilers
|
||||||
import spack.concretize
|
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.fetch_strategy
|
import spack.fetch_strategy
|
||||||
import spack.hooks.sbang as sbang
|
import spack.hooks.sbang as sbang
|
||||||
import spack.main
|
import spack.main
|
||||||
import spack.mirrors.mirror
|
import spack.mirrors.mirror
|
||||||
|
import spack.oci.image
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
@@ -182,13 +183,13 @@ def dummy_prefix(tmpdir):
|
|||||||
absolute_app_link = p.join("bin", "absolute_app_link")
|
absolute_app_link = p.join("bin", "absolute_app_link")
|
||||||
data = p.join("share", "file")
|
data = p.join("share", "file")
|
||||||
|
|
||||||
with open(app, "w") as f:
|
with open(app, "w", encoding="utf-8") as f:
|
||||||
f.write("hello world")
|
f.write("hello world")
|
||||||
|
|
||||||
with open(data, "w") as f:
|
with open(data, "w", encoding="utf-8") as f:
|
||||||
f.write("hello world")
|
f.write("hello world")
|
||||||
|
|
||||||
with open(p.join(".spack", "binary_distribution"), "w") as f:
|
with open(p.join(".spack", "binary_distribution"), "w", encoding="utf-8") as f:
|
||||||
f.write("{}")
|
f.write("{}")
|
||||||
|
|
||||||
os.symlink("app", relative_app_link)
|
os.symlink("app", relative_app_link)
|
||||||
@@ -214,9 +215,8 @@ def test_default_rpaths_create_install_default_layout(temporary_mirror_dir):
|
|||||||
Test the creation and installation of buildcaches with default rpaths
|
Test the creation and installation of buildcaches with default rpaths
|
||||||
into the default directory layout scheme.
|
into the default directory layout scheme.
|
||||||
"""
|
"""
|
||||||
gspec = spack.concretize.concretized(Spec("garply"))
|
gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
|
||||||
cspec = spack.concretize.concretized(Spec("corge"))
|
sy_spec = Spec("symly").concretized()
|
||||||
sy_spec = spack.concretize.concretized(Spec("symly"))
|
|
||||||
|
|
||||||
# Install 'corge' without using a cache
|
# Install 'corge' without using a cache
|
||||||
install_cmd("--no-cache", cspec.name)
|
install_cmd("--no-cache", cspec.name)
|
||||||
@@ -263,9 +263,9 @@ def test_default_rpaths_install_nondefault_layout(temporary_mirror_dir):
|
|||||||
Test the creation and installation of buildcaches with default rpaths
|
Test the creation and installation of buildcaches with default rpaths
|
||||||
into the non-default directory layout scheme.
|
into the non-default directory layout scheme.
|
||||||
"""
|
"""
|
||||||
cspec = spack.concretize.concretized(Spec("corge"))
|
cspec = Spec("corge").concretized()
|
||||||
# This guy tests for symlink relocation
|
# This guy tests for symlink relocation
|
||||||
sy_spec = spack.concretize.concretized(Spec("symly"))
|
sy_spec = Spec("symly").concretized()
|
||||||
|
|
||||||
# Install some packages with dependent packages
|
# Install some packages with dependent packages
|
||||||
# test install in non-default install path scheme
|
# test install in non-default install path scheme
|
||||||
@@ -286,8 +286,7 @@ def test_relative_rpaths_install_default_layout(temporary_mirror_dir):
|
|||||||
Test the creation and installation of buildcaches with relative
|
Test the creation and installation of buildcaches with relative
|
||||||
rpaths into the default directory layout scheme.
|
rpaths into the default directory layout scheme.
|
||||||
"""
|
"""
|
||||||
gspec = spack.concretize.concretized(Spec("garply"))
|
gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
|
||||||
cspec = spack.concretize.concretized(Spec("corge"))
|
|
||||||
|
|
||||||
# Install buildcache created with relativized rpaths
|
# Install buildcache created with relativized rpaths
|
||||||
buildcache_cmd("install", "-uf", cspec.name)
|
buildcache_cmd("install", "-uf", cspec.name)
|
||||||
@@ -316,7 +315,7 @@ def test_relative_rpaths_install_nondefault(temporary_mirror_dir):
|
|||||||
Test the installation of buildcaches with relativized rpaths
|
Test the installation of buildcaches with relativized rpaths
|
||||||
into the non-default directory layout scheme.
|
into the non-default directory layout scheme.
|
||||||
"""
|
"""
|
||||||
cspec = spack.concretize.concretized(Spec("corge"))
|
cspec = Spec("corge").concretized()
|
||||||
|
|
||||||
# Test install in non-default install path scheme and relative path
|
# Test install in non-default install path scheme and relative path
|
||||||
buildcache_cmd("install", "-uf", cspec.name)
|
buildcache_cmd("install", "-uf", cspec.name)
|
||||||
@@ -369,8 +368,7 @@ def test_built_spec_cache(temporary_mirror_dir):
|
|||||||
that cache from a buildcache index."""
|
that cache from a buildcache index."""
|
||||||
buildcache_cmd("list", "-a", "-l")
|
buildcache_cmd("list", "-a", "-l")
|
||||||
|
|
||||||
gspec = spack.concretize.concretized(Spec("garply"))
|
gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
|
||||||
cspec = spack.concretize.concretized(Spec("corge"))
|
|
||||||
|
|
||||||
for s in [gspec, cspec]:
|
for s in [gspec, cspec]:
|
||||||
results = bindist.get_mirrors_for_spec(s)
|
results = bindist.get_mirrors_for_spec(s)
|
||||||
@@ -393,7 +391,7 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
|
|||||||
mirror_dir = tmpdir.join("mirror_dir")
|
mirror_dir = tmpdir.join("mirror_dir")
|
||||||
mirror_url = url_util.path_to_file_url(mirror_dir.strpath)
|
mirror_url = url_util.path_to_file_url(mirror_dir.strpath)
|
||||||
|
|
||||||
s = spack.concretize.concretized(Spec("libdwarf"))
|
s = Spec("libdwarf").concretized()
|
||||||
|
|
||||||
# Install a package
|
# Install a package
|
||||||
install_cmd(s.name)
|
install_cmd(s.name)
|
||||||
@@ -422,7 +420,7 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
|
|||||||
mirror_url = url_util.path_to_file_url(mirror_dir.strpath)
|
mirror_url = url_util.path_to_file_url(mirror_dir.strpath)
|
||||||
spack.config.set("mirrors", {"test": mirror_url})
|
spack.config.set("mirrors", {"test": mirror_url})
|
||||||
|
|
||||||
s = spack.concretize.concretized(Spec("libdwarf"))
|
s = Spec("libdwarf").concretized()
|
||||||
|
|
||||||
# Install a package
|
# Install a package
|
||||||
install_cmd("--no-cache", s.name)
|
install_cmd("--no-cache", s.name)
|
||||||
@@ -512,7 +510,7 @@ def test_update_sbang(tmpdir, temporary_mirror):
|
|||||||
"""
|
"""
|
||||||
spec_str = "old-sbang"
|
spec_str = "old-sbang"
|
||||||
# Concretize a package with some old-fashioned sbang lines.
|
# Concretize a package with some old-fashioned sbang lines.
|
||||||
old_spec = spack.concretize.concretized(Spec(spec_str))
|
old_spec = Spec(spec_str).concretized()
|
||||||
old_spec_hash_str = "/{0}".format(old_spec.dag_hash())
|
old_spec_hash_str = "/{0}".format(old_spec.dag_hash())
|
||||||
|
|
||||||
# Need a fake mirror with *function* scope.
|
# Need a fake mirror with *function* scope.
|
||||||
@@ -533,7 +531,7 @@ def test_update_sbang(tmpdir, temporary_mirror):
|
|||||||
# Switch the store to the new install tree locations
|
# Switch the store to the new install tree locations
|
||||||
newtree_dir = tmpdir.join("newtree")
|
newtree_dir = tmpdir.join("newtree")
|
||||||
with spack.store.use_store(str(newtree_dir)):
|
with spack.store.use_store(str(newtree_dir)):
|
||||||
new_spec = spack.concretize.concretized(Spec("old-sbang"))
|
new_spec = Spec("old-sbang").concretized()
|
||||||
assert new_spec.dag_hash() == old_spec.dag_hash()
|
assert new_spec.dag_hash() == old_spec.dag_hash()
|
||||||
|
|
||||||
# Install package from buildcache
|
# Install package from buildcache
|
||||||
@@ -560,10 +558,16 @@ def test_update_sbang(tmpdir, temporary_mirror):
|
|||||||
)
|
)
|
||||||
|
|
||||||
installed_script_style_1_path = new_spec.prefix.bin.join("sbang-style-1.sh")
|
installed_script_style_1_path = new_spec.prefix.bin.join("sbang-style-1.sh")
|
||||||
assert sbang_style_1_expected == open(str(installed_script_style_1_path)).read()
|
assert (
|
||||||
|
sbang_style_1_expected
|
||||||
|
== open(str(installed_script_style_1_path), encoding="utf-8").read()
|
||||||
|
)
|
||||||
|
|
||||||
installed_script_style_2_path = new_spec.prefix.bin.join("sbang-style-2.sh")
|
installed_script_style_2_path = new_spec.prefix.bin.join("sbang-style-2.sh")
|
||||||
assert sbang_style_2_expected == open(str(installed_script_style_2_path)).read()
|
assert (
|
||||||
|
sbang_style_2_expected
|
||||||
|
== open(str(installed_script_style_2_path), encoding="utf-8").read()
|
||||||
|
)
|
||||||
|
|
||||||
uninstall_cmd("-y", "/%s" % new_spec.dag_hash())
|
uninstall_cmd("-y", "/%s" % new_spec.dag_hash())
|
||||||
|
|
||||||
@@ -906,7 +910,7 @@ def test_tarball_doesnt_include_buildinfo_twice(tmp_path: Path):
|
|||||||
p.joinpath(".spack").mkdir(parents=True)
|
p.joinpath(".spack").mkdir(parents=True)
|
||||||
|
|
||||||
# Create a binary_distribution file in the .spack folder
|
# Create a binary_distribution file in the .spack folder
|
||||||
with open(p / ".spack" / "binary_distribution", "w") as f:
|
with open(p / ".spack" / "binary_distribution", "w", encoding="utf-8") as f:
|
||||||
f.write(syaml.dump({"metadata", "old"}))
|
f.write(syaml.dump({"metadata", "old"}))
|
||||||
|
|
||||||
# Now create a tarball, which should include a new binary_distribution file
|
# Now create a tarball, which should include a new binary_distribution file
|
||||||
@@ -940,7 +944,7 @@ def test_reproducible_tarball_is_reproducible(tmp_path: Path):
|
|||||||
tarball_1 = str(tmp_path / "prefix-1.tar.gz")
|
tarball_1 = str(tmp_path / "prefix-1.tar.gz")
|
||||||
tarball_2 = str(tmp_path / "prefix-2.tar.gz")
|
tarball_2 = str(tmp_path / "prefix-2.tar.gz")
|
||||||
|
|
||||||
with open(app, "w") as f:
|
with open(app, "w", encoding="utf-8") as f:
|
||||||
f.write("hello world")
|
f.write("hello world")
|
||||||
|
|
||||||
buildinfo = {"metadata": "yes please"}
|
buildinfo = {"metadata": "yes please"}
|
||||||
@@ -985,12 +989,16 @@ def test_tarball_normalized_permissions(tmpdir):
|
|||||||
|
|
||||||
# Everyone can write & execute. This should turn into 0o755 when the tarball is
|
# Everyone can write & execute. This should turn into 0o755 when the tarball is
|
||||||
# extracted (on a different system).
|
# extracted (on a different system).
|
||||||
with open(app, "w", opener=lambda path, flags: os.open(path, flags, 0o777)) as f:
|
with open(
|
||||||
|
app, "w", opener=lambda path, flags: os.open(path, flags, 0o777), encoding="utf-8"
|
||||||
|
) as f:
|
||||||
f.write("hello world")
|
f.write("hello world")
|
||||||
|
|
||||||
# User doesn't have execute permissions, but group/world have; this should also
|
# User doesn't have execute permissions, but group/world have; this should also
|
||||||
# turn into 0o644 (user read/write, group&world only read).
|
# turn into 0o644 (user read/write, group&world only read).
|
||||||
with open(data, "w", opener=lambda path, flags: os.open(path, flags, 0o477)) as f:
|
with open(
|
||||||
|
data, "w", opener=lambda path, flags: os.open(path, flags, 0o477), encoding="utf-8"
|
||||||
|
) as f:
|
||||||
f.write("hello world")
|
f.write("hello world")
|
||||||
|
|
||||||
bindist._do_create_tarball(tarball, binaries_dir=p.strpath, buildinfo={})
|
bindist._do_create_tarball(tarball, binaries_dir=p.strpath, buildinfo={})
|
||||||
@@ -1157,7 +1165,7 @@ def test_get_valid_spec_file(tmp_path, layout, expect_success):
|
|||||||
spec_dict["buildcache_layout_version"] = layout
|
spec_dict["buildcache_layout_version"] = layout
|
||||||
|
|
||||||
# Save to file
|
# Save to file
|
||||||
with open(path, "w") as f:
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
json.dump(spec_dict, f)
|
json.dump(spec_dict, f)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -1206,7 +1214,7 @@ def test_download_tarball_with_unsupported_layout_fails(tmp_path, mutable_config
|
|||||||
tmp_path / bindist.build_cache_relative_path() / bindist.tarball_name(spec, ".spec.json")
|
tmp_path / bindist.build_cache_relative_path() / bindist.tarball_name(spec, ".spec.json")
|
||||||
)
|
)
|
||||||
path.parent.mkdir(parents=True)
|
path.parent.mkdir(parents=True)
|
||||||
with open(path, "w") as f:
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
json.dump(spec_dict, f)
|
json.dump(spec_dict, f)
|
||||||
|
|
||||||
# Configure as a mirror.
|
# Configure as a mirror.
|
||||||
@@ -1217,3 +1225,19 @@ def test_download_tarball_with_unsupported_layout_fails(tmp_path, mutable_config
|
|||||||
|
|
||||||
# And there should be a warning about an unsupported layout version.
|
# And there should be a warning about an unsupported layout version.
|
||||||
assert f"Layout version {layout_version} is too new" in capsys.readouterr().err
|
assert f"Layout version {layout_version} is too new" in capsys.readouterr().err
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"spec",
|
||||||
|
[
|
||||||
|
# Standard case
|
||||||
|
"short-name@=1.2.3",
|
||||||
|
# Unsupported characters in git version
|
||||||
|
f"git-version@{1:040x}=develop",
|
||||||
|
# Too long of a name
|
||||||
|
f"{'too-long':x<256}@=1.2.3",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_default_tag(spec: str):
|
||||||
|
"""Make sure that computed image tags are valid."""
|
||||||
|
assert re.fullmatch(spack.oci.image.tag, bindist._oci_default_tag(spack.spec.Spec(spec)))
|
||||||
|
@@ -9,7 +9,6 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import spack.binary_distribution as bd
|
import spack.binary_distribution as bd
|
||||||
import spack.concretize
|
|
||||||
import spack.mirrors.mirror
|
import spack.mirrors.mirror
|
||||||
import spack.spec
|
import spack.spec
|
||||||
from spack.installer import PackageInstaller
|
from spack.installer import PackageInstaller
|
||||||
@@ -18,7 +17,7 @@
|
|||||||
|
|
||||||
|
|
||||||
def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmp_path):
|
def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmp_path):
|
||||||
spec = spack.concretize.concretized(spack.spec.Spec("trivial-install-test-package"))
|
spec = spack.spec.Spec("trivial-install-test-package").concretized()
|
||||||
PackageInstaller([spec.package], fake=True).install()
|
PackageInstaller([spec.package], fake=True).install()
|
||||||
|
|
||||||
specs = [spec]
|
specs = [spec]
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user