Compare commits
1 Commits
develop-20
...
woptim/rad
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
048a46f817 |
2
.github/workflows/audit.yaml
vendored
2
.github/workflows/audit.yaml
vendored
@@ -66,7 +66,7 @@ jobs:
|
|||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
spack -d audit externals
|
spack -d audit externals
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
if: ${{ inputs.with_coverage == 'true' && runner.os != 'Windows' }}
|
||||||
with:
|
with:
|
||||||
name: coverage-audits-${{ matrix.system.os }}
|
name: coverage-audits-${{ matrix.system.os }}
|
||||||
|
|||||||
12
.github/workflows/bootstrap.yml
vendored
12
.github/workflows/bootstrap.yml
vendored
@@ -161,7 +161,11 @@ jobs:
|
|||||||
source share/spack/setup-env.sh
|
source share/spack/setup-env.sh
|
||||||
spack -d gpg list
|
spack -d gpg list
|
||||||
tree $HOME/.spack/bootstrap/store/
|
tree $HOME/.spack/bootstrap/store/
|
||||||
|
- name: Bootstrap File
|
||||||
|
run: |
|
||||||
|
source share/spack/setup-env.sh
|
||||||
|
spack -d python share/spack/qa/bootstrap-file.py
|
||||||
|
tree $HOME/.spack/bootstrap/store/
|
||||||
|
|
||||||
windows:
|
windows:
|
||||||
runs-on: "windows-latest"
|
runs-on: "windows-latest"
|
||||||
@@ -192,3 +196,9 @@ jobs:
|
|||||||
spack -d gpg list
|
spack -d gpg list
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
tree $env:userprofile/.spack/bootstrap/store/
|
tree $env:userprofile/.spack/bootstrap/store/
|
||||||
|
- name: Bootstrap File
|
||||||
|
run: |
|
||||||
|
./share/spack/setup-env.ps1
|
||||||
|
spack -d python share/spack/qa/bootstrap-file.py
|
||||||
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
|
tree $env:userprofile/.spack/bootstrap/store/
|
||||||
|
|||||||
6
.github/workflows/build-containers.yml
vendored
6
.github/workflows/build-containers.yml
vendored
@@ -94,7 +94,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Upload Dockerfile
|
- name: Upload Dockerfile
|
||||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
with:
|
with:
|
||||||
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
name: dockerfiles_${{ matrix.dockerfile[0] }}
|
||||||
path: dockerfiles
|
path: dockerfiles
|
||||||
@@ -103,7 +103,7 @@ jobs:
|
|||||||
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5
|
uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349
|
||||||
|
|
||||||
- name: Log in to GitHub Container Registry
|
- name: Log in to GitHub Container Registry
|
||||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||||
@@ -133,7 +133,7 @@ jobs:
|
|||||||
needs: deploy-images
|
needs: deploy-images
|
||||||
steps:
|
steps:
|
||||||
- name: Merge Artifacts
|
- name: Merge Artifacts
|
||||||
uses: actions/upload-artifact/merge@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
uses: actions/upload-artifact/merge@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
with:
|
with:
|
||||||
name: dockerfiles
|
name: dockerfiles
|
||||||
pattern: dockerfiles_*
|
pattern: dockerfiles_*
|
||||||
|
|||||||
1
.github/workflows/coverage.yml
vendored
1
.github/workflows/coverage.yml
vendored
@@ -32,4 +32,3 @@ jobs:
|
|||||||
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
|
uses: codecov/codecov-action@05f5a9cfad807516dbbef9929c4a42df3eb78766
|
||||||
with:
|
with:
|
||||||
verbose: true
|
verbose: true
|
||||||
fail_ci_if_error: false
|
|
||||||
|
|||||||
@@ -3,5 +3,5 @@ clingo==5.7.1
|
|||||||
flake8==7.1.1
|
flake8==7.1.1
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
mypy==1.8.0
|
mypy==1.8.0
|
||||||
types-six==1.17.0.20241205
|
types-six==1.16.21.20241105
|
||||||
vermin==1.6.0
|
vermin==1.6.0
|
||||||
|
|||||||
38
.github/workflows/unit_tests.yaml
vendored
38
.github/workflows/unit_tests.yaml
vendored
@@ -15,17 +15,17 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
|
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12']
|
||||||
on_develop:
|
on_develop:
|
||||||
- ${{ github.ref == 'refs/heads/develop' }}
|
- ${{ github.ref == 'refs/heads/develop' }}
|
||||||
include:
|
include:
|
||||||
- python-version: '3.6'
|
- python-version: '3.6'
|
||||||
os: ubuntu-20.04
|
os: ubuntu-20.04
|
||||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
||||||
- python-version: '3.7'
|
|
||||||
os: ubuntu-22.04
|
|
||||||
on_develop: ${{ github.ref == 'refs/heads/develop' }}
|
|
||||||
exclude:
|
exclude:
|
||||||
|
- python-version: '3.7'
|
||||||
|
os: ubuntu-latest
|
||||||
|
on_develop: false
|
||||||
- python-version: '3.8'
|
- python-version: '3.8'
|
||||||
os: ubuntu-latest
|
os: ubuntu-latest
|
||||||
on_develop: false
|
on_develop: false
|
||||||
@@ -52,13 +52,7 @@ jobs:
|
|||||||
# Needed for unit tests
|
# Needed for unit tests
|
||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
coreutils cvs gfortran graphviz gnupg2 mercurial ninja-build \
|
||||||
cmake bison libbison-dev subversion
|
cmake bison libbison-dev kcov
|
||||||
# On ubuntu 24.04, kcov was removed. It may come back in some future Ubuntu
|
|
||||||
- name: Set up Homebrew
|
|
||||||
id: set-up-homebrew
|
|
||||||
uses: Homebrew/actions/setup-homebrew@40e9946c182a64b3db1bf51be0dcb915f7802aa9
|
|
||||||
- name: Install kcov with brew
|
|
||||||
run: "brew install kcov"
|
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
|
pip install --upgrade pip setuptools pytest pytest-xdist pytest-cov
|
||||||
@@ -86,7 +80,7 @@ jobs:
|
|||||||
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
UNIT_TEST_COVERAGE: ${{ matrix.python-version == '3.11' }}
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-unit-tests
|
share/spack/qa/run-unit-tests
|
||||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||||
path: coverage
|
path: coverage
|
||||||
@@ -105,13 +99,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
sudo apt-get -y update
|
sudo apt-get -y update
|
||||||
# Needed for shell tests
|
# Needed for shell tests
|
||||||
sudo apt-get install -y coreutils csh zsh tcsh fish dash bash subversion
|
sudo apt-get install -y coreutils kcov csh zsh tcsh fish dash bash
|
||||||
# On ubuntu 24.04, kcov was removed. It may come back in some future Ubuntu
|
|
||||||
- name: Set up Homebrew
|
|
||||||
id: set-up-homebrew
|
|
||||||
uses: Homebrew/actions/setup-homebrew@40e9946c182a64b3db1bf51be0dcb915f7802aa9
|
|
||||||
- name: Install kcov with brew
|
|
||||||
run: "brew install kcov"
|
|
||||||
- name: Install Python packages
|
- name: Install Python packages
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip setuptools pytest coverage[toml] pytest-xdist
|
pip install --upgrade pip setuptools pytest coverage[toml] pytest-xdist
|
||||||
@@ -125,7 +113,7 @@ jobs:
|
|||||||
COVERAGE: true
|
COVERAGE: true
|
||||||
run: |
|
run: |
|
||||||
share/spack/qa/run-shell-tests
|
share/spack/qa/run-shell-tests
|
||||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
with:
|
with:
|
||||||
name: coverage-shell
|
name: coverage-shell
|
||||||
path: coverage
|
path: coverage
|
||||||
@@ -140,13 +128,13 @@ jobs:
|
|||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
dnf install -y \
|
dnf install -y \
|
||||||
bzip2 curl gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
bzip2 curl file gcc-c++ gcc gcc-gfortran git gnupg2 gzip \
|
||||||
make patch tcl unzip which xz
|
make patch tcl unzip which xz
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
git config --global --add safe.directory '*'
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/bin/setup_git.sh
|
||||||
useradd spack-test
|
useradd spack-test
|
||||||
@@ -187,7 +175,7 @@ jobs:
|
|||||||
spack bootstrap status
|
spack bootstrap status
|
||||||
spack solve zlib
|
spack solve zlib
|
||||||
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
|
spack unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml lib/spack/spack/test/concretization/core.py
|
||||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
with:
|
with:
|
||||||
name: coverage-clingo-cffi
|
name: coverage-clingo-cffi
|
||||||
path: coverage
|
path: coverage
|
||||||
@@ -225,7 +213,7 @@ jobs:
|
|||||||
$(which spack) solve zlib
|
$(which spack) solve zlib
|
||||||
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
common_args=(--dist loadfile --tx '4*popen//python=./bin/spack-tmpconfig python -u ./bin/spack python' -x)
|
||||||
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
$(which spack) unit-test --verbose --cov --cov-config=pyproject.toml --cov-report=xml:coverage.xml "${common_args[@]}"
|
||||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
name: coverage-${{ matrix.os }}-python${{ matrix.python-version }}
|
||||||
path: coverage
|
path: coverage
|
||||||
@@ -256,7 +244,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
spack unit-test -x --verbose --cov --cov-config=pyproject.toml
|
||||||
./share/spack/qa/validate_last_exit.ps1
|
./share/spack/qa/validate_last_exit.ps1
|
||||||
- uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b
|
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
|
||||||
with:
|
with:
|
||||||
name: coverage-windows
|
name: coverage-windows
|
||||||
path: coverage
|
path: coverage
|
||||||
|
|||||||
42
.github/workflows/valid-style.yml
vendored
42
.github/workflows/valid-style.yml
vendored
@@ -13,7 +13,8 @@ concurrency:
|
|||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Validate that the code can be run on all the Python versions supported by Spack
|
# Validate that the code can be run on all the Python versions
|
||||||
|
# supported by Spack
|
||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@@ -73,7 +74,7 @@ jobs:
|
|||||||
- name: Setup repo and non-root user
|
- name: Setup repo and non-root user
|
||||||
run: |
|
run: |
|
||||||
git --version
|
git --version
|
||||||
git config --global --add safe.directory '*'
|
git config --global --add safe.directory /__w/spack/spack
|
||||||
git fetch --unshallow
|
git fetch --unshallow
|
||||||
. .github/workflows/bin/setup_git.sh
|
. .github/workflows/bin/setup_git.sh
|
||||||
useradd spack-test
|
useradd spack-test
|
||||||
@@ -86,7 +87,6 @@ jobs:
|
|||||||
spack -d bootstrap now --dev
|
spack -d bootstrap now --dev
|
||||||
spack -d style -t black
|
spack -d style -t black
|
||||||
spack unit-test -V
|
spack unit-test -V
|
||||||
# Check we don't make the situation with circular imports worse
|
|
||||||
import-check:
|
import-check:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@@ -121,46 +121,28 @@ jobs:
|
|||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||||
with:
|
with:
|
||||||
repository: haampie/circular-import-fighter
|
repository: haampie/circular-import-fighter
|
||||||
ref: b5d6ce9be35f602cca7d5a6aa0259fca10639cca
|
ref: 9f60f51bc7134e0be73f27623f1b0357d1718427
|
||||||
path: circular-import-fighter
|
path: circular-import-fighter
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
working-directory: circular-import-fighter
|
working-directory: circular-import-fighter
|
||||||
run: make -j dependencies
|
run: make -j dependencies
|
||||||
- name: Problematic imports before
|
- name: Import cycles before
|
||||||
working-directory: circular-import-fighter
|
working-directory: circular-import-fighter
|
||||||
run: make SPACK_ROOT=../old SUFFIX=.old
|
run: make SPACK_ROOT=../old && cp solution solution.old
|
||||||
- name: Problematic imports after
|
- name: Import cycles after
|
||||||
working-directory: circular-import-fighter
|
working-directory: circular-import-fighter
|
||||||
run: make SPACK_ROOT=../new SUFFIX=.new
|
run: make clean-graph && make SPACK_ROOT=../new && cp solution solution.new
|
||||||
- name: Compare import cycles
|
- name: Compare import cycles
|
||||||
working-directory: circular-import-fighter
|
working-directory: circular-import-fighter
|
||||||
run: |
|
run: |
|
||||||
edges_before="$(head -n1 solution.old)"
|
edges_before="$(grep -oP 'edges to delete: \K\d+' solution.old)"
|
||||||
edges_after="$(head -n1 solution.new)"
|
edges_after="$(grep -oP 'edges to delete: \K\d+' solution.new)"
|
||||||
if [ "$edges_after" -gt "$edges_before" ]; then
|
if [ "$edges_after" -gt "$edges_before" ]; then
|
||||||
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
printf '\033[1;31mImport check failed: %s imports need to be deleted, ' "$edges_after"
|
||||||
printf 'previously this was %s\033[0m\n' "$edges_before"
|
printf 'previously this was %s\033[0m\n' "$edges_before"
|
||||||
printf 'Compare \033[1;97m"Problematic imports before"\033[0m and '
|
printf 'Compare \033[1;97m"Import cycles before"\033[0m and '
|
||||||
printf '\033[1;97m"Problematic imports after"\033[0m.\n'
|
printf '\033[1;97m"Import cycles after"\033[0m to see problematic imports.\n'
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
printf '\033[1;32mImport check passed: %s <= %s\033[0m\n' "$edges_after" "$edges_before"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Further style checks from pylint
|
|
||||||
pylint:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b
|
|
||||||
with:
|
|
||||||
python-version: '3.13'
|
|
||||||
cache: 'pip'
|
|
||||||
- name: Install Python packages
|
|
||||||
run: |
|
|
||||||
pip install --upgrade pip setuptools pylint
|
|
||||||
- name: Pylint (Spack Core)
|
|
||||||
run: |
|
|
||||||
pylint -j 4 --disable=all --enable=unspecified-encoding --ignore-paths=lib/spack/external lib
|
|
||||||
|
|||||||
@@ -102,6 +102,6 @@ PackageName: sbang
|
|||||||
PackageHomePage: https://github.com/spack/sbang
|
PackageHomePage: https://github.com/spack/sbang
|
||||||
PackageLicenseDeclared: Apache-2.0 OR MIT
|
PackageLicenseDeclared: Apache-2.0 OR MIT
|
||||||
|
|
||||||
PackageName: typing_extensions
|
PackageName: six
|
||||||
PackageHomePage: https://pypi.org/project/typing-extensions/
|
PackageHomePage: https://pypi.python.org/pypi/six
|
||||||
PackageLicenseDeclared: Python-2.0
|
PackageLicenseDeclared: MIT
|
||||||
|
|||||||
@@ -194,12 +194,6 @@ config:
|
|||||||
# executables with many dependencies, in particular on slow filesystems.
|
# executables with many dependencies, in particular on slow filesystems.
|
||||||
bind: false
|
bind: false
|
||||||
|
|
||||||
# Controls the handling of missing dynamic libraries after installation.
|
|
||||||
# Options are ignore (default), warn, or error. If set to error, the
|
|
||||||
# installation fails if installed binaries reference dynamic libraries that
|
|
||||||
# are not found in their specified rpaths.
|
|
||||||
missing_library_policy: ignore
|
|
||||||
|
|
||||||
|
|
||||||
# Set to 'false' to allow installation on filesystems that doesn't allow setgid bit
|
# Set to 'false' to allow installation on filesystems that doesn't allow setgid bit
|
||||||
# manipulation by unprivileged user (e.g. AFS)
|
# manipulation by unprivileged user (e.g. AFS)
|
||||||
|
|||||||
@@ -76,8 +76,6 @@ packages:
|
|||||||
buildable: false
|
buildable: false
|
||||||
cray-mvapich2:
|
cray-mvapich2:
|
||||||
buildable: false
|
buildable: false
|
||||||
egl:
|
|
||||||
buildable: false
|
|
||||||
fujitsu-mpi:
|
fujitsu-mpi:
|
||||||
buildable: false
|
buildable: false
|
||||||
hpcx-mpi:
|
hpcx-mpi:
|
||||||
|
|||||||
@@ -265,30 +265,25 @@ infrastructure, or to cache Spack built binaries in Github Actions and
|
|||||||
GitLab CI.
|
GitLab CI.
|
||||||
|
|
||||||
To get started, configure an OCI mirror using ``oci://`` as the scheme,
|
To get started, configure an OCI mirror using ``oci://`` as the scheme,
|
||||||
and optionally specify variables that hold the username and password (or
|
and optionally specify a username and password (or personal access token):
|
||||||
personal access token) for the registry:
|
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack mirror add --oci-username-variable REGISTRY_USER \
|
$ spack mirror add --oci-username username --oci-password password my_registry oci://example.com/my_image
|
||||||
--oci-password-variable REGISTRY_TOKEN \
|
|
||||||
my_registry oci://example.com/my_image
|
|
||||||
|
|
||||||
Spack follows the naming conventions of Docker, with Dockerhub as the default
|
Spack follows the naming conventions of Docker, with Dockerhub as the default
|
||||||
registry. To use Dockerhub, you can omit the registry domain:
|
registry. To use Dockerhub, you can omit the registry domain:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack mirror add ... my_registry oci://username/my_image
|
$ spack mirror add --oci-username username --oci-password password my_registry oci://username/my_image
|
||||||
|
|
||||||
From here, you can use the mirror as any other build cache:
|
From here, you can use the mirror as any other build cache:
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ export REGISTRY_USER=...
|
|
||||||
$ export REGISTRY_TOKEN=...
|
|
||||||
$ spack buildcache push my_registry <specs...> # push to the registry
|
$ spack buildcache push my_registry <specs...> # push to the registry
|
||||||
$ spack install <specs...> # or install from the registry
|
$ spack install <specs...> # install from the registry
|
||||||
|
|
||||||
A unique feature of buildcaches on top of OCI registries is that it's incredibly
|
A unique feature of buildcaches on top of OCI registries is that it's incredibly
|
||||||
easy to generate get a runnable container image with the binaries installed. This
|
easy to generate get a runnable container image with the binaries installed. This
|
||||||
|
|||||||
@@ -25,14 +25,6 @@ QMake does not appear to have a standardized way of specifying
|
|||||||
the installation directory, so you may have to set environment
|
the installation directory, so you may have to set environment
|
||||||
variables or edit ``*.pro`` files to get things working properly.
|
variables or edit ``*.pro`` files to get things working properly.
|
||||||
|
|
||||||
QMake packages will depend on the virtual ``qmake`` package which
|
|
||||||
is provided by multiple versions of Qt: ``qt`` provides Qt up to
|
|
||||||
Qt5, and ``qt-base`` provides Qt from version Qt6 onwards. This
|
|
||||||
split was motivated by the desire to split the single Qt package
|
|
||||||
into its components to allow for more fine-grained installation.
|
|
||||||
To depend on a specific version, refer to the documentation on
|
|
||||||
:ref:`virtual-dependencies`.
|
|
||||||
|
|
||||||
^^^^^^
|
^^^^^^
|
||||||
Phases
|
Phases
|
||||||
^^^^^^
|
^^^^^^
|
||||||
|
|||||||
@@ -38,11 +38,9 @@ just have to configure and OCI registry and run ``spack buildcache push``.
|
|||||||
spack -e . install
|
spack -e . install
|
||||||
|
|
||||||
# Configure the registry
|
# Configure the registry
|
||||||
spack -e . mirror add --oci-username-variable REGISTRY_USER \
|
spack -e . mirror add --oci-username ... --oci-password ... container-registry oci://example.com/name/image
|
||||||
--oci-password-variable REGISTRY_TOKEN \
|
|
||||||
container-registry oci://example.com/name/image
|
|
||||||
|
|
||||||
# Push the image (do set REGISTRY_USER and REGISTRY_TOKEN)
|
# Push the image
|
||||||
spack -e . buildcache push --update-index --base-image ubuntu:22.04 --tag my_env container-registry
|
spack -e . buildcache push --update-index --base-image ubuntu:22.04 --tag my_env container-registry
|
||||||
|
|
||||||
The resulting container image can then be run as follows:
|
The resulting container image can then be run as follows:
|
||||||
|
|||||||
@@ -178,8 +178,8 @@ Spec-related modules
|
|||||||
Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization
|
Contains :class:`~spack.spec.Spec`. Also implements most of the logic for concretization
|
||||||
of specs.
|
of specs.
|
||||||
|
|
||||||
:mod:`spack.spec_parser`
|
:mod:`spack.parser`
|
||||||
Contains :class:`~spack.spec_parser.SpecParser` and functions related to parsing specs.
|
Contains :class:`~spack.parser.SpecParser` and functions related to parsing specs.
|
||||||
|
|
||||||
:mod:`spack.version`
|
:mod:`spack.version`
|
||||||
Implements a simple :class:`~spack.version.Version` class with simple
|
Implements a simple :class:`~spack.version.Version` class with simple
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ A build matrix showing which packages are working on which systems is shown belo
|
|||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
apt update
|
apt update
|
||||||
apt install bzip2 ca-certificates g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
|
apt install bzip2 ca-certificates file g++ gcc gfortran git gzip lsb-release patch python3 tar unzip xz-utils zstd
|
||||||
|
|
||||||
.. tab-item:: RHEL
|
.. tab-item:: RHEL
|
||||||
|
|
||||||
@@ -148,22 +148,20 @@ The first time you concretize a spec, Spack will bootstrap automatically:
|
|||||||
--------------------------------
|
--------------------------------
|
||||||
zlib@1.2.13%gcc@9.4.0+optimize+pic+shared build_system=makefile arch=linux-ubuntu20.04-icelake
|
zlib@1.2.13%gcc@9.4.0+optimize+pic+shared build_system=makefile arch=linux-ubuntu20.04-icelake
|
||||||
|
|
||||||
The default bootstrap behavior is to use pre-built binaries. You can verify the
|
|
||||||
active bootstrap repositories with:
|
|
||||||
|
|
||||||
.. command-output:: spack bootstrap list
|
|
||||||
|
|
||||||
If for security concerns you cannot bootstrap ``clingo`` from pre-built
|
If for security concerns you cannot bootstrap ``clingo`` from pre-built
|
||||||
binaries, you have to disable fetching the binaries we generated with Github Actions.
|
binaries, you have to disable fetching the binaries we generated with Github Actions.
|
||||||
|
|
||||||
.. code-block:: console
|
.. code-block:: console
|
||||||
|
|
||||||
$ spack bootstrap disable github-actions-v0.6
|
$ spack bootstrap disable github-actions-v0.4
|
||||||
==> "github-actions-v0.6" is now disabled and will not be used for bootstrapping
|
==> "github-actions-v0.4" is now disabled and will not be used for bootstrapping
|
||||||
$ spack bootstrap disable github-actions-v0.5
|
$ spack bootstrap disable github-actions-v0.3
|
||||||
==> "github-actions-v0.5" is now disabled and will not be used for bootstrapping
|
==> "github-actions-v0.3" is now disabled and will not be used for bootstrapping
|
||||||
|
|
||||||
|
You can verify that the new settings are effective with:
|
||||||
|
|
||||||
|
.. command-output:: spack bootstrap list
|
||||||
|
|
||||||
You can verify that the new settings are effective with ``spack bootstrap list``.
|
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
|
|||||||
@@ -5137,7 +5137,7 @@ other checks.
|
|||||||
- Not applicable
|
- Not applicable
|
||||||
* - :ref:`PythonPackage <pythonpackage>`
|
* - :ref:`PythonPackage <pythonpackage>`
|
||||||
- Not applicable
|
- Not applicable
|
||||||
- ``test_imports`` (module imports)
|
- ``test`` (module imports)
|
||||||
* - :ref:`QMakePackage <qmakepackage>`
|
* - :ref:`QMakePackage <qmakepackage>`
|
||||||
- ``check`` (``make check``)
|
- ``check`` (``make check``)
|
||||||
- Not applicable
|
- Not applicable
|
||||||
@@ -5146,7 +5146,7 @@ other checks.
|
|||||||
- Not applicable
|
- Not applicable
|
||||||
* - :ref:`SIPPackage <sippackage>`
|
* - :ref:`SIPPackage <sippackage>`
|
||||||
- Not applicable
|
- Not applicable
|
||||||
- ``test_imports`` (module imports)
|
- ``test`` (module imports)
|
||||||
* - :ref:`WafPackage <wafpackage>`
|
* - :ref:`WafPackage <wafpackage>`
|
||||||
- ``build_test`` (must be overridden)
|
- ``build_test`` (must be overridden)
|
||||||
- ``install_test`` (must be overridden)
|
- ``install_test`` (must be overridden)
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
sphinx==8.1.3
|
sphinx==8.1.3
|
||||||
sphinxcontrib-programoutput==0.18
|
sphinxcontrib-programoutput==0.17
|
||||||
sphinx_design==0.6.1
|
sphinx_design==0.6.1
|
||||||
sphinx-rtd-theme==3.0.2
|
sphinx-rtd-theme==3.0.2
|
||||||
python-levenshtein==0.26.1
|
python-levenshtein==0.26.1
|
||||||
docutils==0.21.2
|
docutils==0.21.2
|
||||||
pygments==2.18.0
|
pygments==2.18.0
|
||||||
urllib3==2.3.0
|
urllib3==2.2.3
|
||||||
pytest==8.3.4
|
pytest==8.3.3
|
||||||
isort==5.13.2
|
isort==5.13.2
|
||||||
black==24.10.0
|
black==24.10.0
|
||||||
flake8==7.1.1
|
flake8==7.1.1
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ unzip, , , Compress/Decompress archives
|
|||||||
bzip2, , , Compress/Decompress archives
|
bzip2, , , Compress/Decompress archives
|
||||||
xz, , , Compress/Decompress archives
|
xz, , , Compress/Decompress archives
|
||||||
zstd, , Optional, Compress/Decompress archives
|
zstd, , Optional, Compress/Decompress archives
|
||||||
|
file, , , Create/Use Buildcaches
|
||||||
lsb-release, , , Linux: identify operating system version
|
lsb-release, , , Linux: identify operating system version
|
||||||
gnupg2, , , Sign/Verify Buildcaches
|
gnupg2, , , Sign/Verify Buildcaches
|
||||||
git, , , Manage Software Repositories
|
git, , , Manage Software Repositories
|
||||||
|
|||||||
|
@@ -1,254 +0,0 @@
|
|||||||
A. HISTORY OF THE SOFTWARE
|
|
||||||
==========================
|
|
||||||
|
|
||||||
Python was created in the early 1990s by Guido van Rossum at Stichting
|
|
||||||
Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
|
|
||||||
as a successor of a language called ABC. Guido remains Python's
|
|
||||||
principal author, although it includes many contributions from others.
|
|
||||||
|
|
||||||
In 1995, Guido continued his work on Python at the Corporation for
|
|
||||||
National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
|
|
||||||
in Reston, Virginia where he released several versions of the
|
|
||||||
software.
|
|
||||||
|
|
||||||
In May 2000, Guido and the Python core development team moved to
|
|
||||||
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
|
|
||||||
year, the PythonLabs team moved to Digital Creations (now Zope
|
|
||||||
Corporation, see http://www.zope.com). In 2001, the Python Software
|
|
||||||
Foundation (PSF, see http://www.python.org/psf/) was formed, a
|
|
||||||
non-profit organization created specifically to own Python-related
|
|
||||||
Intellectual Property. Zope Corporation is a sponsoring member of
|
|
||||||
the PSF.
|
|
||||||
|
|
||||||
All Python releases are Open Source (see http://www.opensource.org for
|
|
||||||
the Open Source Definition). Historically, most, but not all, Python
|
|
||||||
releases have also been GPL-compatible; the table below summarizes
|
|
||||||
the various releases.
|
|
||||||
|
|
||||||
Release Derived Year Owner GPL-
|
|
||||||
from compatible? (1)
|
|
||||||
|
|
||||||
0.9.0 thru 1.2 1991-1995 CWI yes
|
|
||||||
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
|
|
||||||
1.6 1.5.2 2000 CNRI no
|
|
||||||
2.0 1.6 2000 BeOpen.com no
|
|
||||||
1.6.1 1.6 2001 CNRI yes (2)
|
|
||||||
2.1 2.0+1.6.1 2001 PSF no
|
|
||||||
2.0.1 2.0+1.6.1 2001 PSF yes
|
|
||||||
2.1.1 2.1+2.0.1 2001 PSF yes
|
|
||||||
2.1.2 2.1.1 2002 PSF yes
|
|
||||||
2.1.3 2.1.2 2002 PSF yes
|
|
||||||
2.2 and above 2.1.1 2001-now PSF yes
|
|
||||||
|
|
||||||
Footnotes:
|
|
||||||
|
|
||||||
(1) GPL-compatible doesn't mean that we're distributing Python under
|
|
||||||
the GPL. All Python licenses, unlike the GPL, let you distribute
|
|
||||||
a modified version without making your changes open source. The
|
|
||||||
GPL-compatible licenses make it possible to combine Python with
|
|
||||||
other software that is released under the GPL; the others don't.
|
|
||||||
|
|
||||||
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
|
|
||||||
because its license has a choice of law clause. According to
|
|
||||||
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
|
|
||||||
is "not incompatible" with the GPL.
|
|
||||||
|
|
||||||
Thanks to the many outside volunteers who have worked under Guido's
|
|
||||||
direction to make these releases possible.
|
|
||||||
|
|
||||||
|
|
||||||
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
|
|
||||||
===============================================================
|
|
||||||
|
|
||||||
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
|
|
||||||
--------------------------------------------
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between the Python Software Foundation
|
|
||||||
("PSF"), and the Individual or Organization ("Licensee") accessing and
|
|
||||||
otherwise using this software ("Python") in source or binary form and
|
|
||||||
its associated documentation.
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this License Agreement, PSF hereby
|
|
||||||
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
|
|
||||||
analyze, test, perform and/or display publicly, prepare derivative works,
|
|
||||||
distribute, and otherwise use Python alone or in any derivative version,
|
|
||||||
provided, however, that PSF's License Agreement and PSF's notice of copyright,
|
|
||||||
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
|
|
||||||
2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
|
|
||||||
retained in Python alone or in any derivative version prepared by Licensee.
|
|
||||||
|
|
||||||
3. In the event Licensee prepares a derivative work that is based on
|
|
||||||
or incorporates Python or any part thereof, and wants to make
|
|
||||||
the derivative work available to others as provided herein, then
|
|
||||||
Licensee hereby agrees to include in any such work a brief summary of
|
|
||||||
the changes made to Python.
|
|
||||||
|
|
||||||
4. PSF is making Python available to Licensee on an "AS IS"
|
|
||||||
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
|
||||||
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
|
||||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
|
|
||||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
6. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
7. Nothing in this License Agreement shall be deemed to create any
|
|
||||||
relationship of agency, partnership, or joint venture between PSF and
|
|
||||||
Licensee. This License Agreement does not grant permission to use PSF
|
|
||||||
trademarks or trade name in a trademark sense to endorse or promote
|
|
||||||
products or services of Licensee, or any third party.
|
|
||||||
|
|
||||||
8. By copying, installing or otherwise using Python, Licensee
|
|
||||||
agrees to be bound by the terms and conditions of this License
|
|
||||||
Agreement.
|
|
||||||
|
|
||||||
|
|
||||||
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
|
|
||||||
-------------------------------------------
|
|
||||||
|
|
||||||
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
|
|
||||||
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
|
|
||||||
Individual or Organization ("Licensee") accessing and otherwise using
|
|
||||||
this software in source or binary form and its associated
|
|
||||||
documentation ("the Software").
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this BeOpen Python License
|
|
||||||
Agreement, BeOpen hereby grants Licensee a non-exclusive,
|
|
||||||
royalty-free, world-wide license to reproduce, analyze, test, perform
|
|
||||||
and/or display publicly, prepare derivative works, distribute, and
|
|
||||||
otherwise use the Software alone or in any derivative version,
|
|
||||||
provided, however, that the BeOpen Python License is retained in the
|
|
||||||
Software, alone or in any derivative version prepared by Licensee.
|
|
||||||
|
|
||||||
3. BeOpen is making the Software available to Licensee on an "AS IS"
|
|
||||||
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
|
|
||||||
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
|
|
||||||
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
|
|
||||||
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
5. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
6. This License Agreement shall be governed by and interpreted in all
|
|
||||||
respects by the law of the State of California, excluding conflict of
|
|
||||||
law provisions. Nothing in this License Agreement shall be deemed to
|
|
||||||
create any relationship of agency, partnership, or joint venture
|
|
||||||
between BeOpen and Licensee. This License Agreement does not grant
|
|
||||||
permission to use BeOpen trademarks or trade names in a trademark
|
|
||||||
sense to endorse or promote products or services of Licensee, or any
|
|
||||||
third party. As an exception, the "BeOpen Python" logos available at
|
|
||||||
http://www.pythonlabs.com/logos.html may be used according to the
|
|
||||||
permissions granted on that web page.
|
|
||||||
|
|
||||||
7. By copying, installing or otherwise using the software, Licensee
|
|
||||||
agrees to be bound by the terms and conditions of this License
|
|
||||||
Agreement.
|
|
||||||
|
|
||||||
|
|
||||||
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
|
|
||||||
---------------------------------------
|
|
||||||
|
|
||||||
1. This LICENSE AGREEMENT is between the Corporation for National
|
|
||||||
Research Initiatives, having an office at 1895 Preston White Drive,
|
|
||||||
Reston, VA 20191 ("CNRI"), and the Individual or Organization
|
|
||||||
("Licensee") accessing and otherwise using Python 1.6.1 software in
|
|
||||||
source or binary form and its associated documentation.
|
|
||||||
|
|
||||||
2. Subject to the terms and conditions of this License Agreement, CNRI
|
|
||||||
hereby grants Licensee a nonexclusive, royalty-free, world-wide
|
|
||||||
license to reproduce, analyze, test, perform and/or display publicly,
|
|
||||||
prepare derivative works, distribute, and otherwise use Python 1.6.1
|
|
||||||
alone or in any derivative version, provided, however, that CNRI's
|
|
||||||
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
|
|
||||||
1995-2001 Corporation for National Research Initiatives; All Rights
|
|
||||||
Reserved" are retained in Python 1.6.1 alone or in any derivative
|
|
||||||
version prepared by Licensee. Alternately, in lieu of CNRI's License
|
|
||||||
Agreement, Licensee may substitute the following text (omitting the
|
|
||||||
quotes): "Python 1.6.1 is made available subject to the terms and
|
|
||||||
conditions in CNRI's License Agreement. This Agreement together with
|
|
||||||
Python 1.6.1 may be located on the Internet using the following
|
|
||||||
unique, persistent identifier (known as a handle): 1895.22/1013. This
|
|
||||||
Agreement may also be obtained from a proxy server on the Internet
|
|
||||||
using the following URL: http://hdl.handle.net/1895.22/1013".
|
|
||||||
|
|
||||||
3. In the event Licensee prepares a derivative work that is based on
|
|
||||||
or incorporates Python 1.6.1 or any part thereof, and wants to make
|
|
||||||
the derivative work available to others as provided herein, then
|
|
||||||
Licensee hereby agrees to include in any such work a brief summary of
|
|
||||||
the changes made to Python 1.6.1.
|
|
||||||
|
|
||||||
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
|
|
||||||
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
|
|
||||||
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
|
|
||||||
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
|
|
||||||
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
|
|
||||||
INFRINGE ANY THIRD PARTY RIGHTS.
|
|
||||||
|
|
||||||
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
|
|
||||||
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
|
|
||||||
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
|
|
||||||
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
|
|
||||||
|
|
||||||
6. This License Agreement will automatically terminate upon a material
|
|
||||||
breach of its terms and conditions.
|
|
||||||
|
|
||||||
7. This License Agreement shall be governed by the federal
|
|
||||||
intellectual property law of the United States, including without
|
|
||||||
limitation the federal copyright law, and, to the extent such
|
|
||||||
U.S. federal law does not apply, by the law of the Commonwealth of
|
|
||||||
Virginia, excluding Virginia's conflict of law provisions.
|
|
||||||
Notwithstanding the foregoing, with regard to derivative works based
|
|
||||||
on Python 1.6.1 that incorporate non-separable material that was
|
|
||||||
previously distributed under the GNU General Public License (GPL), the
|
|
||||||
law of the Commonwealth of Virginia shall govern this License
|
|
||||||
Agreement only as to issues arising under or with respect to
|
|
||||||
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
|
|
||||||
License Agreement shall be deemed to create any relationship of
|
|
||||||
agency, partnership, or joint venture between CNRI and Licensee. This
|
|
||||||
License Agreement does not grant permission to use CNRI trademarks or
|
|
||||||
trade name in a trademark sense to endorse or promote products or
|
|
||||||
services of Licensee, or any third party.
|
|
||||||
|
|
||||||
8. By clicking on the "ACCEPT" button where indicated, or by copying,
|
|
||||||
installing or otherwise using Python 1.6.1, Licensee agrees to be
|
|
||||||
bound by the terms and conditions of this License Agreement.
|
|
||||||
|
|
||||||
ACCEPT
|
|
||||||
|
|
||||||
|
|
||||||
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
|
|
||||||
The Netherlands. All rights reserved.
|
|
||||||
|
|
||||||
Permission to use, copy, modify, and distribute this software and its
|
|
||||||
documentation for any purpose and without fee is hereby granted,
|
|
||||||
provided that the above copyright notice appear in all copies and that
|
|
||||||
both that copyright notice and this permission notice appear in
|
|
||||||
supporting documentation, and that the name of Stichting Mathematisch
|
|
||||||
Centrum or CWI not be used in advertising or publicity pertaining to
|
|
||||||
distribution of the software without specific, written prior
|
|
||||||
permission.
|
|
||||||
|
|
||||||
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
|
|
||||||
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
|
||||||
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
|
|
||||||
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|
||||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
|
||||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
|
|
||||||
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
||||||
2908
lib/spack/external/_vendoring/typing_extensions.py
vendored
2908
lib/spack/external/_vendoring/typing_extensions.py
vendored
File diff suppressed because it is too large
Load Diff
@@ -1 +0,0 @@
|
|||||||
from typing_extensions import *
|
|
||||||
1
lib/spack/external/vendor.txt
vendored
1
lib/spack/external/vendor.txt
vendored
@@ -8,4 +8,3 @@ six==1.16.0
|
|||||||
macholib==1.16.2
|
macholib==1.16.2
|
||||||
altgraph==0.17.3
|
altgraph==0.17.3
|
||||||
ruamel.yaml==0.17.21
|
ruamel.yaml==0.17.21
|
||||||
typing_extensions==4.1.1
|
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ def _is_url(path_or_url: str) -> bool:
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def _system_path_filter(_func=None, arg_slice: Optional[slice] = None):
|
def system_path_filter(_func=None, arg_slice: Optional[slice] = None):
|
||||||
"""Filters function arguments to account for platform path separators.
|
"""Filters function arguments to account for platform path separators.
|
||||||
Optional slicing range can be specified to select specific arguments
|
Optional slicing range can be specified to select specific arguments
|
||||||
|
|
||||||
@@ -100,16 +100,6 @@ def path_filter_caller(*args, **kwargs):
|
|||||||
return holder_func
|
return holder_func
|
||||||
|
|
||||||
|
|
||||||
def _noop_decorator(_func=None, arg_slice: Optional[slice] = None):
|
|
||||||
return _func if _func else lambda x: x
|
|
||||||
|
|
||||||
|
|
||||||
if sys.platform == "win32":
|
|
||||||
system_path_filter = _system_path_filter
|
|
||||||
else:
|
|
||||||
system_path_filter = _noop_decorator
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_win_longpath(path: str) -> str:
|
def sanitize_win_longpath(path: str) -> str:
|
||||||
"""Strip Windows extended path prefix from strings
|
"""Strip Windows extended path prefix from strings
|
||||||
Returns sanitized string.
|
Returns sanitized string.
|
||||||
|
|||||||
@@ -301,32 +301,35 @@ def filter_file(
|
|||||||
ignore_absent: bool = False,
|
ignore_absent: bool = False,
|
||||||
start_at: Optional[str] = None,
|
start_at: Optional[str] = None,
|
||||||
stop_at: Optional[str] = None,
|
stop_at: Optional[str] = None,
|
||||||
encoding: Optional[str] = "utf-8",
|
|
||||||
) -> None:
|
) -> None:
|
||||||
r"""Like sed, but uses python regular expressions.
|
r"""Like sed, but uses python regular expressions.
|
||||||
|
|
||||||
Filters every line of each file through regex and replaces the file with a filtered version.
|
Filters every line of each file through regex and replaces the file
|
||||||
Preserves mode of filtered files.
|
with a filtered version. Preserves mode of filtered files.
|
||||||
|
|
||||||
As with re.sub, ``repl`` can be either a string or a callable. If it is a callable, it is
|
As with re.sub, ``repl`` can be either a string or a callable.
|
||||||
passed the match object and should return a suitable replacement string. If it is a string, it
|
If it is a callable, it is passed the match object and should
|
||||||
can contain ``\1``, ``\2``, etc. to represent back-substitution as sed would allow.
|
return a suitable replacement string. If it is a string, it
|
||||||
|
can contain ``\1``, ``\2``, etc. to represent back-substitution
|
||||||
|
as sed would allow.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
regex: The regular expression to search for
|
regex (str): The regular expression to search for
|
||||||
repl: The string to replace matches with
|
repl (str): The string to replace matches with
|
||||||
*filenames: One or more files to search and replace string: Treat regex as a plain string.
|
*filenames: One or more files to search and replace
|
||||||
Default it False backup: Make backup file(s) suffixed with ``~``. Default is False
|
string (bool): Treat regex as a plain string. Default it False
|
||||||
ignore_absent: Ignore any files that don't exist. Default is False
|
backup (bool): Make backup file(s) suffixed with ``~``. Default is False
|
||||||
start_at: Marker used to start applying the replacements. If a text line matches this
|
ignore_absent (bool): Ignore any files that don't exist.
|
||||||
marker filtering is started at the next line. All contents before the marker and the
|
Default is False
|
||||||
marker itself are copied verbatim. Default is to start filtering from the first line of
|
start_at (str): Marker used to start applying the replacements. If a
|
||||||
the file.
|
text line matches this marker filtering is started at the next line.
|
||||||
stop_at: Marker used to stop scanning the file further. If a text line matches this marker
|
All contents before the marker and the marker itself are copied
|
||||||
filtering is stopped and the rest of the file is copied verbatim. Default is to filter
|
verbatim. Default is to start filtering from the first line of the
|
||||||
until the end of the file.
|
file.
|
||||||
encoding: The encoding to use when reading and writing the files. Default is None, which
|
stop_at (str): Marker used to stop scanning the file further. If a text
|
||||||
uses the system's default encoding.
|
line matches this marker filtering is stopped and the rest of the
|
||||||
|
file is copied verbatim. Default is to filter until the end of the
|
||||||
|
file.
|
||||||
"""
|
"""
|
||||||
# Allow strings to use \1, \2, etc. for replacement, like sed
|
# Allow strings to use \1, \2, etc. for replacement, like sed
|
||||||
if not callable(repl):
|
if not callable(repl):
|
||||||
@@ -342,56 +345,72 @@ def groupid_to_group(x):
|
|||||||
|
|
||||||
if string:
|
if string:
|
||||||
regex = re.escape(regex)
|
regex = re.escape(regex)
|
||||||
regex_compiled = re.compile(regex)
|
for filename in path_to_os_path(*filenames):
|
||||||
for path in path_to_os_path(*filenames):
|
msg = 'FILTER FILE: {0} [replacing "{1}"]'
|
||||||
if ignore_absent and not os.path.exists(path):
|
tty.debug(msg.format(filename, regex))
|
||||||
tty.debug(f'FILTER FILE: file "{path}" not found. Skipping to next file.')
|
|
||||||
|
backup_filename = filename + "~"
|
||||||
|
tmp_filename = filename + ".spack~"
|
||||||
|
|
||||||
|
if ignore_absent and not os.path.exists(filename):
|
||||||
|
msg = 'FILTER FILE: file "{0}" not found. Skipping to next file.'
|
||||||
|
tty.debug(msg.format(filename))
|
||||||
continue
|
continue
|
||||||
else:
|
|
||||||
tty.debug(f'FILTER FILE: {path} [replacing "{regex}"]')
|
|
||||||
|
|
||||||
fd, temp_path = tempfile.mkstemp(
|
# Create backup file. Don't overwrite an existing backup
|
||||||
prefix=f"{os.path.basename(path)}.", dir=os.path.dirname(path)
|
# file in case this file is being filtered multiple times.
|
||||||
)
|
if not os.path.exists(backup_filename):
|
||||||
os.close(fd)
|
shutil.copy(filename, backup_filename)
|
||||||
|
|
||||||
shutil.copy(path, temp_path)
|
# Create a temporary file to read from. We cannot use backup_filename
|
||||||
errored = False
|
# in case filter_file is invoked multiple times on the same file.
|
||||||
|
shutil.copy(filename, tmp_filename)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Open as a text file and filter until the end of the file is reached, or we found a
|
# Open as a text file and filter until the end of the file is
|
||||||
# marker in the line if it was specified. To avoid translating line endings (\n to
|
# reached, or we found a marker in the line if it was specified
|
||||||
# \r\n and vice-versa) use newline="".
|
#
|
||||||
with open(
|
# To avoid translating line endings (\n to \r\n and vice-versa)
|
||||||
temp_path, mode="r", errors="surrogateescape", newline="", encoding=encoding
|
# we force os.open to ignore translations and use the line endings
|
||||||
) as input_file, open(
|
# the file comes with
|
||||||
path, mode="w", errors="surrogateescape", newline="", encoding=encoding
|
with open(tmp_filename, mode="r", errors="surrogateescape", newline="") as input_file:
|
||||||
) as output_file:
|
with open(filename, mode="w", errors="surrogateescape", newline="") as output_file:
|
||||||
if start_at is None and stop_at is None: # common case, avoids branching in loop
|
do_filtering = start_at is None
|
||||||
for line in input_file:
|
# Using iter and readline is a workaround needed not to
|
||||||
output_file.write(re.sub(regex_compiled, repl, line))
|
# disable input_file.tell(), which will happen if we call
|
||||||
else:
|
# input_file.next() implicitly via the for loop
|
||||||
# state is -1 before start_at; 0 between; 1 after stop_at
|
for line in iter(input_file.readline, ""):
|
||||||
state = 0 if start_at is None else -1
|
if stop_at is not None:
|
||||||
for line in input_file:
|
current_position = input_file.tell()
|
||||||
if state == 0:
|
|
||||||
if stop_at == line.strip():
|
if stop_at == line.strip():
|
||||||
state = 1
|
|
||||||
else:
|
|
||||||
line = re.sub(regex_compiled, repl, line)
|
|
||||||
elif state == -1 and start_at == line.strip():
|
|
||||||
state = 0
|
|
||||||
output_file.write(line)
|
output_file.write(line)
|
||||||
|
break
|
||||||
|
if do_filtering:
|
||||||
|
filtered_line = re.sub(regex, repl, line)
|
||||||
|
output_file.write(filtered_line)
|
||||||
|
else:
|
||||||
|
do_filtering = start_at == line.strip()
|
||||||
|
output_file.write(line)
|
||||||
|
else:
|
||||||
|
current_position = None
|
||||||
|
|
||||||
|
# If we stopped filtering at some point, reopen the file in
|
||||||
|
# binary mode and copy verbatim the remaining part
|
||||||
|
if current_position and stop_at:
|
||||||
|
with open(tmp_filename, mode="rb") as input_binary_buffer:
|
||||||
|
input_binary_buffer.seek(current_position)
|
||||||
|
with open(filename, mode="ab") as output_binary_buffer:
|
||||||
|
output_binary_buffer.writelines(input_binary_buffer.readlines())
|
||||||
|
|
||||||
except BaseException:
|
except BaseException:
|
||||||
# restore the original file
|
# clean up the original file on failure.
|
||||||
os.rename(temp_path, path)
|
shutil.move(backup_filename, filename)
|
||||||
errored = True
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
if not errored and not backup:
|
os.remove(tmp_filename)
|
||||||
os.unlink(temp_path)
|
if not backup and os.path.exists(backup_filename):
|
||||||
|
os.remove(backup_filename)
|
||||||
|
|
||||||
|
|
||||||
class FileFilter:
|
class FileFilter:
|
||||||
@@ -1096,12 +1115,12 @@ def hash_directory(directory, ignore=[]):
|
|||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
@system_path_filter
|
@system_path_filter
|
||||||
def write_tmp_and_move(filename: str, *, encoding: Optional[str] = None):
|
def write_tmp_and_move(filename):
|
||||||
"""Write to a temporary file, then move into place."""
|
"""Write to a temporary file, then move into place."""
|
||||||
dirname = os.path.dirname(filename)
|
dirname = os.path.dirname(filename)
|
||||||
basename = os.path.basename(filename)
|
basename = os.path.basename(filename)
|
||||||
tmp = os.path.join(dirname, ".%s.tmp" % basename)
|
tmp = os.path.join(dirname, ".%s.tmp" % basename)
|
||||||
with open(tmp, "w", encoding=encoding) as f:
|
with open(tmp, "w") as f:
|
||||||
yield f
|
yield f
|
||||||
shutil.move(tmp, filename)
|
shutil.move(tmp, filename)
|
||||||
|
|
||||||
|
|||||||
@@ -863,10 +863,8 @@ def elide_list(line_list: List[str], max_num: int = 10) -> List[str]:
|
|||||||
|
|
||||||
if sys.version_info >= (3, 9):
|
if sys.version_info >= (3, 9):
|
||||||
PatternStr = re.Pattern[str]
|
PatternStr = re.Pattern[str]
|
||||||
PatternBytes = re.Pattern[bytes]
|
|
||||||
else:
|
else:
|
||||||
PatternStr = typing.Pattern[str]
|
PatternStr = typing.Pattern[str]
|
||||||
PatternBytes = typing.Pattern[bytes]
|
|
||||||
|
|
||||||
|
|
||||||
def fnmatch_translate_multiple(named_patterns: Dict[str, str]) -> str:
|
def fnmatch_translate_multiple(named_patterns: Dict[str, str]) -> str:
|
||||||
|
|||||||
@@ -96,8 +96,8 @@ def get_fh(self, path: str) -> IO:
|
|||||||
Arguments:
|
Arguments:
|
||||||
path: path to lock file we want a filehandle for
|
path: path to lock file we want a filehandle for
|
||||||
"""
|
"""
|
||||||
# Open writable files as rb+ so we can upgrade to write later
|
# Open writable files as 'r+' so we can upgrade to write later
|
||||||
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "rb+"
|
os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "r+"
|
||||||
|
|
||||||
pid = os.getpid()
|
pid = os.getpid()
|
||||||
open_file = None # OpenFile object, if there is one
|
open_file = None # OpenFile object, if there is one
|
||||||
@@ -124,7 +124,7 @@ def get_fh(self, path: str) -> IO:
|
|||||||
# we know path exists but not if it's writable. If it's read-only,
|
# we know path exists but not if it's writable. If it's read-only,
|
||||||
# only open the file for reading (and fail if we're trying to get
|
# only open the file for reading (and fail if we're trying to get
|
||||||
# an exclusive (write) lock on it)
|
# an exclusive (write) lock on it)
|
||||||
os_mode, fh_mode = os.O_RDONLY, "rb"
|
os_mode, fh_mode = os.O_RDONLY, "r"
|
||||||
|
|
||||||
fd = os.open(path, os_mode)
|
fd = os.open(path, os_mode)
|
||||||
fh = os.fdopen(fd, fh_mode)
|
fh = os.fdopen(fd, fh_mode)
|
||||||
@@ -243,7 +243,7 @@ def __init__(
|
|||||||
helpful for distinguishing between different Spack locks.
|
helpful for distinguishing between different Spack locks.
|
||||||
"""
|
"""
|
||||||
self.path = path
|
self.path = path
|
||||||
self._file: Optional[IO[bytes]] = None
|
self._file: Optional[IO] = None
|
||||||
self._reads = 0
|
self._reads = 0
|
||||||
self._writes = 0
|
self._writes = 0
|
||||||
|
|
||||||
@@ -329,9 +329,9 @@ def _lock(self, op: int, timeout: Optional[float] = None) -> Tuple[float, int]:
|
|||||||
self._ensure_parent_directory()
|
self._ensure_parent_directory()
|
||||||
self._file = FILE_TRACKER.get_fh(self.path)
|
self._file = FILE_TRACKER.get_fh(self.path)
|
||||||
|
|
||||||
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "rb":
|
if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "r":
|
||||||
# Attempt to upgrade to write lock w/a read-only file.
|
# Attempt to upgrade to write lock w/a read-only file.
|
||||||
# If the file were writable, we'd have opened it rb+
|
# If the file were writable, we'd have opened it 'r+'
|
||||||
raise LockROFileError(self.path)
|
raise LockROFileError(self.path)
|
||||||
|
|
||||||
self._log_debug(
|
self._log_debug(
|
||||||
@@ -426,7 +426,7 @@ def _read_log_debug_data(self) -> None:
|
|||||||
|
|
||||||
line = self._file.read()
|
line = self._file.read()
|
||||||
if line:
|
if line:
|
||||||
pid, host = line.decode("utf-8").strip().split(",")
|
pid, host = line.strip().split(",")
|
||||||
_, _, pid = pid.rpartition("=")
|
_, _, pid = pid.rpartition("=")
|
||||||
_, _, self.host = host.rpartition("=")
|
_, _, self.host = host.rpartition("=")
|
||||||
self.pid = int(pid)
|
self.pid = int(pid)
|
||||||
@@ -442,7 +442,7 @@ def _write_log_debug_data(self) -> None:
|
|||||||
|
|
||||||
# write pid, host to disk to sync over FS
|
# write pid, host to disk to sync over FS
|
||||||
self._file.seek(0)
|
self._file.seek(0)
|
||||||
self._file.write(f"pid={self.pid},host={self.host}".encode("utf-8"))
|
self._file.write("pid=%s,host=%s" % (self.pid, self.host))
|
||||||
self._file.truncate()
|
self._file.truncate()
|
||||||
self._file.flush()
|
self._file.flush()
|
||||||
os.fsync(self._file.fileno())
|
os.fsync(self._file.fileno())
|
||||||
|
|||||||
@@ -161,7 +161,7 @@ def _err_check(result, func, args):
|
|||||||
)
|
)
|
||||||
# Use conout$ here to handle a redirectired stdout/get active console associated
|
# Use conout$ here to handle a redirectired stdout/get active console associated
|
||||||
# with spack
|
# with spack
|
||||||
with open(r"\\.\CONOUT$", "w", encoding="utf-8") as conout:
|
with open(r"\\.\CONOUT$", "w") as conout:
|
||||||
# Link above would use kernel32.GetStdHandle(-11) however this would not handle
|
# Link above would use kernel32.GetStdHandle(-11) however this would not handle
|
||||||
# a redirected stdout appropriately, so we always refer to the current CONSOLE out
|
# a redirected stdout appropriately, so we always refer to the current CONSOLE out
|
||||||
# which is defined as conout$ on Windows.
|
# which is defined as conout$ on Windows.
|
||||||
|
|||||||
@@ -762,7 +762,7 @@ def __enter__(self):
|
|||||||
self.reader = open(self.logfile, mode="rb+")
|
self.reader = open(self.logfile, mode="rb+")
|
||||||
|
|
||||||
# Dup stdout so we can still write to it after redirection
|
# Dup stdout so we can still write to it after redirection
|
||||||
self.echo_writer = open(os.dup(sys.stdout.fileno()), "w", encoding=sys.stdout.encoding)
|
self.echo_writer = open(os.dup(sys.stdout.fileno()), "w")
|
||||||
# Redirect stdout and stderr to write to logfile
|
# Redirect stdout and stderr to write to logfile
|
||||||
self.stderr.redirect_stream(self.writer.fileno())
|
self.stderr.redirect_stream(self.writer.fileno())
|
||||||
self.stdout.redirect_stream(self.writer.fileno())
|
self.stdout.redirect_stream(self.writer.fileno())
|
||||||
@@ -879,13 +879,10 @@ def _writer_daemon(
|
|||||||
write_fd.close()
|
write_fd.close()
|
||||||
|
|
||||||
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
# 1. Use line buffering (3rd param = 1) since Python 3 has a bug
|
||||||
# that prevents unbuffered text I/O. [needs citation]
|
# that prevents unbuffered text I/O.
|
||||||
# 2. Enforce a UTF-8 interpretation of build process output with errors replaced by '?'.
|
# 2. Python 3.x before 3.7 does not open with UTF-8 encoding by default
|
||||||
# The downside is that the log file will not contain the exact output of the build process.
|
|
||||||
# 3. closefd=False because Connection has "ownership"
|
# 3. closefd=False because Connection has "ownership"
|
||||||
read_file = os.fdopen(
|
read_file = os.fdopen(read_fd.fileno(), "r", 1, encoding="utf-8", closefd=False)
|
||||||
read_fd.fileno(), "r", 1, encoding="utf-8", errors="replace", closefd=False
|
|
||||||
)
|
|
||||||
|
|
||||||
if stdin_fd:
|
if stdin_fd:
|
||||||
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
stdin_file = os.fdopen(stdin_fd.fileno(), closefd=False)
|
||||||
@@ -931,7 +928,11 @@ def _writer_daemon(
|
|||||||
try:
|
try:
|
||||||
while line_count < 100:
|
while line_count < 100:
|
||||||
# Handle output from the calling process.
|
# Handle output from the calling process.
|
||||||
|
try:
|
||||||
line = _retry(read_file.readline)()
|
line = _retry(read_file.readline)()
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
# installs like --test=root gpgme produce non-UTF8 logs
|
||||||
|
line = "<line lost: output was not encoded as UTF-8>\n"
|
||||||
|
|
||||||
if not line:
|
if not line:
|
||||||
return
|
return
|
||||||
@@ -945,13 +946,6 @@ def _writer_daemon(
|
|||||||
output_line = clean_line
|
output_line = clean_line
|
||||||
if filter_fn:
|
if filter_fn:
|
||||||
output_line = filter_fn(clean_line)
|
output_line = filter_fn(clean_line)
|
||||||
enc = sys.stdout.encoding
|
|
||||||
if enc != "utf-8":
|
|
||||||
# On Python 3.6 and 3.7-3.14 with non-{utf-8,C} locale stdout
|
|
||||||
# may not be able to handle utf-8 output. We do an inefficient
|
|
||||||
# dance of re-encoding with errors replaced, so stdout.write
|
|
||||||
# does not raise.
|
|
||||||
output_line = output_line.encode(enc, "replace").decode(enc)
|
|
||||||
sys.stdout.write(output_line)
|
sys.stdout.write(output_line)
|
||||||
|
|
||||||
# Stripped output to log file.
|
# Stripped output to log file.
|
||||||
|
|||||||
@@ -656,7 +656,7 @@ def _ensure_docstring_and_no_fixme(pkgs, error_cls):
|
|||||||
for pkg_name in pkgs:
|
for pkg_name in pkgs:
|
||||||
details = []
|
details = []
|
||||||
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
filename = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||||
with open(filename, "r", encoding="utf-8") as package_file:
|
with open(filename, "r") as package_file:
|
||||||
for i, line in enumerate(package_file):
|
for i, line in enumerate(package_file):
|
||||||
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
pattern = next((r for r in fixme_regexes if r.search(line)), None)
|
||||||
if pattern:
|
if pattern:
|
||||||
@@ -693,19 +693,19 @@ def invalid_sha256_digest(fetcher):
|
|||||||
return h, True
|
return h, True
|
||||||
return None, False
|
return None, False
|
||||||
|
|
||||||
error_msg = f"Package '{pkg_name}' does not use sha256 checksum"
|
error_msg = "Package '{}' does not use sha256 checksum".format(pkg_name)
|
||||||
details = []
|
details = []
|
||||||
for v, args in pkg.versions.items():
|
for v, args in pkg.versions.items():
|
||||||
fetcher = spack.fetch_strategy.for_package_version(pkg, v)
|
fetcher = spack.fetch_strategy.for_package_version(pkg, v)
|
||||||
digest, is_bad = invalid_sha256_digest(fetcher)
|
digest, is_bad = invalid_sha256_digest(fetcher)
|
||||||
if is_bad:
|
if is_bad:
|
||||||
details.append(f"{pkg_name}@{v} uses {digest}")
|
details.append("{}@{} uses {}".format(pkg_name, v, digest))
|
||||||
|
|
||||||
for _, resources in pkg.resources.items():
|
for _, resources in pkg.resources.items():
|
||||||
for resource in resources:
|
for resource in resources:
|
||||||
digest, is_bad = invalid_sha256_digest(resource.fetcher)
|
digest, is_bad = invalid_sha256_digest(resource.fetcher)
|
||||||
if is_bad:
|
if is_bad:
|
||||||
details.append(f"Resource in '{pkg_name}' uses {digest}")
|
details.append("Resource in '{}' uses {}".format(pkg_name, digest))
|
||||||
if details:
|
if details:
|
||||||
errors.append(error_cls(error_msg, details))
|
errors.append(error_cls(error_msg, details))
|
||||||
|
|
||||||
@@ -809,7 +809,7 @@ def _uses_deprecated_globals(pkgs, error_cls):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
file = spack.repo.PATH.filename_for_package_name(pkg_name)
|
file = spack.repo.PATH.filename_for_package_name(pkg_name)
|
||||||
tree = ast.parse(open(file, "rb").read())
|
tree = ast.parse(open(file).read())
|
||||||
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args"))
|
visitor = DeprecatedMagicGlobals(("std_cmake_args", "std_meson_args", "std_pip_args"))
|
||||||
visitor.visit(tree)
|
visitor.visit(tree)
|
||||||
if visitor.references_to_globals:
|
if visitor.references_to_globals:
|
||||||
@@ -1009,6 +1009,20 @@ def _issues_in_depends_on_directive(pkgs, error_cls):
|
|||||||
|
|
||||||
for when, deps_by_name in pkg_cls.dependencies.items():
|
for when, deps_by_name in pkg_cls.dependencies.items():
|
||||||
for dep_name, dep in deps_by_name.items():
|
for dep_name, dep in deps_by_name.items():
|
||||||
|
# Check if there are nested dependencies declared. We don't want directives like:
|
||||||
|
#
|
||||||
|
# depends_on('foo+bar ^fee+baz')
|
||||||
|
#
|
||||||
|
# but we'd like to have two dependencies listed instead.
|
||||||
|
nested_dependencies = dep.spec.dependencies()
|
||||||
|
if nested_dependencies:
|
||||||
|
summary = f"{pkg_name}: nested dependency declaration '{dep.spec}'"
|
||||||
|
ndir = len(nested_dependencies) + 1
|
||||||
|
details = [
|
||||||
|
f"split depends_on('{dep.spec}', when='{when}') into {ndir} directives",
|
||||||
|
f"in {filename}",
|
||||||
|
]
|
||||||
|
errors.append(error_cls(summary=summary, details=details))
|
||||||
|
|
||||||
def check_virtual_with_variants(spec, msg):
|
def check_virtual_with_variants(spec, msg):
|
||||||
if not spec.virtual or not spec.variants:
|
if not spec.virtual or not spec.variants:
|
||||||
|
|||||||
@@ -24,12 +24,13 @@
|
|||||||
import urllib.request
|
import urllib.request
|
||||||
import warnings
|
import warnings
|
||||||
from contextlib import closing
|
from contextlib import closing
|
||||||
from typing import IO, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.filesystem as fsys
|
import llnl.util.filesystem as fsys
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
from llnl.util.filesystem import mkdirp
|
from llnl.util.filesystem import BaseDirectoryVisitor, mkdirp, visit_directory_tree
|
||||||
|
from llnl.util.symlink import readlink
|
||||||
|
|
||||||
import spack.caches
|
import spack.caches
|
||||||
import spack.config as config
|
import spack.config as config
|
||||||
@@ -39,7 +40,7 @@
|
|||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.hooks
|
import spack.hooks
|
||||||
import spack.hooks.sbang
|
import spack.hooks.sbang
|
||||||
import spack.mirrors.mirror
|
import spack.mirror
|
||||||
import spack.oci.image
|
import spack.oci.image
|
||||||
import spack.oci.oci
|
import spack.oci.oci
|
||||||
import spack.oci.opener
|
import spack.oci.opener
|
||||||
@@ -53,6 +54,7 @@
|
|||||||
import spack.util.archive
|
import spack.util.archive
|
||||||
import spack.util.crypto
|
import spack.util.crypto
|
||||||
import spack.util.file_cache as file_cache
|
import spack.util.file_cache as file_cache
|
||||||
|
import spack.util.filesystem as ssys
|
||||||
import spack.util.gpg
|
import spack.util.gpg
|
||||||
import spack.util.parallel
|
import spack.util.parallel
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
@@ -67,8 +69,10 @@
|
|||||||
Digest,
|
Digest,
|
||||||
ImageReference,
|
ImageReference,
|
||||||
default_config,
|
default_config,
|
||||||
|
default_index_tag,
|
||||||
default_manifest,
|
default_manifest,
|
||||||
ensure_valid_tag,
|
default_tag,
|
||||||
|
tag_is_spec,
|
||||||
)
|
)
|
||||||
from spack.oci.oci import (
|
from spack.oci.oci import (
|
||||||
copy_missing_layers_with_retry,
|
copy_missing_layers_with_retry,
|
||||||
@@ -79,6 +83,7 @@
|
|||||||
)
|
)
|
||||||
from spack.package_prefs import get_package_dir_permissions, get_package_group
|
from spack.package_prefs import get_package_dir_permissions, get_package_group
|
||||||
from spack.relocate_text import utf8_paths_to_single_binary_regex
|
from spack.relocate_text import utf8_paths_to_single_binary_regex
|
||||||
|
from spack.spec import Spec
|
||||||
from spack.stage import Stage
|
from spack.stage import Stage
|
||||||
from spack.util.executable import which
|
from spack.util.executable import which
|
||||||
|
|
||||||
@@ -364,7 +369,7 @@ def update(self, with_cooldown=False):
|
|||||||
on disk under ``_index_cache_root``)."""
|
on disk under ``_index_cache_root``)."""
|
||||||
self._init_local_index_cache()
|
self._init_local_index_cache()
|
||||||
configured_mirror_urls = [
|
configured_mirror_urls = [
|
||||||
m.fetch_url for m in spack.mirrors.mirror.MirrorCollection(binary=True).values()
|
m.fetch_url for m in spack.mirror.MirrorCollection(binary=True).values()
|
||||||
]
|
]
|
||||||
items_to_remove = []
|
items_to_remove = []
|
||||||
spec_cache_clear_needed = False
|
spec_cache_clear_needed = False
|
||||||
@@ -581,15 +586,133 @@ def buildinfo_file_name(prefix):
|
|||||||
|
|
||||||
def read_buildinfo_file(prefix):
|
def read_buildinfo_file(prefix):
|
||||||
"""Read buildinfo file"""
|
"""Read buildinfo file"""
|
||||||
with open(buildinfo_file_name(prefix), "r", encoding="utf-8") as f:
|
with open(buildinfo_file_name(prefix), "r") as f:
|
||||||
return syaml.load(f)
|
return syaml.load(f)
|
||||||
|
|
||||||
|
|
||||||
def file_matches(f: IO[bytes], regex: llnl.util.lang.PatternBytes) -> bool:
|
class BuildManifestVisitor(BaseDirectoryVisitor):
|
||||||
try:
|
"""Visitor that collects a list of files and symlinks
|
||||||
return bool(regex.search(f.read()))
|
that can be checked for need of relocation. It knows how
|
||||||
finally:
|
to dedupe hardlinks and deal with symlinks to files and
|
||||||
f.seek(0)
|
directories."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
# Save unique identifiers of hardlinks to avoid relocating them multiple times
|
||||||
|
self.visited = set()
|
||||||
|
|
||||||
|
# Lists of files we will check
|
||||||
|
self.files = []
|
||||||
|
self.symlinks = []
|
||||||
|
|
||||||
|
def seen_before(self, root, rel_path):
|
||||||
|
stat_result = os.lstat(os.path.join(root, rel_path))
|
||||||
|
if stat_result.st_nlink == 1:
|
||||||
|
return False
|
||||||
|
identifier = (stat_result.st_dev, stat_result.st_ino)
|
||||||
|
if identifier in self.visited:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
self.visited.add(identifier)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def visit_file(self, root, rel_path, depth):
|
||||||
|
if self.seen_before(root, rel_path):
|
||||||
|
return
|
||||||
|
self.files.append(rel_path)
|
||||||
|
|
||||||
|
def visit_symlinked_file(self, root, rel_path, depth):
|
||||||
|
# Note: symlinks *can* be hardlinked, but it is unclear if
|
||||||
|
# symlinks can be relinked in-place (preserving inode).
|
||||||
|
# Therefore, we do *not* de-dupe hardlinked symlinks.
|
||||||
|
self.symlinks.append(rel_path)
|
||||||
|
|
||||||
|
def before_visit_dir(self, root, rel_path, depth):
|
||||||
|
return os.path.basename(rel_path) not in (".spack", "man")
|
||||||
|
|
||||||
|
def before_visit_symlinked_dir(self, root, rel_path, depth):
|
||||||
|
# Treat symlinked directories simply as symlinks.
|
||||||
|
self.visit_symlinked_file(root, rel_path, depth)
|
||||||
|
# Never recurse into symlinked directories.
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def file_matches(path, regex):
|
||||||
|
with open(path, "rb") as f:
|
||||||
|
contents = f.read()
|
||||||
|
return bool(regex.search(contents))
|
||||||
|
|
||||||
|
|
||||||
|
def get_buildfile_manifest(spec):
|
||||||
|
"""
|
||||||
|
Return a data structure with information about a build, including
|
||||||
|
text_to_relocate, binary_to_relocate, binary_to_relocate_fullpath
|
||||||
|
link_to_relocate, and other, which means it doesn't fit any of previous
|
||||||
|
checks (and should not be relocated). We exclude docs (man) and
|
||||||
|
metadata (.spack). This can be used to find a particular kind of file
|
||||||
|
in spack, or to generate the build metadata.
|
||||||
|
"""
|
||||||
|
data = {
|
||||||
|
"text_to_relocate": [],
|
||||||
|
"binary_to_relocate": [],
|
||||||
|
"link_to_relocate": [],
|
||||||
|
"other": [],
|
||||||
|
"binary_to_relocate_fullpath": [],
|
||||||
|
"hardlinks_deduped": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Guard against filesystem footguns of hardlinks and symlinks by using
|
||||||
|
# a visitor to retrieve a list of files and symlinks, so we don't have
|
||||||
|
# to worry about hardlinks of symlinked dirs and what not.
|
||||||
|
visitor = BuildManifestVisitor()
|
||||||
|
root = spec.prefix
|
||||||
|
visit_directory_tree(root, visitor)
|
||||||
|
|
||||||
|
# Collect a list of prefixes for this package and it's dependencies, Spack will
|
||||||
|
# look for them to decide if text file needs to be relocated or not
|
||||||
|
prefixes = [d.prefix for d in spec.traverse(root=True, deptype="all") if not d.external]
|
||||||
|
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
||||||
|
prefixes.append(str(spack.store.STORE.layout.root))
|
||||||
|
|
||||||
|
# Create a giant regex that matches all prefixes
|
||||||
|
regex = utf8_paths_to_single_binary_regex(prefixes)
|
||||||
|
|
||||||
|
# Symlinks.
|
||||||
|
|
||||||
|
# Obvious bugs:
|
||||||
|
# 1. relative links are not relocated.
|
||||||
|
# 2. paths are used as strings.
|
||||||
|
for rel_path in visitor.symlinks:
|
||||||
|
abs_path = os.path.join(root, rel_path)
|
||||||
|
link = readlink(abs_path)
|
||||||
|
if os.path.isabs(link) and link.startswith(spack.store.STORE.layout.root):
|
||||||
|
data["link_to_relocate"].append(rel_path)
|
||||||
|
|
||||||
|
# Non-symlinks.
|
||||||
|
for rel_path in visitor.files:
|
||||||
|
abs_path = os.path.join(root, rel_path)
|
||||||
|
m_type, m_subtype = ssys.mime_type(abs_path)
|
||||||
|
|
||||||
|
if relocate.needs_binary_relocation(m_type, m_subtype):
|
||||||
|
# Why is this branch not part of needs_binary_relocation? :(
|
||||||
|
if (
|
||||||
|
(
|
||||||
|
m_subtype in ("x-executable", "x-sharedlib", "x-pie-executable")
|
||||||
|
and sys.platform != "darwin"
|
||||||
|
)
|
||||||
|
or (m_subtype in ("x-mach-binary") and sys.platform == "darwin")
|
||||||
|
or (not rel_path.endswith(".o"))
|
||||||
|
):
|
||||||
|
data["binary_to_relocate"].append(rel_path)
|
||||||
|
data["binary_to_relocate_fullpath"].append(abs_path)
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif relocate.needs_text_relocation(m_type, m_subtype) and file_matches(abs_path, regex):
|
||||||
|
data["text_to_relocate"].append(rel_path)
|
||||||
|
continue
|
||||||
|
|
||||||
|
data["other"].append(abs_path)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
def deps_to_relocate(spec):
|
def deps_to_relocate(spec):
|
||||||
@@ -622,15 +745,17 @@ def deps_to_relocate(spec):
|
|||||||
|
|
||||||
def get_buildinfo_dict(spec):
|
def get_buildinfo_dict(spec):
|
||||||
"""Create metadata for a tarball"""
|
"""Create metadata for a tarball"""
|
||||||
|
manifest = get_buildfile_manifest(spec)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
"sbang_install_path": spack.hooks.sbang.sbang_install_path(),
|
||||||
"buildpath": spack.store.STORE.layout.root,
|
"buildpath": spack.store.STORE.layout.root,
|
||||||
"spackprefix": spack.paths.prefix,
|
"spackprefix": spack.paths.prefix,
|
||||||
"relative_prefix": os.path.relpath(spec.prefix, spack.store.STORE.layout.root),
|
"relative_prefix": os.path.relpath(spec.prefix, spack.store.STORE.layout.root),
|
||||||
# "relocate_textfiles": [],
|
"relocate_textfiles": manifest["text_to_relocate"],
|
||||||
# "relocate_binaries": [],
|
"relocate_binaries": manifest["binary_to_relocate"],
|
||||||
# "relocate_links": [],
|
"relocate_links": manifest["link_to_relocate"],
|
||||||
"hardlinks_deduped": True,
|
"hardlinks_deduped": manifest["hardlinks_deduped"],
|
||||||
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in deps_to_relocate(spec)},
|
"hash_to_prefix": {d.dag_hash(): str(d.prefix) for d in deps_to_relocate(spec)},
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -702,10 +827,10 @@ def _read_specs_and_push_index(
|
|||||||
contents = read_method(file)
|
contents = read_method(file)
|
||||||
# Need full spec.json name or this gets confused with index.json.
|
# Need full spec.json name or this gets confused with index.json.
|
||||||
if file.endswith(".json.sig"):
|
if file.endswith(".json.sig"):
|
||||||
specfile_json = spack.spec.Spec.extract_json_from_clearsig(contents)
|
specfile_json = Spec.extract_json_from_clearsig(contents)
|
||||||
fetched_spec = spack.spec.Spec.from_dict(specfile_json)
|
fetched_spec = Spec.from_dict(specfile_json)
|
||||||
elif file.endswith(".json"):
|
elif file.endswith(".json"):
|
||||||
fetched_spec = spack.spec.Spec.from_json(contents)
|
fetched_spec = Spec.from_json(contents)
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -715,17 +840,17 @@ def _read_specs_and_push_index(
|
|||||||
# Now generate the index, compute its hash, and push the two files to
|
# Now generate the index, compute its hash, and push the two files to
|
||||||
# the mirror.
|
# the mirror.
|
||||||
index_json_path = os.path.join(temp_dir, "index.json")
|
index_json_path = os.path.join(temp_dir, "index.json")
|
||||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
with open(index_json_path, "w") as f:
|
||||||
db._write_to_file(f)
|
db._write_to_file(f)
|
||||||
|
|
||||||
# Read the index back in and compute its hash
|
# Read the index back in and compute its hash
|
||||||
with open(index_json_path, encoding="utf-8") as f:
|
with open(index_json_path) as f:
|
||||||
index_string = f.read()
|
index_string = f.read()
|
||||||
index_hash = compute_hash(index_string)
|
index_hash = compute_hash(index_string)
|
||||||
|
|
||||||
# Write the hash out to a local file
|
# Write the hash out to a local file
|
||||||
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
index_hash_path = os.path.join(temp_dir, "index.json.hash")
|
||||||
with open(index_hash_path, "w", encoding="utf-8") as f:
|
with open(index_hash_path, "w") as f:
|
||||||
f.write(index_hash)
|
f.write(index_hash)
|
||||||
|
|
||||||
# Push the index itself
|
# Push the index itself
|
||||||
@@ -759,7 +884,7 @@ def _specs_from_cache_aws_cli(cache_prefix):
|
|||||||
aws = which("aws")
|
aws = which("aws")
|
||||||
|
|
||||||
def file_read_method(file_path):
|
def file_read_method(file_path):
|
||||||
with open(file_path, encoding="utf-8") as fd:
|
with open(file_path) as fd:
|
||||||
return fd.read()
|
return fd.read()
|
||||||
|
|
||||||
tmpspecsdir = tempfile.mkdtemp()
|
tmpspecsdir = tempfile.mkdtemp()
|
||||||
@@ -904,7 +1029,7 @@ def generate_key_index(key_prefix: str, tmpdir: str) -> None:
|
|||||||
target = os.path.join(tmpdir, "index.json")
|
target = os.path.join(tmpdir, "index.json")
|
||||||
|
|
||||||
index = {"keys": dict((fingerprint, {}) for fingerprint in sorted(set(fingerprints)))}
|
index = {"keys": dict((fingerprint, {}) for fingerprint in sorted(set(fingerprints)))}
|
||||||
with open(target, "w", encoding="utf-8") as f:
|
with open(target, "w") as f:
|
||||||
sjson.dump(index, f)
|
sjson.dump(index, f)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -920,55 +1045,7 @@ def generate_key_index(key_prefix: str, tmpdir: str) -> None:
|
|||||||
) from e
|
) from e
|
||||||
|
|
||||||
|
|
||||||
class FileTypes:
|
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
|
||||||
BINARY = 0
|
|
||||||
TEXT = 1
|
|
||||||
UNKNOWN = 2
|
|
||||||
|
|
||||||
|
|
||||||
NOT_ISO8859_1_TEXT = re.compile(b"[\x00\x7F-\x9F]")
|
|
||||||
|
|
||||||
|
|
||||||
def file_type(f: IO[bytes]) -> int:
|
|
||||||
try:
|
|
||||||
# first check if this is an ELF or mach-o binary.
|
|
||||||
magic = f.read(8)
|
|
||||||
if len(magic) < 8:
|
|
||||||
return FileTypes.UNKNOWN
|
|
||||||
elif relocate.is_elf_magic(magic) or relocate.is_macho_magic(magic):
|
|
||||||
return FileTypes.BINARY
|
|
||||||
|
|
||||||
f.seek(0)
|
|
||||||
|
|
||||||
# Then try utf-8, which has a fast exponential decay in false positive rate with file size.
|
|
||||||
# Use chunked reads for fast early exit.
|
|
||||||
f_txt = io.TextIOWrapper(f, encoding="utf-8", errors="strict")
|
|
||||||
try:
|
|
||||||
while f_txt.read(1024):
|
|
||||||
pass
|
|
||||||
return FileTypes.TEXT
|
|
||||||
except UnicodeError:
|
|
||||||
f_txt.seek(0)
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
f_txt.detach()
|
|
||||||
# Finally try iso-8859-1 heuristically. In Python, all possible 256 byte values are valid.
|
|
||||||
# We classify it as text if it does not contain any control characters / null bytes.
|
|
||||||
data = f.read(1024)
|
|
||||||
while data:
|
|
||||||
if NOT_ISO8859_1_TEXT.search(data):
|
|
||||||
break
|
|
||||||
data = f.read(1024)
|
|
||||||
else:
|
|
||||||
return FileTypes.TEXT
|
|
||||||
return FileTypes.UNKNOWN
|
|
||||||
finally:
|
|
||||||
f.seek(0)
|
|
||||||
|
|
||||||
|
|
||||||
def tarfile_of_spec_prefix(
|
|
||||||
tar: tarfile.TarFile, prefix: str, prefixes_to_relocate: List[str]
|
|
||||||
) -> dict:
|
|
||||||
"""Create a tarfile of an install prefix of a spec. Skips existing buildinfo file.
|
"""Create a tarfile of an install prefix of a spec. Skips existing buildinfo file.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -984,33 +1061,6 @@ def tarfile_of_spec_prefix(
|
|||||||
except OSError:
|
except OSError:
|
||||||
skip = lambda entry: False
|
skip = lambda entry: False
|
||||||
|
|
||||||
binary_regex = utf8_paths_to_single_binary_regex(prefixes_to_relocate)
|
|
||||||
|
|
||||||
relocate_binaries = []
|
|
||||||
relocate_links = []
|
|
||||||
relocate_textfiles = []
|
|
||||||
|
|
||||||
# use callbacks to add files and symlinks, so we can register which files need relocation upon
|
|
||||||
# extraction.
|
|
||||||
def add_file(tar: tarfile.TarFile, info: tarfile.TarInfo, path: str):
|
|
||||||
with open(path, "rb") as f:
|
|
||||||
relpath = os.path.relpath(path, prefix)
|
|
||||||
# no need to relocate anything in the .spack directory
|
|
||||||
if relpath.split(os.sep, 1)[0] == ".spack":
|
|
||||||
tar.addfile(info, f)
|
|
||||||
return
|
|
||||||
f_type = file_type(f)
|
|
||||||
if f_type == FileTypes.BINARY:
|
|
||||||
relocate_binaries.append(os.path.relpath(path, prefix))
|
|
||||||
elif f_type == FileTypes.TEXT and file_matches(f, binary_regex):
|
|
||||||
relocate_textfiles.append(os.path.relpath(path, prefix))
|
|
||||||
tar.addfile(info, f)
|
|
||||||
|
|
||||||
def add_symlink(tar: tarfile.TarFile, info: tarfile.TarInfo, path: str):
|
|
||||||
if os.path.isabs(info.linkname) and binary_regex.match(info.linkname.encode("utf-8")):
|
|
||||||
relocate_links.append(os.path.relpath(path, prefix))
|
|
||||||
tar.addfile(info)
|
|
||||||
|
|
||||||
spack.util.archive.reproducible_tarfile_from_prefix(
|
spack.util.archive.reproducible_tarfile_from_prefix(
|
||||||
tar,
|
tar,
|
||||||
prefix,
|
prefix,
|
||||||
@@ -1018,51 +1068,29 @@ def add_symlink(tar: tarfile.TarFile, info: tarfile.TarInfo, path: str):
|
|||||||
# used in runtimes like AWS lambda.
|
# used in runtimes like AWS lambda.
|
||||||
include_parent_directories=True,
|
include_parent_directories=True,
|
||||||
skip=skip,
|
skip=skip,
|
||||||
add_file=add_file,
|
|
||||||
add_symlink=add_symlink,
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"relocate_binaries": relocate_binaries,
|
|
||||||
"relocate_links": relocate_links,
|
|
||||||
"relocate_textfiles": relocate_textfiles,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def create_tarball(spec: spack.spec.Spec, tarfile_path: str) -> Tuple[str, str]:
|
|
||||||
"""Create a tarball of a spec and return the checksums of the compressed tarfile and the
|
|
||||||
uncompressed tarfile."""
|
|
||||||
return _do_create_tarball(
|
|
||||||
tarfile_path,
|
|
||||||
spec.prefix,
|
|
||||||
buildinfo=get_buildinfo_dict(spec),
|
|
||||||
prefixes_to_relocate=prefixes_to_relocate(spec),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _do_create_tarball(
|
def _do_create_tarball(tarfile_path: str, binaries_dir: str, buildinfo: dict):
|
||||||
tarfile_path: str, prefix: str, buildinfo: dict, prefixes_to_relocate: List[str]
|
|
||||||
) -> Tuple[str, str]:
|
|
||||||
with spack.util.archive.gzip_compressed_tarfile(tarfile_path) as (
|
with spack.util.archive.gzip_compressed_tarfile(tarfile_path) as (
|
||||||
tar,
|
tar,
|
||||||
tar_gz_checksum,
|
inner_checksum,
|
||||||
tar_checksum,
|
outer_checksum,
|
||||||
):
|
):
|
||||||
# Tarball the install prefix
|
# Tarball the install prefix
|
||||||
files_to_relocate = tarfile_of_spec_prefix(tar, prefix, prefixes_to_relocate)
|
tarfile_of_spec_prefix(tar, binaries_dir)
|
||||||
buildinfo.update(files_to_relocate)
|
|
||||||
|
|
||||||
# Serialize buildinfo for the tarball
|
# Serialize buildinfo for the tarball
|
||||||
bstring = syaml.dump(buildinfo, default_flow_style=True).encode("utf-8")
|
bstring = syaml.dump(buildinfo, default_flow_style=True).encode("utf-8")
|
||||||
tarinfo = tarfile.TarInfo(
|
tarinfo = tarfile.TarInfo(
|
||||||
name=spack.util.archive.default_path_to_name(buildinfo_file_name(prefix))
|
name=spack.util.archive.default_path_to_name(buildinfo_file_name(binaries_dir))
|
||||||
)
|
)
|
||||||
tarinfo.type = tarfile.REGTYPE
|
tarinfo.type = tarfile.REGTYPE
|
||||||
tarinfo.size = len(bstring)
|
tarinfo.size = len(bstring)
|
||||||
tarinfo.mode = 0o644
|
tarinfo.mode = 0o644
|
||||||
tar.addfile(tarinfo, io.BytesIO(bstring))
|
tar.addfile(tarinfo, io.BytesIO(bstring))
|
||||||
|
|
||||||
return tar_gz_checksum.hexdigest(), tar_checksum.hexdigest()
|
return inner_checksum.hexdigest(), outer_checksum.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
class ExistsInBuildcache(NamedTuple):
|
class ExistsInBuildcache(NamedTuple):
|
||||||
@@ -1072,7 +1100,7 @@ class ExistsInBuildcache(NamedTuple):
|
|||||||
|
|
||||||
|
|
||||||
class BuildcacheFiles:
|
class BuildcacheFiles:
|
||||||
def __init__(self, spec: spack.spec.Spec, local: str, remote: str):
|
def __init__(self, spec: Spec, local: str, remote: str):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
spec: The spec whose tarball and specfile are being managed.
|
spec: The spec whose tarball and specfile are being managed.
|
||||||
@@ -1102,7 +1130,7 @@ def local_tarball(self) -> str:
|
|||||||
return os.path.join(self.local, f"{self.spec.dag_hash()}.tar.gz")
|
return os.path.join(self.local, f"{self.spec.dag_hash()}.tar.gz")
|
||||||
|
|
||||||
|
|
||||||
def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> ExistsInBuildcache:
|
def _exists_in_buildcache(spec: Spec, tmpdir: str, out_url: str) -> ExistsInBuildcache:
|
||||||
"""returns a tuple of bools (signed, unsigned, tarball) indicating whether specfiles/tarballs
|
"""returns a tuple of bools (signed, unsigned, tarball) indicating whether specfiles/tarballs
|
||||||
exist in the buildcache"""
|
exist in the buildcache"""
|
||||||
files = BuildcacheFiles(spec, tmpdir, out_url)
|
files = BuildcacheFiles(spec, tmpdir, out_url)
|
||||||
@@ -1112,23 +1140,12 @@ def _exists_in_buildcache(spec: spack.spec.Spec, tmpdir: str, out_url: str) -> E
|
|||||||
return ExistsInBuildcache(signed, unsigned, tarball)
|
return ExistsInBuildcache(signed, unsigned, tarball)
|
||||||
|
|
||||||
|
|
||||||
def prefixes_to_relocate(spec):
|
|
||||||
prefixes = [s.prefix for s in deps_to_relocate(spec)]
|
|
||||||
prefixes.append(spack.hooks.sbang.sbang_install_path())
|
|
||||||
prefixes.append(str(spack.store.STORE.layout.root))
|
|
||||||
return prefixes
|
|
||||||
|
|
||||||
|
|
||||||
def _url_upload_tarball_and_specfile(
|
def _url_upload_tarball_and_specfile(
|
||||||
spec: spack.spec.Spec,
|
spec: Spec, tmpdir: str, out_url: str, exists: ExistsInBuildcache, signing_key: Optional[str]
|
||||||
tmpdir: str,
|
|
||||||
out_url: str,
|
|
||||||
exists: ExistsInBuildcache,
|
|
||||||
signing_key: Optional[str],
|
|
||||||
):
|
):
|
||||||
files = BuildcacheFiles(spec, tmpdir, out_url)
|
files = BuildcacheFiles(spec, tmpdir, out_url)
|
||||||
tarball = files.local_tarball()
|
tarball = files.local_tarball()
|
||||||
checksum, _ = create_tarball(spec, tarball)
|
checksum, _ = _do_create_tarball(tarball, spec.prefix, get_buildinfo_dict(spec))
|
||||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||||
spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||||
spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum}
|
spec_dict["binary_cache_checksum"] = {"hash_algorithm": "sha256", "hash": checksum}
|
||||||
@@ -1142,7 +1159,7 @@ def _url_upload_tarball_and_specfile(
|
|||||||
web_util.push_to_url(tarball, files.remote_tarball(), keep_original=False)
|
web_util.push_to_url(tarball, files.remote_tarball(), keep_original=False)
|
||||||
|
|
||||||
specfile = files.local_specfile()
|
specfile = files.local_specfile()
|
||||||
with open(specfile, "w", encoding="utf-8") as f:
|
with open(specfile, "w") as f:
|
||||||
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
|
# Note: when using gpg clear sign, we need to avoid long lines (19995 chars).
|
||||||
# If lines are longer, they are truncated without error. Thanks GPG!
|
# If lines are longer, they are truncated without error. Thanks GPG!
|
||||||
# So, here we still add newlines, but no indent, so save on file size and
|
# So, here we still add newlines, but no indent, so save on file size and
|
||||||
@@ -1159,7 +1176,7 @@ def _url_upload_tarball_and_specfile(
|
|||||||
|
|
||||||
|
|
||||||
class Uploader:
|
class Uploader:
|
||||||
def __init__(self, mirror: spack.mirrors.mirror.Mirror, force: bool, update_index: bool):
|
def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool):
|
||||||
self.mirror = mirror
|
self.mirror = mirror
|
||||||
self.force = force
|
self.force = force
|
||||||
self.update_index = update_index
|
self.update_index = update_index
|
||||||
@@ -1207,7 +1224,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
|
|||||||
class OCIUploader(Uploader):
|
class OCIUploader(Uploader):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
mirror: spack.mirrors.mirror.Mirror,
|
mirror: spack.mirror.Mirror,
|
||||||
force: bool,
|
force: bool,
|
||||||
update_index: bool,
|
update_index: bool,
|
||||||
base_image: Optional[str],
|
base_image: Optional[str],
|
||||||
@@ -1256,7 +1273,7 @@ def tag(self, tag: str, roots: List[spack.spec.Spec]):
|
|||||||
class URLUploader(Uploader):
|
class URLUploader(Uploader):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
mirror: spack.mirrors.mirror.Mirror,
|
mirror: spack.mirror.Mirror,
|
||||||
force: bool,
|
force: bool,
|
||||||
update_index: bool,
|
update_index: bool,
|
||||||
signing_key: Optional[str],
|
signing_key: Optional[str],
|
||||||
@@ -1280,7 +1297,7 @@ def push(
|
|||||||
|
|
||||||
|
|
||||||
def make_uploader(
|
def make_uploader(
|
||||||
mirror: spack.mirrors.mirror.Mirror,
|
mirror: spack.mirror.Mirror,
|
||||||
force: bool = False,
|
force: bool = False,
|
||||||
update_index: bool = False,
|
update_index: bool = False,
|
||||||
signing_key: Optional[str] = None,
|
signing_key: Optional[str] = None,
|
||||||
@@ -1297,7 +1314,7 @@ def make_uploader(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _format_spec(spec: spack.spec.Spec) -> str:
|
def _format_spec(spec: Spec) -> str:
|
||||||
return spec.cformat("{name}{@version}{/hash:7}")
|
return spec.cformat("{name}{@version}{/hash:7}")
|
||||||
|
|
||||||
|
|
||||||
@@ -1320,7 +1337,7 @@ def _progress(self):
|
|||||||
return f"[{self.n:{digits}}/{self.total}] "
|
return f"[{self.n:{digits}}/{self.total}] "
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def start(self, spec: spack.spec.Spec, running: bool) -> None:
|
def start(self, spec: Spec, running: bool) -> None:
|
||||||
self.n += 1
|
self.n += 1
|
||||||
self.running = running
|
self.running = running
|
||||||
self.pre = self._progress()
|
self.pre = self._progress()
|
||||||
@@ -1339,18 +1356,18 @@ def fail(self) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def _url_push(
|
def _url_push(
|
||||||
specs: List[spack.spec.Spec],
|
specs: List[Spec],
|
||||||
out_url: str,
|
out_url: str,
|
||||||
signing_key: Optional[str],
|
signing_key: Optional[str],
|
||||||
force: bool,
|
force: bool,
|
||||||
update_index: bool,
|
update_index: bool,
|
||||||
tmpdir: str,
|
tmpdir: str,
|
||||||
executor: concurrent.futures.Executor,
|
executor: concurrent.futures.Executor,
|
||||||
) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
|
) -> Tuple[List[Spec], List[Tuple[Spec, BaseException]]]:
|
||||||
"""Pushes to the provided build cache, and returns a list of skipped specs that were already
|
"""Pushes to the provided build cache, and returns a list of skipped specs that were already
|
||||||
present (when force=False), and a list of errors. Does not raise on error."""
|
present (when force=False), and a list of errors. Does not raise on error."""
|
||||||
skipped: List[spack.spec.Spec] = []
|
skipped: List[Spec] = []
|
||||||
errors: List[Tuple[spack.spec.Spec, BaseException]] = []
|
errors: List[Tuple[Spec, BaseException]] = []
|
||||||
|
|
||||||
exists_futures = [
|
exists_futures = [
|
||||||
executor.submit(_exists_in_buildcache, spec, tmpdir, out_url) for spec in specs
|
executor.submit(_exists_in_buildcache, spec, tmpdir, out_url) for spec in specs
|
||||||
@@ -1423,7 +1440,7 @@ def _url_push(
|
|||||||
return skipped, errors
|
return skipped, errors
|
||||||
|
|
||||||
|
|
||||||
def _oci_upload_success_msg(spec: spack.spec.Spec, digest: Digest, size: int, elapsed: float):
|
def _oci_upload_success_msg(spec: Spec, digest: Digest, size: int, elapsed: float):
|
||||||
elapsed = max(elapsed, 0.001) # guard against division by zero
|
elapsed = max(elapsed, 0.001) # guard against division by zero
|
||||||
return (
|
return (
|
||||||
f"Pushed {_format_spec(spec)}: {digest} ({elapsed:.2f}s, "
|
f"Pushed {_format_spec(spec)}: {digest} ({elapsed:.2f}s, "
|
||||||
@@ -1452,11 +1469,13 @@ def _oci_push_pkg_blob(
|
|||||||
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
|
filename = os.path.join(tmpdir, f"{spec.dag_hash()}.tar.gz")
|
||||||
|
|
||||||
# Create an oci.image.layer aka tarball of the package
|
# Create an oci.image.layer aka tarball of the package
|
||||||
tar_gz_checksum, tar_checksum = create_tarball(spec, filename)
|
compressed_tarfile_checksum, tarfile_checksum = _do_create_tarball(
|
||||||
|
filename, spec.prefix, get_buildinfo_dict(spec)
|
||||||
|
)
|
||||||
|
|
||||||
blob = spack.oci.oci.Blob(
|
blob = spack.oci.oci.Blob(
|
||||||
Digest.from_sha256(tar_gz_checksum),
|
Digest.from_sha256(compressed_tarfile_checksum),
|
||||||
Digest.from_sha256(tar_checksum),
|
Digest.from_sha256(tarfile_checksum),
|
||||||
os.path.getsize(filename),
|
os.path.getsize(filename),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1507,7 +1526,7 @@ def _oci_put_manifest(
|
|||||||
):
|
):
|
||||||
architecture = _oci_archspec_to_gooarch(specs[0])
|
architecture = _oci_archspec_to_gooarch(specs[0])
|
||||||
|
|
||||||
expected_blobs: List[spack.spec.Spec] = [
|
expected_blobs: List[Spec] = [
|
||||||
s
|
s
|
||||||
for s in traverse.traverse_nodes(specs, order="topo", deptype=("link", "run"), root=True)
|
for s in traverse.traverse_nodes(specs, order="topo", deptype=("link", "run"), root=True)
|
||||||
if not s.external
|
if not s.external
|
||||||
@@ -1551,7 +1570,7 @@ def _oci_put_manifest(
|
|||||||
|
|
||||||
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
|
config_file = os.path.join(tmpdir, f"{specs[0].dag_hash()}.config.json")
|
||||||
|
|
||||||
with open(config_file, "w", encoding="utf-8") as f:
|
with open(config_file, "w") as f:
|
||||||
json.dump(config, f, separators=(",", ":"))
|
json.dump(config, f, separators=(",", ":"))
|
||||||
|
|
||||||
config_file_checksum = Digest.from_sha256(
|
config_file_checksum = Digest.from_sha256(
|
||||||
@@ -1621,33 +1640,19 @@ def _oci_update_base_images(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _oci_default_tag(spec: spack.spec.Spec) -> str:
|
|
||||||
"""Return a valid, default image tag for a spec."""
|
|
||||||
return ensure_valid_tag(f"{spec.name}-{spec.version}-{spec.dag_hash()}.spack")
|
|
||||||
|
|
||||||
|
|
||||||
#: Default OCI index tag
|
|
||||||
default_index_tag = "index.spack"
|
|
||||||
|
|
||||||
|
|
||||||
def tag_is_spec(tag: str) -> bool:
|
|
||||||
"""Check if a tag is likely a Spec"""
|
|
||||||
return tag.endswith(".spack") and tag != default_index_tag
|
|
||||||
|
|
||||||
|
|
||||||
def _oci_push(
|
def _oci_push(
|
||||||
*,
|
*,
|
||||||
target_image: ImageReference,
|
target_image: ImageReference,
|
||||||
base_image: Optional[ImageReference],
|
base_image: Optional[ImageReference],
|
||||||
installed_specs_with_deps: List[spack.spec.Spec],
|
installed_specs_with_deps: List[Spec],
|
||||||
tmpdir: str,
|
tmpdir: str,
|
||||||
executor: concurrent.futures.Executor,
|
executor: concurrent.futures.Executor,
|
||||||
force: bool = False,
|
force: bool = False,
|
||||||
) -> Tuple[
|
) -> Tuple[
|
||||||
List[spack.spec.Spec],
|
List[Spec],
|
||||||
Dict[str, Tuple[dict, dict]],
|
Dict[str, Tuple[dict, dict]],
|
||||||
Dict[str, spack.oci.oci.Blob],
|
Dict[str, spack.oci.oci.Blob],
|
||||||
List[Tuple[spack.spec.Spec, BaseException]],
|
List[Tuple[Spec, BaseException]],
|
||||||
]:
|
]:
|
||||||
# Spec dag hash -> blob
|
# Spec dag hash -> blob
|
||||||
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
checksums: Dict[str, spack.oci.oci.Blob] = {}
|
||||||
@@ -1656,15 +1661,13 @@ def _oci_push(
|
|||||||
base_images: Dict[str, Tuple[dict, dict]] = {}
|
base_images: Dict[str, Tuple[dict, dict]] = {}
|
||||||
|
|
||||||
# Specs not uploaded because they already exist
|
# Specs not uploaded because they already exist
|
||||||
skipped: List[spack.spec.Spec] = []
|
skipped: List[Spec] = []
|
||||||
|
|
||||||
if not force:
|
if not force:
|
||||||
tty.info("Checking for existing specs in the buildcache")
|
tty.info("Checking for existing specs in the buildcache")
|
||||||
blobs_to_upload = []
|
blobs_to_upload = []
|
||||||
|
|
||||||
tags_to_check = (
|
tags_to_check = (target_image.with_tag(default_tag(s)) for s in installed_specs_with_deps)
|
||||||
target_image.with_tag(_oci_default_tag(s)) for s in installed_specs_with_deps
|
|
||||||
)
|
|
||||||
available_blobs = executor.map(_oci_get_blob_info, tags_to_check)
|
available_blobs = executor.map(_oci_get_blob_info, tags_to_check)
|
||||||
|
|
||||||
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
|
for spec, maybe_blob in zip(installed_specs_with_deps, available_blobs):
|
||||||
@@ -1692,8 +1695,8 @@ def _oci_push(
|
|||||||
executor.submit(_oci_push_pkg_blob, target_image, spec, tmpdir) for spec in blobs_to_upload
|
executor.submit(_oci_push_pkg_blob, target_image, spec, tmpdir) for spec in blobs_to_upload
|
||||||
]
|
]
|
||||||
|
|
||||||
manifests_to_upload: List[spack.spec.Spec] = []
|
manifests_to_upload: List[Spec] = []
|
||||||
errors: List[Tuple[spack.spec.Spec, BaseException]] = []
|
errors: List[Tuple[Spec, BaseException]] = []
|
||||||
|
|
||||||
# And update the spec to blob mapping for successful uploads
|
# And update the spec to blob mapping for successful uploads
|
||||||
for spec, blob_future in zip(blobs_to_upload, blob_futures):
|
for spec, blob_future in zip(blobs_to_upload, blob_futures):
|
||||||
@@ -1719,7 +1722,7 @@ def _oci_push(
|
|||||||
base_image_cache=base_images,
|
base_image_cache=base_images,
|
||||||
)
|
)
|
||||||
|
|
||||||
def extra_config(spec: spack.spec.Spec):
|
def extra_config(spec: Spec):
|
||||||
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
spec_dict = spec.to_dict(hash=ht.dag_hash)
|
||||||
spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
spec_dict["buildcache_layout_version"] = CURRENT_BUILD_CACHE_LAYOUT_VERSION
|
||||||
spec_dict["binary_cache_checksum"] = {
|
spec_dict["binary_cache_checksum"] = {
|
||||||
@@ -1735,7 +1738,7 @@ def extra_config(spec: spack.spec.Spec):
|
|||||||
_oci_put_manifest,
|
_oci_put_manifest,
|
||||||
base_images,
|
base_images,
|
||||||
checksums,
|
checksums,
|
||||||
target_image.with_tag(_oci_default_tag(spec)),
|
target_image.with_tag(default_tag(spec)),
|
||||||
tmpdir,
|
tmpdir,
|
||||||
extra_config(spec),
|
extra_config(spec),
|
||||||
{"org.opencontainers.image.description": spec.format()},
|
{"org.opencontainers.image.description": spec.format()},
|
||||||
@@ -1752,7 +1755,7 @@ def extra_config(spec: spack.spec.Spec):
|
|||||||
manifest_progress.start(spec, manifest_future.running())
|
manifest_progress.start(spec, manifest_future.running())
|
||||||
if error is None:
|
if error is None:
|
||||||
manifest_progress.ok(
|
manifest_progress.ok(
|
||||||
f"Tagged {_format_spec(spec)} as {target_image.with_tag(_oci_default_tag(spec))}"
|
f"Tagged {_format_spec(spec)} as {target_image.with_tag(default_tag(spec))}"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
manifest_progress.fail()
|
manifest_progress.fail()
|
||||||
@@ -1787,13 +1790,13 @@ def _oci_update_index(
|
|||||||
db = BuildCacheDatabase(db_root_dir)
|
db = BuildCacheDatabase(db_root_dir)
|
||||||
|
|
||||||
for spec_dict in spec_dicts:
|
for spec_dict in spec_dicts:
|
||||||
spec = spack.spec.Spec.from_dict(spec_dict)
|
spec = Spec.from_dict(spec_dict)
|
||||||
db.add(spec)
|
db.add(spec)
|
||||||
db.mark(spec, "in_buildcache", True)
|
db.mark(spec, "in_buildcache", True)
|
||||||
|
|
||||||
# Create the index.json file
|
# Create the index.json file
|
||||||
index_json_path = os.path.join(tmpdir, "index.json")
|
index_json_path = os.path.join(tmpdir, "index.json")
|
||||||
with open(index_json_path, "w", encoding="utf-8") as f:
|
with open(index_json_path, "w") as f:
|
||||||
db._write_to_file(f)
|
db._write_to_file(f)
|
||||||
|
|
||||||
# Create an empty config.json file
|
# Create an empty config.json file
|
||||||
@@ -1902,7 +1905,7 @@ def _get_valid_spec_file(path: str, max_supported_layout: int) -> Tuple[Dict, in
|
|||||||
try:
|
try:
|
||||||
as_string = binary_content.decode("utf-8")
|
as_string = binary_content.decode("utf-8")
|
||||||
if path.endswith(".json.sig"):
|
if path.endswith(".json.sig"):
|
||||||
spec_dict = spack.spec.Spec.extract_json_from_clearsig(as_string)
|
spec_dict = Spec.extract_json_from_clearsig(as_string)
|
||||||
else:
|
else:
|
||||||
spec_dict = json.loads(as_string)
|
spec_dict = json.loads(as_string)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -1950,9 +1953,9 @@ def download_tarball(spec, unsigned: Optional[bool] = False, mirrors_for_spec=No
|
|||||||
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
"signature_verified": "true-if-binary-pkg-was-already-verified"
|
||||||
}
|
}
|
||||||
"""
|
"""
|
||||||
configured_mirrors: Iterable[spack.mirrors.mirror.Mirror] = (
|
configured_mirrors: Iterable[spack.mirror.Mirror] = spack.mirror.MirrorCollection(
|
||||||
spack.mirrors.mirror.MirrorCollection(binary=True).values()
|
binary=True
|
||||||
)
|
).values()
|
||||||
if not configured_mirrors:
|
if not configured_mirrors:
|
||||||
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
|
tty.die("Please add a spack mirror to allow download of pre-compiled packages.")
|
||||||
|
|
||||||
@@ -1977,7 +1980,7 @@ def fetch_url_to_mirror(url):
|
|||||||
for mirror in configured_mirrors:
|
for mirror in configured_mirrors:
|
||||||
if mirror.fetch_url == url:
|
if mirror.fetch_url == url:
|
||||||
return mirror
|
return mirror
|
||||||
return spack.mirrors.mirror.Mirror(url)
|
return spack.mirror.Mirror(url)
|
||||||
|
|
||||||
mirrors = [fetch_url_to_mirror(url) for url in mirror_urls]
|
mirrors = [fetch_url_to_mirror(url) for url in mirror_urls]
|
||||||
|
|
||||||
@@ -1998,7 +2001,7 @@ def fetch_url_to_mirror(url):
|
|||||||
if fetch_url.startswith("oci://"):
|
if fetch_url.startswith("oci://"):
|
||||||
ref = spack.oci.image.ImageReference.from_string(
|
ref = spack.oci.image.ImageReference.from_string(
|
||||||
fetch_url[len("oci://") :]
|
fetch_url[len("oci://") :]
|
||||||
).with_tag(_oci_default_tag(spec))
|
).with_tag(spack.oci.image.default_tag(spec))
|
||||||
|
|
||||||
# Fetch the manifest
|
# Fetch the manifest
|
||||||
try:
|
try:
|
||||||
@@ -2242,8 +2245,7 @@ def relocate_package(spec):
|
|||||||
]
|
]
|
||||||
if analogs:
|
if analogs:
|
||||||
# Prefer same-name analogs and prefer higher versions
|
# Prefer same-name analogs and prefer higher versions
|
||||||
# This matches the preferences in spack.spec.Spec.splice, so we
|
# This matches the preferences in Spec.splice, so we will find same node
|
||||||
# will find same node
|
|
||||||
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
|
analog = max(analogs, key=lambda a: (a.name == s.name, a.version))
|
||||||
|
|
||||||
lookup_dag_hash = analog.dag_hash()
|
lookup_dag_hash = analog.dag_hash()
|
||||||
@@ -2415,14 +2417,6 @@ def _tar_strip_component(tar: tarfile.TarFile, prefix: str):
|
|||||||
yield m
|
yield m
|
||||||
|
|
||||||
|
|
||||||
def extract_buildcache_tarball(tarfile_path: str, destination: str) -> None:
|
|
||||||
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
|
||||||
# Remove common prefix from tarball entries and directly extract them to the install dir.
|
|
||||||
tar.extractall(
|
|
||||||
path=destination, members=_tar_strip_component(tar, prefix=_ensure_common_prefix(tar))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
||||||
"""
|
"""
|
||||||
extract binary tarball for given package into install area
|
extract binary tarball for given package into install area
|
||||||
@@ -2492,7 +2486,12 @@ def extract_tarball(spec, download_result, force=False, timer=timer.NULL_TIMER):
|
|||||||
tarfile_path, size, contents, "sha256", expected, local_checksum
|
tarfile_path, size, contents, "sha256", expected, local_checksum
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
extract_buildcache_tarball(tarfile_path, destination=spec.prefix)
|
with closing(tarfile.open(tarfile_path, "r")) as tar:
|
||||||
|
# Remove install prefix from tarfil to extract directly into spec.prefix
|
||||||
|
tar.extractall(
|
||||||
|
path=spec.prefix,
|
||||||
|
members=_tar_strip_component(tar, prefix=_ensure_common_prefix(tar)),
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
shutil.rmtree(spec.prefix, ignore_errors=True)
|
shutil.rmtree(spec.prefix, ignore_errors=True)
|
||||||
_delete_staged_downloads(download_result)
|
_delete_staged_downloads(download_result)
|
||||||
@@ -2651,7 +2650,7 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
specfile_is_signed = False
|
specfile_is_signed = False
|
||||||
found_specs = []
|
found_specs = []
|
||||||
|
|
||||||
binary_mirrors = spack.mirrors.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
|
binary_mirrors = spack.mirror.MirrorCollection(mirrors=mirrors, binary=True).values()
|
||||||
|
|
||||||
for mirror in binary_mirrors:
|
for mirror in binary_mirrors:
|
||||||
buildcache_fetch_url_json = url_util.join(
|
buildcache_fetch_url_json = url_util.join(
|
||||||
@@ -2682,10 +2681,10 @@ def try_direct_fetch(spec, mirrors=None):
|
|||||||
# are concrete (as they are built) so we need to mark this spec
|
# are concrete (as they are built) so we need to mark this spec
|
||||||
# concrete on read-in.
|
# concrete on read-in.
|
||||||
if specfile_is_signed:
|
if specfile_is_signed:
|
||||||
specfile_json = spack.spec.Spec.extract_json_from_clearsig(specfile_contents)
|
specfile_json = Spec.extract_json_from_clearsig(specfile_contents)
|
||||||
fetched_spec = spack.spec.Spec.from_dict(specfile_json)
|
fetched_spec = Spec.from_dict(specfile_json)
|
||||||
else:
|
else:
|
||||||
fetched_spec = spack.spec.Spec.from_json(specfile_contents)
|
fetched_spec = Spec.from_json(specfile_contents)
|
||||||
fetched_spec._mark_concrete()
|
fetched_spec._mark_concrete()
|
||||||
|
|
||||||
found_specs.append({"mirror_url": mirror.fetch_url, "spec": fetched_spec})
|
found_specs.append({"mirror_url": mirror.fetch_url, "spec": fetched_spec})
|
||||||
@@ -2712,7 +2711,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
|
|||||||
if spec is None:
|
if spec is None:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if not spack.mirrors.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
|
if not spack.mirror.MirrorCollection(mirrors=mirrors_to_check, binary=True):
|
||||||
tty.debug("No Spack mirrors are currently configured")
|
tty.debug("No Spack mirrors are currently configured")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
@@ -2751,7 +2750,7 @@ def clear_spec_cache():
|
|||||||
|
|
||||||
def get_keys(install=False, trust=False, force=False, mirrors=None):
|
def get_keys(install=False, trust=False, force=False, mirrors=None):
|
||||||
"""Get pgp public keys available on mirror with suffix .pub"""
|
"""Get pgp public keys available on mirror with suffix .pub"""
|
||||||
mirror_collection = mirrors or spack.mirrors.mirror.MirrorCollection(binary=True)
|
mirror_collection = mirrors or spack.mirror.MirrorCollection(binary=True)
|
||||||
|
|
||||||
if not mirror_collection:
|
if not mirror_collection:
|
||||||
tty.die("Please add a spack mirror to allow " + "download of build caches.")
|
tty.die("Please add a spack mirror to allow " + "download of build caches.")
|
||||||
@@ -2806,7 +2805,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
|
|||||||
|
|
||||||
|
|
||||||
def _url_push_keys(
|
def _url_push_keys(
|
||||||
*mirrors: Union[spack.mirrors.mirror.Mirror, str],
|
*mirrors: Union[spack.mirror.Mirror, str],
|
||||||
keys: List[str],
|
keys: List[str],
|
||||||
tmpdir: str,
|
tmpdir: str,
|
||||||
update_index: bool = False,
|
update_index: bool = False,
|
||||||
@@ -2873,7 +2872,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
rebuilds = {}
|
rebuilds = {}
|
||||||
for mirror in spack.mirrors.mirror.MirrorCollection(mirrors, binary=True).values():
|
for mirror in spack.mirror.MirrorCollection(mirrors, binary=True).values():
|
||||||
tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
|
tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
|
||||||
|
|
||||||
rebuild_list = []
|
rebuild_list = []
|
||||||
@@ -2890,7 +2889,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
|
|||||||
}
|
}
|
||||||
|
|
||||||
if output_file:
|
if output_file:
|
||||||
with open(output_file, "w", encoding="utf-8") as outf:
|
with open(output_file, "w") as outf:
|
||||||
outf.write(json.dumps(rebuilds))
|
outf.write(json.dumps(rebuilds))
|
||||||
|
|
||||||
return 1 if rebuilds else 0
|
return 1 if rebuilds else 0
|
||||||
@@ -2917,7 +2916,7 @@ def _download_buildcache_entry(mirror_root, descriptions):
|
|||||||
|
|
||||||
|
|
||||||
def download_buildcache_entry(file_descriptions, mirror_url=None):
|
def download_buildcache_entry(file_descriptions, mirror_url=None):
|
||||||
if not mirror_url and not spack.mirrors.mirror.MirrorCollection(binary=True):
|
if not mirror_url and not spack.mirror.MirrorCollection(binary=True):
|
||||||
tty.die(
|
tty.die(
|
||||||
"Please provide or add a spack mirror to allow " + "download of buildcache entries."
|
"Please provide or add a spack mirror to allow " + "download of buildcache entries."
|
||||||
)
|
)
|
||||||
@@ -2926,7 +2925,7 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
|
|||||||
mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH)
|
mirror_root = os.path.join(mirror_url, BUILD_CACHE_RELATIVE_PATH)
|
||||||
return _download_buildcache_entry(mirror_root, file_descriptions)
|
return _download_buildcache_entry(mirror_root, file_descriptions)
|
||||||
|
|
||||||
for mirror in spack.mirrors.mirror.MirrorCollection(binary=True).values():
|
for mirror in spack.mirror.MirrorCollection(binary=True).values():
|
||||||
mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH)
|
mirror_root = os.path.join(mirror.fetch_url, BUILD_CACHE_RELATIVE_PATH)
|
||||||
|
|
||||||
if _download_buildcache_entry(mirror_root, file_descriptions):
|
if _download_buildcache_entry(mirror_root, file_descriptions):
|
||||||
@@ -2984,7 +2983,7 @@ def __init__(self, all_architectures):
|
|||||||
|
|
||||||
self.possible_specs = specs
|
self.possible_specs = specs
|
||||||
|
|
||||||
def __call__(self, spec: spack.spec.Spec, **kwargs):
|
def __call__(self, spec: Spec, **kwargs):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
spec: The spec being searched for
|
spec: The spec being searched for
|
||||||
@@ -3122,7 +3121,7 @@ def __init__(self, url: str, local_hash, urlopen=None) -> None:
|
|||||||
|
|
||||||
def conditional_fetch(self) -> FetchIndexResult:
|
def conditional_fetch(self) -> FetchIndexResult:
|
||||||
"""Download an index from an OCI registry type mirror."""
|
"""Download an index from an OCI registry type mirror."""
|
||||||
url_manifest = self.ref.with_tag(default_index_tag).manifest_url()
|
url_manifest = self.ref.with_tag(spack.oci.image.default_index_tag).manifest_url()
|
||||||
try:
|
try:
|
||||||
response = self.urlopen(
|
response = self.urlopen(
|
||||||
urllib.request.Request(
|
urllib.request.Request(
|
||||||
|
|||||||
@@ -9,6 +9,7 @@
|
|||||||
all_core_root_specs,
|
all_core_root_specs,
|
||||||
ensure_clingo_importable_or_raise,
|
ensure_clingo_importable_or_raise,
|
||||||
ensure_core_dependencies,
|
ensure_core_dependencies,
|
||||||
|
ensure_file_in_path_or_raise,
|
||||||
ensure_gpg_in_path_or_raise,
|
ensure_gpg_in_path_or_raise,
|
||||||
ensure_patchelf_in_path_or_raise,
|
ensure_patchelf_in_path_or_raise,
|
||||||
)
|
)
|
||||||
@@ -19,6 +20,7 @@
|
|||||||
"is_bootstrapping",
|
"is_bootstrapping",
|
||||||
"ensure_bootstrap_configuration",
|
"ensure_bootstrap_configuration",
|
||||||
"ensure_core_dependencies",
|
"ensure_core_dependencies",
|
||||||
|
"ensure_file_in_path_or_raise",
|
||||||
"ensure_gpg_in_path_or_raise",
|
"ensure_gpg_in_path_or_raise",
|
||||||
"ensure_clingo_importable_or_raise",
|
"ensure_clingo_importable_or_raise",
|
||||||
"ensure_patchelf_in_path_or_raise",
|
"ensure_patchelf_in_path_or_raise",
|
||||||
|
|||||||
@@ -37,7 +37,7 @@
|
|||||||
import spack.binary_distribution
|
import spack.binary_distribution
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.detection
|
import spack.detection
|
||||||
import spack.mirrors.mirror
|
import spack.mirror
|
||||||
import spack.platforms
|
import spack.platforms
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -91,7 +91,7 @@ def __init__(self, conf: ConfigDictionary) -> None:
|
|||||||
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
|
||||||
|
|
||||||
# Promote (relative) paths to file urls
|
# Promote (relative) paths to file urls
|
||||||
self.url = spack.mirrors.mirror.Mirror(conf["info"]["url"]).fetch_url
|
self.url = spack.mirror.Mirror(conf["info"]["url"]).fetch_url
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
def mirror_scope(self) -> spack.config.InternalConfigScope:
|
||||||
@@ -481,6 +481,19 @@ def ensure_gpg_in_path_or_raise() -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def file_root_spec() -> str:
|
||||||
|
"""Return the root spec used to bootstrap file"""
|
||||||
|
root_spec_name = "win-file" if IS_WINDOWS else "file"
|
||||||
|
return _root_spec(root_spec_name)
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_file_in_path_or_raise() -> None:
|
||||||
|
"""Ensure file is in the PATH or raise"""
|
||||||
|
return ensure_executables_in_path_or_raise(
|
||||||
|
executables=["file"], abstract_spec=file_root_spec()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def patchelf_root_spec() -> str:
|
def patchelf_root_spec() -> str:
|
||||||
"""Return the root spec used to bootstrap patchelf"""
|
"""Return the root spec used to bootstrap patchelf"""
|
||||||
# 0.13.1 is the last version not to require C++17.
|
# 0.13.1 is the last version not to require C++17.
|
||||||
@@ -564,13 +577,15 @@ def ensure_core_dependencies() -> None:
|
|||||||
"""Ensure the presence of all the core dependencies."""
|
"""Ensure the presence of all the core dependencies."""
|
||||||
if sys.platform.lower() == "linux":
|
if sys.platform.lower() == "linux":
|
||||||
ensure_patchelf_in_path_or_raise()
|
ensure_patchelf_in_path_or_raise()
|
||||||
|
elif sys.platform == "win32":
|
||||||
|
ensure_file_in_path_or_raise()
|
||||||
ensure_gpg_in_path_or_raise()
|
ensure_gpg_in_path_or_raise()
|
||||||
ensure_clingo_importable_or_raise()
|
ensure_clingo_importable_or_raise()
|
||||||
|
|
||||||
|
|
||||||
def all_core_root_specs() -> List[str]:
|
def all_core_root_specs() -> List[str]:
|
||||||
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
|
"""Return a list of all the core root specs that may be used to bootstrap Spack"""
|
||||||
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
|
return [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec(), file_root_spec()]
|
||||||
|
|
||||||
|
|
||||||
def bootstrapping_sources(scope: Optional[str] = None):
|
def bootstrapping_sources(scope: Optional[str] = None):
|
||||||
|
|||||||
@@ -3,8 +3,8 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""Query the status of bootstrapping on this machine"""
|
"""Query the status of bootstrapping on this machine"""
|
||||||
import sys
|
import platform
|
||||||
from typing import Dict, List, Optional, Sequence, Tuple, Union
|
from typing import List, Optional, Sequence, Tuple, Union
|
||||||
|
|
||||||
import spack.util.executable
|
import spack.util.executable
|
||||||
|
|
||||||
@@ -72,7 +72,7 @@ def _core_requirements() -> List[RequiredResponseType]:
|
|||||||
"bzip2": _missing("bzip2", "required to compress/decompress code archives"),
|
"bzip2": _missing("bzip2", "required to compress/decompress code archives"),
|
||||||
"git": _missing("git", "required to fetch/manage git repositories"),
|
"git": _missing("git", "required to fetch/manage git repositories"),
|
||||||
}
|
}
|
||||||
if sys.platform == "linux":
|
if platform.system().lower() == "linux":
|
||||||
_core_system_exes["xz"] = _missing("xz", "required to compress/decompress code archives")
|
_core_system_exes["xz"] = _missing("xz", "required to compress/decompress code archives")
|
||||||
|
|
||||||
# Executables that are not bootstrapped yet
|
# Executables that are not bootstrapped yet
|
||||||
@@ -87,16 +87,17 @@ def _core_requirements() -> List[RequiredResponseType]:
|
|||||||
|
|
||||||
|
|
||||||
def _buildcache_requirements() -> List[RequiredResponseType]:
|
def _buildcache_requirements() -> List[RequiredResponseType]:
|
||||||
_buildcache_exes: Dict[ExecutablesType, str] = {
|
_buildcache_exes = {
|
||||||
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False)
|
"file": _missing("file", "required to analyze files for buildcaches", system_only=False),
|
||||||
|
("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
|
||||||
}
|
}
|
||||||
if sys.platform == "darwin":
|
if platform.system().lower() == "darwin":
|
||||||
_buildcache_exes["otool"] = _missing("otool", "required to relocate binaries")
|
_buildcache_exes["otool"] = _missing("otool", "required to relocate binaries")
|
||||||
|
|
||||||
# Executables that are not bootstrapped yet
|
# Executables that are not bootstrapped yet
|
||||||
result = [_required_system_executable(exe, msg) for exe, msg in _buildcache_exes.items()]
|
result = [_required_system_executable(exe, msg) for exe, msg in _buildcache_exes.items()]
|
||||||
|
|
||||||
if sys.platform == "linux":
|
if platform.system().lower() == "linux":
|
||||||
result.append(
|
result.append(
|
||||||
_required_executable(
|
_required_executable(
|
||||||
"patchelf",
|
"patchelf",
|
||||||
|
|||||||
@@ -1426,20 +1426,27 @@ def make_stack(tb, stack=None):
|
|||||||
# We found obj, the Package implementation we care about.
|
# We found obj, the Package implementation we care about.
|
||||||
# Point out the location in the install method where we failed.
|
# Point out the location in the install method where we failed.
|
||||||
filename = inspect.getfile(frame.f_code)
|
filename = inspect.getfile(frame.f_code)
|
||||||
lines = [f"{filename}:{frame.f_lineno}, in {frame.f_code.co_name}:"]
|
lineno = frame.f_lineno
|
||||||
|
if os.path.basename(filename) == "package.py":
|
||||||
|
# subtract 1 because we inject a magic import at the top of package files.
|
||||||
|
# TODO: get rid of the magic import.
|
||||||
|
lineno -= 1
|
||||||
|
|
||||||
|
lines = ["{0}:{1:d}, in {2}:".format(filename, lineno, frame.f_code.co_name)]
|
||||||
|
|
||||||
# Build a message showing context in the install method.
|
# Build a message showing context in the install method.
|
||||||
sourcelines, start = inspect.getsourcelines(frame)
|
sourcelines, start = inspect.getsourcelines(frame)
|
||||||
|
|
||||||
# Calculate lineno of the error relative to the start of the function.
|
# Calculate lineno of the error relative to the start of the function.
|
||||||
fun_lineno = frame.f_lineno - start
|
fun_lineno = lineno - start
|
||||||
start_ctx = max(0, fun_lineno - context)
|
start_ctx = max(0, fun_lineno - context)
|
||||||
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
|
sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
|
||||||
|
|
||||||
for i, line in enumerate(sourcelines):
|
for i, line in enumerate(sourcelines):
|
||||||
is_error = start_ctx + i == fun_lineno
|
is_error = start_ctx + i == fun_lineno
|
||||||
|
mark = ">> " if is_error else " "
|
||||||
# Add start to get lineno relative to start of file, not function.
|
# Add start to get lineno relative to start of file, not function.
|
||||||
marked = f" {'>> ' if is_error else ' '}{start + start_ctx + i:-6d}{line.rstrip()}"
|
marked = " {0}{1:-6d}{2}".format(mark, start + start_ctx + i, line.rstrip())
|
||||||
if is_error:
|
if is_error:
|
||||||
marked = colorize("@R{%s}" % cescape(marked))
|
marked = colorize("@R{%s}" % cescape(marked))
|
||||||
lines.append(marked)
|
lines.append(marked)
|
||||||
|
|||||||
@@ -182,7 +182,10 @@ def patch_config_files(self) -> bool:
|
|||||||
@property
|
@property
|
||||||
def _removed_la_files_log(self) -> str:
|
def _removed_la_files_log(self) -> str:
|
||||||
"""File containing the list of removed libtool archives"""
|
"""File containing the list of removed libtool archives"""
|
||||||
return os.path.join(self.build_directory, "removed_la_files.txt")
|
build_dir = self.build_directory
|
||||||
|
if not os.path.isabs(self.build_directory):
|
||||||
|
build_dir = os.path.join(self.pkg.stage.path, build_dir)
|
||||||
|
return os.path.join(build_dir, "removed_la_files.txt")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def archive_files(self) -> List[str]:
|
def archive_files(self) -> List[str]:
|
||||||
@@ -520,12 +523,7 @@ def configure_abs_path(self) -> str:
|
|||||||
@property
|
@property
|
||||||
def build_directory(self) -> str:
|
def build_directory(self) -> str:
|
||||||
"""Override to provide another place to build the package"""
|
"""Override to provide another place to build the package"""
|
||||||
# Handle the case where the configure directory is set to a non-absolute path
|
return self.configure_directory
|
||||||
# Non-absolute paths are always relative to the staging source path
|
|
||||||
build_dir = self.configure_directory
|
|
||||||
if not os.path.isabs(build_dir):
|
|
||||||
build_dir = os.path.join(self.pkg.stage.source_path, build_dir)
|
|
||||||
return build_dir
|
|
||||||
|
|
||||||
@spack.phase_callbacks.run_before("autoreconf")
|
@spack.phase_callbacks.run_before("autoreconf")
|
||||||
def delete_configure_to_force_update(self) -> None:
|
def delete_configure_to_force_update(self) -> None:
|
||||||
@@ -838,7 +836,7 @@ def remove_libtool_archives(self) -> None:
|
|||||||
libtool_files = fs.find(str(self.pkg.prefix), "*.la", recursive=True)
|
libtool_files = fs.find(str(self.pkg.prefix), "*.la", recursive=True)
|
||||||
with fs.safe_remove(*libtool_files):
|
with fs.safe_remove(*libtool_files):
|
||||||
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
|
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
|
||||||
with open(self._removed_la_files_log, mode="w", encoding="utf-8") as f:
|
with open(self._removed_la_files_log, mode="w") as f:
|
||||||
f.write("\n".join(libtool_files))
|
f.write("\n".join(libtool_files))
|
||||||
|
|
||||||
def setup_build_environment(self, env):
|
def setup_build_environment(self, env):
|
||||||
|
|||||||
@@ -324,7 +324,7 @@ def initconfig(self, pkg, spec, prefix):
|
|||||||
+ self.initconfig_package_entries()
|
+ self.initconfig_package_entries()
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(self.cache_name, "w", encoding="utf-8") as f:
|
with open(self.cache_name, "w") as f:
|
||||||
for entry in cache_entries:
|
for entry in cache_entries:
|
||||||
f.write("%s\n" % entry)
|
f.write("%s\n" % entry)
|
||||||
f.write("\n")
|
f.write("\n")
|
||||||
|
|||||||
@@ -1153,7 +1153,7 @@ def _determine_license_type(self):
|
|||||||
# The file will have been created upon self.license_required AND
|
# The file will have been created upon self.license_required AND
|
||||||
# self.license_files having been populated, so the "if" is usually
|
# self.license_files having been populated, so the "if" is usually
|
||||||
# true by the time the present function runs; ../hooks/licensing.py
|
# true by the time the present function runs; ../hooks/licensing.py
|
||||||
with open(f, encoding="utf-8") as fh:
|
with open(f) as fh:
|
||||||
if re.search(r"^[ \t]*[^" + self.license_comment + "\n]", fh.read(), re.MULTILINE):
|
if re.search(r"^[ \t]*[^" + self.license_comment + "\n]", fh.read(), re.MULTILINE):
|
||||||
license_type = {
|
license_type = {
|
||||||
"ACTIVATION_TYPE": "license_file",
|
"ACTIVATION_TYPE": "license_file",
|
||||||
@@ -1185,7 +1185,7 @@ def configure(self):
|
|||||||
# our configuration accordingly. We can do this because the tokens are
|
# our configuration accordingly. We can do this because the tokens are
|
||||||
# quite long and specific.
|
# quite long and specific.
|
||||||
|
|
||||||
validator_code = open("pset/check.awk", "r", encoding="utf-8").read()
|
validator_code = open("pset/check.awk", "r").read()
|
||||||
# Let's go a little further and distill the tokens (plus some noise).
|
# Let's go a little further and distill the tokens (plus some noise).
|
||||||
tokenlike_words = set(re.findall(r"[A-Z_]{4,}", validator_code))
|
tokenlike_words = set(re.findall(r"[A-Z_]{4,}", validator_code))
|
||||||
|
|
||||||
@@ -1222,7 +1222,7 @@ def configure(self):
|
|||||||
config_draft.update(self._determine_license_type)
|
config_draft.update(self._determine_license_type)
|
||||||
|
|
||||||
# Write sorted *by token* so the file looks less like a hash dump.
|
# Write sorted *by token* so the file looks less like a hash dump.
|
||||||
f = open("silent.cfg", "w", encoding="utf-8")
|
f = open("silent.cfg", "w")
|
||||||
for token, value in sorted(config_draft.items()):
|
for token, value in sorted(config_draft.items()):
|
||||||
if token in tokenlike_words:
|
if token in tokenlike_words:
|
||||||
f.write("%s=%s\n" % (token, value))
|
f.write("%s=%s\n" % (token, value))
|
||||||
@@ -1273,7 +1273,7 @@ def configure_rpath(self):
|
|||||||
raise InstallError("Cannot find compiler command to configure rpath:\n\t" + f)
|
raise InstallError("Cannot find compiler command to configure rpath:\n\t" + f)
|
||||||
|
|
||||||
compiler_cfg = os.path.abspath(f + ".cfg")
|
compiler_cfg = os.path.abspath(f + ".cfg")
|
||||||
with open(compiler_cfg, "w", encoding="utf-8") as fh:
|
with open(compiler_cfg, "w") as fh:
|
||||||
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
|
fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("install")
|
@spack.phase_callbacks.run_after("install")
|
||||||
@@ -1297,7 +1297,7 @@ def configure_auto_dispatch(self):
|
|||||||
ad.append(x)
|
ad.append(x)
|
||||||
|
|
||||||
compiler_cfg = os.path.abspath(f + ".cfg")
|
compiler_cfg = os.path.abspath(f + ".cfg")
|
||||||
with open(compiler_cfg, "a", encoding="utf-8") as fh:
|
with open(compiler_cfg, "a") as fh:
|
||||||
fh.write("-ax{0}\n".format(",".join(ad)))
|
fh.write("-ax{0}\n".format(",".join(ad)))
|
||||||
|
|
||||||
@spack.phase_callbacks.run_after("install")
|
@spack.phase_callbacks.run_after("install")
|
||||||
|
|||||||
@@ -75,7 +75,7 @@ def generate_luarocks_config(self, pkg, spec, prefix):
|
|||||||
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
table_entries.append(self._generate_tree_line(d.name, d.prefix))
|
||||||
|
|
||||||
path = self._luarocks_config_path()
|
path = self._luarocks_config_path()
|
||||||
with open(path, "w", encoding="utf-8") as config:
|
with open(path, "w") as config:
|
||||||
config.write(
|
config.write(
|
||||||
"""
|
"""
|
||||||
deps_mode="all"
|
deps_mode="all"
|
||||||
|
|||||||
@@ -32,9 +32,6 @@ class IntelOneApiPackage(Package):
|
|||||||
# organization (e.g. University/Company).
|
# organization (e.g. University/Company).
|
||||||
redistribute(source=False, binary=False)
|
redistribute(source=False, binary=False)
|
||||||
|
|
||||||
# contains precompiled binaries without rpaths
|
|
||||||
unresolved_libraries = ["*"]
|
|
||||||
|
|
||||||
for c in [
|
for c in [
|
||||||
"target=ppc64:",
|
"target=ppc64:",
|
||||||
"target=ppc64le:",
|
"target=ppc64le:",
|
||||||
|
|||||||
2269
lib/spack/spack/ci.py
Normal file
2269
lib/spack/spack/ci.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,41 +0,0 @@
|
|||||||
# Spack CI generators
|
|
||||||
|
|
||||||
This document describes how the ci module can be extended to provide novel
|
|
||||||
ci generators. The module currently has only a single generator for gitlab.
|
|
||||||
The unit-tests for the ci module define a small custom generator for testing
|
|
||||||
purposes as well.
|
|
||||||
|
|
||||||
The process of generating a pipeline involves creating a ci-enabled spack
|
|
||||||
environment, activating it, and running `spack ci generate`, possibly with
|
|
||||||
arguments describing things like where the output should be written.
|
|
||||||
|
|
||||||
Internally pipeline generation is broken into two components: general and
|
|
||||||
ci platform specific.
|
|
||||||
|
|
||||||
## General pipeline functionality
|
|
||||||
|
|
||||||
General pipeline functionality includes building a pipeline graph (really,
|
|
||||||
a forest), pruning it in a variety of ways, and gathering attributes for all
|
|
||||||
the generated spec build jobs from the spack configuration.
|
|
||||||
|
|
||||||
All of the above functionality is defined in the `__init__.py` of the top-level
|
|
||||||
ci module, and should be roughly the same for pipelines generated for any
|
|
||||||
platform.
|
|
||||||
|
|
||||||
## CI platform specific functionality
|
|
||||||
|
|
||||||
Functionality specific to CI platforms (e.g. gitlab, gha, etc.) should be
|
|
||||||
defined in a dedicated module. In order to define a generator for a new
|
|
||||||
platform, there are only a few requirements:
|
|
||||||
|
|
||||||
1. add a file under `ci` in which you define a generator method decorated with
|
|
||||||
the `@generator` attribute. .
|
|
||||||
|
|
||||||
1. import it from `lib/spack/spack/ci/__init__.py`, so that your new generator
|
|
||||||
is registered.
|
|
||||||
|
|
||||||
1. the generator method must take as arguments PipelineDag, SpackCIConfig,
|
|
||||||
and PipelineOptions objects, in that order.
|
|
||||||
|
|
||||||
1. the generator method must produce an output file containing the
|
|
||||||
generated pipeline.
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,825 +0,0 @@
|
|||||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
import codecs
|
|
||||||
import copy
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import ssl
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
from collections import deque
|
|
||||||
from enum import Enum
|
|
||||||
from typing import Dict, Generator, List, Optional, Set, Tuple
|
|
||||||
from urllib.parse import quote, urlencode, urlparse
|
|
||||||
from urllib.request import HTTPHandler, HTTPSHandler, Request, build_opener
|
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
from llnl.util.lang import Singleton, memoized
|
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
|
||||||
import spack.config as cfg
|
|
||||||
import spack.deptypes as dt
|
|
||||||
import spack.environment as ev
|
|
||||||
import spack.error
|
|
||||||
import spack.mirrors.mirror
|
|
||||||
import spack.schema
|
|
||||||
import spack.spec
|
|
||||||
import spack.util.spack_yaml as syaml
|
|
||||||
import spack.util.url as url_util
|
|
||||||
import spack.util.web as web_util
|
|
||||||
from spack import traverse
|
|
||||||
from spack.reporters import CDash, CDashConfiguration
|
|
||||||
from spack.reporters.cdash import SPACK_CDASH_TIMEOUT
|
|
||||||
from spack.reporters.cdash import build_stamp as cdash_build_stamp
|
|
||||||
|
|
||||||
|
|
||||||
def _urlopen():
|
|
||||||
error_handler = web_util.SpackHTTPDefaultErrorHandler()
|
|
||||||
|
|
||||||
# One opener with HTTPS ssl enabled
|
|
||||||
with_ssl = build_opener(
|
|
||||||
HTTPHandler(), HTTPSHandler(context=web_util.ssl_create_default_context()), error_handler
|
|
||||||
)
|
|
||||||
|
|
||||||
# One opener with HTTPS ssl disabled
|
|
||||||
without_ssl = build_opener(
|
|
||||||
HTTPHandler(), HTTPSHandler(context=ssl._create_unverified_context()), error_handler
|
|
||||||
)
|
|
||||||
|
|
||||||
# And dynamically dispatch based on the config:verify_ssl.
|
|
||||||
def dispatch_open(fullurl, data=None, timeout=None, verify_ssl=True):
|
|
||||||
opener = with_ssl if verify_ssl else without_ssl
|
|
||||||
timeout = timeout or cfg.get("config:connect_timeout", 1)
|
|
||||||
return opener.open(fullurl, data, timeout)
|
|
||||||
|
|
||||||
return dispatch_open
|
|
||||||
|
|
||||||
|
|
||||||
IS_WINDOWS = sys.platform == "win32"
|
|
||||||
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
|
|
||||||
_dyn_mapping_urlopener = Singleton(_urlopen)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_files_to_artifacts(src, artifacts_dir):
|
|
||||||
"""
|
|
||||||
Copy file(s) to the given artifacts directory
|
|
||||||
|
|
||||||
Parameters:
|
|
||||||
src (str): the glob-friendly path expression for the file(s) to copy
|
|
||||||
artifacts_dir (str): the destination directory
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
fs.copy(src, artifacts_dir)
|
|
||||||
except Exception as err:
|
|
||||||
msg = (
|
|
||||||
f"Unable to copy files ({src}) to artifacts {artifacts_dir} due to "
|
|
||||||
f"exception: {str(err)}"
|
|
||||||
)
|
|
||||||
tty.warn(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def win_quote(quote_str: str) -> str:
|
|
||||||
if IS_WINDOWS:
|
|
||||||
quote_str = f'"{quote_str}"'
|
|
||||||
return quote_str
|
|
||||||
|
|
||||||
|
|
||||||
def _spec_matches(spec, match_string):
|
|
||||||
return spec.intersects(match_string)
|
|
||||||
|
|
||||||
|
|
||||||
def _noop(x):
|
|
||||||
return x
|
|
||||||
|
|
||||||
|
|
||||||
def unpack_script(script_section, op=_noop):
|
|
||||||
script = []
|
|
||||||
for cmd in script_section:
|
|
||||||
if isinstance(cmd, list):
|
|
||||||
for subcmd in cmd:
|
|
||||||
script.append(op(subcmd))
|
|
||||||
else:
|
|
||||||
script.append(op(cmd))
|
|
||||||
|
|
||||||
return script
|
|
||||||
|
|
||||||
|
|
||||||
def ensure_expected_target_path(path: str) -> str:
|
|
||||||
"""Returns passed paths with all Windows path separators exchanged
|
|
||||||
for posix separators
|
|
||||||
|
|
||||||
TODO (johnwparent): Refactor config + cli read/write to deal only in posix style paths
|
|
||||||
"""
|
|
||||||
if path:
|
|
||||||
return path.replace("\\", "/")
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
def update_env_scopes(
|
|
||||||
env: ev.Environment,
|
|
||||||
cli_scopes: List[str],
|
|
||||||
output_file: str,
|
|
||||||
transform_windows_paths: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""Add any config scopes from cli_scopes which aren't already included in the
|
|
||||||
environment, by reading the yaml, adding the missing includes, and writing the
|
|
||||||
updated yaml back to the same location.
|
|
||||||
"""
|
|
||||||
with open(env.manifest_path, "r", encoding="utf-8") as env_fd:
|
|
||||||
env_yaml_root = syaml.load(env_fd)
|
|
||||||
|
|
||||||
# Add config scopes to environment
|
|
||||||
env_includes = env_yaml_root["spack"].get("include", [])
|
|
||||||
include_scopes: List[str] = []
|
|
||||||
for scope in cli_scopes:
|
|
||||||
if scope not in include_scopes and scope not in env_includes:
|
|
||||||
include_scopes.insert(0, scope)
|
|
||||||
env_includes.extend(include_scopes)
|
|
||||||
env_yaml_root["spack"]["include"] = [
|
|
||||||
ensure_expected_target_path(i) if transform_windows_paths else i for i in env_includes
|
|
||||||
]
|
|
||||||
|
|
||||||
with open(output_file, "w", encoding="utf-8") as fd:
|
|
||||||
syaml.dump_config(env_yaml_root, fd, default_flow_style=False)
|
|
||||||
|
|
||||||
|
|
||||||
def write_pipeline_manifest(specs, src_prefix, dest_prefix, output_file):
|
|
||||||
"""Write out the file describing specs that should be copied"""
|
|
||||||
buildcache_copies = {}
|
|
||||||
|
|
||||||
for release_spec in specs:
|
|
||||||
release_spec_dag_hash = release_spec.dag_hash()
|
|
||||||
# TODO: This assumes signed version of the spec
|
|
||||||
buildcache_copies[release_spec_dag_hash] = [
|
|
||||||
{
|
|
||||||
"src": url_util.join(
|
|
||||||
src_prefix,
|
|
||||||
bindist.build_cache_relative_path(),
|
|
||||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
|
||||||
),
|
|
||||||
"dest": url_util.join(
|
|
||||||
dest_prefix,
|
|
||||||
bindist.build_cache_relative_path(),
|
|
||||||
bindist.tarball_name(release_spec, ".spec.json.sig"),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"src": url_util.join(
|
|
||||||
src_prefix,
|
|
||||||
bindist.build_cache_relative_path(),
|
|
||||||
bindist.tarball_path_name(release_spec, ".spack"),
|
|
||||||
),
|
|
||||||
"dest": url_util.join(
|
|
||||||
dest_prefix,
|
|
||||||
bindist.build_cache_relative_path(),
|
|
||||||
bindist.tarball_path_name(release_spec, ".spack"),
|
|
||||||
),
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
target_dir = os.path.dirname(output_file)
|
|
||||||
|
|
||||||
if not os.path.exists(target_dir):
|
|
||||||
os.makedirs(target_dir)
|
|
||||||
|
|
||||||
with open(output_file, "w", encoding="utf-8") as fd:
|
|
||||||
fd.write(json.dumps(buildcache_copies))
|
|
||||||
|
|
||||||
|
|
||||||
class CDashHandler:
|
|
||||||
"""
|
|
||||||
Class for managing CDash data and processing.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, ci_cdash):
|
|
||||||
# start with the gitlab ci configuration
|
|
||||||
self.url = ci_cdash.get("url")
|
|
||||||
self.build_group = ci_cdash.get("build-group")
|
|
||||||
self.project = ci_cdash.get("project")
|
|
||||||
self.site = ci_cdash.get("site")
|
|
||||||
|
|
||||||
# grab the authorization token when available
|
|
||||||
self.auth_token = os.environ.get("SPACK_CDASH_AUTH_TOKEN")
|
|
||||||
if self.auth_token:
|
|
||||||
tty.verbose("Using CDash auth token from environment")
|
|
||||||
|
|
||||||
# append runner description to the site if available
|
|
||||||
runner = os.environ.get("CI_RUNNER_DESCRIPTION")
|
|
||||||
if runner:
|
|
||||||
self.site += f" ({runner})"
|
|
||||||
|
|
||||||
def args(self):
|
|
||||||
return [
|
|
||||||
"--cdash-upload-url",
|
|
||||||
win_quote(self.upload_url),
|
|
||||||
"--cdash-build",
|
|
||||||
win_quote(self.build_name()),
|
|
||||||
"--cdash-site",
|
|
||||||
win_quote(self.site),
|
|
||||||
"--cdash-buildstamp",
|
|
||||||
win_quote(self.build_stamp),
|
|
||||||
]
|
|
||||||
|
|
||||||
def build_name(self, spec: Optional[spack.spec.Spec] = None) -> Optional[str]:
|
|
||||||
"""Returns the CDash build name.
|
|
||||||
|
|
||||||
A name will be generated if the `spec` is provided,
|
|
||||||
otherwise, the value will be retrieved from the environment
|
|
||||||
through the `SPACK_CDASH_BUILD_NAME` variable.
|
|
||||||
|
|
||||||
Returns: (str) given spec's CDash build name."""
|
|
||||||
if spec:
|
|
||||||
build_name = f"{spec.name}@{spec.version}%{spec.compiler} \
|
|
||||||
hash={spec.dag_hash()} arch={spec.architecture} ({self.build_group})"
|
|
||||||
tty.debug(f"Generated CDash build name ({build_name}) from the {spec.name}")
|
|
||||||
return build_name
|
|
||||||
|
|
||||||
env_build_name = os.environ.get("SPACK_CDASH_BUILD_NAME")
|
|
||||||
tty.debug(f"Using CDash build name ({env_build_name}) from the environment")
|
|
||||||
return env_build_name
|
|
||||||
|
|
||||||
@property # type: ignore
|
|
||||||
def build_stamp(self):
|
|
||||||
"""Returns the CDash build stamp.
|
|
||||||
|
|
||||||
The one defined by SPACK_CDASH_BUILD_STAMP environment variable
|
|
||||||
is preferred due to the representation of timestamps; otherwise,
|
|
||||||
one will be built.
|
|
||||||
|
|
||||||
Returns: (str) current CDash build stamp"""
|
|
||||||
build_stamp = os.environ.get("SPACK_CDASH_BUILD_STAMP")
|
|
||||||
if build_stamp:
|
|
||||||
tty.debug(f"Using build stamp ({build_stamp}) from the environment")
|
|
||||||
return build_stamp
|
|
||||||
|
|
||||||
build_stamp = cdash_build_stamp(self.build_group, time.time())
|
|
||||||
tty.debug(f"Generated new build stamp ({build_stamp})")
|
|
||||||
return build_stamp
|
|
||||||
|
|
||||||
@property # type: ignore
|
|
||||||
@memoized
|
|
||||||
def project_enc(self):
|
|
||||||
tty.debug(f"Encoding project ({type(self.project)}): {self.project})")
|
|
||||||
encode = urlencode({"project": self.project})
|
|
||||||
index = encode.find("=") + 1
|
|
||||||
return encode[index:]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def upload_url(self):
|
|
||||||
url_format = f"{self.url}/submit.php?project={self.project_enc}"
|
|
||||||
return url_format
|
|
||||||
|
|
||||||
def copy_test_results(self, source, dest):
|
|
||||||
"""Copy test results to artifacts directory."""
|
|
||||||
reports = fs.join_path(source, "*_Test*.xml")
|
|
||||||
copy_files_to_artifacts(reports, dest)
|
|
||||||
|
|
||||||
def create_buildgroup(self, opener, headers, url, group_name, group_type):
|
|
||||||
data = {"newbuildgroup": group_name, "project": self.project, "type": group_type}
|
|
||||||
|
|
||||||
enc_data = json.dumps(data).encode("utf-8")
|
|
||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers)
|
|
||||||
|
|
||||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
|
||||||
response_code = response.getcode()
|
|
||||||
|
|
||||||
if response_code not in [200, 201]:
|
|
||||||
msg = f"Creating buildgroup failed (response code = {response_code})"
|
|
||||||
tty.warn(msg)
|
|
||||||
return None
|
|
||||||
|
|
||||||
response_text = response.read()
|
|
||||||
response_json = json.loads(response_text)
|
|
||||||
build_group_id = response_json["id"]
|
|
||||||
|
|
||||||
return build_group_id
|
|
||||||
|
|
||||||
def populate_buildgroup(self, job_names):
|
|
||||||
url = f"{self.url}/api/v1/buildgroup.php"
|
|
||||||
|
|
||||||
headers = {
|
|
||||||
"Authorization": f"Bearer {self.auth_token}",
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
}
|
|
||||||
|
|
||||||
opener = build_opener(HTTPHandler)
|
|
||||||
|
|
||||||
parent_group_id = self.create_buildgroup(opener, headers, url, self.build_group, "Daily")
|
|
||||||
group_id = self.create_buildgroup(
|
|
||||||
opener, headers, url, f"Latest {self.build_group}", "Latest"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not parent_group_id or not group_id:
|
|
||||||
msg = f"Failed to create or retrieve buildgroups for {self.build_group}"
|
|
||||||
tty.warn(msg)
|
|
||||||
return
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"dynamiclist": [
|
|
||||||
{"match": name, "parentgroupid": parent_group_id, "site": self.site}
|
|
||||||
for name in job_names
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
enc_data = json.dumps(data).encode("utf-8")
|
|
||||||
|
|
||||||
request = Request(url, data=enc_data, headers=headers)
|
|
||||||
request.get_method = lambda: "PUT"
|
|
||||||
|
|
||||||
response = opener.open(request, timeout=SPACK_CDASH_TIMEOUT)
|
|
||||||
response_code = response.getcode()
|
|
||||||
|
|
||||||
if response_code != 200:
|
|
||||||
msg = f"Error response code ({response_code}) in populate_buildgroup"
|
|
||||||
tty.warn(msg)
|
|
||||||
|
|
||||||
def report_skipped(self, spec: spack.spec.Spec, report_dir: str, reason: Optional[str]):
|
|
||||||
"""Explicitly report skipping testing of a spec (e.g., it's CI
|
|
||||||
configuration identifies it as known to have broken tests or
|
|
||||||
the CI installation failed).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
spec: spec being tested
|
|
||||||
report_dir: directory where the report will be written
|
|
||||||
reason: reason the test is being skipped
|
|
||||||
"""
|
|
||||||
configuration = CDashConfiguration(
|
|
||||||
upload_url=self.upload_url,
|
|
||||||
packages=[spec.name],
|
|
||||||
build=self.build_name(),
|
|
||||||
site=self.site,
|
|
||||||
buildstamp=self.build_stamp,
|
|
||||||
track=None,
|
|
||||||
)
|
|
||||||
reporter = CDash(configuration=configuration)
|
|
||||||
reporter.test_skipped_report(report_dir, spec, reason)
|
|
||||||
|
|
||||||
|
|
||||||
class PipelineType(Enum):
|
|
||||||
COPY_ONLY = 1
|
|
||||||
spack_copy_only = 1
|
|
||||||
PROTECTED_BRANCH = 2
|
|
||||||
spack_protected_branch = 2
|
|
||||||
PULL_REQUEST = 3
|
|
||||||
spack_pull_request = 3
|
|
||||||
|
|
||||||
|
|
||||||
class PipelineOptions:
|
|
||||||
"""A container for all pipeline options that can be specified (whether
|
|
||||||
via cli, config/yaml, or environment variables)"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
env: ev.Environment,
|
|
||||||
buildcache_destination: spack.mirrors.mirror.Mirror,
|
|
||||||
artifacts_root: str = "jobs_scratch_dir",
|
|
||||||
print_summary: bool = True,
|
|
||||||
output_file: Optional[str] = None,
|
|
||||||
check_index_only: bool = False,
|
|
||||||
broken_specs_url: Optional[str] = None,
|
|
||||||
rebuild_index: bool = True,
|
|
||||||
untouched_pruning_dependent_depth: Optional[int] = None,
|
|
||||||
prune_untouched: bool = False,
|
|
||||||
prune_up_to_date: bool = True,
|
|
||||||
prune_external: bool = True,
|
|
||||||
stack_name: Optional[str] = None,
|
|
||||||
pipeline_type: Optional[PipelineType] = None,
|
|
||||||
require_signing: bool = False,
|
|
||||||
cdash_handler: Optional["CDashHandler"] = None,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Args:
|
|
||||||
env: Active spack environment
|
|
||||||
buildcache_destination: The mirror where built binaries should be pushed
|
|
||||||
artifacts_root: Path to location where artifacts should be stored
|
|
||||||
print_summary: Print a summary of the scheduled pipeline
|
|
||||||
output_file: Path where output file should be written
|
|
||||||
check_index_only: Only fetch the index or fetch all spec files
|
|
||||||
broken_specs_url: URL where broken specs (on develop) should be reported
|
|
||||||
rebuild_index: Generate a job to rebuild mirror index after rebuilds
|
|
||||||
untouched_pruning_dependent_depth: How many parents to traverse from changed pkg specs
|
|
||||||
prune_untouched: Prune jobs for specs that were unchanged in git history
|
|
||||||
prune_up_to_date: Prune specs from pipeline if binary exists on the mirror
|
|
||||||
prune_external: Prune specs from pipeline if they are external
|
|
||||||
stack_name: Name of spack stack
|
|
||||||
pipeline_type: Type of pipeline running (optional)
|
|
||||||
require_signing: Require buildcache to be signed (fail w/out signing key)
|
|
||||||
cdash_handler: Object for communicating build information with CDash
|
|
||||||
"""
|
|
||||||
self.env = env
|
|
||||||
self.buildcache_destination = buildcache_destination
|
|
||||||
self.artifacts_root = artifacts_root
|
|
||||||
self.print_summary = print_summary
|
|
||||||
self.output_file = output_file
|
|
||||||
self.check_index_only = check_index_only
|
|
||||||
self.broken_specs_url = broken_specs_url
|
|
||||||
self.rebuild_index = rebuild_index
|
|
||||||
self.untouched_pruning_dependent_depth = untouched_pruning_dependent_depth
|
|
||||||
self.prune_untouched = prune_untouched
|
|
||||||
self.prune_up_to_date = prune_up_to_date
|
|
||||||
self.prune_external = prune_external
|
|
||||||
self.stack_name = stack_name
|
|
||||||
self.pipeline_type = pipeline_type
|
|
||||||
self.require_signing = require_signing
|
|
||||||
self.cdash_handler = cdash_handler
|
|
||||||
|
|
||||||
|
|
||||||
class PipelineNode:
|
|
||||||
spec: spack.spec.Spec
|
|
||||||
parents: Set[str]
|
|
||||||
children: Set[str]
|
|
||||||
|
|
||||||
def __init__(self, spec: spack.spec.Spec):
|
|
||||||
self.spec = spec
|
|
||||||
self.parents = set()
|
|
||||||
self.children = set()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def key(self):
|
|
||||||
"""Return key of the stored spec"""
|
|
||||||
return PipelineDag.key(self.spec)
|
|
||||||
|
|
||||||
|
|
||||||
class PipelineDag:
|
|
||||||
"""Turn a list of specs into a simple directed graph, that doesn't keep track
|
|
||||||
of edge types."""
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def key(cls, spec: spack.spec.Spec) -> str:
|
|
||||||
return spec.dag_hash()
|
|
||||||
|
|
||||||
def __init__(self, specs: List[spack.spec.Spec]) -> None:
|
|
||||||
# Build dictionary of nodes
|
|
||||||
self.nodes: Dict[str, PipelineNode] = {
|
|
||||||
PipelineDag.key(s): PipelineNode(s)
|
|
||||||
for s in traverse.traverse_nodes(specs, deptype=dt.ALL_TYPES, root=True)
|
|
||||||
}
|
|
||||||
|
|
||||||
# Create edges
|
|
||||||
for edge in traverse.traverse_edges(
|
|
||||||
specs, deptype=dt.ALL_TYPES, root=False, cover="edges"
|
|
||||||
):
|
|
||||||
parent_key = PipelineDag.key(edge.parent)
|
|
||||||
child_key = PipelineDag.key(edge.spec)
|
|
||||||
|
|
||||||
self.nodes[parent_key].children.add(child_key)
|
|
||||||
self.nodes[child_key].parents.add(parent_key)
|
|
||||||
|
|
||||||
def prune(self, node_key: str):
|
|
||||||
"""Remove a node from the graph, and reconnect its parents and children"""
|
|
||||||
node = self.nodes[node_key]
|
|
||||||
for parent in node.parents:
|
|
||||||
self.nodes[parent].children.remove(node_key)
|
|
||||||
self.nodes[parent].children |= node.children
|
|
||||||
for child in node.children:
|
|
||||||
self.nodes[child].parents.remove(node_key)
|
|
||||||
self.nodes[child].parents |= node.parents
|
|
||||||
del self.nodes[node_key]
|
|
||||||
|
|
||||||
def traverse_nodes(
|
|
||||||
self, direction: str = "children"
|
|
||||||
) -> Generator[Tuple[int, PipelineNode], None, None]:
|
|
||||||
"""Yields (depth, node) from the pipeline graph. Traversal is topologically
|
|
||||||
ordered from the roots if ``direction`` is ``children``, or from the leaves
|
|
||||||
if ``direction`` is ``parents``. The yielded depth is the length of the
|
|
||||||
longest path from the starting point to the yielded node."""
|
|
||||||
if direction == "children":
|
|
||||||
get_in_edges = lambda node: node.parents
|
|
||||||
get_out_edges = lambda node: node.children
|
|
||||||
else:
|
|
||||||
get_in_edges = lambda node: node.children
|
|
||||||
get_out_edges = lambda node: node.parents
|
|
||||||
|
|
||||||
sort_key = lambda k: self.nodes[k].spec.name
|
|
||||||
|
|
||||||
out_edges = {k: sorted(get_out_edges(n), key=sort_key) for k, n in self.nodes.items()}
|
|
||||||
num_in_edges = {k: len(get_in_edges(n)) for k, n in self.nodes.items()}
|
|
||||||
|
|
||||||
# Populate a queue with all the nodes that have no incoming edges
|
|
||||||
nodes = deque(
|
|
||||||
sorted(
|
|
||||||
[(0, key) for key in self.nodes.keys() if num_in_edges[key] == 0],
|
|
||||||
key=lambda item: item[1],
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
while nodes:
|
|
||||||
# Remove the next node, n, from the queue and yield it
|
|
||||||
depth, n_key = nodes.pop()
|
|
||||||
yield (depth, self.nodes[n_key])
|
|
||||||
|
|
||||||
# Remove an in-edge from every node, m, pointed to by an
|
|
||||||
# out-edge from n. If any of those nodes are left with
|
|
||||||
# 0 remaining in-edges, add them to the queue.
|
|
||||||
for m in out_edges[n_key]:
|
|
||||||
num_in_edges[m] -= 1
|
|
||||||
if num_in_edges[m] == 0:
|
|
||||||
nodes.appendleft((depth + 1, m))
|
|
||||||
|
|
||||||
def get_dependencies(self, node: PipelineNode) -> List[PipelineNode]:
|
|
||||||
"""Returns a list of nodes corresponding to the direct dependencies
|
|
||||||
of the given node."""
|
|
||||||
return [self.nodes[k] for k in node.children]
|
|
||||||
|
|
||||||
|
|
||||||
class SpackCIConfig:
|
|
||||||
"""Spack CI object used to generate intermediate representation
|
|
||||||
used by the CI generator(s).
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, ci_config):
|
|
||||||
"""Given the information from the ci section of the config
|
|
||||||
and the staged jobs, set up meta data needed for generating Spack
|
|
||||||
CI IR.
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.ci_config = ci_config
|
|
||||||
self.named_jobs = ["any", "build", "copy", "cleanup", "noop", "reindex", "signing"]
|
|
||||||
|
|
||||||
self.ir = {
|
|
||||||
"jobs": {},
|
|
||||||
"rebuild-index": self.ci_config.get("rebuild-index", True),
|
|
||||||
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
|
|
||||||
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
|
|
||||||
"target": self.ci_config.get("target", "gitlab"),
|
|
||||||
}
|
|
||||||
jobs = self.ir["jobs"]
|
|
||||||
|
|
||||||
for name in self.named_jobs:
|
|
||||||
# Skip the special named jobs
|
|
||||||
if name not in ["any", "build"]:
|
|
||||||
jobs[name] = self.__init_job("")
|
|
||||||
|
|
||||||
def __init_job(self, release_spec):
|
|
||||||
"""Initialize job object"""
|
|
||||||
job_object = {"spec": release_spec, "attributes": {}}
|
|
||||||
if release_spec:
|
|
||||||
job_vars = job_object["attributes"].setdefault("variables", {})
|
|
||||||
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec.dag_hash()
|
|
||||||
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
|
|
||||||
job_vars["SPACK_JOB_SPEC_PKG_VERSION"] = release_spec.format("{version}")
|
|
||||||
job_vars["SPACK_JOB_SPEC_COMPILER_NAME"] = release_spec.format("{compiler.name}")
|
|
||||||
job_vars["SPACK_JOB_SPEC_COMPILER_VERSION"] = release_spec.format("{compiler.version}")
|
|
||||||
job_vars["SPACK_JOB_SPEC_ARCH"] = release_spec.format("{architecture}")
|
|
||||||
job_vars["SPACK_JOB_SPEC_VARIANTS"] = release_spec.format("{variants}")
|
|
||||||
|
|
||||||
return job_object
|
|
||||||
|
|
||||||
def __is_named(self, section):
|
|
||||||
"""Check if a pipeline-gen configuration section is for a named job,
|
|
||||||
and if so return the name otherwise return none.
|
|
||||||
"""
|
|
||||||
for _name in self.named_jobs:
|
|
||||||
keys = [f"{_name}-job", f"{_name}-job-remove"]
|
|
||||||
if any([key for key in keys if key in section]):
|
|
||||||
return _name
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def __job_name(name, suffix=""):
|
|
||||||
"""Compute the name of a named job with appropriate suffix.
|
|
||||||
Valid suffixes are either '-remove' or empty string or None
|
|
||||||
"""
|
|
||||||
assert isinstance(name, str)
|
|
||||||
|
|
||||||
jname = name
|
|
||||||
if suffix:
|
|
||||||
jname = f"{name}-job{suffix}"
|
|
||||||
else:
|
|
||||||
jname = f"{name}-job"
|
|
||||||
|
|
||||||
return jname
|
|
||||||
|
|
||||||
def __apply_submapping(self, dest, spec, section):
|
|
||||||
"""Apply submapping setion to the IR dict"""
|
|
||||||
matched = False
|
|
||||||
only_first = section.get("match_behavior", "first") == "first"
|
|
||||||
|
|
||||||
for match_attrs in reversed(section["submapping"]):
|
|
||||||
attrs = cfg.InternalConfigScope._process_dict_keyname_overrides(match_attrs)
|
|
||||||
for match_string in match_attrs["match"]:
|
|
||||||
if _spec_matches(spec, match_string):
|
|
||||||
matched = True
|
|
||||||
if "build-job-remove" in match_attrs:
|
|
||||||
spack.config.remove_yaml(dest, attrs["build-job-remove"])
|
|
||||||
if "build-job" in match_attrs:
|
|
||||||
spack.schema.merge_yaml(dest, attrs["build-job"])
|
|
||||||
break
|
|
||||||
if matched and only_first:
|
|
||||||
break
|
|
||||||
|
|
||||||
return dest
|
|
||||||
|
|
||||||
# Create jobs for all the pipeline specs
|
|
||||||
def init_pipeline_jobs(self, pipeline: PipelineDag):
|
|
||||||
for _, node in pipeline.traverse_nodes():
|
|
||||||
dag_hash = node.spec.dag_hash()
|
|
||||||
self.ir["jobs"][dag_hash] = self.__init_job(node.spec)
|
|
||||||
|
|
||||||
# Generate IR from the configs
|
|
||||||
def generate_ir(self):
|
|
||||||
"""Generate the IR from the Spack CI configurations."""
|
|
||||||
|
|
||||||
jobs = self.ir["jobs"]
|
|
||||||
|
|
||||||
# Implicit job defaults
|
|
||||||
defaults = [
|
|
||||||
{
|
|
||||||
"build-job": {
|
|
||||||
"script": [
|
|
||||||
"cd {env_dir}",
|
|
||||||
"spack env activate --without-view .",
|
|
||||||
"spack ci rebuild",
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{"noop-job": {"script": ['echo "All specs already up to date, nothing to rebuild."']}},
|
|
||||||
]
|
|
||||||
|
|
||||||
# Job overrides
|
|
||||||
overrides = [
|
|
||||||
# Reindex script
|
|
||||||
{
|
|
||||||
"reindex-job": {
|
|
||||||
"script:": ["spack buildcache update-index --keys {index_target_mirror}"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
# Cleanup script
|
|
||||||
{
|
|
||||||
"cleanup-job": {
|
|
||||||
"script:": ["spack -d mirror destroy {mirror_prefix}/$CI_PIPELINE_ID"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
# Add signing job tags
|
|
||||||
{"signing-job": {"tags": ["aws", "protected", "notary"]}},
|
|
||||||
# Remove reserved tags
|
|
||||||
{"any-job-remove": {"tags": SPACK_RESERVED_TAGS}},
|
|
||||||
]
|
|
||||||
|
|
||||||
pipeline_gen = overrides + self.ci_config.get("pipeline-gen", []) + defaults
|
|
||||||
|
|
||||||
for section in reversed(pipeline_gen):
|
|
||||||
name = self.__is_named(section)
|
|
||||||
has_submapping = "submapping" in section
|
|
||||||
has_dynmapping = "dynamic-mapping" in section
|
|
||||||
section = cfg.InternalConfigScope._process_dict_keyname_overrides(section)
|
|
||||||
|
|
||||||
if name:
|
|
||||||
remove_job_name = self.__job_name(name, suffix="-remove")
|
|
||||||
merge_job_name = self.__job_name(name)
|
|
||||||
do_remove = remove_job_name in section
|
|
||||||
do_merge = merge_job_name in section
|
|
||||||
|
|
||||||
def _apply_section(dest, src):
|
|
||||||
if do_remove:
|
|
||||||
dest = spack.config.remove_yaml(dest, src[remove_job_name])
|
|
||||||
if do_merge:
|
|
||||||
dest = copy.copy(spack.schema.merge_yaml(dest, src[merge_job_name]))
|
|
||||||
|
|
||||||
if name == "build":
|
|
||||||
# Apply attributes to all build jobs
|
|
||||||
for _, job in jobs.items():
|
|
||||||
if job["spec"]:
|
|
||||||
_apply_section(job["attributes"], section)
|
|
||||||
elif name == "any":
|
|
||||||
# Apply section attributes too all jobs
|
|
||||||
for _, job in jobs.items():
|
|
||||||
_apply_section(job["attributes"], section)
|
|
||||||
else:
|
|
||||||
# Create a signing job if there is script and the job hasn't
|
|
||||||
# been initialized yet
|
|
||||||
if name == "signing" and name not in jobs:
|
|
||||||
if "signing-job" in section:
|
|
||||||
if "script" not in section["signing-job"]:
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
jobs[name] = self.__init_job("")
|
|
||||||
# Apply attributes to named job
|
|
||||||
_apply_section(jobs[name]["attributes"], section)
|
|
||||||
|
|
||||||
elif has_submapping:
|
|
||||||
# Apply section jobs with specs to match
|
|
||||||
for _, job in jobs.items():
|
|
||||||
if job["spec"]:
|
|
||||||
job["attributes"] = self.__apply_submapping(
|
|
||||||
job["attributes"], job["spec"], section
|
|
||||||
)
|
|
||||||
elif has_dynmapping:
|
|
||||||
mapping = section["dynamic-mapping"]
|
|
||||||
|
|
||||||
dynmap_name = mapping.get("name")
|
|
||||||
|
|
||||||
# Check if this section should be skipped
|
|
||||||
dynmap_skip = os.environ.get("SPACK_CI_SKIP_DYNAMIC_MAPPING")
|
|
||||||
if dynmap_name and dynmap_skip:
|
|
||||||
if re.match(dynmap_skip, dynmap_name):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Get the endpoint
|
|
||||||
endpoint = mapping["endpoint"]
|
|
||||||
endpoint_url = urlparse(endpoint)
|
|
||||||
|
|
||||||
# Configure the request header
|
|
||||||
header = {"User-Agent": web_util.SPACK_USER_AGENT}
|
|
||||||
header.update(mapping.get("header", {}))
|
|
||||||
|
|
||||||
# Expand header environment variables
|
|
||||||
# ie. if tokens are passed
|
|
||||||
for value in header.values():
|
|
||||||
value = os.path.expandvars(value)
|
|
||||||
|
|
||||||
verify_ssl = mapping.get("verify_ssl", spack.config.get("config:verify_ssl", True))
|
|
||||||
timeout = mapping.get("timeout", spack.config.get("config:connect_timeout", 1))
|
|
||||||
|
|
||||||
required = mapping.get("require", [])
|
|
||||||
allowed = mapping.get("allow", [])
|
|
||||||
ignored = mapping.get("ignore", [])
|
|
||||||
|
|
||||||
# required keys are implicitly allowed
|
|
||||||
allowed = sorted(set(allowed + required))
|
|
||||||
ignored = sorted(set(ignored))
|
|
||||||
required = sorted(set(required))
|
|
||||||
|
|
||||||
# Make sure required things are not also ignored
|
|
||||||
assert not any([ikey in required for ikey in ignored])
|
|
||||||
|
|
||||||
def job_query(job):
|
|
||||||
job_vars = job["attributes"]["variables"]
|
|
||||||
query = (
|
|
||||||
"{SPACK_JOB_SPEC_PKG_NAME}@{SPACK_JOB_SPEC_PKG_VERSION}"
|
|
||||||
# The preceding spaces are required (ref. https://github.com/spack/spack-gantry/blob/develop/docs/api.md#allocation)
|
|
||||||
" {SPACK_JOB_SPEC_VARIANTS}"
|
|
||||||
" arch={SPACK_JOB_SPEC_ARCH}"
|
|
||||||
"%{SPACK_JOB_SPEC_COMPILER_NAME}@{SPACK_JOB_SPEC_COMPILER_VERSION}"
|
|
||||||
).format_map(job_vars)
|
|
||||||
return f"spec={quote(query)}"
|
|
||||||
|
|
||||||
for job in jobs.values():
|
|
||||||
if not job["spec"]:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Create request for this job
|
|
||||||
query = job_query(job)
|
|
||||||
request = Request(
|
|
||||||
endpoint_url._replace(query=query).geturl(), headers=header, method="GET"
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
response = _dyn_mapping_urlopener(
|
|
||||||
request, verify_ssl=verify_ssl, timeout=timeout
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
# For now just ignore any errors from dynamic mapping and continue
|
|
||||||
# This is still experimental, and failures should not stop CI
|
|
||||||
# from running normally
|
|
||||||
tty.warn(f"Failed to fetch dynamic mapping for query:\n\t{query}")
|
|
||||||
tty.warn(f"{e}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
config = json.load(codecs.getreader("utf-8")(response))
|
|
||||||
|
|
||||||
# Strip ignore keys
|
|
||||||
if ignored:
|
|
||||||
for key in ignored:
|
|
||||||
if key in config:
|
|
||||||
config.pop(key)
|
|
||||||
|
|
||||||
# Only keep allowed keys
|
|
||||||
clean_config = {}
|
|
||||||
if allowed:
|
|
||||||
for key in allowed:
|
|
||||||
if key in config:
|
|
||||||
clean_config[key] = config[key]
|
|
||||||
else:
|
|
||||||
clean_config = config
|
|
||||||
|
|
||||||
# Verify all of the required keys are present
|
|
||||||
if required:
|
|
||||||
missing_keys = []
|
|
||||||
for key in required:
|
|
||||||
if key not in clean_config.keys():
|
|
||||||
missing_keys.append(key)
|
|
||||||
|
|
||||||
if missing_keys:
|
|
||||||
tty.warn(f"Response missing required keys: {missing_keys}")
|
|
||||||
|
|
||||||
if clean_config:
|
|
||||||
job["attributes"] = spack.schema.merge_yaml(
|
|
||||||
job.get("attributes", {}), clean_config
|
|
||||||
)
|
|
||||||
|
|
||||||
for _, job in jobs.items():
|
|
||||||
if job["spec"]:
|
|
||||||
job["spec"] = job["spec"].name
|
|
||||||
|
|
||||||
return self.ir
|
|
||||||
|
|
||||||
|
|
||||||
class SpackCIError(spack.error.SpackError):
|
|
||||||
def __init__(self, msg):
|
|
||||||
super().__init__(msg)
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
# Holds all known formatters
|
|
||||||
"""Generators that support writing out pipelines for various CI platforms,
|
|
||||||
using a common pipeline graph definition.
|
|
||||||
"""
|
|
||||||
import spack.error
|
|
||||||
|
|
||||||
_generators = {}
|
|
||||||
|
|
||||||
|
|
||||||
def generator(name):
|
|
||||||
"""Decorator to register a pipeline generator method.
|
|
||||||
A generator method should take PipelineDag, SpackCIConfig, and
|
|
||||||
PipelineOptions arguments, and should produce a pipeline file.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _decorator(generate_method):
|
|
||||||
_generators[name] = generate_method
|
|
||||||
return generate_method
|
|
||||||
|
|
||||||
return _decorator
|
|
||||||
|
|
||||||
|
|
||||||
def get_generator(name):
|
|
||||||
try:
|
|
||||||
return _generators[name]
|
|
||||||
except KeyError:
|
|
||||||
raise UnknownGeneratorException(name)
|
|
||||||
|
|
||||||
|
|
||||||
class UnknownGeneratorException(spack.error.SpackError):
|
|
||||||
def __init__(self, generator_name):
|
|
||||||
super().__init__(f"No registered generator for {generator_name}")
|
|
||||||
@@ -1,416 +0,0 @@
|
|||||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
import copy
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
import ruamel.yaml
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
|
|
||||||
import spack
|
|
||||||
import spack.binary_distribution as bindist
|
|
||||||
import spack.config as cfg
|
|
||||||
import spack.mirrors.mirror
|
|
||||||
import spack.schema
|
|
||||||
import spack.spec
|
|
||||||
import spack.util.spack_yaml as syaml
|
|
||||||
|
|
||||||
from .common import (
|
|
||||||
SPACK_RESERVED_TAGS,
|
|
||||||
PipelineDag,
|
|
||||||
PipelineOptions,
|
|
||||||
PipelineType,
|
|
||||||
SpackCIConfig,
|
|
||||||
SpackCIError,
|
|
||||||
ensure_expected_target_path,
|
|
||||||
unpack_script,
|
|
||||||
update_env_scopes,
|
|
||||||
write_pipeline_manifest,
|
|
||||||
)
|
|
||||||
from .generator_registry import generator
|
|
||||||
|
|
||||||
# See https://docs.gitlab.com/ee/ci/yaml/#retry for descriptions of conditions
|
|
||||||
JOB_RETRY_CONDITIONS = [
|
|
||||||
# "always",
|
|
||||||
"unknown_failure",
|
|
||||||
"script_failure",
|
|
||||||
"api_failure",
|
|
||||||
"stuck_or_timeout_failure",
|
|
||||||
"runner_system_failure",
|
|
||||||
"runner_unsupported",
|
|
||||||
"stale_schedule",
|
|
||||||
# "job_execution_timeout",
|
|
||||||
"archived_failure",
|
|
||||||
"unmet_prerequisites",
|
|
||||||
"scheduler_failure",
|
|
||||||
"data_integrity_failure",
|
|
||||||
]
|
|
||||||
JOB_NAME_FORMAT = "{name}{@version} {/hash}"
|
|
||||||
|
|
||||||
|
|
||||||
def _remove_reserved_tags(tags):
|
|
||||||
"""Convenience function to strip reserved tags from jobs"""
|
|
||||||
return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
|
|
||||||
|
|
||||||
|
|
||||||
def get_job_name(spec: spack.spec.Spec, build_group: Optional[str] = None) -> str:
|
|
||||||
"""Given a spec and possibly a build group, return the job name. If the
|
|
||||||
resulting name is longer than 255 characters, it will be truncated.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
spec: Spec job will build
|
|
||||||
build_group: Name of build group this job belongs to (a CDash notion)
|
|
||||||
|
|
||||||
Returns: The job name
|
|
||||||
"""
|
|
||||||
job_name = spec.format(JOB_NAME_FORMAT)
|
|
||||||
|
|
||||||
if build_group:
|
|
||||||
job_name = f"{job_name} {build_group}"
|
|
||||||
|
|
||||||
return job_name[:255]
|
|
||||||
|
|
||||||
|
|
||||||
def maybe_generate_manifest(pipeline: PipelineDag, options: PipelineOptions, manifest_path):
|
|
||||||
# TODO: Consider including only hashes of rebuilt specs in the manifest,
|
|
||||||
# instead of full source and destination urls. Also, consider renaming
|
|
||||||
# the variable that controls whether or not to write the manifest from
|
|
||||||
# "SPACK_COPY_BUILDCACHE" to "SPACK_WRITE_PIPELINE_MANIFEST" or similar.
|
|
||||||
spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
|
|
||||||
if spack_buildcache_copy:
|
|
||||||
buildcache_copy_src_prefix = options.buildcache_destination.fetch_url
|
|
||||||
buildcache_copy_dest_prefix = spack_buildcache_copy
|
|
||||||
|
|
||||||
if options.pipeline_type == PipelineType.COPY_ONLY:
|
|
||||||
manifest_specs = [s for s in options.env.all_specs() if not s.external]
|
|
||||||
else:
|
|
||||||
manifest_specs = [n.spec for _, n in pipeline.traverse_nodes(direction="children")]
|
|
||||||
|
|
||||||
write_pipeline_manifest(
|
|
||||||
manifest_specs, buildcache_copy_src_prefix, buildcache_copy_dest_prefix, manifest_path
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@generator("gitlab")
|
|
||||||
def generate_gitlab_yaml(pipeline: PipelineDag, spack_ci: SpackCIConfig, options: PipelineOptions):
|
|
||||||
"""Given a pipeline graph, job attributes, and pipeline options,
|
|
||||||
write a pipeline that can be consumed by GitLab to the given output file.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
pipeline: An already pruned graph of jobs representing all the specs to build
|
|
||||||
spack_ci: An object containing the configured attributes of all jobs in the pipeline
|
|
||||||
options: An object containing all the pipeline options gathered from yaml, env, etc...
|
|
||||||
"""
|
|
||||||
ci_project_dir = os.environ.get("CI_PROJECT_DIR") or os.getcwd()
|
|
||||||
generate_job_name = os.environ.get("CI_JOB_NAME", "job-does-not-exist")
|
|
||||||
generate_pipeline_id = os.environ.get("CI_PIPELINE_ID", "pipeline-does-not-exist")
|
|
||||||
artifacts_root = options.artifacts_root
|
|
||||||
if artifacts_root.startswith(ci_project_dir):
|
|
||||||
artifacts_root = os.path.relpath(artifacts_root, ci_project_dir)
|
|
||||||
pipeline_artifacts_dir = os.path.join(ci_project_dir, artifacts_root)
|
|
||||||
output_file = options.output_file
|
|
||||||
|
|
||||||
if not output_file:
|
|
||||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
|
||||||
else:
|
|
||||||
output_file_path = os.path.abspath(output_file)
|
|
||||||
gen_ci_dir = os.path.dirname(output_file_path)
|
|
||||||
if not os.path.exists(gen_ci_dir):
|
|
||||||
os.makedirs(gen_ci_dir)
|
|
||||||
|
|
||||||
spack_ci_ir = spack_ci.generate_ir()
|
|
||||||
|
|
||||||
concrete_env_dir = os.path.join(pipeline_artifacts_dir, "concrete_environment")
|
|
||||||
|
|
||||||
# Now that we've added the mirrors we know about, they should be properly
|
|
||||||
# reflected in the environment manifest file, so copy that into the
|
|
||||||
# concrete environment directory, along with the spack.lock file.
|
|
||||||
if not os.path.exists(concrete_env_dir):
|
|
||||||
os.makedirs(concrete_env_dir)
|
|
||||||
shutil.copyfile(options.env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
|
|
||||||
shutil.copyfile(options.env.lock_path, os.path.join(concrete_env_dir, "spack.lock"))
|
|
||||||
|
|
||||||
update_env_scopes(
|
|
||||||
options.env,
|
|
||||||
[
|
|
||||||
os.path.relpath(s.path, concrete_env_dir)
|
|
||||||
for s in cfg.scopes().values()
|
|
||||||
if not s.writable
|
|
||||||
and isinstance(s, (cfg.DirectoryConfigScope))
|
|
||||||
and os.path.exists(s.path)
|
|
||||||
],
|
|
||||||
os.path.join(concrete_env_dir, "spack.yaml"),
|
|
||||||
# Here transforming windows paths is only required in the special case
|
|
||||||
# of copy_only_pipelines, a unique scenario where the generate job and
|
|
||||||
# child pipelines are run on different platforms. To make this compatible
|
|
||||||
# w/ Windows, we cannot write Windows style path separators that will be
|
|
||||||
# consumed on by the Posix copy job runner.
|
|
||||||
#
|
|
||||||
# TODO (johnwparent): Refactor config + cli read/write to deal only in
|
|
||||||
# posix style paths
|
|
||||||
transform_windows_paths=(options.pipeline_type == PipelineType.COPY_ONLY),
|
|
||||||
)
|
|
||||||
|
|
||||||
job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
|
|
||||||
job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
|
|
||||||
job_test_dir = os.path.join(pipeline_artifacts_dir, "tests")
|
|
||||||
user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
|
|
||||||
|
|
||||||
# We communicate relative paths to the downstream jobs to avoid issues in
|
|
||||||
# situations where the CI_PROJECT_DIR varies between the pipeline
|
|
||||||
# generation job and the rebuild jobs. This can happen when gitlab
|
|
||||||
# checks out the project into a runner-specific directory, for example,
|
|
||||||
# and different runners are picked for generate and rebuild jobs.
|
|
||||||
|
|
||||||
rel_concrete_env_dir = os.path.relpath(concrete_env_dir, ci_project_dir)
|
|
||||||
rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
|
|
||||||
rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
|
|
||||||
rel_job_test_dir = os.path.relpath(job_test_dir, ci_project_dir)
|
|
||||||
rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
|
|
||||||
|
|
||||||
def main_script_replacements(cmd):
|
|
||||||
return cmd.replace("{env_dir}", rel_concrete_env_dir)
|
|
||||||
|
|
||||||
output_object = {}
|
|
||||||
job_id = 0
|
|
||||||
stage_id = 0
|
|
||||||
stages: List[List] = []
|
|
||||||
stage_names = []
|
|
||||||
|
|
||||||
max_length_needs = 0
|
|
||||||
max_needs_job = ""
|
|
||||||
|
|
||||||
if not options.pipeline_type == PipelineType.COPY_ONLY:
|
|
||||||
for level, node in pipeline.traverse_nodes(direction="parents"):
|
|
||||||
stage_id = level
|
|
||||||
if len(stages) == stage_id:
|
|
||||||
stages.append([])
|
|
||||||
stages[stage_id].append(node.spec)
|
|
||||||
stage_name = f"stage-{level}"
|
|
||||||
|
|
||||||
if stage_name not in stage_names:
|
|
||||||
stage_names.append(stage_name)
|
|
||||||
|
|
||||||
release_spec = node.spec
|
|
||||||
release_spec_dag_hash = release_spec.dag_hash()
|
|
||||||
|
|
||||||
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
|
|
||||||
|
|
||||||
if not job_object:
|
|
||||||
tty.warn(f"No match found for {release_spec}, skipping it")
|
|
||||||
continue
|
|
||||||
|
|
||||||
if options.pipeline_type is not None:
|
|
||||||
# For spack pipelines "public" and "protected" are reserved tags
|
|
||||||
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
|
|
||||||
if options.pipeline_type == PipelineType.PROTECTED_BRANCH:
|
|
||||||
job_object["tags"].extend(["protected"])
|
|
||||||
elif options.pipeline_type == PipelineType.PULL_REQUEST:
|
|
||||||
job_object["tags"].extend(["public"])
|
|
||||||
|
|
||||||
if "script" not in job_object:
|
|
||||||
raise AttributeError
|
|
||||||
|
|
||||||
job_object["script"] = unpack_script(job_object["script"], op=main_script_replacements)
|
|
||||||
|
|
||||||
if "before_script" in job_object:
|
|
||||||
job_object["before_script"] = unpack_script(job_object["before_script"])
|
|
||||||
|
|
||||||
if "after_script" in job_object:
|
|
||||||
job_object["after_script"] = unpack_script(job_object["after_script"])
|
|
||||||
|
|
||||||
build_group = options.cdash_handler.build_group if options.cdash_handler else None
|
|
||||||
job_name = get_job_name(release_spec, build_group)
|
|
||||||
|
|
||||||
dep_nodes = pipeline.get_dependencies(node)
|
|
||||||
job_object["needs"] = [
|
|
||||||
{"job": get_job_name(dep_node.spec, build_group), "artifacts": False}
|
|
||||||
for dep_node in dep_nodes
|
|
||||||
]
|
|
||||||
|
|
||||||
job_object["needs"].append(
|
|
||||||
{"job": generate_job_name, "pipeline": f"{generate_pipeline_id}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
job_vars = job_object["variables"]
|
|
||||||
|
|
||||||
# Let downstream jobs know whether the spec needed rebuilding, regardless
|
|
||||||
# whether DAG pruning was enabled or not.
|
|
||||||
already_built = bindist.get_mirrors_for_spec(spec=release_spec, index_only=True)
|
|
||||||
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = "False" if already_built else "True"
|
|
||||||
|
|
||||||
if options.cdash_handler:
|
|
||||||
build_name = options.cdash_handler.build_name(release_spec)
|
|
||||||
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
|
|
||||||
build_stamp = options.cdash_handler.build_stamp
|
|
||||||
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
|
|
||||||
|
|
||||||
job_object["artifacts"] = spack.schema.merge_yaml(
|
|
||||||
job_object.get("artifacts", {}),
|
|
||||||
{
|
|
||||||
"when": "always",
|
|
||||||
"paths": [
|
|
||||||
rel_job_log_dir,
|
|
||||||
rel_job_repro_dir,
|
|
||||||
rel_job_test_dir,
|
|
||||||
rel_user_artifacts_dir,
|
|
||||||
],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
job_object["stage"] = stage_name
|
|
||||||
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
|
|
||||||
job_object["interruptible"] = True
|
|
||||||
|
|
||||||
length_needs = len(job_object["needs"])
|
|
||||||
if length_needs > max_length_needs:
|
|
||||||
max_length_needs = length_needs
|
|
||||||
max_needs_job = job_name
|
|
||||||
|
|
||||||
output_object[job_name] = job_object
|
|
||||||
job_id += 1
|
|
||||||
|
|
||||||
tty.debug(f"{job_id} build jobs generated in {stage_id} stages")
|
|
||||||
|
|
||||||
if job_id > 0:
|
|
||||||
tty.debug(f"The max_needs_job is {max_needs_job}, with {max_length_needs} needs")
|
|
||||||
|
|
||||||
service_job_retries = {
|
|
||||||
"max": 2,
|
|
||||||
"when": ["runner_system_failure", "stuck_or_timeout_failure", "script_failure"],
|
|
||||||
}
|
|
||||||
|
|
||||||
# In some cases, pipeline generation should write a manifest. Currently
|
|
||||||
# the only purpose is to specify a list of sources and destinations for
|
|
||||||
# everything that should be copied.
|
|
||||||
distinguish_stack = options.stack_name if options.stack_name else "rebuilt"
|
|
||||||
manifest_path = os.path.join(
|
|
||||||
pipeline_artifacts_dir, "specs_to_copy", f"copy_{distinguish_stack}_specs.json"
|
|
||||||
)
|
|
||||||
maybe_generate_manifest(pipeline, options, manifest_path)
|
|
||||||
|
|
||||||
if options.pipeline_type == PipelineType.COPY_ONLY:
|
|
||||||
stage_names.append("copy")
|
|
||||||
sync_job = copy.deepcopy(spack_ci_ir["jobs"]["copy"]["attributes"])
|
|
||||||
sync_job["stage"] = "copy"
|
|
||||||
sync_job["needs"] = [{"job": generate_job_name, "pipeline": f"{generate_pipeline_id}"}]
|
|
||||||
|
|
||||||
if "variables" not in sync_job:
|
|
||||||
sync_job["variables"] = {}
|
|
||||||
|
|
||||||
sync_job["variables"][
|
|
||||||
"SPACK_COPY_ONLY_DESTINATION"
|
|
||||||
] = options.buildcache_destination.fetch_url
|
|
||||||
|
|
||||||
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
|
|
||||||
if "buildcache-source" not in pipeline_mirrors:
|
|
||||||
raise SpackCIError("Copy-only pipelines require a mirror named 'buildcache-source'")
|
|
||||||
|
|
||||||
buildcache_source = pipeline_mirrors["buildcache-source"].fetch_url
|
|
||||||
sync_job["variables"]["SPACK_BUILDCACHE_SOURCE"] = buildcache_source
|
|
||||||
sync_job["dependencies"] = []
|
|
||||||
|
|
||||||
output_object["copy"] = sync_job
|
|
||||||
job_id += 1
|
|
||||||
|
|
||||||
if job_id > 0:
|
|
||||||
if (
|
|
||||||
"script" in spack_ci_ir["jobs"]["signing"]["attributes"]
|
|
||||||
and options.pipeline_type == PipelineType.PROTECTED_BRANCH
|
|
||||||
):
|
|
||||||
# External signing: generate a job to check and sign binary pkgs
|
|
||||||
stage_names.append("stage-sign-pkgs")
|
|
||||||
signing_job = spack_ci_ir["jobs"]["signing"]["attributes"]
|
|
||||||
|
|
||||||
signing_job["script"] = unpack_script(signing_job["script"])
|
|
||||||
|
|
||||||
signing_job["stage"] = "stage-sign-pkgs"
|
|
||||||
signing_job["when"] = "always"
|
|
||||||
signing_job["retry"] = {"max": 2, "when": ["always"]}
|
|
||||||
signing_job["interruptible"] = True
|
|
||||||
if "variables" not in signing_job:
|
|
||||||
signing_job["variables"] = {}
|
|
||||||
signing_job["variables"][
|
|
||||||
"SPACK_BUILDCACHE_DESTINATION"
|
|
||||||
] = options.buildcache_destination.push_url
|
|
||||||
signing_job["dependencies"] = []
|
|
||||||
|
|
||||||
output_object["sign-pkgs"] = signing_job
|
|
||||||
|
|
||||||
if options.rebuild_index:
|
|
||||||
# Add a final job to regenerate the index
|
|
||||||
stage_names.append("stage-rebuild-index")
|
|
||||||
final_job = spack_ci_ir["jobs"]["reindex"]["attributes"]
|
|
||||||
|
|
||||||
final_job["stage"] = "stage-rebuild-index"
|
|
||||||
target_mirror = options.buildcache_destination.push_url
|
|
||||||
final_job["script"] = unpack_script(
|
|
||||||
final_job["script"],
|
|
||||||
op=lambda cmd: cmd.replace("{index_target_mirror}", target_mirror),
|
|
||||||
)
|
|
||||||
|
|
||||||
final_job["when"] = "always"
|
|
||||||
final_job["retry"] = service_job_retries
|
|
||||||
final_job["interruptible"] = True
|
|
||||||
final_job["dependencies"] = []
|
|
||||||
|
|
||||||
output_object["rebuild-index"] = final_job
|
|
||||||
|
|
||||||
output_object["stages"] = stage_names
|
|
||||||
|
|
||||||
# Capture the version of Spack used to generate the pipeline, that can be
|
|
||||||
# passed to `git checkout` for version consistency. If we aren't in a Git
|
|
||||||
# repository, presume we are a Spack release and use the Git tag instead.
|
|
||||||
spack_version = spack.get_version()
|
|
||||||
version_to_clone = spack.get_spack_commit() or f"v{spack.spack_version}"
|
|
||||||
|
|
||||||
rebuild_everything = not options.prune_up_to_date and not options.prune_untouched
|
|
||||||
|
|
||||||
output_object["variables"] = {
|
|
||||||
"SPACK_ARTIFACTS_ROOT": artifacts_root,
|
|
||||||
"SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
|
|
||||||
"SPACK_VERSION": spack_version,
|
|
||||||
"SPACK_CHECKOUT_VERSION": version_to_clone,
|
|
||||||
"SPACK_JOB_LOG_DIR": rel_job_log_dir,
|
|
||||||
"SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
|
|
||||||
"SPACK_JOB_TEST_DIR": rel_job_test_dir,
|
|
||||||
"SPACK_PIPELINE_TYPE": options.pipeline_type.name if options.pipeline_type else "None",
|
|
||||||
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
|
|
||||||
"SPACK_REBUILD_CHECK_UP_TO_DATE": str(options.prune_up_to_date),
|
|
||||||
"SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
|
|
||||||
"SPACK_REQUIRE_SIGNING": str(options.require_signing),
|
|
||||||
}
|
|
||||||
|
|
||||||
if options.stack_name:
|
|
||||||
output_object["variables"]["SPACK_CI_STACK_NAME"] = options.stack_name
|
|
||||||
|
|
||||||
output_vars = output_object["variables"]
|
|
||||||
for item, val in output_vars.items():
|
|
||||||
output_vars[item] = ensure_expected_target_path(val)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# No jobs were generated
|
|
||||||
noop_job = spack_ci_ir["jobs"]["noop"]["attributes"]
|
|
||||||
# If this job fails ignore the status and carry on
|
|
||||||
noop_job["retry"] = 0
|
|
||||||
noop_job["allow_failure"] = True
|
|
||||||
|
|
||||||
tty.debug("No specs to rebuild, generating no-op job")
|
|
||||||
output_object = {"no-specs-to-rebuild": noop_job}
|
|
||||||
|
|
||||||
# Ensure the child pipeline always runs
|
|
||||||
output_object["workflow"] = {"rules": [{"when": "always"}]}
|
|
||||||
|
|
||||||
sorted_output = {}
|
|
||||||
for output_key, output_value in sorted(output_object.items()):
|
|
||||||
sorted_output[output_key] = output_value
|
|
||||||
|
|
||||||
# Minimize yaml output size through use of anchors
|
|
||||||
syaml.anchorify(sorted_output)
|
|
||||||
|
|
||||||
with open(output_file, "w", encoding="utf-8") as f:
|
|
||||||
ruamel.yaml.YAML().dump(sorted_output, f)
|
|
||||||
@@ -4,7 +4,6 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import difflib
|
|
||||||
import importlib
|
import importlib
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -24,10 +23,10 @@
|
|||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.extensions
|
import spack.extensions
|
||||||
|
import spack.parser
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.spec_parser
|
|
||||||
import spack.store
|
import spack.store
|
||||||
import spack.traverse as traverse
|
import spack.traverse as traverse
|
||||||
import spack.user_environment as uenv
|
import spack.user_environment as uenv
|
||||||
@@ -126,8 +125,6 @@ def get_module(cmd_name):
|
|||||||
tty.debug("Imported {0} from built-in commands".format(pname))
|
tty.debug("Imported {0} from built-in commands".format(pname))
|
||||||
except ImportError:
|
except ImportError:
|
||||||
module = spack.extensions.get_module(cmd_name)
|
module = spack.extensions.get_module(cmd_name)
|
||||||
if not module:
|
|
||||||
raise CommandNotFoundError(cmd_name)
|
|
||||||
|
|
||||||
attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
|
attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
|
||||||
attr_setdefault(module, DESCRIPTION, "")
|
attr_setdefault(module, DESCRIPTION, "")
|
||||||
@@ -163,12 +160,12 @@ def quote_kvp(string: str) -> str:
|
|||||||
or ``name==``, and we assume the rest of the argument is the value. This covers the
|
or ``name==``, and we assume the rest of the argument is the value. This covers the
|
||||||
common cases of passign flags, e.g., ``cflags="-O2 -g"`` on the command line.
|
common cases of passign flags, e.g., ``cflags="-O2 -g"`` on the command line.
|
||||||
"""
|
"""
|
||||||
match = spack.spec_parser.SPLIT_KVP.match(string)
|
match = spack.parser.SPLIT_KVP.match(string)
|
||||||
if not match:
|
if not match:
|
||||||
return string
|
return string
|
||||||
|
|
||||||
key, delim, value = match.groups()
|
key, delim, value = match.groups()
|
||||||
return f"{key}{delim}{spack.spec_parser.quote_if_needed(value)}"
|
return f"{key}{delim}{spack.parser.quote_if_needed(value)}"
|
||||||
|
|
||||||
|
|
||||||
def parse_specs(
|
def parse_specs(
|
||||||
@@ -180,7 +177,7 @@ def parse_specs(
|
|||||||
args = [args] if isinstance(args, str) else args
|
args = [args] if isinstance(args, str) else args
|
||||||
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
arg_string = " ".join([quote_kvp(arg) for arg in args])
|
||||||
|
|
||||||
specs = spack.spec_parser.parse(arg_string)
|
specs = spack.parser.parse(arg_string)
|
||||||
if not concretize:
|
if not concretize:
|
||||||
return specs
|
return specs
|
||||||
|
|
||||||
@@ -694,24 +691,3 @@ def find_environment(args):
|
|||||||
def first_line(docstring):
|
def first_line(docstring):
|
||||||
"""Return the first line of the docstring."""
|
"""Return the first line of the docstring."""
|
||||||
return docstring.split("\n")[0]
|
return docstring.split("\n")[0]
|
||||||
|
|
||||||
|
|
||||||
class CommandNotFoundError(spack.error.SpackError):
|
|
||||||
"""Exception class thrown when a requested command is not recognized as
|
|
||||||
such.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cmd_name):
|
|
||||||
msg = (
|
|
||||||
f"{cmd_name} is not a recognized Spack command or extension command; "
|
|
||||||
"check with `spack commands`."
|
|
||||||
)
|
|
||||||
long_msg = None
|
|
||||||
|
|
||||||
similar = difflib.get_close_matches(cmd_name, all_commands())
|
|
||||||
|
|
||||||
if 1 <= len(similar) <= 5:
|
|
||||||
long_msg = "\nDid you mean one of the following commands?\n "
|
|
||||||
long_msg += "\n ".join(similar)
|
|
||||||
|
|
||||||
super().__init__(msg, long_msg)
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@
|
|||||||
import spack.bootstrap.config
|
import spack.bootstrap.config
|
||||||
import spack.bootstrap.core
|
import spack.bootstrap.core
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.mirrors.utils
|
import spack.mirror
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
@@ -29,7 +29,7 @@
|
|||||||
|
|
||||||
|
|
||||||
# Tarball to be downloaded if binary packages are requested in a local mirror
|
# Tarball to be downloaded if binary packages are requested in a local mirror
|
||||||
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.6/bootstrap-buildcache.tar.gz"
|
BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.4/bootstrap-buildcache.tar.gz"
|
||||||
|
|
||||||
#: Subdirectory where to create the mirror
|
#: Subdirectory where to create the mirror
|
||||||
LOCAL_MIRROR_DIR = "bootstrap_cache"
|
LOCAL_MIRROR_DIR = "bootstrap_cache"
|
||||||
@@ -51,9 +51,9 @@
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.6/clingo.json"
|
CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/clingo.json"
|
||||||
GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.6/gnupg.json"
|
GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/gnupg.json"
|
||||||
PATCHELF_JSON = "$spack/share/spack/bootstrap/github-actions-v0.6/patchelf.json"
|
PATCHELF_JSON = "$spack/share/spack/bootstrap/github-actions-v0.4/patchelf.json"
|
||||||
|
|
||||||
# Metadata for a generated source mirror
|
# Metadata for a generated source mirror
|
||||||
SOURCE_METADATA = {
|
SOURCE_METADATA = {
|
||||||
@@ -400,7 +400,7 @@ def _mirror(args):
|
|||||||
llnl.util.tty.set_msg_enabled(False)
|
llnl.util.tty.set_msg_enabled(False)
|
||||||
spec = spack.spec.Spec(spec_str).concretized()
|
spec = spack.spec.Spec(spec_str).concretized()
|
||||||
for node in spec.traverse():
|
for node in spec.traverse():
|
||||||
spack.mirrors.utils.create(mirror_dir, [node])
|
spack.mirror.create(mirror_dir, [node])
|
||||||
llnl.util.tty.set_msg_enabled(True)
|
llnl.util.tty.set_msg_enabled(True)
|
||||||
|
|
||||||
if args.binary_packages:
|
if args.binary_packages:
|
||||||
@@ -419,7 +419,7 @@ def write_metadata(subdir, metadata):
|
|||||||
metadata_rel_dir = os.path.join("metadata", subdir)
|
metadata_rel_dir = os.path.join("metadata", subdir)
|
||||||
metadata_yaml = os.path.join(args.root_dir, metadata_rel_dir, "metadata.yaml")
|
metadata_yaml = os.path.join(args.root_dir, metadata_rel_dir, "metadata.yaml")
|
||||||
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
|
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
|
||||||
with open(metadata_yaml, mode="w", encoding="utf-8") as f:
|
with open(metadata_yaml, mode="w") as f:
|
||||||
spack.util.spack_yaml.dump(metadata, stream=f)
|
spack.util.spack_yaml.dump(metadata, stream=f)
|
||||||
return os.path.dirname(metadata_yaml), metadata_rel_dir
|
return os.path.dirname(metadata_yaml), metadata_rel_dir
|
||||||
|
|
||||||
|
|||||||
@@ -21,7 +21,7 @@
|
|||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.mirrors.mirror
|
import spack.mirror
|
||||||
import spack.oci.oci
|
import spack.oci.oci
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.stage
|
import spack.stage
|
||||||
@@ -392,7 +392,7 @@ def push_fn(args):
|
|||||||
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
|
||||||
|
|
||||||
mirror = args.mirror
|
mirror = args.mirror
|
||||||
assert isinstance(mirror, spack.mirrors.mirror.Mirror)
|
assert isinstance(mirror, spack.mirror.Mirror)
|
||||||
|
|
||||||
push_url = mirror.push_url
|
push_url = mirror.push_url
|
||||||
|
|
||||||
@@ -731,7 +731,7 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
|
|||||||
deduped_manifest = {}
|
deduped_manifest = {}
|
||||||
|
|
||||||
for manifest_path in manifest_file_list:
|
for manifest_path in manifest_file_list:
|
||||||
with open(manifest_path, encoding="utf-8") as fd:
|
with open(manifest_path) as fd:
|
||||||
manifest = json.loads(fd.read())
|
manifest = json.loads(fd.read())
|
||||||
for spec_hash, copy_list in manifest.items():
|
for spec_hash, copy_list in manifest.items():
|
||||||
# Last duplicate hash wins
|
# Last duplicate hash wins
|
||||||
@@ -750,7 +750,7 @@ def manifest_copy(manifest_file_list, dest_mirror=None):
|
|||||||
copy_buildcache_file(copy_file["src"], dest)
|
copy_buildcache_file(copy_file["src"], dest)
|
||||||
|
|
||||||
|
|
||||||
def update_index(mirror: spack.mirrors.mirror.Mirror, update_keys=False):
|
def update_index(mirror: spack.mirror.Mirror, update_keys=False):
|
||||||
# Special case OCI images for now.
|
# Special case OCI images for now.
|
||||||
try:
|
try:
|
||||||
image_ref = spack.oci.oci.image_from_mirror(mirror)
|
image_ref = spack.oci.oci.image_from_mirror(mirror)
|
||||||
|
|||||||
@@ -253,7 +253,7 @@ def add_versions_to_package(pkg: PackageBase, version_lines: str, is_batch: bool
|
|||||||
if match:
|
if match:
|
||||||
new_versions.append((Version(match.group(1)), ver_line))
|
new_versions.append((Version(match.group(1)), ver_line))
|
||||||
|
|
||||||
with open(filename, "r+", encoding="utf-8") as f:
|
with open(filename, "r+") as f:
|
||||||
contents = f.read()
|
contents = f.read()
|
||||||
split_contents = version_statement_re.split(contents)
|
split_contents = version_statement_re.split(contents)
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
|
import warnings
|
||||||
from urllib.parse import urlparse, urlunparse
|
from urllib.parse import urlparse, urlunparse
|
||||||
|
|
||||||
import llnl.util.filesystem as fs
|
import llnl.util.filesystem as fs
|
||||||
@@ -16,11 +17,10 @@
|
|||||||
import spack.ci as spack_ci
|
import spack.ci as spack_ci
|
||||||
import spack.cmd
|
import spack.cmd
|
||||||
import spack.cmd.buildcache as buildcache
|
import spack.cmd.buildcache as buildcache
|
||||||
import spack.cmd.common.arguments
|
|
||||||
import spack.config as cfg
|
import spack.config as cfg
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
import spack.mirrors.mirror
|
import spack.mirror
|
||||||
import spack.util.gpg as gpg_util
|
import spack.util.gpg as gpg_util
|
||||||
import spack.util.timer as timer
|
import spack.util.timer as timer
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
@@ -62,8 +62,22 @@ def setup_parser(subparser):
|
|||||||
"path to the file where generated jobs file should be written. "
|
"path to the file where generated jobs file should be written. "
|
||||||
"default is .gitlab-ci.yml in the root of the repository",
|
"default is .gitlab-ci.yml in the root of the repository",
|
||||||
)
|
)
|
||||||
prune_dag_group = generate.add_mutually_exclusive_group()
|
generate.add_argument(
|
||||||
prune_dag_group.add_argument(
|
"--optimize",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="(DEPRECATED) optimize the gitlab yaml file for size\n\n"
|
||||||
|
"run the generated document through a series of optimization passes "
|
||||||
|
"designed to reduce the size of the generated file",
|
||||||
|
)
|
||||||
|
generate.add_argument(
|
||||||
|
"--dependencies",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="(DEPRECATED) disable DAG scheduling (use 'plain' dependencies)",
|
||||||
|
)
|
||||||
|
prune_group = generate.add_mutually_exclusive_group()
|
||||||
|
prune_group.add_argument(
|
||||||
"--prune-dag",
|
"--prune-dag",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
dest="prune_dag",
|
dest="prune_dag",
|
||||||
@@ -71,7 +85,7 @@ def setup_parser(subparser):
|
|||||||
help="skip up-to-date specs\n\n"
|
help="skip up-to-date specs\n\n"
|
||||||
"do not generate jobs for specs that are up-to-date on the mirror",
|
"do not generate jobs for specs that are up-to-date on the mirror",
|
||||||
)
|
)
|
||||||
prune_dag_group.add_argument(
|
prune_group.add_argument(
|
||||||
"--no-prune-dag",
|
"--no-prune-dag",
|
||||||
action="store_false",
|
action="store_false",
|
||||||
dest="prune_dag",
|
dest="prune_dag",
|
||||||
@@ -79,23 +93,6 @@ def setup_parser(subparser):
|
|||||||
help="process up-to-date specs\n\n"
|
help="process up-to-date specs\n\n"
|
||||||
"generate jobs for specs even when they are up-to-date on the mirror",
|
"generate jobs for specs even when they are up-to-date on the mirror",
|
||||||
)
|
)
|
||||||
prune_ext_group = generate.add_mutually_exclusive_group()
|
|
||||||
prune_ext_group.add_argument(
|
|
||||||
"--prune-externals",
|
|
||||||
action="store_true",
|
|
||||||
dest="prune_externals",
|
|
||||||
default=True,
|
|
||||||
help="skip external specs\n\n"
|
|
||||||
"do not generate jobs for specs that are marked as external",
|
|
||||||
)
|
|
||||||
prune_ext_group.add_argument(
|
|
||||||
"--no-prune-externals",
|
|
||||||
action="store_false",
|
|
||||||
dest="prune_externals",
|
|
||||||
default=True,
|
|
||||||
help="process external specs\n\n"
|
|
||||||
"generate jobs for specs even when they are marked as external",
|
|
||||||
)
|
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--check-index-only",
|
"--check-index-only",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -111,18 +108,14 @@ def setup_parser(subparser):
|
|||||||
)
|
)
|
||||||
generate.add_argument(
|
generate.add_argument(
|
||||||
"--artifacts-root",
|
"--artifacts-root",
|
||||||
default="jobs_scratch_dir",
|
default=None,
|
||||||
help="path to the root of the artifacts directory\n\n"
|
help="path to the root of the artifacts directory\n\n"
|
||||||
"The spack ci module assumes it will normally be run from within your project "
|
"if provided, concrete environment files (spack.yaml, spack.lock) will be generated under "
|
||||||
"directory, wherever that is checked out to run your ci. The artifacts root directory "
|
"this directory. their location will be passed to generated child jobs through the "
|
||||||
"should specifiy a name that can safely be used for artifacts within your project "
|
"SPACK_CONCRETE_ENVIRONMENT_PATH variable",
|
||||||
"directory.",
|
|
||||||
)
|
)
|
||||||
generate.set_defaults(func=ci_generate)
|
generate.set_defaults(func=ci_generate)
|
||||||
|
|
||||||
spack.cmd.common.arguments.add_concretizer_args(generate)
|
|
||||||
spack.cmd.common.arguments.add_common_arguments(generate, ["jobs"])
|
|
||||||
|
|
||||||
# Rebuild the buildcache index associated with the mirror in the
|
# Rebuild the buildcache index associated with the mirror in the
|
||||||
# active, gitlab-enabled environment.
|
# active, gitlab-enabled environment.
|
||||||
index = subparsers.add_parser(
|
index = subparsers.add_parser(
|
||||||
@@ -152,7 +145,6 @@ def setup_parser(subparser):
|
|||||||
help="stop stand-alone tests after the first failure",
|
help="stop stand-alone tests after the first failure",
|
||||||
)
|
)
|
||||||
rebuild.set_defaults(func=ci_rebuild)
|
rebuild.set_defaults(func=ci_rebuild)
|
||||||
spack.cmd.common.arguments.add_common_arguments(rebuild, ["jobs"])
|
|
||||||
|
|
||||||
# Facilitate reproduction of a failed CI build job
|
# Facilitate reproduction of a failed CI build job
|
||||||
reproduce = subparsers.add_parser(
|
reproduce = subparsers.add_parser(
|
||||||
@@ -195,8 +187,42 @@ def ci_generate(args):
|
|||||||
before invoking this command. the value must be the CDash authorization token needed to create
|
before invoking this command. the value must be the CDash authorization token needed to create
|
||||||
a build group and register all generated jobs under it
|
a build group and register all generated jobs under it
|
||||||
"""
|
"""
|
||||||
|
if args.optimize:
|
||||||
|
warnings.warn(
|
||||||
|
"The --optimize option has been deprecated, and currently has no effect. "
|
||||||
|
"It will be removed in Spack v0.24."
|
||||||
|
)
|
||||||
|
|
||||||
|
if args.dependencies:
|
||||||
|
warnings.warn(
|
||||||
|
"The --dependencies option has been deprecated, and currently has no effect. "
|
||||||
|
"It will be removed in Spack v0.24."
|
||||||
|
)
|
||||||
|
|
||||||
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
env = spack.cmd.require_active_env(cmd_name="ci generate")
|
||||||
spack_ci.generate_pipeline(env, args)
|
|
||||||
|
output_file = args.output_file
|
||||||
|
prune_dag = args.prune_dag
|
||||||
|
index_only = args.index_only
|
||||||
|
artifacts_root = args.artifacts_root
|
||||||
|
|
||||||
|
if not output_file:
|
||||||
|
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||||
|
else:
|
||||||
|
output_file_path = os.path.abspath(output_file)
|
||||||
|
gen_ci_dir = os.path.dirname(output_file_path)
|
||||||
|
if not os.path.exists(gen_ci_dir):
|
||||||
|
os.makedirs(gen_ci_dir)
|
||||||
|
|
||||||
|
# Generate the jobs
|
||||||
|
spack_ci.generate_gitlab_ci_yaml(
|
||||||
|
env,
|
||||||
|
True,
|
||||||
|
output_file,
|
||||||
|
prune_dag=prune_dag,
|
||||||
|
check_index_only=index_only,
|
||||||
|
artifacts_root=artifacts_root,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def ci_reindex(args):
|
def ci_reindex(args):
|
||||||
@@ -214,7 +240,7 @@ def ci_reindex(args):
|
|||||||
ci_mirrors = yaml_root["mirrors"]
|
ci_mirrors = yaml_root["mirrors"]
|
||||||
mirror_urls = [url for url in ci_mirrors.values()]
|
mirror_urls = [url for url in ci_mirrors.values()]
|
||||||
remote_mirror_url = mirror_urls[0]
|
remote_mirror_url = mirror_urls[0]
|
||||||
mirror = spack.mirrors.mirror.Mirror(remote_mirror_url)
|
mirror = spack.mirror.Mirror(remote_mirror_url)
|
||||||
|
|
||||||
buildcache.update_index(mirror, update_keys=True)
|
buildcache.update_index(mirror, update_keys=True)
|
||||||
|
|
||||||
@@ -302,7 +328,7 @@ def ci_rebuild(args):
|
|||||||
|
|
||||||
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
|
full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
|
||||||
|
|
||||||
pipeline_mirrors = spack.mirrors.mirror.MirrorCollection(binary=True)
|
pipeline_mirrors = spack.mirror.MirrorCollection(binary=True)
|
||||||
buildcache_destination = None
|
buildcache_destination = None
|
||||||
if "buildcache-destination" not in pipeline_mirrors:
|
if "buildcache-destination" not in pipeline_mirrors:
|
||||||
tty.die("spack ci rebuild requires a mirror named 'buildcache-destination")
|
tty.die("spack ci rebuild requires a mirror named 'buildcache-destination")
|
||||||
@@ -361,7 +387,7 @@ def ci_rebuild(args):
|
|||||||
# Write this job's spec json into the reproduction directory, and it will
|
# Write this job's spec json into the reproduction directory, and it will
|
||||||
# also be used in the generated "spack install" command to install the spec
|
# also be used in the generated "spack install" command to install the spec
|
||||||
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
|
||||||
with open(job_spec_json_path, "w", encoding="utf-8") as fd:
|
with open(job_spec_json_path, "w") as fd:
|
||||||
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
fd.write(job_spec.to_json(hash=ht.dag_hash))
|
||||||
|
|
||||||
# Write some other details to aid in reproduction into an artifact
|
# Write some other details to aid in reproduction into an artifact
|
||||||
@@ -371,7 +397,7 @@ def ci_rebuild(args):
|
|||||||
"job_spec_json": job_spec_json_file,
|
"job_spec_json": job_spec_json_file,
|
||||||
"ci_project_dir": ci_project_dir,
|
"ci_project_dir": ci_project_dir,
|
||||||
}
|
}
|
||||||
with open(repro_file, "w", encoding="utf-8") as fd:
|
with open(repro_file, "w") as fd:
|
||||||
fd.write(json.dumps(repro_details))
|
fd.write(json.dumps(repro_details))
|
||||||
|
|
||||||
# Write information about spack into an artifact in the repro dir
|
# Write information about spack into an artifact in the repro dir
|
||||||
@@ -407,19 +433,14 @@ def ci_rebuild(args):
|
|||||||
if not config["verify_ssl"]:
|
if not config["verify_ssl"]:
|
||||||
spack_cmd.append("-k")
|
spack_cmd.append("-k")
|
||||||
|
|
||||||
install_args = [
|
install_args = [f'--use-buildcache={spack_ci.win_quote("package:never,dependencies:only")}']
|
||||||
f'--use-buildcache={spack_ci.common.win_quote("package:never,dependencies:only")}'
|
|
||||||
]
|
|
||||||
|
|
||||||
can_verify = spack_ci.can_verify_binaries()
|
can_verify = spack_ci.can_verify_binaries()
|
||||||
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
verify_binaries = can_verify and spack_is_pr_pipeline is False
|
||||||
if not verify_binaries:
|
if not verify_binaries:
|
||||||
install_args.append("--no-check-signature")
|
install_args.append("--no-check-signature")
|
||||||
|
|
||||||
if args.jobs:
|
slash_hash = spack_ci.win_quote("/" + job_spec.dag_hash())
|
||||||
install_args.append(f"-j{args.jobs}")
|
|
||||||
|
|
||||||
slash_hash = spack_ci.common.win_quote("/" + job_spec.dag_hash())
|
|
||||||
|
|
||||||
# Arguments when installing the root from sources
|
# Arguments when installing the root from sources
|
||||||
deps_install_args = install_args + ["--only=dependencies"]
|
deps_install_args = install_args + ["--only=dependencies"]
|
||||||
@@ -584,7 +605,7 @@ def ci_rebuild(args):
|
|||||||
|
|
||||||
rebuild_timer.stop()
|
rebuild_timer.stop()
|
||||||
try:
|
try:
|
||||||
with open("install_timers.json", "w", encoding="utf-8") as timelog:
|
with open("install_timers.json", "w") as timelog:
|
||||||
extra_attributes = {"name": ".ci-rebuild"}
|
extra_attributes = {"name": ".ci-rebuild"}
|
||||||
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
|
rebuild_timer.write_json(timelog, extra_attributes=extra_attributes)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -743,7 +743,7 @@ def rst(args: Namespace, out: IO) -> None:
|
|||||||
# extract cross-refs of the form `_cmd-spack-<cmd>:` from rst files
|
# extract cross-refs of the form `_cmd-spack-<cmd>:` from rst files
|
||||||
documented_commands: Set[str] = set()
|
documented_commands: Set[str] = set()
|
||||||
for filename in args.rst_files:
|
for filename in args.rst_files:
|
||||||
with open(filename, encoding="utf-8") as f:
|
with open(filename) as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
match = re.match(r"\.\. _cmd-(spack-.*):", line)
|
match = re.match(r"\.\. _cmd-(spack-.*):", line)
|
||||||
if match:
|
if match:
|
||||||
@@ -815,7 +815,7 @@ def prepend_header(args: Namespace, out: IO) -> None:
|
|||||||
if not args.header:
|
if not args.header:
|
||||||
return
|
return
|
||||||
|
|
||||||
with open(args.header, encoding="utf-8") as header:
|
with open(args.header) as header:
|
||||||
out.write(header.read())
|
out.write(header.read())
|
||||||
|
|
||||||
|
|
||||||
@@ -836,7 +836,7 @@ def _commands(parser: ArgumentParser, args: Namespace) -> None:
|
|||||||
|
|
||||||
if args.update:
|
if args.update:
|
||||||
tty.msg(f"Updating file: {args.update}")
|
tty.msg(f"Updating file: {args.update}")
|
||||||
with open(args.update, "w", encoding="utf-8") as f:
|
with open(args.update, "w") as f:
|
||||||
prepend_header(args, f)
|
prepend_header(args, f)
|
||||||
formatter(args, f)
|
formatter(args, f)
|
||||||
|
|
||||||
|
|||||||
@@ -14,8 +14,7 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.mirrors.mirror
|
import spack.mirror
|
||||||
import spack.mirrors.utils
|
|
||||||
import spack.reporters
|
import spack.reporters
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -169,7 +168,7 @@ def installed_specs(args):
|
|||||||
else:
|
else:
|
||||||
packages = []
|
packages = []
|
||||||
for file in args.specfiles:
|
for file in args.specfiles:
|
||||||
with open(file, "r", encoding="utf-8") as f:
|
with open(file, "r") as f:
|
||||||
s = spack.spec.Spec.from_yaml(f)
|
s = spack.spec.Spec.from_yaml(f)
|
||||||
packages.append(s.format())
|
packages.append(s.format())
|
||||||
return packages
|
return packages
|
||||||
@@ -529,7 +528,6 @@ def __call__(self, parser, namespace, values, option_string):
|
|||||||
# the const from the constructor or a value from the CLI.
|
# the const from the constructor or a value from the CLI.
|
||||||
# Note that this is only called if the argument is actually
|
# Note that this is only called if the argument is actually
|
||||||
# specified on the command line.
|
# specified on the command line.
|
||||||
spack.config.CONFIG.ensure_scope_ordering()
|
|
||||||
spack.config.set(self.config_path, self.const, scope="command_line")
|
spack.config.set(self.config_path, self.const, scope="command_line")
|
||||||
|
|
||||||
|
|
||||||
@@ -691,31 +689,31 @@ def mirror_name_or_url(m):
|
|||||||
|
|
||||||
# If there's a \ or / in the name, it's interpreted as a path or url.
|
# If there's a \ or / in the name, it's interpreted as a path or url.
|
||||||
if "/" in m or "\\" in m or m in (".", ".."):
|
if "/" in m or "\\" in m or m in (".", ".."):
|
||||||
return spack.mirrors.mirror.Mirror(m)
|
return spack.mirror.Mirror(m)
|
||||||
|
|
||||||
# Otherwise, the named mirror is required to exist.
|
# Otherwise, the named mirror is required to exist.
|
||||||
try:
|
try:
|
||||||
return spack.mirrors.utils.require_mirror_name(m)
|
return spack.mirror.require_mirror_name(m)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e
|
raise argparse.ArgumentTypeError(f"{e}. Did you mean {os.path.join('.', m)}?") from e
|
||||||
|
|
||||||
|
|
||||||
def mirror_url(url):
|
def mirror_url(url):
|
||||||
try:
|
try:
|
||||||
return spack.mirrors.mirror.Mirror.from_url(url)
|
return spack.mirror.Mirror.from_url(url)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise argparse.ArgumentTypeError(str(e)) from e
|
raise argparse.ArgumentTypeError(str(e)) from e
|
||||||
|
|
||||||
|
|
||||||
def mirror_directory(path):
|
def mirror_directory(path):
|
||||||
try:
|
try:
|
||||||
return spack.mirrors.mirror.Mirror.from_local_path(path)
|
return spack.mirror.Mirror.from_local_path(path)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise argparse.ArgumentTypeError(str(e)) from e
|
raise argparse.ArgumentTypeError(str(e)) from e
|
||||||
|
|
||||||
|
|
||||||
def mirror_name(name):
|
def mirror_name(name):
|
||||||
try:
|
try:
|
||||||
return spack.mirrors.utils.require_mirror_name(name)
|
return spack.mirror.require_mirror_name(name)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise argparse.ArgumentTypeError(str(e)) from e
|
raise argparse.ArgumentTypeError(str(e)) from e
|
||||||
|
|||||||
@@ -14,7 +14,6 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.schema
|
|
||||||
import spack.schema.env
|
import spack.schema.env
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -567,7 +566,7 @@ def config_prefer_upstream(args):
|
|||||||
|
|
||||||
# Simply write the config to the specified file.
|
# Simply write the config to the specified file.
|
||||||
existing = spack.config.get("packages", scope=scope)
|
existing = spack.config.get("packages", scope=scope)
|
||||||
new = spack.schema.merge_yaml(existing, pkgs)
|
new = spack.config.merge_yaml(existing, pkgs)
|
||||||
spack.config.set("packages", new, scope)
|
spack.config.set("packages", new, scope)
|
||||||
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
config_file = spack.config.CONFIG.get_config_filename(scope, section)
|
||||||
|
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ def write(self, pkg_path):
|
|||||||
all_deps.append(self.dependencies)
|
all_deps.append(self.dependencies)
|
||||||
|
|
||||||
# Write out a template for the file
|
# Write out a template for the file
|
||||||
with open(pkg_path, "w", encoding="utf-8") as pkg_file:
|
with open(pkg_path, "w") as pkg_file:
|
||||||
pkg_file.write(
|
pkg_file.write(
|
||||||
package_template.format(
|
package_template.format(
|
||||||
name=self.name,
|
name=self.name,
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ def locate_package(name: str, repo: spack.repo.Repo) -> str:
|
|||||||
path = repo.filename_for_package_name(name)
|
path = repo.filename_for_package_name(name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(path, "r", encoding="utf-8"):
|
with open(path, "r"):
|
||||||
return path
|
return path
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno == errno.ENOENT:
|
if e.errno == errno.ENOENT:
|
||||||
@@ -93,7 +93,7 @@ def locate_file(name: str, path: str) -> str:
|
|||||||
|
|
||||||
# Try to open direct match.
|
# Try to open direct match.
|
||||||
try:
|
try:
|
||||||
with open(file_path, "r", encoding="utf-8"):
|
with open(file_path, "r"):
|
||||||
return file_path
|
return file_path
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != errno.ENOENT:
|
if e.errno != errno.ENOENT:
|
||||||
|
|||||||
@@ -865,7 +865,7 @@ def env_loads(args):
|
|||||||
args.recurse_dependencies = False
|
args.recurse_dependencies = False
|
||||||
|
|
||||||
loads_file = fs.join_path(env.path, "loads")
|
loads_file = fs.join_path(env.path, "loads")
|
||||||
with open(loads_file, "w", encoding="utf-8") as f:
|
with open(loads_file, "w") as f:
|
||||||
specs = env._get_environment_specs(recurse_dependencies=recurse_dependencies)
|
specs = env._get_environment_specs(recurse_dependencies=recurse_dependencies)
|
||||||
|
|
||||||
spack.cmd.modules.loads(module_type, specs, args, f)
|
spack.cmd.modules.loads(module_type, specs, args, f)
|
||||||
@@ -1053,7 +1053,7 @@ def env_depfile(args):
|
|||||||
|
|
||||||
# Finally write to stdout/file.
|
# Finally write to stdout/file.
|
||||||
if args.output:
|
if args.output:
|
||||||
with open(args.output, "w", encoding="utf-8") as f:
|
with open(args.output, "w") as f:
|
||||||
f.write(makefile)
|
f.write(makefile)
|
||||||
else:
|
else:
|
||||||
sys.stdout.write(makefile)
|
sys.stdout.write(makefile)
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
import spack.binary_distribution
|
import spack.binary_distribution
|
||||||
import spack.mirrors.mirror
|
import spack.mirror
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.gpg
|
import spack.util.gpg
|
||||||
@@ -217,11 +217,11 @@ def gpg_publish(args):
|
|||||||
mirror = None
|
mirror = None
|
||||||
if args.directory:
|
if args.directory:
|
||||||
url = spack.util.url.path_to_file_url(args.directory)
|
url = spack.util.url.path_to_file_url(args.directory)
|
||||||
mirror = spack.mirrors.mirror.Mirror(url, url)
|
mirror = spack.mirror.Mirror(url, url)
|
||||||
elif args.mirror_name:
|
elif args.mirror_name:
|
||||||
mirror = spack.mirrors.mirror.MirrorCollection(binary=True).lookup(args.mirror_name)
|
mirror = spack.mirror.MirrorCollection(binary=True).lookup(args.mirror_name)
|
||||||
elif args.mirror_url:
|
elif args.mirror_url:
|
||||||
mirror = spack.mirrors.mirror.Mirror(args.mirror_url, args.mirror_url)
|
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
|
||||||
|
|
||||||
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
|
||||||
spack.binary_distribution._url_push_keys(
|
spack.binary_distribution._url_push_keys(
|
||||||
|
|||||||
@@ -291,7 +291,7 @@ def _dump_log_on_error(e: InstallError):
|
|||||||
tty.error("'spack install' created no log.")
|
tty.error("'spack install' created no log.")
|
||||||
else:
|
else:
|
||||||
sys.stderr.write("Full build log:\n")
|
sys.stderr.write("Full build log:\n")
|
||||||
with open(e.pkg.log_path, errors="replace", encoding="utf-8") as log:
|
with open(e.pkg.log_path, errors="replace") as log:
|
||||||
shutil.copyfileobj(log, sys.stderr)
|
shutil.copyfileobj(log, sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
@@ -445,7 +445,7 @@ def concrete_specs_from_file(args):
|
|||||||
"""Return the list of concrete specs read from files."""
|
"""Return the list of concrete specs read from files."""
|
||||||
result = []
|
result = []
|
||||||
for file in args.specfiles:
|
for file in args.specfiles:
|
||||||
with open(file, "r", encoding="utf-8") as f:
|
with open(file, "r") as f:
|
||||||
if file.endswith("yaml") or file.endswith("yml"):
|
if file.endswith("yaml") or file.endswith("yml"):
|
||||||
s = spack.spec.Spec.from_yaml(f)
|
s = spack.spec.Spec.from_yaml(f)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -191,7 +191,7 @@ def verify(args):
|
|||||||
|
|
||||||
for relpath in _licensed_files(args):
|
for relpath in _licensed_files(args):
|
||||||
path = os.path.join(args.root, relpath)
|
path = os.path.join(args.root, relpath)
|
||||||
with open(path, encoding="utf-8") as f:
|
with open(path) as f:
|
||||||
lines = [line for line in f][:license_lines]
|
lines = [line for line in f][:license_lines]
|
||||||
|
|
||||||
error = _check_license(lines, path)
|
error = _check_license(lines, path)
|
||||||
|
|||||||
@@ -340,7 +340,7 @@ def list(parser, args):
|
|||||||
return
|
return
|
||||||
|
|
||||||
tty.msg("Updating file: %s" % args.update)
|
tty.msg("Updating file: %s" % args.update)
|
||||||
with open(args.update, "w", encoding="utf-8") as f:
|
with open(args.update, "w") as f:
|
||||||
formatter(sorted_packages, f)
|
formatter(sorted_packages, f)
|
||||||
|
|
||||||
elif args.count:
|
elif args.count:
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ def line_to_rtf(str):
|
|||||||
return str.replace("\n", "\\par")
|
return str.replace("\n", "\\par")
|
||||||
|
|
||||||
contents = ""
|
contents = ""
|
||||||
with open(file_path, "r+", encoding="utf-8") as f:
|
with open(file_path, "r+") as f:
|
||||||
for line in f.readlines():
|
for line in f.readlines():
|
||||||
contents += line_to_rtf(line)
|
contents += line_to_rtf(line)
|
||||||
return rtf_header.format(contents)
|
return rtf_header.format(contents)
|
||||||
@@ -93,7 +93,7 @@ def make_installer(parser, args):
|
|||||||
rtf_spack_license = txt_to_rtf(spack_license)
|
rtf_spack_license = txt_to_rtf(spack_license)
|
||||||
spack_license = posixpath.join(source_dir, "LICENSE.rtf")
|
spack_license = posixpath.join(source_dir, "LICENSE.rtf")
|
||||||
|
|
||||||
with open(spack_license, "w", encoding="utf-8") as rtf_license:
|
with open(spack_license, "w") as rtf_license:
|
||||||
written = rtf_license.write(rtf_spack_license)
|
written = rtf_license.write(rtf_spack_license)
|
||||||
if written == 0:
|
if written == 0:
|
||||||
raise RuntimeError("Failed to generate properly formatted license file")
|
raise RuntimeError("Failed to generate properly formatted license file")
|
||||||
|
|||||||
@@ -14,8 +14,7 @@
|
|||||||
import spack.concretize
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.environment as ev
|
import spack.environment as ev
|
||||||
import spack.mirrors.mirror
|
import spack.mirror
|
||||||
import spack.mirrors.utils
|
|
||||||
import spack.repo
|
import spack.repo
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.web as web_util
|
import spack.util.web as web_util
|
||||||
@@ -366,15 +365,15 @@ def mirror_add(args):
|
|||||||
connection["autopush"] = args.autopush
|
connection["autopush"] = args.autopush
|
||||||
if args.signed is not None:
|
if args.signed is not None:
|
||||||
connection["signed"] = args.signed
|
connection["signed"] = args.signed
|
||||||
mirror = spack.mirrors.mirror.Mirror(connection, name=args.name)
|
mirror = spack.mirror.Mirror(connection, name=args.name)
|
||||||
else:
|
else:
|
||||||
mirror = spack.mirrors.mirror.Mirror(args.url, name=args.name)
|
mirror = spack.mirror.Mirror(args.url, name=args.name)
|
||||||
spack.mirrors.utils.add(mirror, args.scope)
|
spack.mirror.add(mirror, args.scope)
|
||||||
|
|
||||||
|
|
||||||
def mirror_remove(args):
|
def mirror_remove(args):
|
||||||
"""remove a mirror by name"""
|
"""remove a mirror by name"""
|
||||||
spack.mirrors.utils.remove(args.name, args.scope)
|
spack.mirror.remove(args.name, args.scope)
|
||||||
|
|
||||||
|
|
||||||
def _configure_mirror(args):
|
def _configure_mirror(args):
|
||||||
@@ -383,7 +382,7 @@ def _configure_mirror(args):
|
|||||||
if args.name not in mirrors:
|
if args.name not in mirrors:
|
||||||
tty.die(f"No mirror found with name {args.name}.")
|
tty.die(f"No mirror found with name {args.name}.")
|
||||||
|
|
||||||
entry = spack.mirrors.mirror.Mirror(mirrors[args.name], args.name)
|
entry = spack.mirror.Mirror(mirrors[args.name], args.name)
|
||||||
direction = "fetch" if args.fetch else "push" if args.push else None
|
direction = "fetch" if args.fetch else "push" if args.push else None
|
||||||
changes = {}
|
changes = {}
|
||||||
if args.url:
|
if args.url:
|
||||||
@@ -450,7 +449,7 @@ def mirror_set_url(args):
|
|||||||
def mirror_list(args):
|
def mirror_list(args):
|
||||||
"""print out available mirrors to the console"""
|
"""print out available mirrors to the console"""
|
||||||
|
|
||||||
mirrors = spack.mirrors.mirror.MirrorCollection(scope=args.scope)
|
mirrors = spack.mirror.MirrorCollection(scope=args.scope)
|
||||||
if not mirrors:
|
if not mirrors:
|
||||||
tty.msg("No mirrors configured.")
|
tty.msg("No mirrors configured.")
|
||||||
return
|
return
|
||||||
@@ -468,7 +467,7 @@ def specs_from_text_file(filename, concretize=False):
|
|||||||
concretize (bool): if True concretize the specs before returning
|
concretize (bool): if True concretize the specs before returning
|
||||||
the list.
|
the list.
|
||||||
"""
|
"""
|
||||||
with open(filename, "r", encoding="utf-8") as f:
|
with open(filename, "r") as f:
|
||||||
specs_in_file = f.readlines()
|
specs_in_file = f.readlines()
|
||||||
specs_in_file = [s.strip() for s in specs_in_file]
|
specs_in_file = [s.strip() for s in specs_in_file]
|
||||||
return spack.cmd.parse_specs(" ".join(specs_in_file), concretize=concretize)
|
return spack.cmd.parse_specs(" ".join(specs_in_file), concretize=concretize)
|
||||||
@@ -490,9 +489,9 @@ def concrete_specs_from_user(args):
|
|||||||
|
|
||||||
def extend_with_additional_versions(specs, num_versions):
|
def extend_with_additional_versions(specs, num_versions):
|
||||||
if num_versions == "all":
|
if num_versions == "all":
|
||||||
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
mirror_specs = spack.mirror.get_all_versions(specs)
|
||||||
else:
|
else:
|
||||||
mirror_specs = spack.mirrors.utils.get_matching_versions(specs, num_versions=num_versions)
|
mirror_specs = spack.mirror.get_matching_versions(specs, num_versions=num_versions)
|
||||||
mirror_specs = [x.concretized() for x in mirror_specs]
|
mirror_specs = [x.concretized() for x in mirror_specs]
|
||||||
return mirror_specs
|
return mirror_specs
|
||||||
|
|
||||||
@@ -571,7 +570,7 @@ def concrete_specs_from_environment():
|
|||||||
|
|
||||||
def all_specs_with_all_versions():
|
def all_specs_with_all_versions():
|
||||||
specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()]
|
specs = [spack.spec.Spec(n) for n in spack.repo.all_package_names()]
|
||||||
mirror_specs = spack.mirrors.utils.get_all_versions(specs)
|
mirror_specs = spack.mirror.get_all_versions(specs)
|
||||||
mirror_specs.sort(key=lambda s: (s.name, s.version))
|
mirror_specs.sort(key=lambda s: (s.name, s.version))
|
||||||
return mirror_specs
|
return mirror_specs
|
||||||
|
|
||||||
@@ -660,21 +659,19 @@ def _specs_and_action(args):
|
|||||||
|
|
||||||
|
|
||||||
def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
|
def create_mirror_for_all_specs(mirror_specs, path, skip_unstable_versions):
|
||||||
mirror_cache, mirror_stats = spack.mirrors.utils.mirror_cache_and_stats(
|
mirror_cache, mirror_stats = spack.mirror.mirror_cache_and_stats(
|
||||||
path, skip_unstable_versions=skip_unstable_versions
|
path, skip_unstable_versions=skip_unstable_versions
|
||||||
)
|
)
|
||||||
for candidate in mirror_specs:
|
for candidate in mirror_specs:
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(candidate.name)
|
pkg_cls = spack.repo.PATH.get_pkg_class(candidate.name)
|
||||||
pkg_obj = pkg_cls(spack.spec.Spec(candidate))
|
pkg_obj = pkg_cls(spack.spec.Spec(candidate))
|
||||||
mirror_stats.next_spec(pkg_obj.spec)
|
mirror_stats.next_spec(pkg_obj.spec)
|
||||||
spack.mirrors.utils.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
|
spack.mirror.create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats)
|
||||||
process_mirror_stats(*mirror_stats.stats())
|
process_mirror_stats(*mirror_stats.stats())
|
||||||
|
|
||||||
|
|
||||||
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
|
def create_mirror_for_individual_specs(mirror_specs, path, skip_unstable_versions):
|
||||||
present, mirrored, error = spack.mirrors.utils.create(
|
present, mirrored, error = spack.mirror.create(path, mirror_specs, skip_unstable_versions)
|
||||||
path, mirror_specs, skip_unstable_versions
|
|
||||||
)
|
|
||||||
tty.msg("Summary for mirror in {}".format(path))
|
tty.msg("Summary for mirror in {}".format(path))
|
||||||
process_mirror_stats(present, mirrored, error)
|
process_mirror_stats(present, mirrored, error)
|
||||||
|
|
||||||
@@ -684,7 +681,7 @@ def mirror_destroy(args):
|
|||||||
mirror_url = None
|
mirror_url = None
|
||||||
|
|
||||||
if args.mirror_name:
|
if args.mirror_name:
|
||||||
result = spack.mirrors.mirror.MirrorCollection().lookup(args.mirror_name)
|
result = spack.mirror.MirrorCollection().lookup(args.mirror_name)
|
||||||
mirror_url = result.push_url
|
mirror_url = result.push_url
|
||||||
elif args.mirror_url:
|
elif args.mirror_url:
|
||||||
mirror_url = args.mirror_url
|
mirror_url = args.mirror_url
|
||||||
|
|||||||
@@ -150,7 +150,7 @@ def pkg_source(args):
|
|||||||
content = ph.canonical_source(spec)
|
content = ph.canonical_source(spec)
|
||||||
else:
|
else:
|
||||||
message = "Source for %s:" % filename
|
message = "Source for %s:" % filename
|
||||||
with open(filename, encoding="utf-8") as f:
|
with open(filename) as f:
|
||||||
content = f.read()
|
content = f.read()
|
||||||
|
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
|
|||||||
@@ -94,7 +94,7 @@ def ipython_interpreter(args):
|
|||||||
if "PYTHONSTARTUP" in os.environ:
|
if "PYTHONSTARTUP" in os.environ:
|
||||||
startup_file = os.environ["PYTHONSTARTUP"]
|
startup_file = os.environ["PYTHONSTARTUP"]
|
||||||
if os.path.isfile(startup_file):
|
if os.path.isfile(startup_file):
|
||||||
with open(startup_file, encoding="utf-8") as startup:
|
with open(startup_file) as startup:
|
||||||
exec(startup.read())
|
exec(startup.read())
|
||||||
|
|
||||||
# IPython can also support running a script OR command, not both
|
# IPython can also support running a script OR command, not both
|
||||||
@@ -126,7 +126,7 @@ def python_interpreter(args):
|
|||||||
if "PYTHONSTARTUP" in os.environ:
|
if "PYTHONSTARTUP" in os.environ:
|
||||||
startup_file = os.environ["PYTHONSTARTUP"]
|
startup_file = os.environ["PYTHONSTARTUP"]
|
||||||
if os.path.isfile(startup_file):
|
if os.path.isfile(startup_file):
|
||||||
with open(startup_file, encoding="utf-8") as startup:
|
with open(startup_file) as startup:
|
||||||
console.runsource(startup.read(), startup_file, "exec")
|
console.runsource(startup.read(), startup_file, "exec")
|
||||||
if args.python_command:
|
if args.python_command:
|
||||||
propagate_exceptions_from(console)
|
propagate_exceptions_from(console)
|
||||||
|
|||||||
@@ -19,48 +19,11 @@
|
|||||||
level = "long"
|
level = "long"
|
||||||
|
|
||||||
|
|
||||||
class StageFilter:
|
|
||||||
"""
|
|
||||||
Encapsulation of reasons to skip staging
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, exclusions, skip_installed):
|
|
||||||
"""
|
|
||||||
:param exclusions: A list of specs to skip if satisfied.
|
|
||||||
:param skip_installed: A boolean indicating whether to skip already installed specs.
|
|
||||||
"""
|
|
||||||
self.exclusions = exclusions
|
|
||||||
self.skip_installed = skip_installed
|
|
||||||
|
|
||||||
def __call__(self, spec):
|
|
||||||
"""filter action, true means spec should be filtered"""
|
|
||||||
if spec.external:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if self.skip_installed and spec.installed:
|
|
||||||
return True
|
|
||||||
|
|
||||||
if any(spec.satisfies(exclude) for exclude in self.exclusions):
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def setup_parser(subparser):
|
def setup_parser(subparser):
|
||||||
arguments.add_common_arguments(subparser, ["no_checksum", "specs"])
|
arguments.add_common_arguments(subparser, ["no_checksum", "specs"])
|
||||||
subparser.add_argument(
|
subparser.add_argument(
|
||||||
"-p", "--path", dest="path", help="path to stage package, does not add to spack tree"
|
"-p", "--path", dest="path", help="path to stage package, does not add to spack tree"
|
||||||
)
|
)
|
||||||
subparser.add_argument(
|
|
||||||
"-e",
|
|
||||||
"--exclude",
|
|
||||||
action="append",
|
|
||||||
default=[],
|
|
||||||
help="exclude packages that satisfy the specified specs",
|
|
||||||
)
|
|
||||||
subparser.add_argument(
|
|
||||||
"-s", "--skip-installed", action="store_true", help="dont restage already installed specs"
|
|
||||||
)
|
|
||||||
arguments.add_concretizer_args(subparser)
|
arguments.add_concretizer_args(subparser)
|
||||||
|
|
||||||
|
|
||||||
@@ -68,14 +31,11 @@ def stage(parser, args):
|
|||||||
if args.no_checksum:
|
if args.no_checksum:
|
||||||
spack.config.set("config:checksum", False, scope="command_line")
|
spack.config.set("config:checksum", False, scope="command_line")
|
||||||
|
|
||||||
exclusion_specs = spack.cmd.parse_specs(args.exclude, concretize=False)
|
|
||||||
filter = StageFilter(exclusion_specs, args.skip_installed)
|
|
||||||
|
|
||||||
if not args.specs:
|
if not args.specs:
|
||||||
env = ev.active_environment()
|
env = ev.active_environment()
|
||||||
if not env:
|
if not env:
|
||||||
tty.die("`spack stage` requires a spec or an active environment")
|
tty.die("`spack stage` requires a spec or an active environment")
|
||||||
return _stage_env(env, filter)
|
return _stage_env(env)
|
||||||
|
|
||||||
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
specs = spack.cmd.parse_specs(args.specs, concretize=False)
|
||||||
|
|
||||||
@@ -89,11 +49,6 @@ def stage(parser, args):
|
|||||||
|
|
||||||
specs = spack.cmd.matching_specs_from_env(specs)
|
specs = spack.cmd.matching_specs_from_env(specs)
|
||||||
for spec in specs:
|
for spec in specs:
|
||||||
spec = spack.cmd.matching_spec_from_env(spec)
|
|
||||||
|
|
||||||
if filter(spec):
|
|
||||||
continue
|
|
||||||
|
|
||||||
pkg = spec.package
|
pkg = spec.package
|
||||||
|
|
||||||
if custom_path:
|
if custom_path:
|
||||||
@@ -102,13 +57,9 @@ def stage(parser, args):
|
|||||||
_stage(pkg)
|
_stage(pkg)
|
||||||
|
|
||||||
|
|
||||||
def _stage_env(env: ev.Environment, filter):
|
def _stage_env(env: ev.Environment):
|
||||||
tty.msg(f"Staging specs from environment {env.name}")
|
tty.msg(f"Staging specs from environment {env.name}")
|
||||||
for spec in spack.traverse.traverse_nodes(env.concrete_roots()):
|
for spec in spack.traverse.traverse_nodes(env.concrete_roots()):
|
||||||
|
|
||||||
if filter(spec):
|
|
||||||
continue
|
|
||||||
|
|
||||||
_stage(spec.package)
|
_stage(spec.package)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -323,6 +323,8 @@ def process_files(file_list, is_args):
|
|||||||
rewrite_and_print_output(output, args, pat, replacement)
|
rewrite_and_print_output(output, args, pat, replacement)
|
||||||
|
|
||||||
packages_isort_args = (
|
packages_isort_args = (
|
||||||
|
"--rm",
|
||||||
|
"spack",
|
||||||
"--rm",
|
"--rm",
|
||||||
"spack.pkgkit",
|
"spack.pkgkit",
|
||||||
"--rm",
|
"--rm",
|
||||||
@@ -415,8 +417,8 @@ def _run_import_check(
|
|||||||
pretty_path = file if root_relative else cwd_relative(file, root, working_dir)
|
pretty_path = file if root_relative else cwd_relative(file, root, working_dir)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(file, "r", encoding="utf-8") as f:
|
with open(file, "r") as f:
|
||||||
contents = f.read()
|
contents = open(file, "r").read()
|
||||||
parsed = ast.parse(contents)
|
parsed = ast.parse(contents)
|
||||||
except Exception:
|
except Exception:
|
||||||
exit_code = 1
|
exit_code = 1
|
||||||
@@ -448,7 +450,7 @@ def _run_import_check(
|
|||||||
if not fix or not to_add and not to_remove:
|
if not fix or not to_add and not to_remove:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
with open(file, "r", encoding="utf-8") as f:
|
with open(file, "r") as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
|
|
||||||
if to_add:
|
if to_add:
|
||||||
@@ -468,7 +470,7 @@ def _run_import_check(
|
|||||||
for statement in to_remove:
|
for statement in to_remove:
|
||||||
new_contents = new_contents.replace(f"{statement}\n", "")
|
new_contents = new_contents.replace(f"{statement}\n", "")
|
||||||
|
|
||||||
with open(file, "w", encoding="utf-8") as f:
|
with open(file, "w") as f:
|
||||||
f.write(new_contents)
|
f.write(new_contents)
|
||||||
|
|
||||||
return exit_code
|
return exit_code
|
||||||
|
|||||||
@@ -346,7 +346,7 @@ def _report_suite_results(test_suite, args, constraints):
|
|||||||
tty.msg("{0} for test suite '{1}'{2}:".format(results_desc, test_suite.name, matching))
|
tty.msg("{0} for test suite '{1}'{2}:".format(results_desc, test_suite.name, matching))
|
||||||
|
|
||||||
results = {}
|
results = {}
|
||||||
with open(test_suite.results_file, "r", encoding="utf-8") as f:
|
with open(test_suite.results_file, "r") as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
pkg_id, status = line.split()
|
pkg_id, status = line.split()
|
||||||
results[pkg_id] = status
|
results[pkg_id] = status
|
||||||
@@ -371,7 +371,7 @@ def _report_suite_results(test_suite, args, constraints):
|
|||||||
spec = test_specs[pkg_id]
|
spec = test_specs[pkg_id]
|
||||||
log_file = test_suite.log_file_for_spec(spec)
|
log_file = test_suite.log_file_for_spec(spec)
|
||||||
if os.path.isfile(log_file):
|
if os.path.isfile(log_file):
|
||||||
with open(log_file, "r", encoding="utf-8") as f:
|
with open(log_file, "r") as f:
|
||||||
msg += "\n{0}".format("".join(f.readlines()))
|
msg += "\n{0}".format("".join(f.readlines()))
|
||||||
tty.msg(msg)
|
tty.msg(msg)
|
||||||
|
|
||||||
|
|||||||
@@ -192,7 +192,7 @@ def view(parser, args):
|
|||||||
|
|
||||||
if args.action in actions_link and args.projection_file:
|
if args.action in actions_link and args.projection_file:
|
||||||
# argparse confirms file exists
|
# argparse confirms file exists
|
||||||
with open(args.projection_file, "r", encoding="utf-8") as f:
|
with open(args.projection_file, "r") as f:
|
||||||
projections_data = s_yaml.load(f)
|
projections_data = s_yaml.load(f)
|
||||||
validate(projections_data, spack.schema.projections.schema)
|
validate(projections_data, spack.schema.projections.schema)
|
||||||
ordered_projections = projections_data["projections"]
|
ordered_projections = projections_data["projections"]
|
||||||
|
|||||||
@@ -469,7 +469,7 @@ def _compile_dummy_c_source(self) -> Optional[str]:
|
|||||||
fout = os.path.join(tmpdir, "output")
|
fout = os.path.join(tmpdir, "output")
|
||||||
fin = os.path.join(tmpdir, f"main.{ext}")
|
fin = os.path.join(tmpdir, f"main.{ext}")
|
||||||
|
|
||||||
with open(fin, "w", encoding="utf-8") as csource:
|
with open(fin, "w") as csource:
|
||||||
csource.write(
|
csource.write(
|
||||||
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
|
"int main(int argc, char* argv[]) { (void)argc; (void)argv; return 0; }\n"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -179,7 +179,7 @@ def _write_section(self, section: str) -> None:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
filesystem.mkdirp(self.path)
|
filesystem.mkdirp(self.path)
|
||||||
with open(filename, "w", encoding="utf-8") as f:
|
with open(filename, "w") as f:
|
||||||
syaml.dump_config(data, stream=f, default_flow_style=False)
|
syaml.dump_config(data, stream=f, default_flow_style=False)
|
||||||
except (syaml.SpackYAMLError, OSError) as e:
|
except (syaml.SpackYAMLError, OSError) as e:
|
||||||
raise ConfigFileError(f"cannot write to '{filename}'") from e
|
raise ConfigFileError(f"cannot write to '{filename}'") from e
|
||||||
@@ -314,7 +314,7 @@ def _write_section(self, section: str) -> None:
|
|||||||
filesystem.mkdirp(parent)
|
filesystem.mkdirp(parent)
|
||||||
|
|
||||||
tmp = os.path.join(parent, f".{os.path.basename(self.path)}.tmp")
|
tmp = os.path.join(parent, f".{os.path.basename(self.path)}.tmp")
|
||||||
with open(tmp, "w", encoding="utf-8") as f:
|
with open(tmp, "w") as f:
|
||||||
syaml.dump_config(data_to_write, stream=f, default_flow_style=False)
|
syaml.dump_config(data_to_write, stream=f, default_flow_style=False)
|
||||||
filesystem.rename(tmp, self.path)
|
filesystem.rename(tmp, self.path)
|
||||||
|
|
||||||
@@ -431,19 +431,6 @@ def ensure_unwrapped(self) -> "Configuration":
|
|||||||
"""Ensure we unwrap this object from any dynamic wrapper (like Singleton)"""
|
"""Ensure we unwrap this object from any dynamic wrapper (like Singleton)"""
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def highest(self) -> ConfigScope:
|
|
||||||
"""Scope with highest precedence"""
|
|
||||||
return next(reversed(self.scopes.values())) # type: ignore
|
|
||||||
|
|
||||||
@_config_mutator
|
|
||||||
def ensure_scope_ordering(self):
|
|
||||||
"""Ensure that scope order matches documented precedent"""
|
|
||||||
# FIXME: We also need to consider that custom configurations and other orderings
|
|
||||||
# may not be preserved correctly
|
|
||||||
if "command_line" in self.scopes:
|
|
||||||
# TODO (when dropping python 3.6): self.scopes.move_to_end
|
|
||||||
self.scopes["command_line"] = self.remove_scope("command_line")
|
|
||||||
|
|
||||||
@_config_mutator
|
@_config_mutator
|
||||||
def push_scope(self, scope: ConfigScope) -> None:
|
def push_scope(self, scope: ConfigScope) -> None:
|
||||||
"""Add a higher precedence scope to the Configuration."""
|
"""Add a higher precedence scope to the Configuration."""
|
||||||
@@ -632,7 +619,7 @@ def _get_config_memoized(self, section: str, scope: Optional[str]) -> YamlConfig
|
|||||||
if changed:
|
if changed:
|
||||||
self.format_updates[section].append(scope)
|
self.format_updates[section].append(scope)
|
||||||
|
|
||||||
merged_section = spack.schema.merge_yaml(merged_section, data)
|
merged_section = merge_yaml(merged_section, data)
|
||||||
|
|
||||||
# no config files -- empty config.
|
# no config files -- empty config.
|
||||||
if section not in merged_section:
|
if section not in merged_section:
|
||||||
@@ -693,7 +680,7 @@ def set(self, path: str, value: Any, scope: Optional[str] = None) -> None:
|
|||||||
while len(parts) > 1:
|
while len(parts) > 1:
|
||||||
key = parts.pop(0)
|
key = parts.pop(0)
|
||||||
|
|
||||||
if spack.schema.override(key):
|
if _override(key):
|
||||||
new = type(data[key])()
|
new = type(data[key])()
|
||||||
del data[key]
|
del data[key]
|
||||||
else:
|
else:
|
||||||
@@ -706,7 +693,7 @@ def set(self, path: str, value: Any, scope: Optional[str] = None) -> None:
|
|||||||
data[key] = new
|
data[key] = new
|
||||||
data = new
|
data = new
|
||||||
|
|
||||||
if spack.schema.override(parts[0]):
|
if _override(parts[0]):
|
||||||
data.pop(parts[0], None)
|
data.pop(parts[0], None)
|
||||||
|
|
||||||
# update new value
|
# update new value
|
||||||
@@ -803,6 +790,30 @@ def config_paths_from_entry_points() -> List[Tuple[str, str]]:
|
|||||||
return config_paths
|
return config_paths
|
||||||
|
|
||||||
|
|
||||||
|
def _add_command_line_scopes(cfg: Configuration, command_line_scopes: List[str]) -> None:
|
||||||
|
"""Add additional scopes from the --config-scope argument, either envs or dirs."""
|
||||||
|
import spack.environment.environment as env # circular import
|
||||||
|
|
||||||
|
for i, path in enumerate(command_line_scopes):
|
||||||
|
name = f"cmd_scope_{i}"
|
||||||
|
|
||||||
|
if env.exists(path): # managed environment
|
||||||
|
manifest = env.EnvironmentManifestFile(env.root(path))
|
||||||
|
elif env.is_env_dir(path): # anonymous environment
|
||||||
|
manifest = env.EnvironmentManifestFile(path)
|
||||||
|
elif os.path.isdir(path): # directory with config files
|
||||||
|
cfg.push_scope(DirectoryConfigScope(name, path, writable=False))
|
||||||
|
_add_platform_scope(cfg, name, path, writable=False)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
|
||||||
|
|
||||||
|
for scope in manifest.env_config_scopes:
|
||||||
|
scope.name = f"{name}:{scope.name}"
|
||||||
|
scope.writable = False
|
||||||
|
cfg.push_scope(scope)
|
||||||
|
|
||||||
|
|
||||||
def create() -> Configuration:
|
def create() -> Configuration:
|
||||||
"""Singleton Configuration instance.
|
"""Singleton Configuration instance.
|
||||||
|
|
||||||
@@ -883,7 +894,7 @@ def add_from_file(filename: str, scope: Optional[str] = None) -> None:
|
|||||||
|
|
||||||
value = data[section]
|
value = data[section]
|
||||||
existing = get(section, scope=scope)
|
existing = get(section, scope=scope)
|
||||||
new = spack.schema.merge_yaml(existing, value)
|
new = merge_yaml(existing, value)
|
||||||
|
|
||||||
# We cannot call config.set directly (set is a type)
|
# We cannot call config.set directly (set is a type)
|
||||||
CONFIG.set(section, new, scope)
|
CONFIG.set(section, new, scope)
|
||||||
@@ -935,7 +946,7 @@ def add(fullpath: str, scope: Optional[str] = None) -> None:
|
|||||||
value: List[str] = [value] # type: ignore[no-redef]
|
value: List[str] = [value] # type: ignore[no-redef]
|
||||||
|
|
||||||
# merge value into existing
|
# merge value into existing
|
||||||
new = spack.schema.merge_yaml(existing, value)
|
new = merge_yaml(existing, value)
|
||||||
CONFIG.set(path, new, scope)
|
CONFIG.set(path, new, scope)
|
||||||
|
|
||||||
|
|
||||||
@@ -1082,7 +1093,7 @@ def read_config_file(
|
|||||||
# schema when it's not necessary) while allowing us to validate against a
|
# schema when it's not necessary) while allowing us to validate against a
|
||||||
# known schema when the top-level key could be incorrect.
|
# known schema when the top-level key could be incorrect.
|
||||||
try:
|
try:
|
||||||
with open(path, encoding="utf-8") as f:
|
with open(path) as f:
|
||||||
tty.debug(f"Reading config from file {path}")
|
tty.debug(f"Reading config from file {path}")
|
||||||
data = syaml.load_config(f)
|
data = syaml.load_config(f)
|
||||||
|
|
||||||
@@ -1109,6 +1120,44 @@ def read_config_file(
|
|||||||
raise ConfigFileError(str(e)) from e
|
raise ConfigFileError(str(e)) from e
|
||||||
|
|
||||||
|
|
||||||
|
def _override(string: str) -> bool:
|
||||||
|
"""Test if a spack YAML string is an override.
|
||||||
|
|
||||||
|
See ``spack_yaml`` for details. Keys in Spack YAML can end in `::`,
|
||||||
|
and if they do, their values completely replace lower-precedence
|
||||||
|
configs instead of merging into them.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return hasattr(string, "override") and string.override
|
||||||
|
|
||||||
|
|
||||||
|
def _append(string: str) -> bool:
|
||||||
|
"""Test if a spack YAML string is an override.
|
||||||
|
|
||||||
|
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
|
||||||
|
and if they do, their values append lower-precedence
|
||||||
|
configs.
|
||||||
|
|
||||||
|
str, str : concatenate strings.
|
||||||
|
[obj], [obj] : append lists.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return getattr(string, "append", False)
|
||||||
|
|
||||||
|
|
||||||
|
def _prepend(string: str) -> bool:
|
||||||
|
"""Test if a spack YAML string is an override.
|
||||||
|
|
||||||
|
See ``spack_yaml`` for details. Keys in Spack YAML can end in `+:`,
|
||||||
|
and if they do, their values prepend lower-precedence
|
||||||
|
configs.
|
||||||
|
|
||||||
|
str, str : concatenate strings.
|
||||||
|
[obj], [obj] : prepend lists. (default behavior)
|
||||||
|
"""
|
||||||
|
return getattr(string, "prepend", False)
|
||||||
|
|
||||||
|
|
||||||
def _mark_internal(data, name):
|
def _mark_internal(data, name):
|
||||||
"""Add a simple name mark to raw YAML/JSON data.
|
"""Add a simple name mark to raw YAML/JSON data.
|
||||||
|
|
||||||
@@ -1211,7 +1260,7 @@ def they_are(t):
|
|||||||
unmerge = sk in dest
|
unmerge = sk in dest
|
||||||
old_dest_value = dest.pop(sk, None)
|
old_dest_value = dest.pop(sk, None)
|
||||||
|
|
||||||
if unmerge and not spack.schema.override(sk):
|
if unmerge and not _override(sk):
|
||||||
dest[sk] = remove_yaml(old_dest_value, sv)
|
dest[sk] = remove_yaml(old_dest_value, sv)
|
||||||
|
|
||||||
return dest
|
return dest
|
||||||
@@ -1221,6 +1270,81 @@ def they_are(t):
|
|||||||
return dest
|
return dest
|
||||||
|
|
||||||
|
|
||||||
|
def merge_yaml(dest, source, prepend=False, append=False):
|
||||||
|
"""Merges source into dest; entries in source take precedence over dest.
|
||||||
|
|
||||||
|
This routine may modify dest and should be assigned to dest, in
|
||||||
|
case dest was None to begin with, e.g.:
|
||||||
|
|
||||||
|
dest = merge_yaml(dest, source)
|
||||||
|
|
||||||
|
In the result, elements from lists from ``source`` will appear before
|
||||||
|
elements of lists from ``dest``. Likewise, when iterating over keys
|
||||||
|
or items in merged ``OrderedDict`` objects, keys from ``source`` will
|
||||||
|
appear before keys from ``dest``.
|
||||||
|
|
||||||
|
Config file authors can optionally end any attribute in a dict
|
||||||
|
with `::` instead of `:`, and the key will override that of the
|
||||||
|
parent instead of merging.
|
||||||
|
|
||||||
|
`+:` will extend the default prepend merge strategy to include string concatenation
|
||||||
|
`-:` will change the merge strategy to append, it also includes string concatentation
|
||||||
|
"""
|
||||||
|
|
||||||
|
def they_are(t):
|
||||||
|
return isinstance(dest, t) and isinstance(source, t)
|
||||||
|
|
||||||
|
# If source is None, overwrite with source.
|
||||||
|
if source is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Source list is prepended (for precedence)
|
||||||
|
if they_are(list):
|
||||||
|
if append:
|
||||||
|
# Make sure to copy ruamel comments
|
||||||
|
dest[:] = [x for x in dest if x not in source] + source
|
||||||
|
else:
|
||||||
|
# Make sure to copy ruamel comments
|
||||||
|
dest[:] = source + [x for x in dest if x not in source]
|
||||||
|
return dest
|
||||||
|
|
||||||
|
# Source dict is merged into dest.
|
||||||
|
elif they_are(dict):
|
||||||
|
# save dest keys to reinsert later -- this ensures that source items
|
||||||
|
# come *before* dest in OrderdDicts
|
||||||
|
dest_keys = [dk for dk in dest.keys() if dk not in source]
|
||||||
|
|
||||||
|
for sk, sv in source.items():
|
||||||
|
# always remove the dest items. Python dicts do not overwrite
|
||||||
|
# keys on insert, so this ensures that source keys are copied
|
||||||
|
# into dest along with mark provenance (i.e., file/line info).
|
||||||
|
merge = sk in dest
|
||||||
|
old_dest_value = dest.pop(sk, None)
|
||||||
|
|
||||||
|
if merge and not _override(sk):
|
||||||
|
dest[sk] = merge_yaml(old_dest_value, sv, _prepend(sk), _append(sk))
|
||||||
|
else:
|
||||||
|
# if sk ended with ::, or if it's new, completely override
|
||||||
|
dest[sk] = copy.deepcopy(sv)
|
||||||
|
|
||||||
|
# reinsert dest keys so they are last in the result
|
||||||
|
for dk in dest_keys:
|
||||||
|
dest[dk] = dest.pop(dk)
|
||||||
|
|
||||||
|
return dest
|
||||||
|
|
||||||
|
elif they_are(str):
|
||||||
|
# Concatenate strings in prepend mode
|
||||||
|
if prepend:
|
||||||
|
return source + dest
|
||||||
|
elif append:
|
||||||
|
return dest + source
|
||||||
|
|
||||||
|
# If we reach here source and dest are either different types or are
|
||||||
|
# not both lists or dicts: replace with source.
|
||||||
|
return copy.copy(source)
|
||||||
|
|
||||||
|
|
||||||
class ConfigPath:
|
class ConfigPath:
|
||||||
quoted_string = "(?:\"[^\"]+\")|(?:'[^']+')"
|
quoted_string = "(?:\"[^\"]+\")|(?:'[^']+')"
|
||||||
unquoted_string = "[^:'\"]+"
|
unquoted_string = "[^:'\"]+"
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ def validate(configuration_file):
|
|||||||
"""
|
"""
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
|
||||||
with open(configuration_file, encoding="utf-8") as f:
|
with open(configuration_file) as f:
|
||||||
config = syaml.load(f)
|
config = syaml.load(f)
|
||||||
|
|
||||||
# Ensure we have a "container" attribute with sensible defaults set
|
# Ensure we have a "container" attribute with sensible defaults set
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ def data():
|
|||||||
if not _data:
|
if not _data:
|
||||||
json_dir = os.path.abspath(os.path.dirname(__file__))
|
json_dir = os.path.abspath(os.path.dirname(__file__))
|
||||||
json_file = os.path.join(json_dir, "images.json")
|
json_file = os.path.join(json_dir, "images.json")
|
||||||
with open(json_file, encoding="utf-8") as f:
|
with open(json_file) as f:
|
||||||
_data = json.load(f)
|
_data = json.load(f)
|
||||||
return _data
|
return _data
|
||||||
|
|
||||||
|
|||||||
@@ -211,7 +211,7 @@ def entries_to_specs(entries):
|
|||||||
def read(path, apply_updates):
|
def read(path, apply_updates):
|
||||||
decode_exception_type = json.decoder.JSONDecodeError
|
decode_exception_type = json.decoder.JSONDecodeError
|
||||||
try:
|
try:
|
||||||
with open(path, "r", encoding="utf-8") as json_file:
|
with open(path, "r") as json_file:
|
||||||
json_data = json.load(json_file)
|
json_data = json.load(json_file)
|
||||||
|
|
||||||
jsonschema.validate(json_data, manifest_schema)
|
jsonschema.validate(json_data, manifest_schema)
|
||||||
|
|||||||
@@ -760,7 +760,7 @@ def _read_from_file(self, filename):
|
|||||||
Does not do any locking.
|
Does not do any locking.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with open(filename, "r", encoding="utf-8") as f:
|
with open(filename, "r") as f:
|
||||||
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
|
# In the future we may use a stream of JSON objects, hence `raw_decode` for compat.
|
||||||
fdata, _ = JSONDecoder().raw_decode(f.read())
|
fdata, _ = JSONDecoder().raw_decode(f.read())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -1031,12 +1031,12 @@ def _write(self, type, value, traceback):
|
|||||||
|
|
||||||
# Write a temporary database file them move it into place
|
# Write a temporary database file them move it into place
|
||||||
try:
|
try:
|
||||||
with open(temp_file, "w", encoding="utf-8") as f:
|
with open(temp_file, "w") as f:
|
||||||
self._write_to_file(f)
|
self._write_to_file(f)
|
||||||
fs.rename(temp_file, self._index_path)
|
fs.rename(temp_file, self._index_path)
|
||||||
|
|
||||||
if _use_uuid:
|
if _use_uuid:
|
||||||
with open(self._verifier_path, "w", encoding="utf-8") as f:
|
with open(self._verifier_path, "w") as f:
|
||||||
new_verifier = str(uuid.uuid4())
|
new_verifier = str(uuid.uuid4())
|
||||||
f.write(new_verifier)
|
f.write(new_verifier)
|
||||||
self.last_seen_verifier = new_verifier
|
self.last_seen_verifier = new_verifier
|
||||||
@@ -1053,7 +1053,7 @@ def _read(self):
|
|||||||
current_verifier = ""
|
current_verifier = ""
|
||||||
if _use_uuid:
|
if _use_uuid:
|
||||||
try:
|
try:
|
||||||
with open(self._verifier_path, "r", encoding="utf-8") as f:
|
with open(self._verifier_path, "r") as f:
|
||||||
current_verifier = f.read()
|
current_verifier = f.read()
|
||||||
except BaseException:
|
except BaseException:
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
"""Data structures that represent Spack's dependency relationships."""
|
"""Data structures that represent Spack's dependency relationships."""
|
||||||
from typing import Dict, List, Type
|
from typing import Dict, List
|
||||||
|
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@@ -38,7 +38,7 @@ class Dependency:
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
pkg: Type["spack.package_base.PackageBase"],
|
pkg: "spack.package_base.PackageBase",
|
||||||
spec: "spack.spec.Spec",
|
spec: "spack.spec.Spec",
|
||||||
depflag: dt.DepFlag = dt.DEFAULT,
|
depflag: dt.DepFlag = dt.DEFAULT,
|
||||||
):
|
):
|
||||||
|
|||||||
@@ -6,8 +6,6 @@
|
|||||||
|
|
||||||
from typing import Iterable, List, Tuple, Union
|
from typing import Iterable, List, Tuple, Union
|
||||||
|
|
||||||
from typing_extensions import Literal
|
|
||||||
|
|
||||||
#: Type hint for the low-level dependency input (enum.Flag is too slow)
|
#: Type hint for the low-level dependency input (enum.Flag is too slow)
|
||||||
DepFlag = int
|
DepFlag = int
|
||||||
|
|
||||||
@@ -15,7 +13,7 @@
|
|||||||
DepTypes = Union[str, List[str], Tuple[str, ...]]
|
DepTypes = Union[str, List[str], Tuple[str, ...]]
|
||||||
|
|
||||||
#: Individual dependency types
|
#: Individual dependency types
|
||||||
DepType = Literal["build", "link", "run", "test"]
|
DepType = str # Python 3.8: Literal["build", "link", "run", "test"]
|
||||||
|
|
||||||
# Flag values. NOTE: these values are not arbitrary, since hash computation imposes
|
# Flag values. NOTE: these values are not arbitrary, since hash computation imposes
|
||||||
# the order (link, run, build, test) when depending on the same package multiple times,
|
# the order (link, run, build, test) when depending on the same package multiple times,
|
||||||
|
|||||||
@@ -27,7 +27,6 @@
|
|||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.operating_systems.windows_os as winOs
|
import spack.operating_systems.windows_os as winOs
|
||||||
import spack.schema
|
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
import spack.util.spack_yaml
|
import spack.util.spack_yaml
|
||||||
@@ -227,7 +226,7 @@ def update_configuration(
|
|||||||
pkg_to_cfg[package_name] = pkg_config
|
pkg_to_cfg[package_name] = pkg_config
|
||||||
|
|
||||||
pkgs_cfg = spack.config.get("packages", scope=scope)
|
pkgs_cfg = spack.config.get("packages", scope=scope)
|
||||||
pkgs_cfg = spack.schema.merge_yaml(pkgs_cfg, pkg_to_cfg)
|
pkgs_cfg = spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg)
|
||||||
spack.config.set("packages", pkgs_cfg, scope=scope)
|
spack.config.set("packages", pkgs_cfg, scope=scope)
|
||||||
|
|
||||||
return all_new_specs
|
return all_new_specs
|
||||||
@@ -247,7 +246,7 @@ def set_virtuals_nonbuildable(virtuals: Set[str], scope: Optional[str] = None) -
|
|||||||
# Update the provided scope
|
# Update the provided scope
|
||||||
spack.config.set(
|
spack.config.set(
|
||||||
"packages",
|
"packages",
|
||||||
spack.schema.merge_yaml(spack.config.get("packages", scope=scope), new_config),
|
spack.config.merge_yaml(spack.config.get("packages", scope=scope), new_config),
|
||||||
scope=scope,
|
scope=scope,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -198,6 +198,6 @@ def _detection_tests_yaml(
|
|||||||
) -> Tuple[pathlib.Path, Dict[str, Any]]:
|
) -> Tuple[pathlib.Path, Dict[str, Any]]:
|
||||||
pkg_dir = pathlib.Path(repository.filename_for_package_name(pkg_name)).parent
|
pkg_dir = pathlib.Path(repository.filename_for_package_name(pkg_name)).parent
|
||||||
detection_tests_yaml = pkg_dir / "detection_test.yaml"
|
detection_tests_yaml = pkg_dir / "detection_test.yaml"
|
||||||
with open(str(detection_tests_yaml), encoding="utf-8") as f:
|
with open(str(detection_tests_yaml)) as f:
|
||||||
content = spack_yaml.load(f)
|
content = spack_yaml.load(f)
|
||||||
return detection_tests_yaml, content
|
return detection_tests_yaml, content
|
||||||
|
|||||||
@@ -21,7 +21,6 @@ class OpenMpi(Package):
|
|||||||
* ``conflicts``
|
* ``conflicts``
|
||||||
* ``depends_on``
|
* ``depends_on``
|
||||||
* ``extends``
|
* ``extends``
|
||||||
* ``license``
|
|
||||||
* ``patch``
|
* ``patch``
|
||||||
* ``provides``
|
* ``provides``
|
||||||
* ``resource``
|
* ``resource``
|
||||||
@@ -35,12 +34,11 @@ class OpenMpi(Package):
|
|||||||
import collections.abc
|
import collections.abc
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
from typing import Any, Callable, List, Optional, Tuple, Type, Union
|
from typing import Any, Callable, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import llnl.util.tty.color
|
import llnl.util.tty.color
|
||||||
|
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.fetch_strategy
|
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.patch
|
import spack.patch
|
||||||
import spack.spec
|
import spack.spec
|
||||||
@@ -48,6 +46,7 @@ class OpenMpi(Package):
|
|||||||
import spack.variant
|
import spack.variant
|
||||||
from spack.dependency import Dependency
|
from spack.dependency import Dependency
|
||||||
from spack.directives_meta import DirectiveError, DirectiveMeta
|
from spack.directives_meta import DirectiveError, DirectiveMeta
|
||||||
|
from spack.fetch_strategy import from_kwargs
|
||||||
from spack.resource import Resource
|
from spack.resource import Resource
|
||||||
from spack.version import (
|
from spack.version import (
|
||||||
GitVersion,
|
GitVersion,
|
||||||
@@ -82,8 +81,8 @@ class OpenMpi(Package):
|
|||||||
SpecType = str
|
SpecType = str
|
||||||
DepType = Union[Tuple[str, ...], str]
|
DepType = Union[Tuple[str, ...], str]
|
||||||
WhenType = Optional[Union[spack.spec.Spec, str, bool]]
|
WhenType = Optional[Union[spack.spec.Spec, str, bool]]
|
||||||
Patcher = Callable[[Union[Type[spack.package_base.PackageBase], Dependency]], None]
|
Patcher = Callable[[Union[spack.package_base.PackageBase, Dependency]], None]
|
||||||
PatchesType = Union[Patcher, str, List[Union[Patcher, str]]]
|
PatchesType = Optional[Union[Patcher, str, List[Union[Patcher, str]]]]
|
||||||
|
|
||||||
|
|
||||||
SUPPORTED_LANGUAGES = ("fortran", "cxx", "c")
|
SUPPORTED_LANGUAGES = ("fortran", "cxx", "c")
|
||||||
@@ -219,7 +218,7 @@ def version(
|
|||||||
return lambda pkg: _execute_version(pkg, ver, **kwargs)
|
return lambda pkg: _execute_version(pkg, ver, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def _execute_version(pkg: Type[spack.package_base.PackageBase], ver: Union[str, int], **kwargs):
|
def _execute_version(pkg, ver, **kwargs):
|
||||||
if (
|
if (
|
||||||
(any(s in kwargs for s in spack.util.crypto.hashes) or "checksum" in kwargs)
|
(any(s in kwargs for s in spack.util.crypto.hashes) or "checksum" in kwargs)
|
||||||
and hasattr(pkg, "has_code")
|
and hasattr(pkg, "has_code")
|
||||||
@@ -250,12 +249,12 @@ def _execute_version(pkg: Type[spack.package_base.PackageBase], ver: Union[str,
|
|||||||
|
|
||||||
|
|
||||||
def _depends_on(
|
def _depends_on(
|
||||||
pkg: Type[spack.package_base.PackageBase],
|
pkg: spack.package_base.PackageBase,
|
||||||
spec: spack.spec.Spec,
|
spec: spack.spec.Spec,
|
||||||
*,
|
*,
|
||||||
when: WhenType = None,
|
when: WhenType = None,
|
||||||
type: DepType = dt.DEFAULT_TYPES,
|
type: DepType = dt.DEFAULT_TYPES,
|
||||||
patches: Optional[PatchesType] = None,
|
patches: PatchesType = None,
|
||||||
):
|
):
|
||||||
when_spec = _make_when_spec(when)
|
when_spec = _make_when_spec(when)
|
||||||
if not when_spec:
|
if not when_spec:
|
||||||
@@ -297,13 +296,6 @@ def _depends_on(
|
|||||||
deps_by_name = pkg.dependencies.setdefault(when_spec, {})
|
deps_by_name = pkg.dependencies.setdefault(when_spec, {})
|
||||||
dependency = deps_by_name.get(spec.name)
|
dependency = deps_by_name.get(spec.name)
|
||||||
|
|
||||||
if spec.dependencies():
|
|
||||||
raise DirectiveError(
|
|
||||||
f"the '^' sigil cannot be used in 'depends_on' directives. Please reformulate "
|
|
||||||
f"the directive below as multiple directives:\n\n"
|
|
||||||
f'\tdepends_on("{spec}", when="{when_spec}")\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
if not dependency:
|
if not dependency:
|
||||||
dependency = Dependency(pkg, spec, depflag=depflag)
|
dependency = Dependency(pkg, spec, depflag=depflag)
|
||||||
deps_by_name[spec.name] = dependency
|
deps_by_name[spec.name] = dependency
|
||||||
@@ -337,7 +329,7 @@ def conflicts(conflict_spec: SpecType, when: WhenType = None, msg: Optional[str]
|
|||||||
msg (str): optional user defined message
|
msg (str): optional user defined message
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _execute_conflicts(pkg: Type[spack.package_base.PackageBase]):
|
def _execute_conflicts(pkg: spack.package_base.PackageBase):
|
||||||
# If when is not specified the conflict always holds
|
# If when is not specified the conflict always holds
|
||||||
when_spec = _make_when_spec(when)
|
when_spec = _make_when_spec(when)
|
||||||
if not when_spec:
|
if not when_spec:
|
||||||
@@ -356,7 +348,7 @@ def depends_on(
|
|||||||
spec: SpecType,
|
spec: SpecType,
|
||||||
when: WhenType = None,
|
when: WhenType = None,
|
||||||
type: DepType = dt.DEFAULT_TYPES,
|
type: DepType = dt.DEFAULT_TYPES,
|
||||||
patches: Optional[PatchesType] = None,
|
patches: PatchesType = None,
|
||||||
):
|
):
|
||||||
"""Creates a dict of deps with specs defining when they apply.
|
"""Creates a dict of deps with specs defining when they apply.
|
||||||
|
|
||||||
@@ -378,16 +370,14 @@ def depends_on(
|
|||||||
assert type == "build", "languages must be of 'build' type"
|
assert type == "build", "languages must be of 'build' type"
|
||||||
return _language(lang_spec_str=spec, when=when)
|
return _language(lang_spec_str=spec, when=when)
|
||||||
|
|
||||||
def _execute_depends_on(pkg: Type[spack.package_base.PackageBase]):
|
def _execute_depends_on(pkg: spack.package_base.PackageBase):
|
||||||
_depends_on(pkg, dep_spec, when=when, type=type, patches=patches)
|
_depends_on(pkg, dep_spec, when=when, type=type, patches=patches)
|
||||||
|
|
||||||
return _execute_depends_on
|
return _execute_depends_on
|
||||||
|
|
||||||
|
|
||||||
@directive("disable_redistribute")
|
@directive("disable_redistribute")
|
||||||
def redistribute(
|
def redistribute(source=None, binary=None, when: WhenType = None):
|
||||||
source: Optional[bool] = None, binary: Optional[bool] = None, when: WhenType = None
|
|
||||||
):
|
|
||||||
"""Can be used inside a Package definition to declare that
|
"""Can be used inside a Package definition to declare that
|
||||||
the package source and/or compiled binaries should not be
|
the package source and/or compiled binaries should not be
|
||||||
redistributed.
|
redistributed.
|
||||||
@@ -402,10 +392,7 @@ def redistribute(
|
|||||||
|
|
||||||
|
|
||||||
def _execute_redistribute(
|
def _execute_redistribute(
|
||||||
pkg: Type[spack.package_base.PackageBase],
|
pkg: spack.package_base.PackageBase, source=None, binary=None, when: WhenType = None
|
||||||
source: Optional[bool],
|
|
||||||
binary: Optional[bool],
|
|
||||||
when: WhenType,
|
|
||||||
):
|
):
|
||||||
if source is None and binary is None:
|
if source is None and binary is None:
|
||||||
return
|
return
|
||||||
@@ -481,7 +468,9 @@ def provides(*specs: SpecType, when: WhenType = None):
|
|||||||
when: condition when this provides clause needs to be considered
|
when: condition when this provides clause needs to be considered
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _execute_provides(pkg: Type[spack.package_base.PackageBase]):
|
def _execute_provides(pkg: spack.package_base.PackageBase):
|
||||||
|
import spack.parser # Avoid circular dependency
|
||||||
|
|
||||||
when_spec = _make_when_spec(when)
|
when_spec = _make_when_spec(when)
|
||||||
if not when_spec:
|
if not when_spec:
|
||||||
return
|
return
|
||||||
@@ -527,7 +516,7 @@ def can_splice(
|
|||||||
variants will be skipped by '*'.
|
variants will be skipped by '*'.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _execute_can_splice(pkg: Type[spack.package_base.PackageBase]):
|
def _execute_can_splice(pkg: spack.package_base.PackageBase):
|
||||||
when_spec = _make_when_spec(when)
|
when_spec = _make_when_spec(when)
|
||||||
if isinstance(match_variants, str) and match_variants != "*":
|
if isinstance(match_variants, str) and match_variants != "*":
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
@@ -568,10 +557,10 @@ def patch(
|
|||||||
compressed URL patches)
|
compressed URL patches)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _execute_patch(
|
def _execute_patch(pkg_or_dep: Union[spack.package_base.PackageBase, Dependency]):
|
||||||
pkg_or_dep: Union[Type[spack.package_base.PackageBase], Dependency]
|
pkg = pkg_or_dep
|
||||||
) -> None:
|
if isinstance(pkg, Dependency):
|
||||||
pkg = pkg_or_dep.pkg if isinstance(pkg_or_dep, Dependency) else pkg_or_dep
|
pkg = pkg.pkg
|
||||||
|
|
||||||
if hasattr(pkg, "has_code") and not pkg.has_code:
|
if hasattr(pkg, "has_code") and not pkg.has_code:
|
||||||
raise UnsupportedPackageDirective(
|
raise UnsupportedPackageDirective(
|
||||||
@@ -745,55 +734,58 @@ def _execute_variant(pkg):
|
|||||||
|
|
||||||
|
|
||||||
@directive("resources")
|
@directive("resources")
|
||||||
def resource(
|
def resource(**kwargs):
|
||||||
*,
|
"""Define an external resource to be fetched and staged when building the
|
||||||
name: Optional[str] = None,
|
package. Based on the keywords present in the dictionary the appropriate
|
||||||
destination: str = "",
|
FetchStrategy will be used for the resource. Resources are fetched and
|
||||||
placement: Optional[str] = None,
|
staged in their own folder inside spack stage area, and then moved into
|
||||||
when: WhenType = None,
|
the stage area of the package that needs them.
|
||||||
# additional kwargs are as for `version()`
|
|
||||||
**kwargs,
|
|
||||||
):
|
|
||||||
"""Define an external resource to be fetched and staged when building the package.
|
|
||||||
Based on the keywords present in the dictionary the appropriate FetchStrategy will
|
|
||||||
be used for the resource. Resources are fetched and staged in their own folder
|
|
||||||
inside spack stage area, and then moved into the stage area of the package that
|
|
||||||
needs them.
|
|
||||||
|
|
||||||
Keyword Arguments:
|
List of recognized keywords:
|
||||||
name: name for the resource
|
|
||||||
when: condition defining when the resource is needed
|
|
||||||
destination: path, relative to the package stage area, to which resource should be moved
|
|
||||||
placement: optionally rename the expanded resource inside the destination directory
|
|
||||||
|
|
||||||
|
* 'when' : (optional) represents the condition upon which the resource is
|
||||||
|
needed
|
||||||
|
* 'destination' : (optional) path where to move the resource. This path
|
||||||
|
must be relative to the main package stage area.
|
||||||
|
* 'placement' : (optional) gives the possibility to fine tune how the
|
||||||
|
resource is moved into the main package stage area.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _execute_resource(pkg):
|
def _execute_resource(pkg):
|
||||||
|
when = kwargs.get("when")
|
||||||
when_spec = _make_when_spec(when)
|
when_spec = _make_when_spec(when)
|
||||||
if not when_spec:
|
if not when_spec:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
destination = kwargs.get("destination", "")
|
||||||
|
placement = kwargs.get("placement", None)
|
||||||
|
|
||||||
# Check if the path is relative
|
# Check if the path is relative
|
||||||
if os.path.isabs(destination):
|
if os.path.isabs(destination):
|
||||||
msg = "The destination keyword of a resource directive can't be an absolute path.\n"
|
message = (
|
||||||
msg += f"\tdestination : '{destination}\n'"
|
"The destination keyword of a resource directive " "can't be an absolute path.\n"
|
||||||
raise RuntimeError(msg)
|
)
|
||||||
|
message += "\tdestination : '{dest}\n'".format(dest=destination)
|
||||||
|
raise RuntimeError(message)
|
||||||
|
|
||||||
# Check if the path falls within the main package stage area
|
# Check if the path falls within the main package stage area
|
||||||
test_path = "stage_folder_root"
|
test_path = "stage_folder_root"
|
||||||
|
normalized_destination = os.path.normpath(
|
||||||
# Normalized absolute path
|
os.path.join(test_path, destination)
|
||||||
normalized_destination = os.path.normpath(os.path.join(test_path, destination))
|
) # Normalized absolute path
|
||||||
|
|
||||||
if test_path not in normalized_destination:
|
if test_path not in normalized_destination:
|
||||||
msg = "Destination of a resource must be within the package stage directory.\n"
|
message = (
|
||||||
msg += f"\tdestination : '{destination}'\n"
|
"The destination folder of a resource must fall "
|
||||||
raise RuntimeError(msg)
|
"within the main package stage directory.\n"
|
||||||
|
)
|
||||||
|
message += "\tdestination : '{dest}'\n".format(dest=destination)
|
||||||
|
raise RuntimeError(message)
|
||||||
|
|
||||||
resources = pkg.resources.setdefault(when_spec, [])
|
resources = pkg.resources.setdefault(when_spec, [])
|
||||||
resources.append(
|
name = kwargs.get("name")
|
||||||
Resource(name, spack.fetch_strategy.from_kwargs(**kwargs), destination, placement)
|
fetcher = from_kwargs(**kwargs)
|
||||||
)
|
resources.append(Resource(name, fetcher, destination, placement))
|
||||||
|
|
||||||
return _execute_resource
|
return _execute_resource
|
||||||
|
|
||||||
@@ -825,9 +817,7 @@ def _execute_maintainer(pkg):
|
|||||||
return _execute_maintainer
|
return _execute_maintainer
|
||||||
|
|
||||||
|
|
||||||
def _execute_license(
|
def _execute_license(pkg, license_identifier: str, when):
|
||||||
pkg: Type[spack.package_base.PackageBase], license_identifier: str, when: WhenType
|
|
||||||
):
|
|
||||||
# If when is not specified the license always holds
|
# If when is not specified the license always holds
|
||||||
when_spec = _make_when_spec(when)
|
when_spec = _make_when_spec(when)
|
||||||
if not when_spec:
|
if not when_spec:
|
||||||
@@ -891,7 +881,7 @@ def requires(*requirement_specs: str, policy="one_of", when=None, msg=None):
|
|||||||
msg: optional user defined message
|
msg: optional user defined message
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _execute_requires(pkg: Type[spack.package_base.PackageBase]):
|
def _execute_requires(pkg: spack.package_base.PackageBase):
|
||||||
if policy not in ("one_of", "any_of"):
|
if policy not in ("one_of", "any_of"):
|
||||||
err_msg = (
|
err_msg = (
|
||||||
f"the 'policy' argument of the 'requires' directive in {pkg.name} is set "
|
f"the 'policy' argument of the 'requires' directive in {pkg.name} is set "
|
||||||
@@ -916,7 +906,7 @@ def _execute_requires(pkg: Type[spack.package_base.PackageBase]):
|
|||||||
def _language(lang_spec_str: str, *, when: Optional[Union[str, bool]] = None):
|
def _language(lang_spec_str: str, *, when: Optional[Union[str, bool]] = None):
|
||||||
"""Temporary implementation of language virtuals, until compilers are proper dependencies."""
|
"""Temporary implementation of language virtuals, until compilers are proper dependencies."""
|
||||||
|
|
||||||
def _execute_languages(pkg: Type[spack.package_base.PackageBase]):
|
def _execute_languages(pkg: spack.package_base.PackageBase):
|
||||||
when_spec = _make_when_spec(when)
|
when_spec = _make_when_spec(when)
|
||||||
if not when_spec:
|
if not when_spec:
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
|
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import functools
|
import functools
|
||||||
from typing import Any, Callable, Dict, List, Optional, Sequence, Set, Type, Union
|
from typing import List, Set
|
||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
|
|
||||||
@@ -25,13 +25,11 @@ class DirectiveMeta(type):
|
|||||||
|
|
||||||
# Set of all known directives
|
# Set of all known directives
|
||||||
_directive_dict_names: Set[str] = set()
|
_directive_dict_names: Set[str] = set()
|
||||||
_directives_to_be_executed: List[Callable] = []
|
_directives_to_be_executed: List[str] = []
|
||||||
_when_constraints_from_context: List[spack.spec.Spec] = []
|
_when_constraints_from_context: List[str] = []
|
||||||
_default_args: List[dict] = []
|
_default_args: List[dict] = []
|
||||||
|
|
||||||
def __new__(
|
def __new__(cls, name, bases, attr_dict):
|
||||||
cls: Type["DirectiveMeta"], name: str, bases: tuple, attr_dict: dict
|
|
||||||
) -> "DirectiveMeta":
|
|
||||||
# Initialize the attribute containing the list of directives
|
# Initialize the attribute containing the list of directives
|
||||||
# to be executed. Here we go reversed because we want to execute
|
# to be executed. Here we go reversed because we want to execute
|
||||||
# commands:
|
# commands:
|
||||||
@@ -62,7 +60,7 @@ def __new__(
|
|||||||
|
|
||||||
return super(DirectiveMeta, cls).__new__(cls, name, bases, attr_dict)
|
return super(DirectiveMeta, cls).__new__(cls, name, bases, attr_dict)
|
||||||
|
|
||||||
def __init__(cls: "DirectiveMeta", name: str, bases: tuple, attr_dict: dict):
|
def __init__(cls, name, bases, attr_dict):
|
||||||
# The instance is being initialized: if it is a package we must ensure
|
# The instance is being initialized: if it is a package we must ensure
|
||||||
# that the directives are called to set it up.
|
# that the directives are called to set it up.
|
||||||
|
|
||||||
@@ -83,27 +81,27 @@ def __init__(cls: "DirectiveMeta", name: str, bases: tuple, attr_dict: dict):
|
|||||||
super(DirectiveMeta, cls).__init__(name, bases, attr_dict)
|
super(DirectiveMeta, cls).__init__(name, bases, attr_dict)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def push_to_context(when_spec: spack.spec.Spec) -> None:
|
def push_to_context(when_spec):
|
||||||
"""Add a spec to the context constraints."""
|
"""Add a spec to the context constraints."""
|
||||||
DirectiveMeta._when_constraints_from_context.append(when_spec)
|
DirectiveMeta._when_constraints_from_context.append(when_spec)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def pop_from_context() -> spack.spec.Spec:
|
def pop_from_context():
|
||||||
"""Pop the last constraint from the context"""
|
"""Pop the last constraint from the context"""
|
||||||
return DirectiveMeta._when_constraints_from_context.pop()
|
return DirectiveMeta._when_constraints_from_context.pop()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def push_default_args(default_args: Dict[str, Any]) -> None:
|
def push_default_args(default_args):
|
||||||
"""Push default arguments"""
|
"""Push default arguments"""
|
||||||
DirectiveMeta._default_args.append(default_args)
|
DirectiveMeta._default_args.append(default_args)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def pop_default_args() -> dict:
|
def pop_default_args():
|
||||||
"""Pop default arguments"""
|
"""Pop default arguments"""
|
||||||
return DirectiveMeta._default_args.pop()
|
return DirectiveMeta._default_args.pop()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def directive(dicts: Optional[Union[Sequence[str], str]] = None) -> Callable:
|
def directive(dicts=None):
|
||||||
"""Decorator for Spack directives.
|
"""Decorator for Spack directives.
|
||||||
|
|
||||||
Spack directives allow you to modify a package while it is being
|
Spack directives allow you to modify a package while it is being
|
||||||
@@ -158,7 +156,7 @@ class Foo(Package):
|
|||||||
DirectiveMeta._directive_dict_names |= set(dicts)
|
DirectiveMeta._directive_dict_names |= set(dicts)
|
||||||
|
|
||||||
# This decorator just returns the directive functions
|
# This decorator just returns the directive functions
|
||||||
def _decorator(decorated_function: Callable) -> Callable:
|
def _decorator(decorated_function):
|
||||||
directive_names.append(decorated_function.__name__)
|
directive_names.append(decorated_function.__name__)
|
||||||
|
|
||||||
@functools.wraps(decorated_function)
|
@functools.wraps(decorated_function)
|
||||||
|
|||||||
@@ -141,7 +141,7 @@ def relative_path_for_spec(self, spec):
|
|||||||
def write_spec(self, spec, path):
|
def write_spec(self, spec, path):
|
||||||
"""Write a spec out to a file."""
|
"""Write a spec out to a file."""
|
||||||
_check_concrete(spec)
|
_check_concrete(spec)
|
||||||
with open(path, "w", encoding="utf-8") as f:
|
with open(path, "w") as f:
|
||||||
# The hash of the projection is the DAG hash which contains
|
# The hash of the projection is the DAG hash which contains
|
||||||
# the full provenance, so it's availabe if we want it later
|
# the full provenance, so it's availabe if we want it later
|
||||||
spec.to_json(f, hash=ht.dag_hash)
|
spec.to_json(f, hash=ht.dag_hash)
|
||||||
@@ -153,13 +153,13 @@ def write_host_environment(self, spec):
|
|||||||
"""
|
"""
|
||||||
env_file = self.env_metadata_path(spec)
|
env_file = self.env_metadata_path(spec)
|
||||||
environ = spack.spec.get_host_environment_metadata()
|
environ = spack.spec.get_host_environment_metadata()
|
||||||
with open(env_file, "w", encoding="utf-8") as fd:
|
with open(env_file, "w") as fd:
|
||||||
sjson.dump(environ, fd)
|
sjson.dump(environ, fd)
|
||||||
|
|
||||||
def read_spec(self, path):
|
def read_spec(self, path):
|
||||||
"""Read the contents of a file and parse them as a spec"""
|
"""Read the contents of a file and parse them as a spec"""
|
||||||
try:
|
try:
|
||||||
with open(path, encoding="utf-8") as f:
|
with open(path) as f:
|
||||||
extension = os.path.splitext(path)[-1].lower()
|
extension = os.path.splitext(path)[-1].lower()
|
||||||
if extension == ".json":
|
if extension == ".json":
|
||||||
spec = spack.spec.Spec.from_json(f)
|
spec = spack.spec.Spec.from_json(f)
|
||||||
|
|||||||
@@ -482,7 +482,6 @@
|
|||||||
display_specs,
|
display_specs,
|
||||||
environment_dir_from_name,
|
environment_dir_from_name,
|
||||||
environment_from_name_or_dir,
|
environment_from_name_or_dir,
|
||||||
environment_path_scopes,
|
|
||||||
exists,
|
exists,
|
||||||
initialize_environment_dir,
|
initialize_environment_dir,
|
||||||
installed_specs,
|
installed_specs,
|
||||||
@@ -519,7 +518,6 @@
|
|||||||
"display_specs",
|
"display_specs",
|
||||||
"environment_dir_from_name",
|
"environment_dir_from_name",
|
||||||
"environment_from_name_or_dir",
|
"environment_from_name_or_dir",
|
||||||
"environment_path_scopes",
|
|
||||||
"exists",
|
"exists",
|
||||||
"initialize_environment_dir",
|
"initialize_environment_dir",
|
||||||
"installed_specs",
|
"installed_specs",
|
||||||
|
|||||||
@@ -27,6 +27,7 @@
|
|||||||
import spack.concretize
|
import spack.concretize
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
|
import spack.environment
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.filesystem_view as fsv
|
import spack.filesystem_view as fsv
|
||||||
import spack.hash_types as ht
|
import spack.hash_types as ht
|
||||||
@@ -162,7 +163,7 @@ def installed_specs():
|
|||||||
Returns the specs of packages installed in the active environment or None
|
Returns the specs of packages installed in the active environment or None
|
||||||
if no packages are installed.
|
if no packages are installed.
|
||||||
"""
|
"""
|
||||||
env = active_environment()
|
env = spack.environment.active_environment()
|
||||||
hashes = env.all_hashes() if env else None
|
hashes = env.all_hashes() if env else None
|
||||||
return spack.store.STORE.db.query(hashes=hashes)
|
return spack.store.STORE.db.query(hashes=hashes)
|
||||||
|
|
||||||
@@ -971,7 +972,7 @@ def _read(self):
|
|||||||
self._construct_state_from_manifest()
|
self._construct_state_from_manifest()
|
||||||
|
|
||||||
if os.path.exists(self.lock_path):
|
if os.path.exists(self.lock_path):
|
||||||
with open(self.lock_path, encoding="utf-8") as f:
|
with open(self.lock_path) as f:
|
||||||
read_lock_version = self._read_lockfile(f)["_meta"]["lockfile-version"]
|
read_lock_version = self._read_lockfile(f)["_meta"]["lockfile-version"]
|
||||||
|
|
||||||
if read_lock_version == 1:
|
if read_lock_version == 1:
|
||||||
@@ -1053,7 +1054,7 @@ def _process_concrete_includes(self):
|
|||||||
|
|
||||||
if self.included_concrete_envs:
|
if self.included_concrete_envs:
|
||||||
if os.path.exists(self.lock_path):
|
if os.path.exists(self.lock_path):
|
||||||
with open(self.lock_path, encoding="utf-8") as f:
|
with open(self.lock_path) as f:
|
||||||
data = self._read_lockfile(f)
|
data = self._read_lockfile(f)
|
||||||
|
|
||||||
if included_concrete_name in data:
|
if included_concrete_name in data:
|
||||||
@@ -2332,7 +2333,7 @@ def write(self, regenerate: bool = True) -> None:
|
|||||||
self.new_specs.clear()
|
self.new_specs.clear()
|
||||||
|
|
||||||
def update_lockfile(self) -> None:
|
def update_lockfile(self) -> None:
|
||||||
with fs.write_tmp_and_move(self.lock_path, encoding="utf-8") as f:
|
with fs.write_tmp_and_move(self.lock_path) as f:
|
||||||
sjson.dump(self._to_lockfile_dict(), stream=f)
|
sjson.dump(self._to_lockfile_dict(), stream=f)
|
||||||
|
|
||||||
def ensure_env_directory_exists(self, dot_env: bool = False) -> None:
|
def ensure_env_directory_exists(self, dot_env: bool = False) -> None:
|
||||||
@@ -2507,7 +2508,7 @@ def update_yaml(manifest, backup_file):
|
|||||||
AssertionError: in case anything goes wrong during the update
|
AssertionError: in case anything goes wrong during the update
|
||||||
"""
|
"""
|
||||||
# Check if the environment needs update
|
# Check if the environment needs update
|
||||||
with open(manifest, encoding="utf-8") as f:
|
with open(manifest) as f:
|
||||||
data = syaml.load(f)
|
data = syaml.load(f)
|
||||||
|
|
||||||
top_level_key = _top_level_key(data)
|
top_level_key = _top_level_key(data)
|
||||||
@@ -2525,7 +2526,7 @@ def update_yaml(manifest, backup_file):
|
|||||||
assert not os.path.exists(backup_file), msg.format(backup_file)
|
assert not os.path.exists(backup_file), msg.format(backup_file)
|
||||||
|
|
||||||
shutil.copy(manifest, backup_file)
|
shutil.copy(manifest, backup_file)
|
||||||
with open(manifest, "w", encoding="utf-8") as f:
|
with open(manifest, "w") as f:
|
||||||
syaml.dump_config(data, f)
|
syaml.dump_config(data, f)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -2553,7 +2554,7 @@ def is_latest_format(manifest):
|
|||||||
manifest (str): manifest file to be analyzed
|
manifest (str): manifest file to be analyzed
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with open(manifest, encoding="utf-8") as f:
|
with open(manifest) as f:
|
||||||
data = syaml.load(f)
|
data = syaml.load(f)
|
||||||
except (OSError, IOError):
|
except (OSError, IOError):
|
||||||
return True
|
return True
|
||||||
@@ -2655,7 +2656,7 @@ def from_lockfile(manifest_dir: Union[pathlib.Path, str]) -> "EnvironmentManifes
|
|||||||
# TBD: Should this be the abspath?
|
# TBD: Should this be the abspath?
|
||||||
manifest_dir = pathlib.Path(manifest_dir)
|
manifest_dir = pathlib.Path(manifest_dir)
|
||||||
lockfile = manifest_dir / lockfile_name
|
lockfile = manifest_dir / lockfile_name
|
||||||
with lockfile.open("r", encoding="utf-8") as f:
|
with lockfile.open("r") as f:
|
||||||
data = sjson.load(f)
|
data = sjson.load(f)
|
||||||
user_specs = data["roots"]
|
user_specs = data["roots"]
|
||||||
|
|
||||||
@@ -2682,7 +2683,7 @@ def __init__(self, manifest_dir: Union[pathlib.Path, str], name: Optional[str] =
|
|||||||
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
|
msg = f"cannot find '{manifest_name}' in {self.manifest_dir}"
|
||||||
raise SpackEnvironmentError(msg)
|
raise SpackEnvironmentError(msg)
|
||||||
|
|
||||||
with self.manifest_file.open(encoding="utf-8") as f:
|
with self.manifest_file.open() as f:
|
||||||
self.yaml_content = _read_yaml(f)
|
self.yaml_content = _read_yaml(f)
|
||||||
|
|
||||||
self.changed = False
|
self.changed = False
|
||||||
@@ -3043,13 +3044,11 @@ def prepare_config_scope(self) -> None:
|
|||||||
"""Add the manifest's scopes to the global configuration search path."""
|
"""Add the manifest's scopes to the global configuration search path."""
|
||||||
for scope in self.env_config_scopes:
|
for scope in self.env_config_scopes:
|
||||||
spack.config.CONFIG.push_scope(scope)
|
spack.config.CONFIG.push_scope(scope)
|
||||||
spack.config.CONFIG.ensure_scope_ordering()
|
|
||||||
|
|
||||||
def deactivate_config_scope(self) -> None:
|
def deactivate_config_scope(self) -> None:
|
||||||
"""Remove any of the manifest's scopes from the global config path."""
|
"""Remove any of the manifest's scopes from the global config path."""
|
||||||
for scope in self.env_config_scopes:
|
for scope in self.env_config_scopes:
|
||||||
spack.config.CONFIG.remove_scope(scope.name)
|
spack.config.CONFIG.remove_scope(scope.name)
|
||||||
spack.config.CONFIG.ensure_scope_ordering()
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def use_config(self):
|
def use_config(self):
|
||||||
@@ -3060,29 +3059,6 @@ def use_config(self):
|
|||||||
self.deactivate_config_scope()
|
self.deactivate_config_scope()
|
||||||
|
|
||||||
|
|
||||||
def environment_path_scopes(name: str, path: str) -> Optional[List[spack.config.ConfigScope]]:
|
|
||||||
"""Retrieve the suitably named environment path scopes
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
name: configuration scope name
|
|
||||||
path: path to configuration file(s)
|
|
||||||
|
|
||||||
Returns: list of environment scopes, if any, or None
|
|
||||||
"""
|
|
||||||
if exists(path): # managed environment
|
|
||||||
manifest = EnvironmentManifestFile(root(path))
|
|
||||||
elif is_env_dir(path): # anonymous environment
|
|
||||||
manifest = EnvironmentManifestFile(path)
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
for scope in manifest.env_config_scopes:
|
|
||||||
scope.name = f"{name}:{scope.name}"
|
|
||||||
scope.writable = False
|
|
||||||
|
|
||||||
return manifest.env_config_scopes
|
|
||||||
|
|
||||||
|
|
||||||
class SpackEnvironmentError(spack.error.SpackError):
|
class SpackEnvironmentError(spack.error.SpackError):
|
||||||
"""Superclass for all errors to do with Spack environments."""
|
"""Superclass for all errors to do with Spack environments."""
|
||||||
|
|
||||||
|
|||||||
@@ -192,10 +192,3 @@ def __reduce__(self):
|
|||||||
|
|
||||||
def _make_stop_phase(msg, long_msg):
|
def _make_stop_phase(msg, long_msg):
|
||||||
return StopPhase(msg, long_msg)
|
return StopPhase(msg, long_msg)
|
||||||
|
|
||||||
|
|
||||||
class MirrorError(SpackError):
|
|
||||||
"""Superclass of all mirror-creation related errors."""
|
|
||||||
|
|
||||||
def __init__(self, msg, long_msg=None):
|
|
||||||
super().__init__(msg, long_msg)
|
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
"""Service functions and classes to implement the hooks
|
"""Service functions and classes to implement the hooks
|
||||||
for Spack's command extensions.
|
for Spack's command extensions.
|
||||||
"""
|
"""
|
||||||
|
import difflib
|
||||||
import glob
|
import glob
|
||||||
import importlib
|
import importlib
|
||||||
import os
|
import os
|
||||||
@@ -16,6 +17,7 @@
|
|||||||
|
|
||||||
import llnl.util.lang
|
import llnl.util.lang
|
||||||
|
|
||||||
|
import spack.cmd
|
||||||
import spack.config
|
import spack.config
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
@@ -23,6 +25,9 @@
|
|||||||
_extension_regexp = re.compile(r"spack-(\w[-\w]*)$")
|
_extension_regexp = re.compile(r"spack-(\w[-\w]*)$")
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: For consistency we should use spack.cmd.python_name(), but
|
||||||
|
# currently this would create a circular relationship between
|
||||||
|
# spack.cmd and spack.extensions.
|
||||||
def _python_name(cmd_name):
|
def _python_name(cmd_name):
|
||||||
return cmd_name.replace("-", "_")
|
return cmd_name.replace("-", "_")
|
||||||
|
|
||||||
@@ -206,7 +211,8 @@ def get_module(cmd_name):
|
|||||||
module = load_command_extension(cmd_name, folder)
|
module = load_command_extension(cmd_name, folder)
|
||||||
if module:
|
if module:
|
||||||
return module
|
return module
|
||||||
return None
|
else:
|
||||||
|
raise CommandNotFoundError(cmd_name)
|
||||||
|
|
||||||
|
|
||||||
def get_template_dirs():
|
def get_template_dirs():
|
||||||
@@ -218,6 +224,27 @@ def get_template_dirs():
|
|||||||
return extensions
|
return extensions
|
||||||
|
|
||||||
|
|
||||||
|
class CommandNotFoundError(spack.error.SpackError):
|
||||||
|
"""Exception class thrown when a requested command is not recognized as
|
||||||
|
such.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, cmd_name):
|
||||||
|
msg = (
|
||||||
|
"{0} is not a recognized Spack command or extension command;"
|
||||||
|
" check with `spack commands`.".format(cmd_name)
|
||||||
|
)
|
||||||
|
long_msg = None
|
||||||
|
|
||||||
|
similar = difflib.get_close_matches(cmd_name, spack.cmd.all_commands())
|
||||||
|
|
||||||
|
if 1 <= len(similar) <= 5:
|
||||||
|
long_msg = "\nDid you mean one of the following commands?\n "
|
||||||
|
long_msg += "\n ".join(similar)
|
||||||
|
|
||||||
|
super().__init__(msg, long_msg)
|
||||||
|
|
||||||
|
|
||||||
class ExtensionNamingError(spack.error.SpackError):
|
class ExtensionNamingError(spack.error.SpackError):
|
||||||
"""Exception class thrown when a configured extension does not follow
|
"""Exception class thrown when a configured extension does not follow
|
||||||
the expected naming convention.
|
the expected naming convention.
|
||||||
|
|||||||
@@ -12,8 +12,6 @@
|
|||||||
import sys
|
import sys
|
||||||
from typing import Callable, Dict, Optional
|
from typing import Callable, Dict, Optional
|
||||||
|
|
||||||
from typing_extensions import Literal
|
|
||||||
|
|
||||||
from llnl.string import comma_or
|
from llnl.string import comma_or
|
||||||
from llnl.util import tty
|
from llnl.util import tty
|
||||||
from llnl.util.filesystem import (
|
from llnl.util.filesystem import (
|
||||||
@@ -111,9 +109,6 @@ def view_copy(
|
|||||||
tty.debug(f"Can't change the permissions for {dst}")
|
tty.debug(f"Can't change the permissions for {dst}")
|
||||||
|
|
||||||
|
|
||||||
#: Type alias for link types
|
|
||||||
LinkType = Literal["hardlink", "hard", "copy", "relocate", "add", "symlink", "soft"]
|
|
||||||
|
|
||||||
#: supported string values for `link_type` in an env, mapped to canonical values
|
#: supported string values for `link_type` in an env, mapped to canonical values
|
||||||
_LINK_TYPES = {
|
_LINK_TYPES = {
|
||||||
"hardlink": "hardlink",
|
"hardlink": "hardlink",
|
||||||
@@ -128,7 +123,7 @@ def view_copy(
|
|||||||
_VALID_LINK_TYPES = sorted(set(_LINK_TYPES.values()))
|
_VALID_LINK_TYPES = sorted(set(_LINK_TYPES.values()))
|
||||||
|
|
||||||
|
|
||||||
def canonicalize_link_type(link_type: LinkType) -> str:
|
def canonicalize_link_type(link_type: str) -> str:
|
||||||
"""Return canonical"""
|
"""Return canonical"""
|
||||||
canonical = _LINK_TYPES.get(link_type)
|
canonical = _LINK_TYPES.get(link_type)
|
||||||
if not canonical:
|
if not canonical:
|
||||||
@@ -138,7 +133,7 @@ def canonicalize_link_type(link_type: LinkType) -> str:
|
|||||||
return canonical
|
return canonical
|
||||||
|
|
||||||
|
|
||||||
def function_for_link_type(link_type: LinkType) -> LinkCallbackType:
|
def function_for_link_type(link_type: str) -> LinkCallbackType:
|
||||||
link_type = canonicalize_link_type(link_type)
|
link_type = canonicalize_link_type(link_type)
|
||||||
if link_type == "hardlink":
|
if link_type == "hardlink":
|
||||||
return view_hardlink
|
return view_hardlink
|
||||||
@@ -147,7 +142,7 @@ def function_for_link_type(link_type: LinkType) -> LinkCallbackType:
|
|||||||
elif link_type == "copy":
|
elif link_type == "copy":
|
||||||
return view_copy
|
return view_copy
|
||||||
|
|
||||||
assert False, "invalid link type"
|
assert False, "invalid link type" # need mypy Literal values
|
||||||
|
|
||||||
|
|
||||||
class FilesystemView:
|
class FilesystemView:
|
||||||
@@ -171,7 +166,7 @@ def __init__(
|
|||||||
projections: Optional[Dict] = None,
|
projections: Optional[Dict] = None,
|
||||||
ignore_conflicts: bool = False,
|
ignore_conflicts: bool = False,
|
||||||
verbose: bool = False,
|
verbose: bool = False,
|
||||||
link_type: LinkType = "symlink",
|
link_type: str = "symlink",
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Initialize a filesystem view under the given `root` directory with
|
Initialize a filesystem view under the given `root` directory with
|
||||||
@@ -297,7 +292,7 @@ def __init__(
|
|||||||
projections: Optional[Dict] = None,
|
projections: Optional[Dict] = None,
|
||||||
ignore_conflicts: bool = False,
|
ignore_conflicts: bool = False,
|
||||||
verbose: bool = False,
|
verbose: bool = False,
|
||||||
link_type: LinkType = "symlink",
|
link_type: str = "symlink",
|
||||||
):
|
):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
root,
|
root,
|
||||||
@@ -331,12 +326,12 @@ def __init__(
|
|||||||
def write_projections(self):
|
def write_projections(self):
|
||||||
if self.projections:
|
if self.projections:
|
||||||
mkdirp(os.path.dirname(self.projections_path))
|
mkdirp(os.path.dirname(self.projections_path))
|
||||||
with open(self.projections_path, "w", encoding="utf-8") as f:
|
with open(self.projections_path, "w") as f:
|
||||||
f.write(s_yaml.dump_config({"projections": self.projections}))
|
f.write(s_yaml.dump_config({"projections": self.projections}))
|
||||||
|
|
||||||
def read_projections(self):
|
def read_projections(self):
|
||||||
if os.path.exists(self.projections_path):
|
if os.path.exists(self.projections_path):
|
||||||
with open(self.projections_path, "r", encoding="utf-8") as f:
|
with open(self.projections_path, "r") as f:
|
||||||
projections_data = s_yaml.load(f)
|
projections_data = s_yaml.load(f)
|
||||||
spack.config.validate(projections_data, spack.schema.projections.schema)
|
spack.config.validate(projections_data, spack.schema.projections.schema)
|
||||||
return projections_data["projections"]
|
return projections_data["projections"]
|
||||||
@@ -434,7 +429,7 @@ def needs_file(spec, file):
|
|||||||
self.get_path_meta_folder(spec), spack.store.STORE.layout.manifest_file_name
|
self.get_path_meta_folder(spec), spack.store.STORE.layout.manifest_file_name
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
with open(manifest_file, "r", encoding="utf-8") as f:
|
with open(manifest_file, "r") as f:
|
||||||
manifest = s_json.load(f)
|
manifest = s_json.load(f)
|
||||||
except (OSError, IOError):
|
except (OSError, IOError):
|
||||||
# if we can't load it, assume it doesn't know about the file.
|
# if we can't load it, assume it doesn't know about the file.
|
||||||
@@ -838,7 +833,7 @@ def get_projection_for_spec(self, spec):
|
|||||||
#####################
|
#####################
|
||||||
def get_spec_from_file(filename):
|
def get_spec_from_file(filename):
|
||||||
try:
|
try:
|
||||||
with open(filename, "r", encoding="utf-8") as f:
|
with open(filename, "r") as f:
|
||||||
return spack.spec.Spec.from_yaml(f)
|
return spack.spec.Spec.from_yaml(f)
|
||||||
except IOError:
|
except IOError:
|
||||||
return None
|
return None
|
||||||
|
|||||||
@@ -35,7 +35,6 @@ class _HookRunner:
|
|||||||
"spack.hooks.drop_redundant_rpaths",
|
"spack.hooks.drop_redundant_rpaths",
|
||||||
"spack.hooks.absolutify_elf_sonames",
|
"spack.hooks.absolutify_elf_sonames",
|
||||||
"spack.hooks.permissions_setters",
|
"spack.hooks.permissions_setters",
|
||||||
"spack.hooks.resolve_shared_libraries",
|
|
||||||
# after all mutations to the install prefix, write metadata
|
# after all mutations to the install prefix, write metadata
|
||||||
"spack.hooks.write_install_manifest",
|
"spack.hooks.write_install_manifest",
|
||||||
# after all metadata is written
|
# after all metadata is written
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.binary_distribution as bindist
|
import spack.binary_distribution as bindist
|
||||||
import spack.mirrors.mirror
|
import spack.mirror
|
||||||
|
|
||||||
|
|
||||||
def post_install(spec, explicit):
|
def post_install(spec, explicit):
|
||||||
@@ -22,7 +22,7 @@ def post_install(spec, explicit):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# Push the package to all autopush mirrors
|
# Push the package to all autopush mirrors
|
||||||
for mirror in spack.mirrors.mirror.MirrorCollection(binary=True, autopush=True).values():
|
for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values():
|
||||||
signing_key = bindist.select_signing_key() if mirror.signed else None
|
signing_key = bindist.select_signing_key() if mirror.signed else None
|
||||||
with bindist.make_uploader(mirror=mirror, force=True, signing_key=signing_key) as uploader:
|
with bindist.make_uploader(mirror=mirror, force=True, signing_key=signing_key) as uploader:
|
||||||
uploader.push_or_raise([spec])
|
uploader.push_or_raise([spec])
|
||||||
|
|||||||
@@ -142,7 +142,7 @@ def write_license_file(pkg, license_path):
|
|||||||
os.makedirs(os.path.dirname(license_path))
|
os.makedirs(os.path.dirname(license_path))
|
||||||
|
|
||||||
# Output
|
# Output
|
||||||
with open(license_path, "w", encoding="utf-8") as f:
|
with open(license_path, "w") as f:
|
||||||
for line in txt.splitlines():
|
for line in txt.splitlines():
|
||||||
f.write("{0}{1}\n".format(pkg.license_comment, line))
|
f.write("{0}{1}\n".format(pkg.license_comment, line))
|
||||||
f.close()
|
f.close()
|
||||||
|
|||||||
@@ -1,240 +0,0 @@
|
|||||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
|
|
||||||
import fnmatch
|
|
||||||
import io
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
from typing import Dict, List, Union
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
from llnl.util.filesystem import BaseDirectoryVisitor, visit_directory_tree
|
|
||||||
from llnl.util.lang import stable_partition
|
|
||||||
|
|
||||||
import spack.config
|
|
||||||
import spack.error
|
|
||||||
import spack.util.elf as elf
|
|
||||||
|
|
||||||
#: Patterns for names of libraries that are allowed to be unresolved when *just* looking at RPATHs
|
|
||||||
#: added by Spack. These are libraries outside of Spack's control, and assumed to be located in
|
|
||||||
#: default search paths of the dynamic linker.
|
|
||||||
ALLOW_UNRESOLVED = [
|
|
||||||
# kernel
|
|
||||||
"linux-vdso.so.*",
|
|
||||||
"libselinux.so.*",
|
|
||||||
# musl libc
|
|
||||||
"ld-musl-*.so.*",
|
|
||||||
# glibc
|
|
||||||
"ld-linux*.so.*",
|
|
||||||
"ld64.so.*",
|
|
||||||
"libanl.so.*",
|
|
||||||
"libc.so.*",
|
|
||||||
"libdl.so.*",
|
|
||||||
"libm.so.*",
|
|
||||||
"libmemusage.so.*",
|
|
||||||
"libmvec.so.*",
|
|
||||||
"libnsl.so.*",
|
|
||||||
"libnss_compat.so.*",
|
|
||||||
"libnss_db.so.*",
|
|
||||||
"libnss_dns.so.*",
|
|
||||||
"libnss_files.so.*",
|
|
||||||
"libnss_hesiod.so.*",
|
|
||||||
"libpcprofile.so.*",
|
|
||||||
"libpthread.so.*",
|
|
||||||
"libresolv.so.*",
|
|
||||||
"librt.so.*",
|
|
||||||
"libSegFault.so.*",
|
|
||||||
"libthread_db.so.*",
|
|
||||||
"libutil.so.*",
|
|
||||||
# gcc -- this is required even with gcc-runtime, because e.g. libstdc++ depends on libgcc_s,
|
|
||||||
# but the binaries we copy from the compiler don't have an $ORIGIN rpath.
|
|
||||||
"libasan.so.*",
|
|
||||||
"libatomic.so.*",
|
|
||||||
"libcc1.so.*",
|
|
||||||
"libgcc_s.so.*",
|
|
||||||
"libgfortran.so.*",
|
|
||||||
"libgomp.so.*",
|
|
||||||
"libitm.so.*",
|
|
||||||
"liblsan.so.*",
|
|
||||||
"libquadmath.so.*",
|
|
||||||
"libssp.so.*",
|
|
||||||
"libstdc++.so.*",
|
|
||||||
"libtsan.so.*",
|
|
||||||
"libubsan.so.*",
|
|
||||||
# systemd
|
|
||||||
"libudev.so.*",
|
|
||||||
# cuda driver
|
|
||||||
"libcuda.so.*",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def is_compatible(parent: elf.ElfFile, child: elf.ElfFile) -> bool:
|
|
||||||
return (
|
|
||||||
child.elf_hdr.e_type == elf.ELF_CONSTANTS.ET_DYN
|
|
||||||
and parent.is_little_endian == child.is_little_endian
|
|
||||||
and parent.is_64_bit == child.is_64_bit
|
|
||||||
and parent.elf_hdr.e_machine == child.elf_hdr.e_machine
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def candidate_matches(current_elf: elf.ElfFile, candidate_path: bytes) -> bool:
|
|
||||||
try:
|
|
||||||
with open(candidate_path, "rb") as g:
|
|
||||||
return is_compatible(current_elf, elf.parse_elf(g))
|
|
||||||
except (OSError, elf.ElfParsingError):
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class Problem:
|
|
||||||
def __init__(
|
|
||||||
self, resolved: Dict[bytes, bytes], unresolved: List[bytes], relative_rpaths: List[bytes]
|
|
||||||
) -> None:
|
|
||||||
self.resolved = resolved
|
|
||||||
self.unresolved = unresolved
|
|
||||||
self.relative_rpaths = relative_rpaths
|
|
||||||
|
|
||||||
|
|
||||||
class ResolveSharedElfLibDepsVisitor(BaseDirectoryVisitor):
|
|
||||||
def __init__(self, allow_unresolved_patterns: List[str]) -> None:
|
|
||||||
self.problems: Dict[str, Problem] = {}
|
|
||||||
self._allow_unresolved_regex = re.compile(
|
|
||||||
"|".join(fnmatch.translate(x) for x in allow_unresolved_patterns)
|
|
||||||
)
|
|
||||||
|
|
||||||
def allow_unresolved(self, needed: bytes) -> bool:
|
|
||||||
try:
|
|
||||||
name = needed.decode("utf-8")
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
return False
|
|
||||||
return bool(self._allow_unresolved_regex.match(name))
|
|
||||||
|
|
||||||
def visit_file(self, root: str, rel_path: str, depth: int) -> None:
|
|
||||||
# We work with byte strings for paths.
|
|
||||||
path = os.path.join(root, rel_path).encode("utf-8")
|
|
||||||
|
|
||||||
# For $ORIGIN interpolation: should not have trailing dir seperator.
|
|
||||||
origin = os.path.dirname(path)
|
|
||||||
|
|
||||||
# Retrieve the needed libs + rpaths.
|
|
||||||
try:
|
|
||||||
with open(path, "rb") as f:
|
|
||||||
parsed_elf = elf.parse_elf(f, interpreter=False, dynamic_section=True)
|
|
||||||
except (OSError, elf.ElfParsingError):
|
|
||||||
# Not dealing with an invalid ELF file.
|
|
||||||
return
|
|
||||||
|
|
||||||
# If there's no needed libs all is good
|
|
||||||
if not parsed_elf.has_needed:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Get the needed libs and rpaths (notice: byte strings)
|
|
||||||
# Don't force an encoding cause paths are just a bag of bytes.
|
|
||||||
needed_libs = parsed_elf.dt_needed_strs
|
|
||||||
|
|
||||||
rpaths = parsed_elf.dt_rpath_str.split(b":") if parsed_elf.has_rpath else []
|
|
||||||
|
|
||||||
# We only interpolate $ORIGIN, not $LIB and $PLATFORM, they're not really
|
|
||||||
# supported in general. Also remove empty paths.
|
|
||||||
rpaths = [x.replace(b"$ORIGIN", origin) for x in rpaths if x]
|
|
||||||
|
|
||||||
# Do not allow relative rpaths (they are relative to the current working directory)
|
|
||||||
rpaths, relative_rpaths = stable_partition(rpaths, os.path.isabs)
|
|
||||||
|
|
||||||
# If there's a / in the needed lib, it's opened directly, otherwise it needs
|
|
||||||
# a search.
|
|
||||||
direct_libs, search_libs = stable_partition(needed_libs, lambda x: b"/" in x)
|
|
||||||
|
|
||||||
# Do not allow relative paths in direct libs (they are relative to the current working
|
|
||||||
# directory)
|
|
||||||
direct_libs, unresolved = stable_partition(direct_libs, os.path.isabs)
|
|
||||||
|
|
||||||
resolved: Dict[bytes, bytes] = {}
|
|
||||||
|
|
||||||
for lib in search_libs:
|
|
||||||
if self.allow_unresolved(lib):
|
|
||||||
continue
|
|
||||||
for rpath in rpaths:
|
|
||||||
candidate = os.path.join(rpath, lib)
|
|
||||||
if candidate_matches(parsed_elf, candidate):
|
|
||||||
resolved[lib] = candidate
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
unresolved.append(lib)
|
|
||||||
|
|
||||||
# Check if directly opened libs are compatible
|
|
||||||
for lib in direct_libs:
|
|
||||||
if candidate_matches(parsed_elf, lib):
|
|
||||||
resolved[lib] = lib
|
|
||||||
else:
|
|
||||||
unresolved.append(lib)
|
|
||||||
|
|
||||||
if unresolved or relative_rpaths:
|
|
||||||
self.problems[rel_path] = Problem(resolved, unresolved, relative_rpaths)
|
|
||||||
|
|
||||||
def visit_symlinked_file(self, root: str, rel_path: str, depth: int) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def before_visit_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
|
||||||
# There can be binaries in .spack/test which shouldn't be checked.
|
|
||||||
if rel_path == ".spack":
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def before_visit_symlinked_dir(self, root: str, rel_path: str, depth: int) -> bool:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class CannotLocateSharedLibraries(spack.error.SpackError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def maybe_decode(byte_str: bytes) -> Union[str, bytes]:
|
|
||||||
try:
|
|
||||||
return byte_str.decode("utf-8")
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
return byte_str
|
|
||||||
|
|
||||||
|
|
||||||
def post_install(spec, explicit):
|
|
||||||
"""Check whether shared libraries can be resolved in RPATHs."""
|
|
||||||
policy = spack.config.get("config:shared_linking:missing_library_policy", "ignore")
|
|
||||||
|
|
||||||
# Currently only supported for ELF files.
|
|
||||||
if policy == "ignore" or spec.external or spec.platform not in ("linux", "freebsd"):
|
|
||||||
return
|
|
||||||
|
|
||||||
visitor = ResolveSharedElfLibDepsVisitor(
|
|
||||||
[*ALLOW_UNRESOLVED, *spec.package.unresolved_libraries]
|
|
||||||
)
|
|
||||||
visit_directory_tree(spec.prefix, visitor)
|
|
||||||
|
|
||||||
# All good?
|
|
||||||
if not visitor.problems:
|
|
||||||
return
|
|
||||||
|
|
||||||
# For now just list the issues (print it in ldd style, except we don't recurse)
|
|
||||||
output = io.StringIO()
|
|
||||||
output.write("not all executables and libraries can resolve their dependencies:\n")
|
|
||||||
for path, problem in visitor.problems.items():
|
|
||||||
output.write(path)
|
|
||||||
output.write("\n")
|
|
||||||
for needed, full_path in problem.resolved.items():
|
|
||||||
output.write(" ")
|
|
||||||
if needed == full_path:
|
|
||||||
output.write(maybe_decode(needed))
|
|
||||||
else:
|
|
||||||
output.write(f"{maybe_decode(needed)} => {maybe_decode(full_path)}")
|
|
||||||
output.write("\n")
|
|
||||||
for not_found in problem.unresolved:
|
|
||||||
output.write(f" {maybe_decode(not_found)} => not found\n")
|
|
||||||
for relative_rpath in problem.relative_rpaths:
|
|
||||||
output.write(f" {maybe_decode(relative_rpath)} => relative rpath\n")
|
|
||||||
|
|
||||||
message = output.getvalue().strip()
|
|
||||||
|
|
||||||
if policy == "error":
|
|
||||||
raise CannotLocateSharedLibraries(message)
|
|
||||||
|
|
||||||
tty.warn(message)
|
|
||||||
@@ -81,7 +81,7 @@ def get_escaped_text_output(filename: str) -> List[str]:
|
|||||||
Returns:
|
Returns:
|
||||||
escaped text lines read from the file
|
escaped text lines read from the file
|
||||||
"""
|
"""
|
||||||
with open(filename, encoding="utf-8") as f:
|
with open(filename) as f:
|
||||||
# Ensure special characters are escaped as needed
|
# Ensure special characters are escaped as needed
|
||||||
expected = f.read()
|
expected = f.read()
|
||||||
|
|
||||||
@@ -458,7 +458,7 @@ def write_tested_status(self):
|
|||||||
elif self.counts[TestStatus.PASSED] > 0:
|
elif self.counts[TestStatus.PASSED] > 0:
|
||||||
status = TestStatus.PASSED
|
status = TestStatus.PASSED
|
||||||
|
|
||||||
with open(self.tested_file, "w", encoding="utf-8") as f:
|
with open(self.tested_file, "w") as f:
|
||||||
f.write(f"{status.value}\n")
|
f.write(f"{status.value}\n")
|
||||||
|
|
||||||
|
|
||||||
@@ -502,7 +502,7 @@ def test_part(pkg: Pb, test_name: str, purpose: str, work_dir: str = ".", verbos
|
|||||||
for i, entry in enumerate(stack):
|
for i, entry in enumerate(stack):
|
||||||
filename, lineno, function, text = entry
|
filename, lineno, function, text = entry
|
||||||
if spack.repo.is_package_file(filename):
|
if spack.repo.is_package_file(filename):
|
||||||
with open(filename, encoding="utf-8") as f:
|
with open(filename) as f:
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
new_lineno = lineno - 2
|
new_lineno = lineno - 2
|
||||||
text = lines[new_lineno]
|
text = lines[new_lineno]
|
||||||
@@ -822,7 +822,7 @@ def get_test_suite(name: str) -> Optional["TestSuite"]:
|
|||||||
|
|
||||||
def write_test_suite_file(suite):
|
def write_test_suite_file(suite):
|
||||||
"""Write the test suite to its (JSON) lock file."""
|
"""Write the test suite to its (JSON) lock file."""
|
||||||
with open(suite.stage.join(test_suite_filename), "w", encoding="utf-8") as f:
|
with open(suite.stage.join(test_suite_filename), "w") as f:
|
||||||
sjson.dump(suite.to_dict(), stream=f)
|
sjson.dump(suite.to_dict(), stream=f)
|
||||||
|
|
||||||
|
|
||||||
@@ -977,7 +977,7 @@ def test_status(self, spec: spack.spec.Spec, externals: bool) -> Optional[TestSt
|
|||||||
status = TestStatus.NO_TESTS
|
status = TestStatus.NO_TESTS
|
||||||
return status
|
return status
|
||||||
|
|
||||||
with open(tests_status_file, "r", encoding="utf-8") as f:
|
with open(tests_status_file, "r") as f:
|
||||||
value = (f.read()).strip("\n")
|
value = (f.read()).strip("\n")
|
||||||
return TestStatus(int(value)) if value else TestStatus.NO_TESTS
|
return TestStatus(int(value)) if value else TestStatus.NO_TESTS
|
||||||
|
|
||||||
@@ -1179,7 +1179,7 @@ def from_file(filename):
|
|||||||
BaseException: sjson.SpackJSONError if problem parsing the file
|
BaseException: sjson.SpackJSONError if problem parsing the file
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with open(filename, encoding="utf-8") as f:
|
with open(filename) as f:
|
||||||
data = sjson.load(f)
|
data = sjson.load(f)
|
||||||
test_suite = TestSuite.from_dict(data)
|
test_suite = TestSuite.from_dict(data)
|
||||||
content_hash = os.path.basename(os.path.dirname(filename))
|
content_hash = os.path.basename(os.path.dirname(filename))
|
||||||
@@ -1196,7 +1196,7 @@ def _add_msg_to_file(filename, msg):
|
|||||||
filename (str): path to the file
|
filename (str): path to the file
|
||||||
msg (str): message to be appended to the file
|
msg (str): message to be appended to the file
|
||||||
"""
|
"""
|
||||||
with open(filename, "a+", encoding="utf-8") as f:
|
with open(filename, "a+") as f:
|
||||||
f.write(f"{msg}\n")
|
f.write(f"{msg}\n")
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -56,7 +56,7 @@
|
|||||||
import spack.deptypes as dt
|
import spack.deptypes as dt
|
||||||
import spack.error
|
import spack.error
|
||||||
import spack.hooks
|
import spack.hooks
|
||||||
import spack.mirrors.mirror
|
import spack.mirror
|
||||||
import spack.package_base
|
import spack.package_base
|
||||||
import spack.package_prefs as prefs
|
import spack.package_prefs as prefs
|
||||||
import spack.repo
|
import spack.repo
|
||||||
@@ -105,7 +105,7 @@ def __str__(self):
|
|||||||
def _write_timer_json(pkg, timer, cache):
|
def _write_timer_json(pkg, timer, cache):
|
||||||
extra_attributes = {"name": pkg.name, "cache": cache, "hash": pkg.spec.dag_hash()}
|
extra_attributes = {"name": pkg.name, "cache": cache, "hash": pkg.spec.dag_hash()}
|
||||||
try:
|
try:
|
||||||
with open(pkg.times_log_path, "w", encoding="utf-8") as timelog:
|
with open(pkg.times_log_path, "w") as timelog:
|
||||||
timer.write_json(timelog, extra_attributes=extra_attributes)
|
timer.write_json(timelog, extra_attributes=extra_attributes)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
tty.debug(str(e))
|
tty.debug(str(e))
|
||||||
@@ -491,7 +491,7 @@ def _try_install_from_binary_cache(
|
|||||||
timer: timer to keep track of binary install phases.
|
timer: timer to keep track of binary install phases.
|
||||||
"""
|
"""
|
||||||
# Early exit if no binary mirrors are configured.
|
# Early exit if no binary mirrors are configured.
|
||||||
if not spack.mirrors.mirror.MirrorCollection(binary=True):
|
if not spack.mirror.MirrorCollection(binary=True):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
tty.debug(f"Searching for binary cache of {package_id(pkg.spec)}")
|
tty.debug(f"Searching for binary cache of {package_id(pkg.spec)}")
|
||||||
@@ -692,7 +692,7 @@ def log(pkg: "spack.package_base.PackageBase") -> None:
|
|||||||
if errors.getvalue():
|
if errors.getvalue():
|
||||||
error_file = os.path.join(target_dir, "errors.txt")
|
error_file = os.path.join(target_dir, "errors.txt")
|
||||||
fs.mkdirp(target_dir)
|
fs.mkdirp(target_dir)
|
||||||
with open(error_file, "w", encoding="utf-8") as err:
|
with open(error_file, "w") as err:
|
||||||
err.write(errors.getvalue())
|
err.write(errors.getvalue())
|
||||||
tty.warn(f"Errors occurred when archiving files.\n\tSee: {error_file}")
|
tty.warn(f"Errors occurred when archiving files.\n\tSee: {error_file}")
|
||||||
|
|
||||||
@@ -2405,7 +2405,7 @@ def _real_install(self) -> None:
|
|||||||
|
|
||||||
# Save just the changes to the environment. This file can be
|
# Save just the changes to the environment. This file can be
|
||||||
# safely installed, since it does not contain secret variables.
|
# safely installed, since it does not contain secret variables.
|
||||||
with open(pkg.env_mods_path, "w", encoding="utf-8") as env_mods_file:
|
with open(pkg.env_mods_path, "w") as env_mods_file:
|
||||||
mods = self.env_mods.shell_modifications(explicit=True, env=self.unmodified_env)
|
mods = self.env_mods.shell_modifications(explicit=True, env=self.unmodified_env)
|
||||||
env_mods_file.write(mods)
|
env_mods_file.write(mods)
|
||||||
|
|
||||||
@@ -2414,7 +2414,7 @@ def _real_install(self) -> None:
|
|||||||
configure_args = getattr(pkg, attr)()
|
configure_args = getattr(pkg, attr)()
|
||||||
configure_args = " ".join(configure_args)
|
configure_args = " ".join(configure_args)
|
||||||
|
|
||||||
with open(pkg.configure_args_path, "w", encoding="utf-8") as args_file:
|
with open(pkg.configure_args_path, "w") as args_file:
|
||||||
args_file.write(configure_args)
|
args_file.write(configure_args)
|
||||||
|
|
||||||
break
|
break
|
||||||
|
|||||||
@@ -48,6 +48,7 @@
|
|||||||
import spack.util.debug
|
import spack.util.debug
|
||||||
import spack.util.environment
|
import spack.util.environment
|
||||||
import spack.util.lock
|
import spack.util.lock
|
||||||
|
from spack.error import SpackError
|
||||||
|
|
||||||
#: names of profile statistics
|
#: names of profile statistics
|
||||||
stat_names = pstats.Stats.sort_arg_dict_default
|
stat_names = pstats.Stats.sort_arg_dict_default
|
||||||
@@ -857,33 +858,6 @@ def resolve_alias(cmd_name: str, cmd: List[str]) -> Tuple[str, List[str]]:
|
|||||||
return cmd_name, cmd
|
return cmd_name, cmd
|
||||||
|
|
||||||
|
|
||||||
def add_command_line_scopes(
|
|
||||||
cfg: spack.config.Configuration, command_line_scopes: List[str]
|
|
||||||
) -> None:
|
|
||||||
"""Add additional scopes from the --config-scope argument, either envs or dirs.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
cfg: configuration instance
|
|
||||||
command_line_scopes: list of configuration scope paths
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
spack.error.ConfigError: if the path is an invalid configuration scope
|
|
||||||
"""
|
|
||||||
for i, path in enumerate(command_line_scopes):
|
|
||||||
name = f"cmd_scope_{i}"
|
|
||||||
scopes = ev.environment_path_scopes(name, path)
|
|
||||||
if scopes is None:
|
|
||||||
if os.path.isdir(path): # directory with config files
|
|
||||||
cfg.push_scope(spack.config.DirectoryConfigScope(name, path, writable=False))
|
|
||||||
spack.config._add_platform_scope(cfg, name, path, writable=False)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
raise spack.error.ConfigError(f"Invalid configuration scope: {path}")
|
|
||||||
|
|
||||||
for scope in scopes:
|
|
||||||
cfg.push_scope(scope)
|
|
||||||
|
|
||||||
|
|
||||||
def _main(argv=None):
|
def _main(argv=None):
|
||||||
"""Logic for the main entry point for the Spack command.
|
"""Logic for the main entry point for the Spack command.
|
||||||
|
|
||||||
@@ -952,7 +926,7 @@ def _main(argv=None):
|
|||||||
|
|
||||||
# Push scopes from the command line last
|
# Push scopes from the command line last
|
||||||
if args.config_scopes:
|
if args.config_scopes:
|
||||||
add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
|
spack.config._add_command_line_scopes(spack.config.CONFIG, args.config_scopes)
|
||||||
spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("command_line"))
|
spack.config.CONFIG.push_scope(spack.config.InternalConfigScope("command_line"))
|
||||||
setup_main_options(args)
|
setup_main_options(args)
|
||||||
|
|
||||||
@@ -1038,7 +1012,7 @@ def main(argv=None):
|
|||||||
try:
|
try:
|
||||||
return _main(argv)
|
return _main(argv)
|
||||||
|
|
||||||
except spack.error.SpackError as e:
|
except SpackError as e:
|
||||||
tty.debug(e)
|
tty.debug(e)
|
||||||
e.die() # gracefully die on any SpackErrors
|
e.die() # gracefully die on any SpackErrors
|
||||||
|
|
||||||
|
|||||||
@@ -2,20 +2,42 @@
|
|||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||||
#
|
#
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||||
|
"""
|
||||||
|
This file contains code for creating spack mirror directories. A
|
||||||
|
mirror is an organized hierarchy containing specially named archive
|
||||||
|
files. This enabled spack to know where to find files in a mirror if
|
||||||
|
the main server for a particular package is down. Or, if the computer
|
||||||
|
where spack is run is not connected to the internet, it allows spack
|
||||||
|
to download packages directly from a mirror (e.g., on an intranet).
|
||||||
|
"""
|
||||||
|
import collections
|
||||||
import collections.abc
|
import collections.abc
|
||||||
import operator
|
import operator
|
||||||
import os
|
import os
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from typing import Any, Dict, Optional, Tuple, Union
|
from typing import Any, Dict, Optional, Tuple, Union
|
||||||
|
|
||||||
|
import llnl.url
|
||||||
|
import llnl.util.symlink
|
||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
from llnl.util.filesystem import mkdirp
|
||||||
|
|
||||||
|
import spack.caches
|
||||||
import spack.config
|
import spack.config
|
||||||
|
import spack.error
|
||||||
|
import spack.fetch_strategy
|
||||||
|
import spack.mirror
|
||||||
|
import spack.oci.image
|
||||||
|
import spack.repo
|
||||||
|
import spack.spec
|
||||||
import spack.util.path
|
import spack.util.path
|
||||||
import spack.util.spack_json as sjson
|
import spack.util.spack_json as sjson
|
||||||
import spack.util.spack_yaml as syaml
|
import spack.util.spack_yaml as syaml
|
||||||
import spack.util.url as url_util
|
import spack.util.url as url_util
|
||||||
from spack.error import MirrorError
|
import spack.version
|
||||||
|
|
||||||
#: What schemes do we support
|
#: What schemes do we support
|
||||||
supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs", "oci")
|
supported_url_schemes = ("file", "http", "https", "sftp", "ftp", "s3", "gs", "oci")
|
||||||
@@ -468,3 +490,380 @@ def __iter__(self):
|
|||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self._mirrors)
|
return len(self._mirrors)
|
||||||
|
|
||||||
|
|
||||||
|
def _determine_extension(fetcher):
|
||||||
|
if isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy):
|
||||||
|
if fetcher.expand_archive:
|
||||||
|
# If we fetch with a URLFetchStrategy, use URL's archive type
|
||||||
|
ext = llnl.url.determine_url_file_extension(fetcher.url)
|
||||||
|
|
||||||
|
if ext:
|
||||||
|
# Remove any leading dots
|
||||||
|
ext = ext.lstrip(".")
|
||||||
|
else:
|
||||||
|
msg = """\
|
||||||
|
Unable to parse extension from {0}.
|
||||||
|
|
||||||
|
If this URL is for a tarball but does not include the file extension
|
||||||
|
in the name, you can explicitly declare it with the following syntax:
|
||||||
|
|
||||||
|
version('1.2.3', 'hash', extension='tar.gz')
|
||||||
|
|
||||||
|
If this URL is for a download like a .jar or .whl that does not need
|
||||||
|
to be expanded, or an uncompressed installation script, you can tell
|
||||||
|
Spack not to expand it with the following syntax:
|
||||||
|
|
||||||
|
version('1.2.3', 'hash', expand=False)
|
||||||
|
"""
|
||||||
|
raise MirrorError(msg.format(fetcher.url))
|
||||||
|
else:
|
||||||
|
# If the archive shouldn't be expanded, don't check extension.
|
||||||
|
ext = None
|
||||||
|
else:
|
||||||
|
# Otherwise we'll make a .tar.gz ourselves
|
||||||
|
ext = "tar.gz"
|
||||||
|
|
||||||
|
return ext
|
||||||
|
|
||||||
|
|
||||||
|
class MirrorLayout:
|
||||||
|
"""A ``MirrorLayout`` object describes the relative path of a mirror entry."""
|
||||||
|
|
||||||
|
def __init__(self, path: str) -> None:
|
||||||
|
self.path = path
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
"""Yield all paths including aliases where the resource can be found."""
|
||||||
|
yield self.path
|
||||||
|
|
||||||
|
def make_alias(self, root: str) -> None:
|
||||||
|
"""Make the entry ``root / self.path`` available under a human readable alias"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DefaultLayout(MirrorLayout):
|
||||||
|
def __init__(self, alias_path: str, digest_path: Optional[str] = None) -> None:
|
||||||
|
# When we have a digest, it is used as the primary storage location. If not, then we use
|
||||||
|
# the human-readable alias. In case of mirrors of a VCS checkout, we currently do not have
|
||||||
|
# a digest, that's why an alias is required and a digest optional.
|
||||||
|
super().__init__(path=digest_path or alias_path)
|
||||||
|
self.alias = alias_path
|
||||||
|
self.digest_path = digest_path
|
||||||
|
|
||||||
|
def make_alias(self, root: str) -> None:
|
||||||
|
"""Symlink a human readible path in our mirror to the actual storage location."""
|
||||||
|
# We already use the human-readable path as the main storage location.
|
||||||
|
if not self.digest_path:
|
||||||
|
return
|
||||||
|
|
||||||
|
alias, digest = os.path.join(root, self.alias), os.path.join(root, self.digest_path)
|
||||||
|
|
||||||
|
alias_dir = os.path.dirname(alias)
|
||||||
|
relative_dst = os.path.relpath(digest, start=alias_dir)
|
||||||
|
|
||||||
|
mkdirp(alias_dir)
|
||||||
|
tmp = f"{alias}.tmp"
|
||||||
|
llnl.util.symlink.symlink(relative_dst, tmp)
|
||||||
|
|
||||||
|
try:
|
||||||
|
os.rename(tmp, alias)
|
||||||
|
except OSError:
|
||||||
|
# Clean up the temporary if possible
|
||||||
|
try:
|
||||||
|
os.unlink(tmp)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
raise
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
if self.digest_path:
|
||||||
|
yield self.digest_path
|
||||||
|
yield self.alias
|
||||||
|
|
||||||
|
|
||||||
|
class OCILayout(MirrorLayout):
|
||||||
|
"""Follow the OCI Image Layout Specification to archive blobs where paths are of the form
|
||||||
|
``blobs/<algorithm>/<digest>``"""
|
||||||
|
|
||||||
|
def __init__(self, digest: spack.oci.image.Digest) -> None:
|
||||||
|
super().__init__(os.path.join("blobs", digest.algorithm, digest.digest))
|
||||||
|
|
||||||
|
|
||||||
|
def default_mirror_layout(
|
||||||
|
fetcher: "spack.fetch_strategy.FetchStrategy",
|
||||||
|
per_package_ref: str,
|
||||||
|
spec: Optional["spack.spec.Spec"] = None,
|
||||||
|
) -> MirrorLayout:
|
||||||
|
"""Returns a ``MirrorReference`` object which keeps track of the relative
|
||||||
|
storage path of the resource associated with the specified ``fetcher``."""
|
||||||
|
ext = None
|
||||||
|
if spec:
|
||||||
|
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||||
|
versions = pkg_cls.versions.get(spec.version, {})
|
||||||
|
ext = versions.get("extension", None)
|
||||||
|
# If the spec does not explicitly specify an extension (the default case),
|
||||||
|
# then try to determine it automatically. An extension can only be
|
||||||
|
# specified for the primary source of the package (e.g. the source code
|
||||||
|
# identified in the 'version' declaration). Resources/patches don't have
|
||||||
|
# an option to specify an extension, so it must be inferred for those.
|
||||||
|
ext = ext or _determine_extension(fetcher)
|
||||||
|
|
||||||
|
if ext:
|
||||||
|
per_package_ref += ".%s" % ext
|
||||||
|
|
||||||
|
global_ref = fetcher.mirror_id()
|
||||||
|
if global_ref:
|
||||||
|
global_ref = os.path.join("_source-cache", global_ref)
|
||||||
|
if global_ref and ext:
|
||||||
|
global_ref += ".%s" % ext
|
||||||
|
|
||||||
|
return DefaultLayout(per_package_ref, global_ref)
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_versions(specs):
|
||||||
|
"""Given a set of initial specs, return a new set of specs that includes
|
||||||
|
each version of each package in the original set.
|
||||||
|
|
||||||
|
Note that if any spec in the original set specifies properties other than
|
||||||
|
version, this information will be omitted in the new set; for example; the
|
||||||
|
new set of specs will not include variant settings.
|
||||||
|
"""
|
||||||
|
version_specs = []
|
||||||
|
for spec in specs:
|
||||||
|
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
||||||
|
# Skip any package that has no known versions.
|
||||||
|
if not pkg_cls.versions:
|
||||||
|
tty.msg("No safe (checksummed) versions for package %s" % pkg_cls.name)
|
||||||
|
continue
|
||||||
|
|
||||||
|
for version in pkg_cls.versions:
|
||||||
|
version_spec = spack.spec.Spec(pkg_cls.name)
|
||||||
|
version_spec.versions = spack.version.VersionList([version])
|
||||||
|
version_specs.append(version_spec)
|
||||||
|
|
||||||
|
return version_specs
|
||||||
|
|
||||||
|
|
||||||
|
def get_matching_versions(specs, num_versions=1):
|
||||||
|
"""Get a spec for EACH known version matching any spec in the list.
|
||||||
|
For concrete specs, this retrieves the concrete version and, if more
|
||||||
|
than one version per spec is requested, retrieves the latest versions
|
||||||
|
of the package.
|
||||||
|
"""
|
||||||
|
matching = []
|
||||||
|
for spec in specs:
|
||||||
|
pkg = spec.package
|
||||||
|
|
||||||
|
# Skip any package that has no known versions.
|
||||||
|
if not pkg.versions:
|
||||||
|
tty.msg("No safe (checksummed) versions for package %s" % pkg.name)
|
||||||
|
continue
|
||||||
|
|
||||||
|
pkg_versions = num_versions
|
||||||
|
|
||||||
|
version_order = list(reversed(sorted(pkg.versions)))
|
||||||
|
matching_spec = []
|
||||||
|
if spec.concrete:
|
||||||
|
matching_spec.append(spec)
|
||||||
|
pkg_versions -= 1
|
||||||
|
if spec.version in version_order:
|
||||||
|
version_order.remove(spec.version)
|
||||||
|
|
||||||
|
for v in version_order:
|
||||||
|
# Generate no more than num_versions versions for each spec.
|
||||||
|
if pkg_versions < 1:
|
||||||
|
break
|
||||||
|
|
||||||
|
# Generate only versions that satisfy the spec.
|
||||||
|
if spec.concrete or v.intersects(spec.versions):
|
||||||
|
s = spack.spec.Spec(pkg.name)
|
||||||
|
s.versions = spack.version.VersionList([v])
|
||||||
|
s.variants = spec.variants.copy()
|
||||||
|
# This is needed to avoid hanging references during the
|
||||||
|
# concretization phase
|
||||||
|
s.variants.spec = s
|
||||||
|
matching_spec.append(s)
|
||||||
|
pkg_versions -= 1
|
||||||
|
|
||||||
|
if not matching_spec:
|
||||||
|
tty.warn("No known version matches spec: %s" % spec)
|
||||||
|
matching.extend(matching_spec)
|
||||||
|
|
||||||
|
return matching
|
||||||
|
|
||||||
|
|
||||||
|
def create(path, specs, skip_unstable_versions=False):
|
||||||
|
"""Create a directory to be used as a spack mirror, and fill it with
|
||||||
|
package archives.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
path: Path to create a mirror directory hierarchy in.
|
||||||
|
specs: Any package versions matching these specs will be added \
|
||||||
|
to the mirror.
|
||||||
|
skip_unstable_versions: if true, this skips adding resources when
|
||||||
|
they do not have a stable archive checksum (as determined by
|
||||||
|
``fetch_strategy.stable_target``)
|
||||||
|
|
||||||
|
Return Value:
|
||||||
|
Returns a tuple of lists: (present, mirrored, error)
|
||||||
|
|
||||||
|
* present: Package specs that were already present.
|
||||||
|
* mirrored: Package specs that were successfully mirrored.
|
||||||
|
* error: Package specs that failed to mirror due to some error.
|
||||||
|
"""
|
||||||
|
# automatically spec-ify anything in the specs array.
|
||||||
|
specs = [s if isinstance(s, spack.spec.Spec) else spack.spec.Spec(s) for s in specs]
|
||||||
|
|
||||||
|
mirror_cache, mirror_stats = mirror_cache_and_stats(path, skip_unstable_versions)
|
||||||
|
for spec in specs:
|
||||||
|
mirror_stats.next_spec(spec)
|
||||||
|
create_mirror_from_package_object(spec.package, mirror_cache, mirror_stats)
|
||||||
|
|
||||||
|
return mirror_stats.stats()
|
||||||
|
|
||||||
|
|
||||||
|
def mirror_cache_and_stats(path, skip_unstable_versions=False):
|
||||||
|
"""Return both a mirror cache and a mirror stats, starting from the path
|
||||||
|
where a mirror ought to be created.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path (str): path to create a mirror directory hierarchy in.
|
||||||
|
skip_unstable_versions: if true, this skips adding resources when
|
||||||
|
they do not have a stable archive checksum (as determined by
|
||||||
|
``fetch_strategy.stable_target``)
|
||||||
|
"""
|
||||||
|
# Get the absolute path of the root before we start jumping around.
|
||||||
|
if not os.path.isdir(path):
|
||||||
|
try:
|
||||||
|
mkdirp(path)
|
||||||
|
except OSError as e:
|
||||||
|
raise MirrorError("Cannot create directory '%s':" % path, str(e))
|
||||||
|
mirror_cache = spack.caches.MirrorCache(path, skip_unstable_versions=skip_unstable_versions)
|
||||||
|
mirror_stats = MirrorStats()
|
||||||
|
return mirror_cache, mirror_stats
|
||||||
|
|
||||||
|
|
||||||
|
def add(mirror: Mirror, scope=None):
|
||||||
|
"""Add a named mirror in the given scope"""
|
||||||
|
mirrors = spack.config.get("mirrors", scope=scope)
|
||||||
|
if not mirrors:
|
||||||
|
mirrors = syaml.syaml_dict()
|
||||||
|
|
||||||
|
if mirror.name in mirrors:
|
||||||
|
tty.die("Mirror with name {} already exists.".format(mirror.name))
|
||||||
|
|
||||||
|
items = [(n, u) for n, u in mirrors.items()]
|
||||||
|
items.insert(0, (mirror.name, mirror.to_dict()))
|
||||||
|
mirrors = syaml.syaml_dict(items)
|
||||||
|
spack.config.set("mirrors", mirrors, scope=scope)
|
||||||
|
|
||||||
|
|
||||||
|
def remove(name, scope):
|
||||||
|
"""Remove the named mirror in the given scope"""
|
||||||
|
mirrors = spack.config.get("mirrors", scope=scope)
|
||||||
|
if not mirrors:
|
||||||
|
mirrors = syaml.syaml_dict()
|
||||||
|
|
||||||
|
if name not in mirrors:
|
||||||
|
tty.die("No mirror with name %s" % name)
|
||||||
|
|
||||||
|
mirrors.pop(name)
|
||||||
|
spack.config.set("mirrors", mirrors, scope=scope)
|
||||||
|
tty.msg("Removed mirror %s." % name)
|
||||||
|
|
||||||
|
|
||||||
|
class MirrorStats:
|
||||||
|
def __init__(self):
|
||||||
|
self.present = {}
|
||||||
|
self.new = {}
|
||||||
|
self.errors = set()
|
||||||
|
|
||||||
|
self.current_spec = None
|
||||||
|
self.added_resources = set()
|
||||||
|
self.existing_resources = set()
|
||||||
|
|
||||||
|
def next_spec(self, spec):
|
||||||
|
self._tally_current_spec()
|
||||||
|
self.current_spec = spec
|
||||||
|
|
||||||
|
def _tally_current_spec(self):
|
||||||
|
if self.current_spec:
|
||||||
|
if self.added_resources:
|
||||||
|
self.new[self.current_spec] = len(self.added_resources)
|
||||||
|
if self.existing_resources:
|
||||||
|
self.present[self.current_spec] = len(self.existing_resources)
|
||||||
|
self.added_resources = set()
|
||||||
|
self.existing_resources = set()
|
||||||
|
self.current_spec = None
|
||||||
|
|
||||||
|
def stats(self):
|
||||||
|
self._tally_current_spec()
|
||||||
|
return list(self.present), list(self.new), list(self.errors)
|
||||||
|
|
||||||
|
def already_existed(self, resource):
|
||||||
|
# If an error occurred after caching a subset of a spec's
|
||||||
|
# resources, a secondary attempt may consider them already added
|
||||||
|
if resource not in self.added_resources:
|
||||||
|
self.existing_resources.add(resource)
|
||||||
|
|
||||||
|
def added(self, resource):
|
||||||
|
self.added_resources.add(resource)
|
||||||
|
|
||||||
|
def error(self):
|
||||||
|
self.errors.add(self.current_spec)
|
||||||
|
|
||||||
|
|
||||||
|
def create_mirror_from_package_object(pkg_obj, mirror_cache, mirror_stats):
|
||||||
|
"""Add a single package object to a mirror.
|
||||||
|
|
||||||
|
The package object is only required to have an associated spec
|
||||||
|
with a concrete version.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pkg_obj (spack.package_base.PackageBase): package object with to be added.
|
||||||
|
mirror_cache (spack.caches.MirrorCache): mirror where to add the spec.
|
||||||
|
mirror_stats (spack.mirror.MirrorStats): statistics on the current mirror
|
||||||
|
|
||||||
|
Return:
|
||||||
|
True if the spec was added successfully, False otherwise
|
||||||
|
"""
|
||||||
|
tty.msg("Adding package {} to mirror".format(pkg_obj.spec.format("{name}{@version}")))
|
||||||
|
num_retries = 3
|
||||||
|
while num_retries > 0:
|
||||||
|
try:
|
||||||
|
# Includes patches and resources
|
||||||
|
with pkg_obj.stage as pkg_stage:
|
||||||
|
pkg_stage.cache_mirror(mirror_cache, mirror_stats)
|
||||||
|
exception = None
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
exc_tuple = sys.exc_info()
|
||||||
|
exception = e
|
||||||
|
num_retries -= 1
|
||||||
|
if exception:
|
||||||
|
if spack.config.get("config:debug"):
|
||||||
|
traceback.print_exception(file=sys.stderr, *exc_tuple)
|
||||||
|
else:
|
||||||
|
tty.warn(
|
||||||
|
"Error while fetching %s" % pkg_obj.spec.cformat("{name}{@version}"),
|
||||||
|
getattr(exception, "message", exception),
|
||||||
|
)
|
||||||
|
mirror_stats.error()
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def require_mirror_name(mirror_name):
|
||||||
|
"""Find a mirror by name and raise if it does not exist"""
|
||||||
|
mirror = MirrorCollection().get(mirror_name)
|
||||||
|
if not mirror:
|
||||||
|
raise ValueError(f'no mirror named "{mirror_name}"')
|
||||||
|
return mirror
|
||||||
|
|
||||||
|
|
||||||
|
class MirrorError(spack.error.SpackError):
|
||||||
|
"""Superclass of all mirror-creation related errors."""
|
||||||
|
|
||||||
|
def __init__(self, msg, long_msg=None):
|
||||||
|
super().__init__(msg, long_msg)
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
@@ -1,146 +0,0 @@
|
|||||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
import os
|
|
||||||
import os.path
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import llnl.url
|
|
||||||
import llnl.util.symlink
|
|
||||||
from llnl.util.filesystem import mkdirp
|
|
||||||
|
|
||||||
import spack.fetch_strategy
|
|
||||||
import spack.oci.image
|
|
||||||
import spack.repo
|
|
||||||
import spack.spec
|
|
||||||
from spack.error import MirrorError
|
|
||||||
|
|
||||||
|
|
||||||
class MirrorLayout:
|
|
||||||
"""A ``MirrorLayout`` object describes the relative path of a mirror entry."""
|
|
||||||
|
|
||||||
def __init__(self, path: str) -> None:
|
|
||||||
self.path = path
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
"""Yield all paths including aliases where the resource can be found."""
|
|
||||||
yield self.path
|
|
||||||
|
|
||||||
def make_alias(self, root: str) -> None:
|
|
||||||
"""Make the entry ``root / self.path`` available under a human readable alias"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DefaultLayout(MirrorLayout):
|
|
||||||
def __init__(self, alias_path: str, digest_path: Optional[str] = None) -> None:
|
|
||||||
# When we have a digest, it is used as the primary storage location. If not, then we use
|
|
||||||
# the human-readable alias. In case of mirrors of a VCS checkout, we currently do not have
|
|
||||||
# a digest, that's why an alias is required and a digest optional.
|
|
||||||
super().__init__(path=digest_path or alias_path)
|
|
||||||
self.alias = alias_path
|
|
||||||
self.digest_path = digest_path
|
|
||||||
|
|
||||||
def make_alias(self, root: str) -> None:
|
|
||||||
"""Symlink a human readible path in our mirror to the actual storage location."""
|
|
||||||
# We already use the human-readable path as the main storage location.
|
|
||||||
if not self.digest_path:
|
|
||||||
return
|
|
||||||
|
|
||||||
alias, digest = os.path.join(root, self.alias), os.path.join(root, self.digest_path)
|
|
||||||
|
|
||||||
alias_dir = os.path.dirname(alias)
|
|
||||||
relative_dst = os.path.relpath(digest, start=alias_dir)
|
|
||||||
|
|
||||||
mkdirp(alias_dir)
|
|
||||||
tmp = f"{alias}.tmp"
|
|
||||||
llnl.util.symlink.symlink(relative_dst, tmp)
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.rename(tmp, alias)
|
|
||||||
except OSError:
|
|
||||||
# Clean up the temporary if possible
|
|
||||||
try:
|
|
||||||
os.unlink(tmp)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
raise
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
if self.digest_path:
|
|
||||||
yield self.digest_path
|
|
||||||
yield self.alias
|
|
||||||
|
|
||||||
|
|
||||||
class OCILayout(MirrorLayout):
|
|
||||||
"""Follow the OCI Image Layout Specification to archive blobs where paths are of the form
|
|
||||||
``blobs/<algorithm>/<digest>``"""
|
|
||||||
|
|
||||||
def __init__(self, digest: spack.oci.image.Digest) -> None:
|
|
||||||
super().__init__(os.path.join("blobs", digest.algorithm, digest.digest))
|
|
||||||
|
|
||||||
|
|
||||||
def _determine_extension(fetcher):
|
|
||||||
if isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy):
|
|
||||||
if fetcher.expand_archive:
|
|
||||||
# If we fetch with a URLFetchStrategy, use URL's archive type
|
|
||||||
ext = llnl.url.determine_url_file_extension(fetcher.url)
|
|
||||||
|
|
||||||
if ext:
|
|
||||||
# Remove any leading dots
|
|
||||||
ext = ext.lstrip(".")
|
|
||||||
else:
|
|
||||||
msg = """\
|
|
||||||
Unable to parse extension from {0}.
|
|
||||||
|
|
||||||
If this URL is for a tarball but does not include the file extension
|
|
||||||
in the name, you can explicitly declare it with the following syntax:
|
|
||||||
|
|
||||||
version('1.2.3', 'hash', extension='tar.gz')
|
|
||||||
|
|
||||||
If this URL is for a download like a .jar or .whl that does not need
|
|
||||||
to be expanded, or an uncompressed installation script, you can tell
|
|
||||||
Spack not to expand it with the following syntax:
|
|
||||||
|
|
||||||
version('1.2.3', 'hash', expand=False)
|
|
||||||
"""
|
|
||||||
raise MirrorError(msg.format(fetcher.url))
|
|
||||||
else:
|
|
||||||
# If the archive shouldn't be expanded, don't check extension.
|
|
||||||
ext = None
|
|
||||||
else:
|
|
||||||
# Otherwise we'll make a .tar.gz ourselves
|
|
||||||
ext = "tar.gz"
|
|
||||||
|
|
||||||
return ext
|
|
||||||
|
|
||||||
|
|
||||||
def default_mirror_layout(
|
|
||||||
fetcher: "spack.fetch_strategy.FetchStrategy",
|
|
||||||
per_package_ref: str,
|
|
||||||
spec: Optional["spack.spec.Spec"] = None,
|
|
||||||
) -> MirrorLayout:
|
|
||||||
"""Returns a ``MirrorReference`` object which keeps track of the relative
|
|
||||||
storage path of the resource associated with the specified ``fetcher``."""
|
|
||||||
ext = None
|
|
||||||
if spec:
|
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
|
||||||
versions = pkg_cls.versions.get(spec.version, {})
|
|
||||||
ext = versions.get("extension", None)
|
|
||||||
# If the spec does not explicitly specify an extension (the default case),
|
|
||||||
# then try to determine it automatically. An extension can only be
|
|
||||||
# specified for the primary source of the package (e.g. the source code
|
|
||||||
# identified in the 'version' declaration). Resources/patches don't have
|
|
||||||
# an option to specify an extension, so it must be inferred for those.
|
|
||||||
ext = ext or _determine_extension(fetcher)
|
|
||||||
|
|
||||||
if ext:
|
|
||||||
per_package_ref += ".%s" % ext
|
|
||||||
|
|
||||||
global_ref = fetcher.mirror_id()
|
|
||||||
if global_ref:
|
|
||||||
global_ref = os.path.join("_source-cache", global_ref)
|
|
||||||
if global_ref and ext:
|
|
||||||
global_ref += ".%s" % ext
|
|
||||||
|
|
||||||
return DefaultLayout(per_package_ref, global_ref)
|
|
||||||
@@ -1,258 +0,0 @@
|
|||||||
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
|
|
||||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
|
||||||
import os
|
|
||||||
import os.path
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
import llnl.util.tty as tty
|
|
||||||
from llnl.util.filesystem import mkdirp
|
|
||||||
|
|
||||||
import spack.caches
|
|
||||||
import spack.config
|
|
||||||
import spack.error
|
|
||||||
import spack.repo
|
|
||||||
import spack.spec
|
|
||||||
import spack.util.spack_yaml as syaml
|
|
||||||
import spack.version
|
|
||||||
from spack.error import MirrorError
|
|
||||||
from spack.mirrors.mirror import Mirror, MirrorCollection
|
|
||||||
|
|
||||||
|
|
||||||
def get_all_versions(specs):
|
|
||||||
"""Given a set of initial specs, return a new set of specs that includes
|
|
||||||
each version of each package in the original set.
|
|
||||||
|
|
||||||
Note that if any spec in the original set specifies properties other than
|
|
||||||
version, this information will be omitted in the new set; for example; the
|
|
||||||
new set of specs will not include variant settings.
|
|
||||||
"""
|
|
||||||
version_specs = []
|
|
||||||
for spec in specs:
|
|
||||||
pkg_cls = spack.repo.PATH.get_pkg_class(spec.name)
|
|
||||||
# Skip any package that has no known versions.
|
|
||||||
if not pkg_cls.versions:
|
|
||||||
tty.msg("No safe (checksummed) versions for package %s" % pkg_cls.name)
|
|
||||||
continue
|
|
||||||
|
|
||||||
for version in pkg_cls.versions:
|
|
||||||
version_spec = spack.spec.Spec(pkg_cls.name)
|
|
||||||
version_spec.versions = spack.version.VersionList([version])
|
|
||||||
version_specs.append(version_spec)
|
|
||||||
|
|
||||||
return version_specs
|
|
||||||
|
|
||||||
|
|
||||||
def get_matching_versions(specs, num_versions=1):
|
|
||||||
"""Get a spec for EACH known version matching any spec in the list.
|
|
||||||
For concrete specs, this retrieves the concrete version and, if more
|
|
||||||
than one version per spec is requested, retrieves the latest versions
|
|
||||||
of the package.
|
|
||||||
"""
|
|
||||||
matching = []
|
|
||||||
for spec in specs:
|
|
||||||
pkg = spec.package
|
|
||||||
|
|
||||||
# Skip any package that has no known versions.
|
|
||||||
if not pkg.versions:
|
|
||||||
tty.msg("No safe (checksummed) versions for package %s" % pkg.name)
|
|
||||||
continue
|
|
||||||
|
|
||||||
pkg_versions = num_versions
|
|
||||||
|
|
||||||
version_order = list(reversed(sorted(pkg.versions)))
|
|
||||||
matching_spec = []
|
|
||||||
if spec.concrete:
|
|
||||||
matching_spec.append(spec)
|
|
||||||
pkg_versions -= 1
|
|
||||||
if spec.version in version_order:
|
|
||||||
version_order.remove(spec.version)
|
|
||||||
|
|
||||||
for v in version_order:
|
|
||||||
# Generate no more than num_versions versions for each spec.
|
|
||||||
if pkg_versions < 1:
|
|
||||||
break
|
|
||||||
|
|
||||||
# Generate only versions that satisfy the spec.
|
|
||||||
if spec.concrete or v.intersects(spec.versions):
|
|
||||||
s = spack.spec.Spec(pkg.name)
|
|
||||||
s.versions = spack.version.VersionList([v])
|
|
||||||
s.variants = spec.variants.copy()
|
|
||||||
# This is needed to avoid hanging references during the
|
|
||||||
# concretization phase
|
|
||||||
s.variants.spec = s
|
|
||||||
matching_spec.append(s)
|
|
||||||
pkg_versions -= 1
|
|
||||||
|
|
||||||
if not matching_spec:
|
|
||||||
tty.warn("No known version matches spec: %s" % spec)
|
|
||||||
matching.extend(matching_spec)
|
|
||||||
|
|
||||||
return matching
|
|
||||||
|
|
||||||
|
|
||||||
def create(path, specs, skip_unstable_versions=False):
|
|
||||||
"""Create a directory to be used as a spack mirror, and fill it with
|
|
||||||
package archives.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
path: Path to create a mirror directory hierarchy in.
|
|
||||||
specs: Any package versions matching these specs will be added \
|
|
||||||
to the mirror.
|
|
||||||
skip_unstable_versions: if true, this skips adding resources when
|
|
||||||
they do not have a stable archive checksum (as determined by
|
|
||||||
``fetch_strategy.stable_target``)
|
|
||||||
|
|
||||||
Return Value:
|
|
||||||
Returns a tuple of lists: (present, mirrored, error)
|
|
||||||
|
|
||||||
* present: Package specs that were already present.
|
|
||||||
* mirrored: Package specs that were successfully mirrored.
|
|
||||||
* error: Package specs that failed to mirror due to some error.
|
|
||||||
"""
|
|
||||||
# automatically spec-ify anything in the specs array.
|
|
||||||
specs = [s if isinstance(s, spack.spec.Spec) else spack.spec.Spec(s) for s in specs]
|
|
||||||
|
|
||||||
mirror_cache, mirror_stats = mirror_cache_and_stats(path, skip_unstable_versions)
|
|
||||||
for spec in specs:
|
|
||||||
mirror_stats.next_spec(spec)
|
|
||||||
create_mirror_from_package_object(spec.package, mirror_cache, mirror_stats)
|
|
||||||
|
|
||||||
return mirror_stats.stats()
|
|
||||||
|
|
||||||
|
|
||||||
def mirror_cache_and_stats(path, skip_unstable_versions=False):
|
|
||||||
"""Return both a mirror cache and a mirror stats, starting from the path
|
|
||||||
where a mirror ought to be created.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
path (str): path to create a mirror directory hierarchy in.
|
|
||||||
skip_unstable_versions: if true, this skips adding resources when
|
|
||||||
they do not have a stable archive checksum (as determined by
|
|
||||||
``fetch_strategy.stable_target``)
|
|
||||||
"""
|
|
||||||
# Get the absolute path of the root before we start jumping around.
|
|
||||||
if not os.path.isdir(path):
|
|
||||||
try:
|
|
||||||
mkdirp(path)
|
|
||||||
except OSError as e:
|
|
||||||
raise MirrorError("Cannot create directory '%s':" % path, str(e))
|
|
||||||
mirror_cache = spack.caches.MirrorCache(path, skip_unstable_versions=skip_unstable_versions)
|
|
||||||
mirror_stats = MirrorStats()
|
|
||||||
return mirror_cache, mirror_stats
|
|
||||||
|
|
||||||
|
|
||||||
def add(mirror: Mirror, scope=None):
|
|
||||||
"""Add a named mirror in the given scope"""
|
|
||||||
mirrors = spack.config.get("mirrors", scope=scope)
|
|
||||||
if not mirrors:
|
|
||||||
mirrors = syaml.syaml_dict()
|
|
||||||
|
|
||||||
if mirror.name in mirrors:
|
|
||||||
tty.die("Mirror with name {} already exists.".format(mirror.name))
|
|
||||||
|
|
||||||
items = [(n, u) for n, u in mirrors.items()]
|
|
||||||
items.insert(0, (mirror.name, mirror.to_dict()))
|
|
||||||
mirrors = syaml.syaml_dict(items)
|
|
||||||
spack.config.set("mirrors", mirrors, scope=scope)
|
|
||||||
|
|
||||||
|
|
||||||
def remove(name, scope):
|
|
||||||
"""Remove the named mirror in the given scope"""
|
|
||||||
mirrors = spack.config.get("mirrors", scope=scope)
|
|
||||||
if not mirrors:
|
|
||||||
mirrors = syaml.syaml_dict()
|
|
||||||
|
|
||||||
if name not in mirrors:
|
|
||||||
tty.die("No mirror with name %s" % name)
|
|
||||||
|
|
||||||
mirrors.pop(name)
|
|
||||||
spack.config.set("mirrors", mirrors, scope=scope)
|
|
||||||
tty.msg("Removed mirror %s." % name)
|
|
||||||
|
|
||||||
|
|
||||||
class MirrorStats:
|
|
||||||
def __init__(self):
|
|
||||||
self.present = {}
|
|
||||||
self.new = {}
|
|
||||||
self.errors = set()
|
|
||||||
|
|
||||||
self.current_spec = None
|
|
||||||
self.added_resources = set()
|
|
||||||
self.existing_resources = set()
|
|
||||||
|
|
||||||
def next_spec(self, spec):
|
|
||||||
self._tally_current_spec()
|
|
||||||
self.current_spec = spec
|
|
||||||
|
|
||||||
def _tally_current_spec(self):
|
|
||||||
if self.current_spec:
|
|
||||||
if self.added_resources:
|
|
||||||
self.new[self.current_spec] = len(self.added_resources)
|
|
||||||
if self.existing_resources:
|
|
||||||
self.present[self.current_spec] = len(self.existing_resources)
|
|
||||||
self.added_resources = set()
|
|
||||||
self.existing_resources = set()
|
|
||||||
self.current_spec = None
|
|
||||||
|
|
||||||
def stats(self):
|
|
||||||
self._tally_current_spec()
|
|
||||||
return list(self.present), list(self.new), list(self.errors)
|
|
||||||
|
|
||||||
def already_existed(self, resource):
|
|
||||||
# If an error occurred after caching a subset of a spec's
|
|
||||||
# resources, a secondary attempt may consider them already added
|
|
||||||
if resource not in self.added_resources:
|
|
||||||
self.existing_resources.add(resource)
|
|
||||||
|
|
||||||
def added(self, resource):
|
|
||||||
self.added_resources.add(resource)
|
|
||||||
|
|
||||||
def error(self):
|
|
||||||
self.errors.add(self.current_spec)
|
|
||||||
|
|
||||||
|
|
||||||
def create_mirror_from_package_object(
|
|
||||||
pkg_obj, mirror_cache: "spack.caches.MirrorCache", mirror_stats: MirrorStats
|
|
||||||
) -> bool:
|
|
||||||
"""Add a single package object to a mirror.
|
|
||||||
|
|
||||||
The package object is only required to have an associated spec
|
|
||||||
with a concrete version.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
pkg_obj (spack.package_base.PackageBase): package object with to be added.
|
|
||||||
mirror_cache: mirror where to add the spec.
|
|
||||||
mirror_stats: statistics on the current mirror
|
|
||||||
|
|
||||||
Return:
|
|
||||||
True if the spec was added successfully, False otherwise
|
|
||||||
"""
|
|
||||||
tty.msg("Adding package {} to mirror".format(pkg_obj.spec.format("{name}{@version}")))
|
|
||||||
max_retries = 3
|
|
||||||
for num_retries in range(max_retries):
|
|
||||||
try:
|
|
||||||
# Includes patches and resources
|
|
||||||
with pkg_obj.stage as pkg_stage:
|
|
||||||
pkg_stage.cache_mirror(mirror_cache, mirror_stats)
|
|
||||||
break
|
|
||||||
except Exception as e:
|
|
||||||
if num_retries + 1 == max_retries:
|
|
||||||
if spack.config.get("config:debug"):
|
|
||||||
traceback.print_exc()
|
|
||||||
else:
|
|
||||||
tty.warn(
|
|
||||||
"Error while fetching %s" % pkg_obj.spec.format("{name}{@version}"), str(e)
|
|
||||||
)
|
|
||||||
mirror_stats.error()
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def require_mirror_name(mirror_name):
|
|
||||||
"""Find a mirror by name and raise if it does not exist"""
|
|
||||||
mirror = MirrorCollection().get(mirror_name)
|
|
||||||
if not mirror:
|
|
||||||
raise ValueError(f'no mirror named "{mirror_name}"')
|
|
||||||
return mirror
|
|
||||||
@@ -48,7 +48,6 @@
|
|||||||
import spack.error
|
import spack.error
|
||||||
import spack.paths
|
import spack.paths
|
||||||
import spack.projections as proj
|
import spack.projections as proj
|
||||||
import spack.schema
|
|
||||||
import spack.schema.environment
|
import spack.schema.environment
|
||||||
import spack.spec
|
import spack.spec
|
||||||
import spack.store
|
import spack.store
|
||||||
@@ -217,7 +216,7 @@ def root_path(name, module_set_name):
|
|||||||
roots = spack.config.get(f"modules:{module_set_name}:roots", {})
|
roots = spack.config.get(f"modules:{module_set_name}:roots", {})
|
||||||
|
|
||||||
# Merge config values into the defaults so we prefer configured values
|
# Merge config values into the defaults so we prefer configured values
|
||||||
roots = spack.schema.merge_yaml(defaults, roots)
|
roots = spack.config.merge_yaml(defaults, roots)
|
||||||
|
|
||||||
path = roots.get(name, os.path.join(spack.paths.share_path, name))
|
path = roots.get(name, os.path.join(spack.paths.share_path, name))
|
||||||
return spack.util.path.canonicalize_path(path)
|
return spack.util.path.canonicalize_path(path)
|
||||||
@@ -228,7 +227,7 @@ def generate_module_index(root, modules, overwrite=False):
|
|||||||
if overwrite or not os.path.exists(index_path):
|
if overwrite or not os.path.exists(index_path):
|
||||||
entries = syaml.syaml_dict()
|
entries = syaml.syaml_dict()
|
||||||
else:
|
else:
|
||||||
with open(index_path, encoding="utf-8") as index_file:
|
with open(index_path) as index_file:
|
||||||
yaml_content = syaml.load(index_file)
|
yaml_content = syaml.load(index_file)
|
||||||
entries = yaml_content["module_index"]
|
entries = yaml_content["module_index"]
|
||||||
|
|
||||||
@@ -237,7 +236,7 @@ def generate_module_index(root, modules, overwrite=False):
|
|||||||
entries[m.spec.dag_hash()] = entry
|
entries[m.spec.dag_hash()] = entry
|
||||||
index = {"module_index": entries}
|
index = {"module_index": entries}
|
||||||
llnl.util.filesystem.mkdirp(root)
|
llnl.util.filesystem.mkdirp(root)
|
||||||
with open(index_path, "w", encoding="utf-8") as index_file:
|
with open(index_path, "w") as index_file:
|
||||||
syaml.dump(index, default_flow_style=False, stream=index_file)
|
syaml.dump(index, default_flow_style=False, stream=index_file)
|
||||||
|
|
||||||
|
|
||||||
@@ -257,7 +256,7 @@ def read_module_index(root):
|
|||||||
index_path = os.path.join(root, "module-index.yaml")
|
index_path = os.path.join(root, "module-index.yaml")
|
||||||
if not os.path.exists(index_path):
|
if not os.path.exists(index_path):
|
||||||
return {}
|
return {}
|
||||||
with open(index_path, encoding="utf-8") as index_file:
|
with open(index_path) as index_file:
|
||||||
return _read_module_index(index_file)
|
return _read_module_index(index_file)
|
||||||
|
|
||||||
|
|
||||||
@@ -606,7 +605,7 @@ def configure_options(self):
|
|||||||
return msg
|
return msg
|
||||||
|
|
||||||
if os.path.exists(pkg.install_configure_args_path):
|
if os.path.exists(pkg.install_configure_args_path):
|
||||||
with open(pkg.install_configure_args_path, encoding="utf-8") as args_file:
|
with open(pkg.install_configure_args_path) as args_file:
|
||||||
return spack.util.path.padding_filter(args_file.read())
|
return spack.util.path.padding_filter(args_file.read())
|
||||||
|
|
||||||
# Returning a false-like value makes the default templates skip
|
# Returning a false-like value makes the default templates skip
|
||||||
@@ -625,10 +624,10 @@ def environment_modifications(self):
|
|||||||
"""List of environment modifications to be processed."""
|
"""List of environment modifications to be processed."""
|
||||||
# Modifications guessed by inspecting the spec prefix
|
# Modifications guessed by inspecting the spec prefix
|
||||||
prefix_inspections = syaml.syaml_dict()
|
prefix_inspections = syaml.syaml_dict()
|
||||||
spack.schema.merge_yaml(
|
spack.config.merge_yaml(
|
||||||
prefix_inspections, spack.config.get("modules:prefix_inspections", {})
|
prefix_inspections, spack.config.get("modules:prefix_inspections", {})
|
||||||
)
|
)
|
||||||
spack.schema.merge_yaml(
|
spack.config.merge_yaml(
|
||||||
prefix_inspections,
|
prefix_inspections,
|
||||||
spack.config.get(f"modules:{self.conf.name}:prefix_inspections", {}),
|
spack.config.get(f"modules:{self.conf.name}:prefix_inspections", {}),
|
||||||
)
|
)
|
||||||
@@ -901,7 +900,7 @@ def write(self, overwrite=False):
|
|||||||
# Render the template
|
# Render the template
|
||||||
text = template.render(context)
|
text = template.render(context)
|
||||||
# Write it to file
|
# Write it to file
|
||||||
with open(self.layout.filename, "w", encoding="utf-8") as f:
|
with open(self.layout.filename, "w") as f:
|
||||||
f.write(text)
|
f.write(text)
|
||||||
|
|
||||||
# Set the file permissions of the module to match that of the package
|
# Set the file permissions of the module to match that of the package
|
||||||
@@ -940,7 +939,7 @@ def update_module_hiddenness(self, remove=False):
|
|||||||
|
|
||||||
if modulerc_exists:
|
if modulerc_exists:
|
||||||
# retrieve modulerc content
|
# retrieve modulerc content
|
||||||
with open(modulerc_path, encoding="utf-8") as f:
|
with open(modulerc_path) as f:
|
||||||
content = f.readlines()
|
content = f.readlines()
|
||||||
content = "".join(content).split("\n")
|
content = "".join(content).split("\n")
|
||||||
# remove last empty item if any
|
# remove last empty item if any
|
||||||
@@ -975,7 +974,7 @@ def update_module_hiddenness(self, remove=False):
|
|||||||
elif content != self.modulerc_header:
|
elif content != self.modulerc_header:
|
||||||
# ensure file ends with a newline character
|
# ensure file ends with a newline character
|
||||||
content.append("")
|
content.append("")
|
||||||
with open(modulerc_path, "w", encoding="utf-8") as f:
|
with open(modulerc_path, "w") as f:
|
||||||
f.write("\n".join(content))
|
f.write("\n".join(content))
|
||||||
|
|
||||||
def remove(self):
|
def remove(self):
|
||||||
|
|||||||
@@ -7,6 +7,8 @@
|
|||||||
import urllib.parse
|
import urllib.parse
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
import spack.spec
|
||||||
|
|
||||||
# notice: Docker is more strict (no uppercase allowed). We parse image names *with* uppercase
|
# notice: Docker is more strict (no uppercase allowed). We parse image names *with* uppercase
|
||||||
# and normalize, so: example.com/Organization/Name -> example.com/organization/name. Tags are
|
# and normalize, so: example.com/Organization/Name -> example.com/organization/name. Tags are
|
||||||
# case sensitive though.
|
# case sensitive though.
|
||||||
@@ -193,7 +195,7 @@ def __eq__(self, __value: object) -> bool:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def ensure_valid_tag(tag: str) -> str:
|
def _ensure_valid_tag(tag: str) -> str:
|
||||||
"""Ensure a tag is valid for an OCI registry."""
|
"""Ensure a tag is valid for an OCI registry."""
|
||||||
sanitized = re.sub(r"[^\w.-]", "_", tag)
|
sanitized = re.sub(r"[^\w.-]", "_", tag)
|
||||||
if len(sanitized) > 128:
|
if len(sanitized) > 128:
|
||||||
@@ -201,6 +203,20 @@ def ensure_valid_tag(tag: str) -> str:
|
|||||||
return sanitized
|
return sanitized
|
||||||
|
|
||||||
|
|
||||||
|
def default_tag(spec: "spack.spec.Spec") -> str:
|
||||||
|
"""Return a valid, default image tag for a spec."""
|
||||||
|
return _ensure_valid_tag(f"{spec.name}-{spec.version}-{spec.dag_hash()}.spack")
|
||||||
|
|
||||||
|
|
||||||
|
#: Default OCI index tag
|
||||||
|
default_index_tag = "index.spack"
|
||||||
|
|
||||||
|
|
||||||
|
def tag_is_spec(tag: str) -> bool:
|
||||||
|
"""Check if a tag is likely a Spec"""
|
||||||
|
return tag.endswith(".spack") and tag != default_index_tag
|
||||||
|
|
||||||
|
|
||||||
def default_config(architecture: str, os: str):
|
def default_config(architecture: str, os: str):
|
||||||
return {
|
return {
|
||||||
"architecture": architecture,
|
"architecture": architecture,
|
||||||
|
|||||||
@@ -16,8 +16,7 @@
|
|||||||
import llnl.util.tty as tty
|
import llnl.util.tty as tty
|
||||||
|
|
||||||
import spack.fetch_strategy
|
import spack.fetch_strategy
|
||||||
import spack.mirrors.layout
|
import spack.mirror
|
||||||
import spack.mirrors.mirror
|
|
||||||
import spack.oci.opener
|
import spack.oci.opener
|
||||||
import spack.stage
|
import spack.stage
|
||||||
import spack.util.url
|
import spack.util.url
|
||||||
@@ -214,7 +213,7 @@ def upload_manifest(
|
|||||||
return digest, size
|
return digest, size
|
||||||
|
|
||||||
|
|
||||||
def image_from_mirror(mirror: spack.mirrors.mirror.Mirror) -> ImageReference:
|
def image_from_mirror(mirror: spack.mirror.Mirror) -> ImageReference:
|
||||||
"""Given an OCI based mirror, extract the URL and image name from it"""
|
"""Given an OCI based mirror, extract the URL and image name from it"""
|
||||||
url = mirror.push_url
|
url = mirror.push_url
|
||||||
if not url.startswith("oci://"):
|
if not url.startswith("oci://"):
|
||||||
@@ -386,8 +385,5 @@ def make_stage(
|
|||||||
# is the `oci-layout` and `index.json` files, which are
|
# is the `oci-layout` and `index.json` files, which are
|
||||||
# required by the spec.
|
# required by the spec.
|
||||||
return spack.stage.Stage(
|
return spack.stage.Stage(
|
||||||
fetch_strategy,
|
fetch_strategy, mirror_paths=spack.mirror.OCILayout(digest), name=digest.digest, keep=keep
|
||||||
mirror_paths=spack.mirrors.layout.OCILayout(digest),
|
|
||||||
name=digest.digest,
|
|
||||||
keep=keep,
|
|
||||||
)
|
)
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user